docker setup
This commit is contained in:
		
							
								
								
									
										13
									
								
								srcs/.venv/lib/python3.11/site-packages/pip/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								srcs/.venv/lib/python3.11/site-packages/pip/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,13 @@ | ||||
| from typing import List, Optional | ||||
|  | ||||
| __version__ = "23.2.1" | ||||
|  | ||||
|  | ||||
| def main(args: Optional[List[str]] = None) -> int: | ||||
|     """This is an internal API only meant for use by pip's own console scripts. | ||||
|  | ||||
|     For additional details, see https://github.com/pypa/pip/issues/7498. | ||||
|     """ | ||||
|     from pip._internal.utils.entrypoints import _wrapper | ||||
|  | ||||
|     return _wrapper(args) | ||||
							
								
								
									
										24
									
								
								srcs/.venv/lib/python3.11/site-packages/pip/__main__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								srcs/.venv/lib/python3.11/site-packages/pip/__main__.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,24 @@ | ||||
| import os | ||||
| import sys | ||||
|  | ||||
| # Remove '' and current working directory from the first entry | ||||
| # of sys.path, if present to avoid using current directory | ||||
| # in pip commands check, freeze, install, list and show, | ||||
| # when invoked as python -m pip <command> | ||||
| if sys.path[0] in ("", os.getcwd()): | ||||
|     sys.path.pop(0) | ||||
|  | ||||
| # If we are running from a wheel, add the wheel to sys.path | ||||
| # This allows the usage python pip-*.whl/pip install pip-*.whl | ||||
| if __package__ == "": | ||||
|     # __file__ is pip-*.whl/pip/__main__.py | ||||
|     # first dirname call strips of '/__main__.py', second strips off '/pip' | ||||
|     # Resulting path is the name of the wheel itself | ||||
|     # Add that to sys.path so we can import pip | ||||
|     path = os.path.dirname(os.path.dirname(__file__)) | ||||
|     sys.path.insert(0, path) | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     from pip._internal.cli.main import main as _main | ||||
|  | ||||
|     sys.exit(_main()) | ||||
| @ -0,0 +1,50 @@ | ||||
| """Execute exactly this copy of pip, within a different environment. | ||||
|  | ||||
| This file is named as it is, to ensure that this module can't be imported via | ||||
| an import statement. | ||||
| """ | ||||
|  | ||||
| # /!\ This version compatibility check section must be Python 2 compatible. /!\ | ||||
|  | ||||
| import sys | ||||
|  | ||||
| # Copied from setup.py | ||||
| PYTHON_REQUIRES = (3, 7) | ||||
|  | ||||
|  | ||||
| def version_str(version):  # type: ignore | ||||
|     return ".".join(str(v) for v in version) | ||||
|  | ||||
|  | ||||
| if sys.version_info[:2] < PYTHON_REQUIRES: | ||||
|     raise SystemExit( | ||||
|         "This version of pip does not support python {} (requires >={}).".format( | ||||
|             version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES) | ||||
|         ) | ||||
|     ) | ||||
|  | ||||
| # From here on, we can use Python 3 features, but the syntax must remain | ||||
| # Python 2 compatible. | ||||
|  | ||||
| import runpy  # noqa: E402 | ||||
| from importlib.machinery import PathFinder  # noqa: E402 | ||||
| from os.path import dirname  # noqa: E402 | ||||
|  | ||||
| PIP_SOURCES_ROOT = dirname(dirname(__file__)) | ||||
|  | ||||
|  | ||||
| class PipImportRedirectingFinder: | ||||
|     @classmethod | ||||
|     def find_spec(self, fullname, path=None, target=None):  # type: ignore | ||||
|         if fullname != "pip": | ||||
|             return None | ||||
|  | ||||
|         spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) | ||||
|         assert spec, (PIP_SOURCES_ROOT, fullname) | ||||
|         return spec | ||||
|  | ||||
|  | ||||
| sys.meta_path.insert(0, PipImportRedirectingFinder()) | ||||
|  | ||||
| assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" | ||||
| runpy.run_module("pip", run_name="__main__", alter_sys=True) | ||||
| @ -0,0 +1,19 @@ | ||||
| from typing import List, Optional | ||||
|  | ||||
| import pip._internal.utils.inject_securetransport  # noqa | ||||
| from pip._internal.utils import _log | ||||
|  | ||||
| # init_logging() must be called before any call to logging.getLogger() | ||||
| # which happens at import of most modules. | ||||
| _log.init_logging() | ||||
|  | ||||
|  | ||||
| def main(args: (Optional[List[str]]) = None) -> int: | ||||
|     """This is preserved for old console scripts that may still be referencing | ||||
|     it. | ||||
|  | ||||
|     For additional details, see https://github.com/pypa/pip/issues/7498. | ||||
|     """ | ||||
|     from pip._internal.utils.entrypoints import _wrapper | ||||
|  | ||||
|     return _wrapper(args) | ||||
| @ -0,0 +1,311 @@ | ||||
| """Build Environment used for isolation during sdist building | ||||
| """ | ||||
|  | ||||
| import logging | ||||
| import os | ||||
| import pathlib | ||||
| import site | ||||
| import sys | ||||
| import textwrap | ||||
| from collections import OrderedDict | ||||
| from types import TracebackType | ||||
| from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union | ||||
|  | ||||
| from pip._vendor.certifi import where | ||||
| from pip._vendor.packaging.requirements import Requirement | ||||
| from pip._vendor.packaging.version import Version | ||||
|  | ||||
| from pip import __file__ as pip_location | ||||
| from pip._internal.cli.spinners import open_spinner | ||||
| from pip._internal.locations import get_platlib, get_purelib, get_scheme | ||||
| from pip._internal.metadata import get_default_environment, get_environment | ||||
| from pip._internal.utils.subprocess import call_subprocess | ||||
| from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from pip._internal.index.package_finder import PackageFinder | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]: | ||||
|     return (a, b) if a != b else (a,) | ||||
|  | ||||
|  | ||||
| class _Prefix: | ||||
|     def __init__(self, path: str) -> None: | ||||
|         self.path = path | ||||
|         self.setup = False | ||||
|         scheme = get_scheme("", prefix=path) | ||||
|         self.bin_dir = scheme.scripts | ||||
|         self.lib_dirs = _dedup(scheme.purelib, scheme.platlib) | ||||
|  | ||||
|  | ||||
| def get_runnable_pip() -> str: | ||||
|     """Get a file to pass to a Python executable, to run the currently-running pip. | ||||
|  | ||||
|     This is used to run a pip subprocess, for installing requirements into the build | ||||
|     environment. | ||||
|     """ | ||||
|     source = pathlib.Path(pip_location).resolve().parent | ||||
|  | ||||
|     if not source.is_dir(): | ||||
|         # This would happen if someone is using pip from inside a zip file. In that | ||||
|         # case, we can use that directly. | ||||
|         return str(source) | ||||
|  | ||||
|     return os.fsdecode(source / "__pip-runner__.py") | ||||
|  | ||||
|  | ||||
| def _get_system_sitepackages() -> Set[str]: | ||||
|     """Get system site packages | ||||
|  | ||||
|     Usually from site.getsitepackages, | ||||
|     but fallback on `get_purelib()/get_platlib()` if unavailable | ||||
|     (e.g. in a virtualenv created by virtualenv<20) | ||||
|  | ||||
|     Returns normalized set of strings. | ||||
|     """ | ||||
|     if hasattr(site, "getsitepackages"): | ||||
|         system_sites = site.getsitepackages() | ||||
|     else: | ||||
|         # virtualenv < 20 overwrites site.py without getsitepackages | ||||
|         # fallback on get_purelib/get_platlib. | ||||
|         # this is known to miss things, but shouldn't in the cases | ||||
|         # where getsitepackages() has been removed (inside a virtualenv) | ||||
|         system_sites = [get_purelib(), get_platlib()] | ||||
|     return {os.path.normcase(path) for path in system_sites} | ||||
|  | ||||
|  | ||||
| class BuildEnvironment: | ||||
|     """Creates and manages an isolated environment to install build deps""" | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True) | ||||
|  | ||||
|         self._prefixes = OrderedDict( | ||||
|             (name, _Prefix(os.path.join(temp_dir.path, name))) | ||||
|             for name in ("normal", "overlay") | ||||
|         ) | ||||
|  | ||||
|         self._bin_dirs: List[str] = [] | ||||
|         self._lib_dirs: List[str] = [] | ||||
|         for prefix in reversed(list(self._prefixes.values())): | ||||
|             self._bin_dirs.append(prefix.bin_dir) | ||||
|             self._lib_dirs.extend(prefix.lib_dirs) | ||||
|  | ||||
|         # Customize site to: | ||||
|         # - ensure .pth files are honored | ||||
|         # - prevent access to system site packages | ||||
|         system_sites = _get_system_sitepackages() | ||||
|  | ||||
|         self._site_dir = os.path.join(temp_dir.path, "site") | ||||
|         if not os.path.exists(self._site_dir): | ||||
|             os.mkdir(self._site_dir) | ||||
|         with open( | ||||
|             os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8" | ||||
|         ) as fp: | ||||
|             fp.write( | ||||
|                 textwrap.dedent( | ||||
|                     """ | ||||
|                 import os, site, sys | ||||
|  | ||||
|                 # First, drop system-sites related paths. | ||||
|                 original_sys_path = sys.path[:] | ||||
|                 known_paths = set() | ||||
|                 for path in {system_sites!r}: | ||||
|                     site.addsitedir(path, known_paths=known_paths) | ||||
|                 system_paths = set( | ||||
|                     os.path.normcase(path) | ||||
|                     for path in sys.path[len(original_sys_path):] | ||||
|                 ) | ||||
|                 original_sys_path = [ | ||||
|                     path for path in original_sys_path | ||||
|                     if os.path.normcase(path) not in system_paths | ||||
|                 ] | ||||
|                 sys.path = original_sys_path | ||||
|  | ||||
|                 # Second, add lib directories. | ||||
|                 # ensuring .pth file are processed. | ||||
|                 for path in {lib_dirs!r}: | ||||
|                     assert not path in sys.path | ||||
|                     site.addsitedir(path) | ||||
|                 """ | ||||
|                 ).format(system_sites=system_sites, lib_dirs=self._lib_dirs) | ||||
|             ) | ||||
|  | ||||
|     def __enter__(self) -> None: | ||||
|         self._save_env = { | ||||
|             name: os.environ.get(name, None) | ||||
|             for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH") | ||||
|         } | ||||
|  | ||||
|         path = self._bin_dirs[:] | ||||
|         old_path = self._save_env["PATH"] | ||||
|         if old_path: | ||||
|             path.extend(old_path.split(os.pathsep)) | ||||
|  | ||||
|         pythonpath = [self._site_dir] | ||||
|  | ||||
|         os.environ.update( | ||||
|             { | ||||
|                 "PATH": os.pathsep.join(path), | ||||
|                 "PYTHONNOUSERSITE": "1", | ||||
|                 "PYTHONPATH": os.pathsep.join(pythonpath), | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|     def __exit__( | ||||
|         self, | ||||
|         exc_type: Optional[Type[BaseException]], | ||||
|         exc_val: Optional[BaseException], | ||||
|         exc_tb: Optional[TracebackType], | ||||
|     ) -> None: | ||||
|         for varname, old_value in self._save_env.items(): | ||||
|             if old_value is None: | ||||
|                 os.environ.pop(varname, None) | ||||
|             else: | ||||
|                 os.environ[varname] = old_value | ||||
|  | ||||
|     def check_requirements( | ||||
|         self, reqs: Iterable[str] | ||||
|     ) -> Tuple[Set[Tuple[str, str]], Set[str]]: | ||||
|         """Return 2 sets: | ||||
|         - conflicting requirements: set of (installed, wanted) reqs tuples | ||||
|         - missing requirements: set of reqs | ||||
|         """ | ||||
|         missing = set() | ||||
|         conflicting = set() | ||||
|         if reqs: | ||||
|             env = ( | ||||
|                 get_environment(self._lib_dirs) | ||||
|                 if hasattr(self, "_lib_dirs") | ||||
|                 else get_default_environment() | ||||
|             ) | ||||
|             for req_str in reqs: | ||||
|                 req = Requirement(req_str) | ||||
|                 # We're explicitly evaluating with an empty extra value, since build | ||||
|                 # environments are not provided any mechanism to select specific extras. | ||||
|                 if req.marker is not None and not req.marker.evaluate({"extra": ""}): | ||||
|                     continue | ||||
|                 dist = env.get_distribution(req.name) | ||||
|                 if not dist: | ||||
|                     missing.add(req_str) | ||||
|                     continue | ||||
|                 if isinstance(dist.version, Version): | ||||
|                     installed_req_str = f"{req.name}=={dist.version}" | ||||
|                 else: | ||||
|                     installed_req_str = f"{req.name}==={dist.version}" | ||||
|                 if not req.specifier.contains(dist.version, prereleases=True): | ||||
|                     conflicting.add((installed_req_str, req_str)) | ||||
|                 # FIXME: Consider direct URL? | ||||
|         return conflicting, missing | ||||
|  | ||||
|     def install_requirements( | ||||
|         self, | ||||
|         finder: "PackageFinder", | ||||
|         requirements: Iterable[str], | ||||
|         prefix_as_string: str, | ||||
|         *, | ||||
|         kind: str, | ||||
|     ) -> None: | ||||
|         prefix = self._prefixes[prefix_as_string] | ||||
|         assert not prefix.setup | ||||
|         prefix.setup = True | ||||
|         if not requirements: | ||||
|             return | ||||
|         self._install_requirements( | ||||
|             get_runnable_pip(), | ||||
|             finder, | ||||
|             requirements, | ||||
|             prefix, | ||||
|             kind=kind, | ||||
|         ) | ||||
|  | ||||
|     @staticmethod | ||||
|     def _install_requirements( | ||||
|         pip_runnable: str, | ||||
|         finder: "PackageFinder", | ||||
|         requirements: Iterable[str], | ||||
|         prefix: _Prefix, | ||||
|         *, | ||||
|         kind: str, | ||||
|     ) -> None: | ||||
|         args: List[str] = [ | ||||
|             sys.executable, | ||||
|             pip_runnable, | ||||
|             "install", | ||||
|             "--ignore-installed", | ||||
|             "--no-user", | ||||
|             "--prefix", | ||||
|             prefix.path, | ||||
|             "--no-warn-script-location", | ||||
|         ] | ||||
|         if logger.getEffectiveLevel() <= logging.DEBUG: | ||||
|             args.append("-v") | ||||
|         for format_control in ("no_binary", "only_binary"): | ||||
|             formats = getattr(finder.format_control, format_control) | ||||
|             args.extend( | ||||
|                 ( | ||||
|                     "--" + format_control.replace("_", "-"), | ||||
|                     ",".join(sorted(formats or {":none:"})), | ||||
|                 ) | ||||
|             ) | ||||
|  | ||||
|         index_urls = finder.index_urls | ||||
|         if index_urls: | ||||
|             args.extend(["-i", index_urls[0]]) | ||||
|             for extra_index in index_urls[1:]: | ||||
|                 args.extend(["--extra-index-url", extra_index]) | ||||
|         else: | ||||
|             args.append("--no-index") | ||||
|         for link in finder.find_links: | ||||
|             args.extend(["--find-links", link]) | ||||
|  | ||||
|         for host in finder.trusted_hosts: | ||||
|             args.extend(["--trusted-host", host]) | ||||
|         if finder.allow_all_prereleases: | ||||
|             args.append("--pre") | ||||
|         if finder.prefer_binary: | ||||
|             args.append("--prefer-binary") | ||||
|         args.append("--") | ||||
|         args.extend(requirements) | ||||
|         extra_environ = {"_PIP_STANDALONE_CERT": where()} | ||||
|         with open_spinner(f"Installing {kind}") as spinner: | ||||
|             call_subprocess( | ||||
|                 args, | ||||
|                 command_desc=f"pip subprocess to install {kind}", | ||||
|                 spinner=spinner, | ||||
|                 extra_environ=extra_environ, | ||||
|             ) | ||||
|  | ||||
|  | ||||
| class NoOpBuildEnvironment(BuildEnvironment): | ||||
|     """A no-op drop-in replacement for BuildEnvironment""" | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         pass | ||||
|  | ||||
|     def __enter__(self) -> None: | ||||
|         pass | ||||
|  | ||||
|     def __exit__( | ||||
|         self, | ||||
|         exc_type: Optional[Type[BaseException]], | ||||
|         exc_val: Optional[BaseException], | ||||
|         exc_tb: Optional[TracebackType], | ||||
|     ) -> None: | ||||
|         pass | ||||
|  | ||||
|     def cleanup(self) -> None: | ||||
|         pass | ||||
|  | ||||
|     def install_requirements( | ||||
|         self, | ||||
|         finder: "PackageFinder", | ||||
|         requirements: Iterable[str], | ||||
|         prefix_as_string: str, | ||||
|         *, | ||||
|         kind: str, | ||||
|     ) -> None: | ||||
|         raise NotImplementedError() | ||||
							
								
								
									
										292
									
								
								srcs/.venv/lib/python3.11/site-packages/pip/_internal/cache.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										292
									
								
								srcs/.venv/lib/python3.11/site-packages/pip/_internal/cache.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,292 @@ | ||||
| """Cache Management | ||||
| """ | ||||
|  | ||||
| import hashlib | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| from pathlib import Path | ||||
| from typing import Any, Dict, List, Optional | ||||
|  | ||||
| from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
|  | ||||
| from pip._internal.exceptions import InvalidWheelFilename | ||||
| from pip._internal.models.direct_url import DirectUrl | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.models.wheel import Wheel | ||||
| from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds | ||||
| from pip._internal.utils.urls import path_to_url | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
| ORIGIN_JSON_NAME = "origin.json" | ||||
|  | ||||
|  | ||||
| def _hash_dict(d: Dict[str, str]) -> str: | ||||
|     """Return a stable sha224 of a dictionary.""" | ||||
|     s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) | ||||
|     return hashlib.sha224(s.encode("ascii")).hexdigest() | ||||
|  | ||||
|  | ||||
| class Cache: | ||||
|     """An abstract class - provides cache directories for data from links | ||||
|  | ||||
|     :param cache_dir: The root of the cache. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cache_dir: str) -> None: | ||||
|         super().__init__() | ||||
|         assert not cache_dir or os.path.isabs(cache_dir) | ||||
|         self.cache_dir = cache_dir or None | ||||
|  | ||||
|     def _get_cache_path_parts(self, link: Link) -> List[str]: | ||||
|         """Get parts of part that must be os.path.joined with cache_dir""" | ||||
|  | ||||
|         # We want to generate an url to use as our cache key, we don't want to | ||||
|         # just re-use the URL because it might have other items in the fragment | ||||
|         # and we don't care about those. | ||||
|         key_parts = {"url": link.url_without_fragment} | ||||
|         if link.hash_name is not None and link.hash is not None: | ||||
|             key_parts[link.hash_name] = link.hash | ||||
|         if link.subdirectory_fragment: | ||||
|             key_parts["subdirectory"] = link.subdirectory_fragment | ||||
|  | ||||
|         # Include interpreter name, major and minor version in cache key | ||||
|         # to cope with ill-behaved sdists that build a different wheel | ||||
|         # depending on the python version their setup.py is being run on, | ||||
|         # and don't encode the difference in compatibility tags. | ||||
|         # https://github.com/pypa/pip/issues/7296 | ||||
|         key_parts["interpreter_name"] = interpreter_name() | ||||
|         key_parts["interpreter_version"] = interpreter_version() | ||||
|  | ||||
|         # Encode our key url with sha224, we'll use this because it has similar | ||||
|         # security properties to sha256, but with a shorter total output (and | ||||
|         # thus less secure). However the differences don't make a lot of | ||||
|         # difference for our use case here. | ||||
|         hashed = _hash_dict(key_parts) | ||||
|  | ||||
|         # We want to nest the directories some to prevent having a ton of top | ||||
|         # level directories where we might run out of sub directories on some | ||||
|         # FS. | ||||
|         parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] | ||||
|  | ||||
|         return parts | ||||
|  | ||||
|     def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]: | ||||
|         can_not_cache = not self.cache_dir or not canonical_package_name or not link | ||||
|         if can_not_cache: | ||||
|             return [] | ||||
|  | ||||
|         candidates = [] | ||||
|         path = self.get_path_for_link(link) | ||||
|         if os.path.isdir(path): | ||||
|             for candidate in os.listdir(path): | ||||
|                 candidates.append((candidate, path)) | ||||
|         return candidates | ||||
|  | ||||
|     def get_path_for_link(self, link: Link) -> str: | ||||
|         """Return a directory to store cached items in for link.""" | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def get( | ||||
|         self, | ||||
|         link: Link, | ||||
|         package_name: Optional[str], | ||||
|         supported_tags: List[Tag], | ||||
|     ) -> Link: | ||||
|         """Returns a link to a cached item if it exists, otherwise returns the | ||||
|         passed link. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|  | ||||
| class SimpleWheelCache(Cache): | ||||
|     """A cache of wheels for future installs.""" | ||||
|  | ||||
|     def __init__(self, cache_dir: str) -> None: | ||||
|         super().__init__(cache_dir) | ||||
|  | ||||
|     def get_path_for_link(self, link: Link) -> str: | ||||
|         """Return a directory to store cached wheels for link | ||||
|  | ||||
|         Because there are M wheels for any one sdist, we provide a directory | ||||
|         to cache them in, and then consult that directory when looking up | ||||
|         cache hits. | ||||
|  | ||||
|         We only insert things into the cache if they have plausible version | ||||
|         numbers, so that we don't contaminate the cache with things that were | ||||
|         not unique. E.g. ./package might have dozens of installs done for it | ||||
|         and build a version of 0.0...and if we built and cached a wheel, we'd | ||||
|         end up using the same wheel even if the source has been edited. | ||||
|  | ||||
|         :param link: The link of the sdist for which this will cache wheels. | ||||
|         """ | ||||
|         parts = self._get_cache_path_parts(link) | ||||
|         assert self.cache_dir | ||||
|         # Store wheels within the root cache_dir | ||||
|         return os.path.join(self.cache_dir, "wheels", *parts) | ||||
|  | ||||
|     def get( | ||||
|         self, | ||||
|         link: Link, | ||||
|         package_name: Optional[str], | ||||
|         supported_tags: List[Tag], | ||||
|     ) -> Link: | ||||
|         candidates = [] | ||||
|  | ||||
|         if not package_name: | ||||
|             return link | ||||
|  | ||||
|         canonical_package_name = canonicalize_name(package_name) | ||||
|         for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name): | ||||
|             try: | ||||
|                 wheel = Wheel(wheel_name) | ||||
|             except InvalidWheelFilename: | ||||
|                 continue | ||||
|             if canonicalize_name(wheel.name) != canonical_package_name: | ||||
|                 logger.debug( | ||||
|                     "Ignoring cached wheel %s for %s as it " | ||||
|                     "does not match the expected distribution name %s.", | ||||
|                     wheel_name, | ||||
|                     link, | ||||
|                     package_name, | ||||
|                 ) | ||||
|                 continue | ||||
|             if not wheel.supported(supported_tags): | ||||
|                 # Built for a different python/arch/etc | ||||
|                 continue | ||||
|             candidates.append( | ||||
|                 ( | ||||
|                     wheel.support_index_min(supported_tags), | ||||
|                     wheel_name, | ||||
|                     wheel_dir, | ||||
|                 ) | ||||
|             ) | ||||
|  | ||||
|         if not candidates: | ||||
|             return link | ||||
|  | ||||
|         _, wheel_name, wheel_dir = min(candidates) | ||||
|         return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) | ||||
|  | ||||
|  | ||||
| class EphemWheelCache(SimpleWheelCache): | ||||
|     """A SimpleWheelCache that creates it's own temporary cache directory""" | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         self._temp_dir = TempDirectory( | ||||
|             kind=tempdir_kinds.EPHEM_WHEEL_CACHE, | ||||
|             globally_managed=True, | ||||
|         ) | ||||
|  | ||||
|         super().__init__(self._temp_dir.path) | ||||
|  | ||||
|  | ||||
| class CacheEntry: | ||||
|     def __init__( | ||||
|         self, | ||||
|         link: Link, | ||||
|         persistent: bool, | ||||
|     ): | ||||
|         self.link = link | ||||
|         self.persistent = persistent | ||||
|         self.origin: Optional[DirectUrl] = None | ||||
|         origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME | ||||
|         if origin_direct_url_path.exists(): | ||||
|             try: | ||||
|                 self.origin = DirectUrl.from_json( | ||||
|                     origin_direct_url_path.read_text(encoding="utf-8") | ||||
|                 ) | ||||
|             except Exception as e: | ||||
|                 logger.warning( | ||||
|                     "Ignoring invalid cache entry origin file %s for %s (%s)", | ||||
|                     origin_direct_url_path, | ||||
|                     link.filename, | ||||
|                     e, | ||||
|                 ) | ||||
|  | ||||
|  | ||||
| class WheelCache(Cache): | ||||
|     """Wraps EphemWheelCache and SimpleWheelCache into a single Cache | ||||
|  | ||||
|     This Cache allows for gracefully degradation, using the ephem wheel cache | ||||
|     when a certain link is not found in the simple wheel cache first. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cache_dir: str) -> None: | ||||
|         super().__init__(cache_dir) | ||||
|         self._wheel_cache = SimpleWheelCache(cache_dir) | ||||
|         self._ephem_cache = EphemWheelCache() | ||||
|  | ||||
|     def get_path_for_link(self, link: Link) -> str: | ||||
|         return self._wheel_cache.get_path_for_link(link) | ||||
|  | ||||
|     def get_ephem_path_for_link(self, link: Link) -> str: | ||||
|         return self._ephem_cache.get_path_for_link(link) | ||||
|  | ||||
|     def get( | ||||
|         self, | ||||
|         link: Link, | ||||
|         package_name: Optional[str], | ||||
|         supported_tags: List[Tag], | ||||
|     ) -> Link: | ||||
|         cache_entry = self.get_cache_entry(link, package_name, supported_tags) | ||||
|         if cache_entry is None: | ||||
|             return link | ||||
|         return cache_entry.link | ||||
|  | ||||
|     def get_cache_entry( | ||||
|         self, | ||||
|         link: Link, | ||||
|         package_name: Optional[str], | ||||
|         supported_tags: List[Tag], | ||||
|     ) -> Optional[CacheEntry]: | ||||
|         """Returns a CacheEntry with a link to a cached item if it exists or | ||||
|         None. The cache entry indicates if the item was found in the persistent | ||||
|         or ephemeral cache. | ||||
|         """ | ||||
|         retval = self._wheel_cache.get( | ||||
|             link=link, | ||||
|             package_name=package_name, | ||||
|             supported_tags=supported_tags, | ||||
|         ) | ||||
|         if retval is not link: | ||||
|             return CacheEntry(retval, persistent=True) | ||||
|  | ||||
|         retval = self._ephem_cache.get( | ||||
|             link=link, | ||||
|             package_name=package_name, | ||||
|             supported_tags=supported_tags, | ||||
|         ) | ||||
|         if retval is not link: | ||||
|             return CacheEntry(retval, persistent=False) | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     @staticmethod | ||||
|     def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None: | ||||
|         origin_path = Path(cache_dir) / ORIGIN_JSON_NAME | ||||
|         if origin_path.exists(): | ||||
|             try: | ||||
|                 origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8")) | ||||
|             except Exception as e: | ||||
|                 logger.warning( | ||||
|                     "Could not read origin file %s in cache entry (%s). " | ||||
|                     "Will attempt to overwrite it.", | ||||
|                     origin_path, | ||||
|                     e, | ||||
|                 ) | ||||
|             else: | ||||
|                 # TODO: use DirectUrl.equivalent when | ||||
|                 # https://github.com/pypa/pip/pull/10564 is merged. | ||||
|                 if origin.url != download_info.url: | ||||
|                     logger.warning( | ||||
|                         "Origin URL %s in cache entry %s does not match download URL " | ||||
|                         "%s. This is likely a pip bug or a cache corruption issue. " | ||||
|                         "Will overwrite it with the new value.", | ||||
|                         origin.url, | ||||
|                         cache_dir, | ||||
|                         download_info.url, | ||||
|                     ) | ||||
|         origin_path.write_text(download_info.to_json(), encoding="utf-8") | ||||
| @ -0,0 +1,4 @@ | ||||
| """Subpackage containing all of pip's command line interface related code | ||||
| """ | ||||
|  | ||||
| # This file intentionally does not import submodules | ||||
| @ -0,0 +1,171 @@ | ||||
| """Logic that powers autocompletion installed by ``pip completion``. | ||||
| """ | ||||
|  | ||||
| import optparse | ||||
| import os | ||||
| import sys | ||||
| from itertools import chain | ||||
| from typing import Any, Iterable, List, Optional | ||||
|  | ||||
| from pip._internal.cli.main_parser import create_main_parser | ||||
| from pip._internal.commands import commands_dict, create_command | ||||
| from pip._internal.metadata import get_default_environment | ||||
|  | ||||
|  | ||||
| def autocomplete() -> None: | ||||
|     """Entry Point for completion of main and subcommand options.""" | ||||
|     # Don't complete if user hasn't sourced bash_completion file. | ||||
|     if "PIP_AUTO_COMPLETE" not in os.environ: | ||||
|         return | ||||
|     cwords = os.environ["COMP_WORDS"].split()[1:] | ||||
|     cword = int(os.environ["COMP_CWORD"]) | ||||
|     try: | ||||
|         current = cwords[cword - 1] | ||||
|     except IndexError: | ||||
|         current = "" | ||||
|  | ||||
|     parser = create_main_parser() | ||||
|     subcommands = list(commands_dict) | ||||
|     options = [] | ||||
|  | ||||
|     # subcommand | ||||
|     subcommand_name: Optional[str] = None | ||||
|     for word in cwords: | ||||
|         if word in subcommands: | ||||
|             subcommand_name = word | ||||
|             break | ||||
|     # subcommand options | ||||
|     if subcommand_name is not None: | ||||
|         # special case: 'help' subcommand has no options | ||||
|         if subcommand_name == "help": | ||||
|             sys.exit(1) | ||||
|         # special case: list locally installed dists for show and uninstall | ||||
|         should_list_installed = not current.startswith("-") and subcommand_name in [ | ||||
|             "show", | ||||
|             "uninstall", | ||||
|         ] | ||||
|         if should_list_installed: | ||||
|             env = get_default_environment() | ||||
|             lc = current.lower() | ||||
|             installed = [ | ||||
|                 dist.canonical_name | ||||
|                 for dist in env.iter_installed_distributions(local_only=True) | ||||
|                 if dist.canonical_name.startswith(lc) | ||||
|                 and dist.canonical_name not in cwords[1:] | ||||
|             ] | ||||
|             # if there are no dists installed, fall back to option completion | ||||
|             if installed: | ||||
|                 for dist in installed: | ||||
|                     print(dist) | ||||
|                 sys.exit(1) | ||||
|  | ||||
|         should_list_installables = ( | ||||
|             not current.startswith("-") and subcommand_name == "install" | ||||
|         ) | ||||
|         if should_list_installables: | ||||
|             for path in auto_complete_paths(current, "path"): | ||||
|                 print(path) | ||||
|             sys.exit(1) | ||||
|  | ||||
|         subcommand = create_command(subcommand_name) | ||||
|  | ||||
|         for opt in subcommand.parser.option_list_all: | ||||
|             if opt.help != optparse.SUPPRESS_HELP: | ||||
|                 for opt_str in opt._long_opts + opt._short_opts: | ||||
|                     options.append((opt_str, opt.nargs)) | ||||
|  | ||||
|         # filter out previously specified options from available options | ||||
|         prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] | ||||
|         options = [(x, v) for (x, v) in options if x not in prev_opts] | ||||
|         # filter options by current input | ||||
|         options = [(k, v) for k, v in options if k.startswith(current)] | ||||
|         # get completion type given cwords and available subcommand options | ||||
|         completion_type = get_path_completion_type( | ||||
|             cwords, | ||||
|             cword, | ||||
|             subcommand.parser.option_list_all, | ||||
|         ) | ||||
|         # get completion files and directories if ``completion_type`` is | ||||
|         # ``<file>``, ``<dir>`` or ``<path>`` | ||||
|         if completion_type: | ||||
|             paths = auto_complete_paths(current, completion_type) | ||||
|             options = [(path, 0) for path in paths] | ||||
|         for option in options: | ||||
|             opt_label = option[0] | ||||
|             # append '=' to options which require args | ||||
|             if option[1] and option[0][:2] == "--": | ||||
|                 opt_label += "=" | ||||
|             print(opt_label) | ||||
|     else: | ||||
|         # show main parser options only when necessary | ||||
|  | ||||
|         opts = [i.option_list for i in parser.option_groups] | ||||
|         opts.append(parser.option_list) | ||||
|         flattened_opts = chain.from_iterable(opts) | ||||
|         if current.startswith("-"): | ||||
|             for opt in flattened_opts: | ||||
|                 if opt.help != optparse.SUPPRESS_HELP: | ||||
|                     subcommands += opt._long_opts + opt._short_opts | ||||
|         else: | ||||
|             # get completion type given cwords and all available options | ||||
|             completion_type = get_path_completion_type(cwords, cword, flattened_opts) | ||||
|             if completion_type: | ||||
|                 subcommands = list(auto_complete_paths(current, completion_type)) | ||||
|  | ||||
|         print(" ".join([x for x in subcommands if x.startswith(current)])) | ||||
|     sys.exit(1) | ||||
|  | ||||
|  | ||||
| def get_path_completion_type( | ||||
|     cwords: List[str], cword: int, opts: Iterable[Any] | ||||
| ) -> Optional[str]: | ||||
|     """Get the type of path completion (``file``, ``dir``, ``path`` or None) | ||||
|  | ||||
|     :param cwords: same as the environmental variable ``COMP_WORDS`` | ||||
|     :param cword: same as the environmental variable ``COMP_CWORD`` | ||||
|     :param opts: The available options to check | ||||
|     :return: path completion type (``file``, ``dir``, ``path`` or None) | ||||
|     """ | ||||
|     if cword < 2 or not cwords[cword - 2].startswith("-"): | ||||
|         return None | ||||
|     for opt in opts: | ||||
|         if opt.help == optparse.SUPPRESS_HELP: | ||||
|             continue | ||||
|         for o in str(opt).split("/"): | ||||
|             if cwords[cword - 2].split("=")[0] == o: | ||||
|                 if not opt.metavar or any( | ||||
|                     x in ("path", "file", "dir") for x in opt.metavar.split("/") | ||||
|                 ): | ||||
|                     return opt.metavar | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]: | ||||
|     """If ``completion_type`` is ``file`` or ``path``, list all regular files | ||||
|     and directories starting with ``current``; otherwise only list directories | ||||
|     starting with ``current``. | ||||
|  | ||||
|     :param current: The word to be completed | ||||
|     :param completion_type: path completion type(``file``, ``path`` or ``dir``) | ||||
|     :return: A generator of regular files and/or directories | ||||
|     """ | ||||
|     directory, filename = os.path.split(current) | ||||
|     current_path = os.path.abspath(directory) | ||||
|     # Don't complete paths if they can't be accessed | ||||
|     if not os.access(current_path, os.R_OK): | ||||
|         return | ||||
|     filename = os.path.normcase(filename) | ||||
|     # list all files that start with ``filename`` | ||||
|     file_list = ( | ||||
|         x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename) | ||||
|     ) | ||||
|     for f in file_list: | ||||
|         opt = os.path.join(current_path, f) | ||||
|         comp_file = os.path.normcase(os.path.join(directory, f)) | ||||
|         # complete regular files when there is not ``<dir>`` after option | ||||
|         # complete directories when there is ``<file>``, ``<path>`` or | ||||
|         # ``<dir>``after option | ||||
|         if completion_type != "dir" and os.path.isfile(opt): | ||||
|             yield comp_file | ||||
|         elif os.path.isdir(opt): | ||||
|             yield os.path.join(comp_file, "") | ||||
| @ -0,0 +1,236 @@ | ||||
| """Base Command class, and related routines""" | ||||
|  | ||||
| import functools | ||||
| import logging | ||||
| import logging.config | ||||
| import optparse | ||||
| import os | ||||
| import sys | ||||
| import traceback | ||||
| from optparse import Values | ||||
| from typing import Any, Callable, List, Optional, Tuple | ||||
|  | ||||
| from pip._vendor.rich import traceback as rich_traceback | ||||
|  | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.command_context import CommandContextMixIn | ||||
| from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter | ||||
| from pip._internal.cli.status_codes import ( | ||||
|     ERROR, | ||||
|     PREVIOUS_BUILD_DIR_ERROR, | ||||
|     UNKNOWN_ERROR, | ||||
|     VIRTUALENV_NOT_FOUND, | ||||
| ) | ||||
| from pip._internal.exceptions import ( | ||||
|     BadCommand, | ||||
|     CommandError, | ||||
|     DiagnosticPipError, | ||||
|     InstallationError, | ||||
|     NetworkConnectionError, | ||||
|     PreviousBuildDirError, | ||||
|     UninstallationError, | ||||
| ) | ||||
| from pip._internal.utils.filesystem import check_path_owner | ||||
| from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging | ||||
| from pip._internal.utils.misc import get_prog, normalize_path | ||||
| from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry | ||||
| from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry | ||||
| from pip._internal.utils.virtualenv import running_under_virtualenv | ||||
|  | ||||
| __all__ = ["Command"] | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class Command(CommandContextMixIn): | ||||
|     usage: str = "" | ||||
|     ignore_require_venv: bool = False | ||||
|  | ||||
|     def __init__(self, name: str, summary: str, isolated: bool = False) -> None: | ||||
|         super().__init__() | ||||
|  | ||||
|         self.name = name | ||||
|         self.summary = summary | ||||
|         self.parser = ConfigOptionParser( | ||||
|             usage=self.usage, | ||||
|             prog=f"{get_prog()} {name}", | ||||
|             formatter=UpdatingDefaultsHelpFormatter(), | ||||
|             add_help_option=False, | ||||
|             name=name, | ||||
|             description=self.__doc__, | ||||
|             isolated=isolated, | ||||
|         ) | ||||
|  | ||||
|         self.tempdir_registry: Optional[TempDirRegistry] = None | ||||
|  | ||||
|         # Commands should add options to this option group | ||||
|         optgroup_name = f"{self.name.capitalize()} Options" | ||||
|         self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) | ||||
|  | ||||
|         # Add the general options | ||||
|         gen_opts = cmdoptions.make_option_group( | ||||
|             cmdoptions.general_group, | ||||
|             self.parser, | ||||
|         ) | ||||
|         self.parser.add_option_group(gen_opts) | ||||
|  | ||||
|         self.add_options() | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         pass | ||||
|  | ||||
|     def handle_pip_version_check(self, options: Values) -> None: | ||||
|         """ | ||||
|         This is a no-op so that commands by default do not do the pip version | ||||
|         check. | ||||
|         """ | ||||
|         # Make sure we do the pip version check if the index_group options | ||||
|         # are present. | ||||
|         assert not hasattr(options, "no_index") | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]: | ||||
|         # factored out for testability | ||||
|         return self.parser.parse_args(args) | ||||
|  | ||||
|     def main(self, args: List[str]) -> int: | ||||
|         try: | ||||
|             with self.main_context(): | ||||
|                 return self._main(args) | ||||
|         finally: | ||||
|             logging.shutdown() | ||||
|  | ||||
|     def _main(self, args: List[str]) -> int: | ||||
|         # We must initialize this before the tempdir manager, otherwise the | ||||
|         # configuration would not be accessible by the time we clean up the | ||||
|         # tempdir manager. | ||||
|         self.tempdir_registry = self.enter_context(tempdir_registry()) | ||||
|         # Intentionally set as early as possible so globally-managed temporary | ||||
|         # directories are available to the rest of the code. | ||||
|         self.enter_context(global_tempdir_manager()) | ||||
|  | ||||
|         options, args = self.parse_args(args) | ||||
|  | ||||
|         # Set verbosity so that it can be used elsewhere. | ||||
|         self.verbosity = options.verbose - options.quiet | ||||
|  | ||||
|         level_number = setup_logging( | ||||
|             verbosity=self.verbosity, | ||||
|             no_color=options.no_color, | ||||
|             user_log_file=options.log, | ||||
|         ) | ||||
|  | ||||
|         always_enabled_features = set(options.features_enabled) & set( | ||||
|             cmdoptions.ALWAYS_ENABLED_FEATURES | ||||
|         ) | ||||
|         if always_enabled_features: | ||||
|             logger.warning( | ||||
|                 "The following features are always enabled: %s. ", | ||||
|                 ", ".join(sorted(always_enabled_features)), | ||||
|             ) | ||||
|  | ||||
|         # Make sure that the --python argument isn't specified after the | ||||
|         # subcommand. We can tell, because if --python was specified, | ||||
|         # we should only reach this point if we're running in the created | ||||
|         # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment | ||||
|         # variable set. | ||||
|         if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: | ||||
|             logger.critical( | ||||
|                 "The --python option must be placed before the pip subcommand name" | ||||
|             ) | ||||
|             sys.exit(ERROR) | ||||
|  | ||||
|         # TODO: Try to get these passing down from the command? | ||||
|         #       without resorting to os.environ to hold these. | ||||
|         #       This also affects isolated builds and it should. | ||||
|  | ||||
|         if options.no_input: | ||||
|             os.environ["PIP_NO_INPUT"] = "1" | ||||
|  | ||||
|         if options.exists_action: | ||||
|             os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action) | ||||
|  | ||||
|         if options.require_venv and not self.ignore_require_venv: | ||||
|             # If a venv is required check if it can really be found | ||||
|             if not running_under_virtualenv(): | ||||
|                 logger.critical("Could not find an activated virtualenv (required).") | ||||
|                 sys.exit(VIRTUALENV_NOT_FOUND) | ||||
|  | ||||
|         if options.cache_dir: | ||||
|             options.cache_dir = normalize_path(options.cache_dir) | ||||
|             if not check_path_owner(options.cache_dir): | ||||
|                 logger.warning( | ||||
|                     "The directory '%s' or its parent directory is not owned " | ||||
|                     "or is not writable by the current user. The cache " | ||||
|                     "has been disabled. Check the permissions and owner of " | ||||
|                     "that directory. If executing pip with sudo, you should " | ||||
|                     "use sudo's -H flag.", | ||||
|                     options.cache_dir, | ||||
|                 ) | ||||
|                 options.cache_dir = None | ||||
|  | ||||
|         def intercepts_unhandled_exc( | ||||
|             run_func: Callable[..., int] | ||||
|         ) -> Callable[..., int]: | ||||
|             @functools.wraps(run_func) | ||||
|             def exc_logging_wrapper(*args: Any) -> int: | ||||
|                 try: | ||||
|                     status = run_func(*args) | ||||
|                     assert isinstance(status, int) | ||||
|                     return status | ||||
|                 except DiagnosticPipError as exc: | ||||
|                     logger.error("[present-rich] %s", exc) | ||||
|                     logger.debug("Exception information:", exc_info=True) | ||||
|  | ||||
|                     return ERROR | ||||
|                 except PreviousBuildDirError as exc: | ||||
|                     logger.critical(str(exc)) | ||||
|                     logger.debug("Exception information:", exc_info=True) | ||||
|  | ||||
|                     return PREVIOUS_BUILD_DIR_ERROR | ||||
|                 except ( | ||||
|                     InstallationError, | ||||
|                     UninstallationError, | ||||
|                     BadCommand, | ||||
|                     NetworkConnectionError, | ||||
|                 ) as exc: | ||||
|                     logger.critical(str(exc)) | ||||
|                     logger.debug("Exception information:", exc_info=True) | ||||
|  | ||||
|                     return ERROR | ||||
|                 except CommandError as exc: | ||||
|                     logger.critical("%s", exc) | ||||
|                     logger.debug("Exception information:", exc_info=True) | ||||
|  | ||||
|                     return ERROR | ||||
|                 except BrokenStdoutLoggingError: | ||||
|                     # Bypass our logger and write any remaining messages to | ||||
|                     # stderr because stdout no longer works. | ||||
|                     print("ERROR: Pipe to stdout was broken", file=sys.stderr) | ||||
|                     if level_number <= logging.DEBUG: | ||||
|                         traceback.print_exc(file=sys.stderr) | ||||
|  | ||||
|                     return ERROR | ||||
|                 except KeyboardInterrupt: | ||||
|                     logger.critical("Operation cancelled by user") | ||||
|                     logger.debug("Exception information:", exc_info=True) | ||||
|  | ||||
|                     return ERROR | ||||
|                 except BaseException: | ||||
|                     logger.critical("Exception:", exc_info=True) | ||||
|  | ||||
|                     return UNKNOWN_ERROR | ||||
|  | ||||
|             return exc_logging_wrapper | ||||
|  | ||||
|         try: | ||||
|             if not options.debug_mode: | ||||
|                 run = intercepts_unhandled_exc(self.run) | ||||
|             else: | ||||
|                 run = self.run | ||||
|                 rich_traceback.install(show_locals=True) | ||||
|             return run(options, args) | ||||
|         finally: | ||||
|             self.handle_pip_version_check(options) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @ -0,0 +1,27 @@ | ||||
| from contextlib import ExitStack, contextmanager | ||||
| from typing import ContextManager, Generator, TypeVar | ||||
|  | ||||
| _T = TypeVar("_T", covariant=True) | ||||
|  | ||||
|  | ||||
| class CommandContextMixIn: | ||||
|     def __init__(self) -> None: | ||||
|         super().__init__() | ||||
|         self._in_main_context = False | ||||
|         self._main_context = ExitStack() | ||||
|  | ||||
|     @contextmanager | ||||
|     def main_context(self) -> Generator[None, None, None]: | ||||
|         assert not self._in_main_context | ||||
|  | ||||
|         self._in_main_context = True | ||||
|         try: | ||||
|             with self._main_context: | ||||
|                 yield | ||||
|         finally: | ||||
|             self._in_main_context = False | ||||
|  | ||||
|     def enter_context(self, context_provider: ContextManager[_T]) -> _T: | ||||
|         assert self._in_main_context | ||||
|  | ||||
|         return self._main_context.enter_context(context_provider) | ||||
| @ -0,0 +1,79 @@ | ||||
| """Primary application entrypoint. | ||||
| """ | ||||
| import locale | ||||
| import logging | ||||
| import os | ||||
| import sys | ||||
| import warnings | ||||
| from typing import List, Optional | ||||
|  | ||||
| from pip._internal.cli.autocompletion import autocomplete | ||||
| from pip._internal.cli.main_parser import parse_command | ||||
| from pip._internal.commands import create_command | ||||
| from pip._internal.exceptions import PipError | ||||
| from pip._internal.utils import deprecation | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| # Do not import and use main() directly! Using it directly is actively | ||||
| # discouraged by pip's maintainers. The name, location and behavior of | ||||
| # this function is subject to change, so calling it directly is not | ||||
| # portable across different pip versions. | ||||
|  | ||||
| # In addition, running pip in-process is unsupported and unsafe. This is | ||||
| # elaborated in detail at | ||||
| # https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. | ||||
| # That document also provides suggestions that should work for nearly | ||||
| # all users that are considering importing and using main() directly. | ||||
|  | ||||
| # However, we know that certain users will still want to invoke pip | ||||
| # in-process. If you understand and accept the implications of using pip | ||||
| # in an unsupported manner, the best approach is to use runpy to avoid | ||||
| # depending on the exact location of this entry point. | ||||
|  | ||||
| # The following example shows how to use runpy to invoke pip in that | ||||
| # case: | ||||
| # | ||||
| #     sys.argv = ["pip", your, args, here] | ||||
| #     runpy.run_module("pip", run_name="__main__") | ||||
| # | ||||
| # Note that this will exit the process after running, unlike a direct | ||||
| # call to main. As it is not safe to do any processing after calling | ||||
| # main, this should not be an issue in practice. | ||||
|  | ||||
|  | ||||
| def main(args: Optional[List[str]] = None) -> int: | ||||
|     if args is None: | ||||
|         args = sys.argv[1:] | ||||
|  | ||||
|     # Suppress the pkg_resources deprecation warning | ||||
|     # Note - we use a module of .*pkg_resources to cover | ||||
|     # the normal case (pip._vendor.pkg_resources) and the | ||||
|     # devendored case (a bare pkg_resources) | ||||
|     warnings.filterwarnings( | ||||
|         action="ignore", category=DeprecationWarning, module=".*pkg_resources" | ||||
|     ) | ||||
|  | ||||
|     # Configure our deprecation warnings to be sent through loggers | ||||
|     deprecation.install_warning_logger() | ||||
|  | ||||
|     autocomplete() | ||||
|  | ||||
|     try: | ||||
|         cmd_name, cmd_args = parse_command(args) | ||||
|     except PipError as exc: | ||||
|         sys.stderr.write(f"ERROR: {exc}") | ||||
|         sys.stderr.write(os.linesep) | ||||
|         sys.exit(1) | ||||
|  | ||||
|     # Needed for locale.getpreferredencoding(False) to work | ||||
|     # in pip._internal.utils.encoding.auto_decode | ||||
|     try: | ||||
|         locale.setlocale(locale.LC_ALL, "") | ||||
|     except locale.Error as e: | ||||
|         # setlocale can apparently crash if locale are uninitialized | ||||
|         logger.debug("Ignoring error %s when setting locale", e) | ||||
|     command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) | ||||
|  | ||||
|     return command.main(cmd_args) | ||||
| @ -0,0 +1,134 @@ | ||||
| """A single place for constructing and exposing the main parser | ||||
| """ | ||||
|  | ||||
| import os | ||||
| import subprocess | ||||
| import sys | ||||
| from typing import List, Optional, Tuple | ||||
|  | ||||
| from pip._internal.build_env import get_runnable_pip | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter | ||||
| from pip._internal.commands import commands_dict, get_similar_commands | ||||
| from pip._internal.exceptions import CommandError | ||||
| from pip._internal.utils.misc import get_pip_version, get_prog | ||||
|  | ||||
| __all__ = ["create_main_parser", "parse_command"] | ||||
|  | ||||
|  | ||||
| def create_main_parser() -> ConfigOptionParser: | ||||
|     """Creates and returns the main parser for pip's CLI""" | ||||
|  | ||||
|     parser = ConfigOptionParser( | ||||
|         usage="\n%prog <command> [options]", | ||||
|         add_help_option=False, | ||||
|         formatter=UpdatingDefaultsHelpFormatter(), | ||||
|         name="global", | ||||
|         prog=get_prog(), | ||||
|     ) | ||||
|     parser.disable_interspersed_args() | ||||
|  | ||||
|     parser.version = get_pip_version() | ||||
|  | ||||
|     # add the general options | ||||
|     gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) | ||||
|     parser.add_option_group(gen_opts) | ||||
|  | ||||
|     # so the help formatter knows | ||||
|     parser.main = True  # type: ignore | ||||
|  | ||||
|     # create command listing for description | ||||
|     description = [""] + [ | ||||
|         f"{name:27} {command_info.summary}" | ||||
|         for name, command_info in commands_dict.items() | ||||
|     ] | ||||
|     parser.description = "\n".join(description) | ||||
|  | ||||
|     return parser | ||||
|  | ||||
|  | ||||
| def identify_python_interpreter(python: str) -> Optional[str]: | ||||
|     # If the named file exists, use it. | ||||
|     # If it's a directory, assume it's a virtual environment and | ||||
|     # look for the environment's Python executable. | ||||
|     if os.path.exists(python): | ||||
|         if os.path.isdir(python): | ||||
|             # bin/python for Unix, Scripts/python.exe for Windows | ||||
|             # Try both in case of odd cases like cygwin. | ||||
|             for exe in ("bin/python", "Scripts/python.exe"): | ||||
|                 py = os.path.join(python, exe) | ||||
|                 if os.path.exists(py): | ||||
|                     return py | ||||
|         else: | ||||
|             return python | ||||
|  | ||||
|     # Could not find the interpreter specified | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def parse_command(args: List[str]) -> Tuple[str, List[str]]: | ||||
|     parser = create_main_parser() | ||||
|  | ||||
|     # Note: parser calls disable_interspersed_args(), so the result of this | ||||
|     # call is to split the initial args into the general options before the | ||||
|     # subcommand and everything else. | ||||
|     # For example: | ||||
|     #  args: ['--timeout=5', 'install', '--user', 'INITools'] | ||||
|     #  general_options: ['--timeout==5'] | ||||
|     #  args_else: ['install', '--user', 'INITools'] | ||||
|     general_options, args_else = parser.parse_args(args) | ||||
|  | ||||
|     # --python | ||||
|     if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: | ||||
|         # Re-invoke pip using the specified Python interpreter | ||||
|         interpreter = identify_python_interpreter(general_options.python) | ||||
|         if interpreter is None: | ||||
|             raise CommandError( | ||||
|                 f"Could not locate Python interpreter {general_options.python}" | ||||
|             ) | ||||
|  | ||||
|         pip_cmd = [ | ||||
|             interpreter, | ||||
|             get_runnable_pip(), | ||||
|         ] | ||||
|         pip_cmd.extend(args) | ||||
|  | ||||
|         # Set a flag so the child doesn't re-invoke itself, causing | ||||
|         # an infinite loop. | ||||
|         os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1" | ||||
|         returncode = 0 | ||||
|         try: | ||||
|             proc = subprocess.run(pip_cmd) | ||||
|             returncode = proc.returncode | ||||
|         except (subprocess.SubprocessError, OSError) as exc: | ||||
|             raise CommandError(f"Failed to run pip under {interpreter}: {exc}") | ||||
|         sys.exit(returncode) | ||||
|  | ||||
|     # --version | ||||
|     if general_options.version: | ||||
|         sys.stdout.write(parser.version) | ||||
|         sys.stdout.write(os.linesep) | ||||
|         sys.exit() | ||||
|  | ||||
|     # pip || pip help -> print_help() | ||||
|     if not args_else or (args_else[0] == "help" and len(args_else) == 1): | ||||
|         parser.print_help() | ||||
|         sys.exit() | ||||
|  | ||||
|     # the subcommand name | ||||
|     cmd_name = args_else[0] | ||||
|  | ||||
|     if cmd_name not in commands_dict: | ||||
|         guess = get_similar_commands(cmd_name) | ||||
|  | ||||
|         msg = [f'unknown command "{cmd_name}"'] | ||||
|         if guess: | ||||
|             msg.append(f'maybe you meant "{guess}"') | ||||
|  | ||||
|         raise CommandError(" - ".join(msg)) | ||||
|  | ||||
|     # all the args without the subcommand | ||||
|     cmd_args = args[:] | ||||
|     cmd_args.remove(cmd_name) | ||||
|  | ||||
|     return cmd_name, cmd_args | ||||
| @ -0,0 +1,294 @@ | ||||
| """Base option parser setup""" | ||||
|  | ||||
| import logging | ||||
| import optparse | ||||
| import shutil | ||||
| import sys | ||||
| import textwrap | ||||
| from contextlib import suppress | ||||
| from typing import Any, Dict, Generator, List, Tuple | ||||
|  | ||||
| from pip._internal.cli.status_codes import UNKNOWN_ERROR | ||||
| from pip._internal.configuration import Configuration, ConfigurationError | ||||
| from pip._internal.utils.misc import redact_auth_from_url, strtobool | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class PrettyHelpFormatter(optparse.IndentedHelpFormatter): | ||||
|     """A prettier/less verbose help formatter for optparse.""" | ||||
|  | ||||
|     def __init__(self, *args: Any, **kwargs: Any) -> None: | ||||
|         # help position must be aligned with __init__.parseopts.description | ||||
|         kwargs["max_help_position"] = 30 | ||||
|         kwargs["indent_increment"] = 1 | ||||
|         kwargs["width"] = shutil.get_terminal_size()[0] - 2 | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def format_option_strings(self, option: optparse.Option) -> str: | ||||
|         return self._format_option_strings(option) | ||||
|  | ||||
|     def _format_option_strings( | ||||
|         self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", " | ||||
|     ) -> str: | ||||
|         """ | ||||
|         Return a comma-separated list of option strings and metavars. | ||||
|  | ||||
|         :param option:  tuple of (short opt, long opt), e.g: ('-f', '--format') | ||||
|         :param mvarfmt: metavar format string | ||||
|         :param optsep:  separator | ||||
|         """ | ||||
|         opts = [] | ||||
|  | ||||
|         if option._short_opts: | ||||
|             opts.append(option._short_opts[0]) | ||||
|         if option._long_opts: | ||||
|             opts.append(option._long_opts[0]) | ||||
|         if len(opts) > 1: | ||||
|             opts.insert(1, optsep) | ||||
|  | ||||
|         if option.takes_value(): | ||||
|             assert option.dest is not None | ||||
|             metavar = option.metavar or option.dest.lower() | ||||
|             opts.append(mvarfmt.format(metavar.lower())) | ||||
|  | ||||
|         return "".join(opts) | ||||
|  | ||||
|     def format_heading(self, heading: str) -> str: | ||||
|         if heading == "Options": | ||||
|             return "" | ||||
|         return heading + ":\n" | ||||
|  | ||||
|     def format_usage(self, usage: str) -> str: | ||||
|         """ | ||||
|         Ensure there is only one newline between usage and the first heading | ||||
|         if there is no description. | ||||
|         """ | ||||
|         msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), "  ")) | ||||
|         return msg | ||||
|  | ||||
|     def format_description(self, description: str) -> str: | ||||
|         # leave full control over description to us | ||||
|         if description: | ||||
|             if hasattr(self.parser, "main"): | ||||
|                 label = "Commands" | ||||
|             else: | ||||
|                 label = "Description" | ||||
|             # some doc strings have initial newlines, some don't | ||||
|             description = description.lstrip("\n") | ||||
|             # some doc strings have final newlines and spaces, some don't | ||||
|             description = description.rstrip() | ||||
|             # dedent, then reindent | ||||
|             description = self.indent_lines(textwrap.dedent(description), "  ") | ||||
|             description = f"{label}:\n{description}\n" | ||||
|             return description | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def format_epilog(self, epilog: str) -> str: | ||||
|         # leave full control over epilog to us | ||||
|         if epilog: | ||||
|             return epilog | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def indent_lines(self, text: str, indent: str) -> str: | ||||
|         new_lines = [indent + line for line in text.split("\n")] | ||||
|         return "\n".join(new_lines) | ||||
|  | ||||
|  | ||||
| class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): | ||||
|     """Custom help formatter for use in ConfigOptionParser. | ||||
|  | ||||
|     This is updates the defaults before expanding them, allowing | ||||
|     them to show up correctly in the help listing. | ||||
|  | ||||
|     Also redact auth from url type options | ||||
|     """ | ||||
|  | ||||
|     def expand_default(self, option: optparse.Option) -> str: | ||||
|         default_values = None | ||||
|         if self.parser is not None: | ||||
|             assert isinstance(self.parser, ConfigOptionParser) | ||||
|             self.parser._update_defaults(self.parser.defaults) | ||||
|             assert option.dest is not None | ||||
|             default_values = self.parser.defaults.get(option.dest) | ||||
|         help_text = super().expand_default(option) | ||||
|  | ||||
|         if default_values and option.metavar == "URL": | ||||
|             if isinstance(default_values, str): | ||||
|                 default_values = [default_values] | ||||
|  | ||||
|             # If its not a list, we should abort and just return the help text | ||||
|             if not isinstance(default_values, list): | ||||
|                 default_values = [] | ||||
|  | ||||
|             for val in default_values: | ||||
|                 help_text = help_text.replace(val, redact_auth_from_url(val)) | ||||
|  | ||||
|         return help_text | ||||
|  | ||||
|  | ||||
| class CustomOptionParser(optparse.OptionParser): | ||||
|     def insert_option_group( | ||||
|         self, idx: int, *args: Any, **kwargs: Any | ||||
|     ) -> optparse.OptionGroup: | ||||
|         """Insert an OptionGroup at a given position.""" | ||||
|         group = self.add_option_group(*args, **kwargs) | ||||
|  | ||||
|         self.option_groups.pop() | ||||
|         self.option_groups.insert(idx, group) | ||||
|  | ||||
|         return group | ||||
|  | ||||
|     @property | ||||
|     def option_list_all(self) -> List[optparse.Option]: | ||||
|         """Get a list of all options, including those in option groups.""" | ||||
|         res = self.option_list[:] | ||||
|         for i in self.option_groups: | ||||
|             res.extend(i.option_list) | ||||
|  | ||||
|         return res | ||||
|  | ||||
|  | ||||
| class ConfigOptionParser(CustomOptionParser): | ||||
|     """Custom option parser which updates its defaults by checking the | ||||
|     configuration files and environmental variables""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *args: Any, | ||||
|         name: str, | ||||
|         isolated: bool = False, | ||||
|         **kwargs: Any, | ||||
|     ) -> None: | ||||
|         self.name = name | ||||
|         self.config = Configuration(isolated) | ||||
|  | ||||
|         assert self.name | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def check_default(self, option: optparse.Option, key: str, val: Any) -> Any: | ||||
|         try: | ||||
|             return option.check_value(key, val) | ||||
|         except optparse.OptionValueError as exc: | ||||
|             print(f"An error occurred during configuration: {exc}") | ||||
|             sys.exit(3) | ||||
|  | ||||
|     def _get_ordered_configuration_items( | ||||
|         self, | ||||
|     ) -> Generator[Tuple[str, Any], None, None]: | ||||
|         # Configuration gives keys in an unordered manner. Order them. | ||||
|         override_order = ["global", self.name, ":env:"] | ||||
|  | ||||
|         # Pool the options into different groups | ||||
|         section_items: Dict[str, List[Tuple[str, Any]]] = { | ||||
|             name: [] for name in override_order | ||||
|         } | ||||
|         for section_key, val in self.config.items(): | ||||
|             # ignore empty values | ||||
|             if not val: | ||||
|                 logger.debug( | ||||
|                     "Ignoring configuration key '%s' as it's value is empty.", | ||||
|                     section_key, | ||||
|                 ) | ||||
|                 continue | ||||
|  | ||||
|             section, key = section_key.split(".", 1) | ||||
|             if section in override_order: | ||||
|                 section_items[section].append((key, val)) | ||||
|  | ||||
|         # Yield each group in their override order | ||||
|         for section in override_order: | ||||
|             for key, val in section_items[section]: | ||||
|                 yield key, val | ||||
|  | ||||
|     def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: | ||||
|         """Updates the given defaults with values from the config files and | ||||
|         the environ. Does a little special handling for certain types of | ||||
|         options (lists).""" | ||||
|  | ||||
|         # Accumulate complex default state. | ||||
|         self.values = optparse.Values(self.defaults) | ||||
|         late_eval = set() | ||||
|         # Then set the options with those values | ||||
|         for key, val in self._get_ordered_configuration_items(): | ||||
|             # '--' because configuration supports only long names | ||||
|             option = self.get_option("--" + key) | ||||
|  | ||||
|             # Ignore options not present in this parser. E.g. non-globals put | ||||
|             # in [global] by users that want them to apply to all applicable | ||||
|             # commands. | ||||
|             if option is None: | ||||
|                 continue | ||||
|  | ||||
|             assert option.dest is not None | ||||
|  | ||||
|             if option.action in ("store_true", "store_false"): | ||||
|                 try: | ||||
|                     val = strtobool(val) | ||||
|                 except ValueError: | ||||
|                     self.error( | ||||
|                         "{} is not a valid value for {} option, "  # noqa | ||||
|                         "please specify a boolean value like yes/no, " | ||||
|                         "true/false or 1/0 instead.".format(val, key) | ||||
|                     ) | ||||
|             elif option.action == "count": | ||||
|                 with suppress(ValueError): | ||||
|                     val = strtobool(val) | ||||
|                 with suppress(ValueError): | ||||
|                     val = int(val) | ||||
|                 if not isinstance(val, int) or val < 0: | ||||
|                     self.error( | ||||
|                         "{} is not a valid value for {} option, "  # noqa | ||||
|                         "please instead specify either a non-negative integer " | ||||
|                         "or a boolean value like yes/no or false/true " | ||||
|                         "which is equivalent to 1/0.".format(val, key) | ||||
|                     ) | ||||
|             elif option.action == "append": | ||||
|                 val = val.split() | ||||
|                 val = [self.check_default(option, key, v) for v in val] | ||||
|             elif option.action == "callback": | ||||
|                 assert option.callback is not None | ||||
|                 late_eval.add(option.dest) | ||||
|                 opt_str = option.get_opt_string() | ||||
|                 val = option.convert_value(opt_str, val) | ||||
|                 # From take_action | ||||
|                 args = option.callback_args or () | ||||
|                 kwargs = option.callback_kwargs or {} | ||||
|                 option.callback(option, opt_str, val, self, *args, **kwargs) | ||||
|             else: | ||||
|                 val = self.check_default(option, key, val) | ||||
|  | ||||
|             defaults[option.dest] = val | ||||
|  | ||||
|         for key in late_eval: | ||||
|             defaults[key] = getattr(self.values, key) | ||||
|         self.values = None | ||||
|         return defaults | ||||
|  | ||||
|     def get_default_values(self) -> optparse.Values: | ||||
|         """Overriding to make updating the defaults after instantiation of | ||||
|         the option parser possible, _update_defaults() does the dirty work.""" | ||||
|         if not self.process_default_values: | ||||
|             # Old, pre-Optik 1.5 behaviour. | ||||
|             return optparse.Values(self.defaults) | ||||
|  | ||||
|         # Load the configuration, or error out in case of an error | ||||
|         try: | ||||
|             self.config.load() | ||||
|         except ConfigurationError as err: | ||||
|             self.exit(UNKNOWN_ERROR, str(err)) | ||||
|  | ||||
|         defaults = self._update_defaults(self.defaults.copy())  # ours | ||||
|         for option in self._get_all_options(): | ||||
|             assert option.dest is not None | ||||
|             default = defaults.get(option.dest) | ||||
|             if isinstance(default, str): | ||||
|                 opt_str = option.get_opt_string() | ||||
|                 defaults[option.dest] = option.check_value(opt_str, default) | ||||
|         return optparse.Values(defaults) | ||||
|  | ||||
|     def error(self, msg: str) -> None: | ||||
|         self.print_usage(sys.stderr) | ||||
|         self.exit(UNKNOWN_ERROR, f"{msg}\n") | ||||
| @ -0,0 +1,68 @@ | ||||
| import functools | ||||
| from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple | ||||
|  | ||||
| from pip._vendor.rich.progress import ( | ||||
|     BarColumn, | ||||
|     DownloadColumn, | ||||
|     FileSizeColumn, | ||||
|     Progress, | ||||
|     ProgressColumn, | ||||
|     SpinnerColumn, | ||||
|     TextColumn, | ||||
|     TimeElapsedColumn, | ||||
|     TimeRemainingColumn, | ||||
|     TransferSpeedColumn, | ||||
| ) | ||||
|  | ||||
| from pip._internal.utils.logging import get_indentation | ||||
|  | ||||
| DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]] | ||||
|  | ||||
|  | ||||
| def _rich_progress_bar( | ||||
|     iterable: Iterable[bytes], | ||||
|     *, | ||||
|     bar_type: str, | ||||
|     size: int, | ||||
| ) -> Generator[bytes, None, None]: | ||||
|     assert bar_type == "on", "This should only be used in the default mode." | ||||
|  | ||||
|     if not size: | ||||
|         total = float("inf") | ||||
|         columns: Tuple[ProgressColumn, ...] = ( | ||||
|             TextColumn("[progress.description]{task.description}"), | ||||
|             SpinnerColumn("line", speed=1.5), | ||||
|             FileSizeColumn(), | ||||
|             TransferSpeedColumn(), | ||||
|             TimeElapsedColumn(), | ||||
|         ) | ||||
|     else: | ||||
|         total = size | ||||
|         columns = ( | ||||
|             TextColumn("[progress.description]{task.description}"), | ||||
|             BarColumn(), | ||||
|             DownloadColumn(), | ||||
|             TransferSpeedColumn(), | ||||
|             TextColumn("eta"), | ||||
|             TimeRemainingColumn(), | ||||
|         ) | ||||
|  | ||||
|     progress = Progress(*columns, refresh_per_second=30) | ||||
|     task_id = progress.add_task(" " * (get_indentation() + 2), total=total) | ||||
|     with progress: | ||||
|         for chunk in iterable: | ||||
|             yield chunk | ||||
|             progress.update(task_id, advance=len(chunk)) | ||||
|  | ||||
|  | ||||
| def get_download_progress_renderer( | ||||
|     *, bar_type: str, size: Optional[int] = None | ||||
| ) -> DownloadProgressRenderer: | ||||
|     """Get an object that can be used to render the download progress. | ||||
|  | ||||
|     Returns a callable, that takes an iterable to "wrap". | ||||
|     """ | ||||
|     if bar_type == "on": | ||||
|         return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size) | ||||
|     else: | ||||
|         return iter  # no-op, when passed an iterator | ||||
| @ -0,0 +1,508 @@ | ||||
| """Contains the Command base classes that depend on PipSession. | ||||
|  | ||||
| The classes in this module are in a separate module so the commands not | ||||
| needing download / PackageFinder capability don't unnecessarily import the | ||||
| PackageFinder machinery and all its vendored dependencies, etc. | ||||
| """ | ||||
|  | ||||
| import logging | ||||
| import os | ||||
| import sys | ||||
| from functools import partial | ||||
| from optparse import Values | ||||
| from typing import TYPE_CHECKING, Any, List, Optional, Tuple | ||||
|  | ||||
| from pip._internal.cache import WheelCache | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.command_context import CommandContextMixIn | ||||
| from pip._internal.exceptions import CommandError, PreviousBuildDirError | ||||
| from pip._internal.index.collector import LinkCollector | ||||
| from pip._internal.index.package_finder import PackageFinder | ||||
| from pip._internal.models.selection_prefs import SelectionPreferences | ||||
| from pip._internal.models.target_python import TargetPython | ||||
| from pip._internal.network.session import PipSession | ||||
| from pip._internal.operations.build.build_tracker import BuildTracker | ||||
| from pip._internal.operations.prepare import RequirementPreparer | ||||
| from pip._internal.req.constructors import ( | ||||
|     install_req_from_editable, | ||||
|     install_req_from_line, | ||||
|     install_req_from_parsed_requirement, | ||||
|     install_req_from_req_string, | ||||
| ) | ||||
| from pip._internal.req.req_file import parse_requirements | ||||
| from pip._internal.req.req_install import InstallRequirement | ||||
| from pip._internal.resolution.base import BaseResolver | ||||
| from pip._internal.self_outdated_check import pip_self_version_check | ||||
| from pip._internal.utils.temp_dir import ( | ||||
|     TempDirectory, | ||||
|     TempDirectoryTypeRegistry, | ||||
|     tempdir_kinds, | ||||
| ) | ||||
| from pip._internal.utils.virtualenv import running_under_virtualenv | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from ssl import SSLContext | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def _create_truststore_ssl_context() -> Optional["SSLContext"]: | ||||
|     if sys.version_info < (3, 10): | ||||
|         raise CommandError("The truststore feature is only available for Python 3.10+") | ||||
|  | ||||
|     try: | ||||
|         import ssl | ||||
|     except ImportError: | ||||
|         logger.warning("Disabling truststore since ssl support is missing") | ||||
|         return None | ||||
|  | ||||
|     try: | ||||
|         import truststore | ||||
|     except ImportError: | ||||
|         raise CommandError( | ||||
|             "To use the truststore feature, 'truststore' must be installed into " | ||||
|             "pip's current environment." | ||||
|         ) | ||||
|  | ||||
|     return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT) | ||||
|  | ||||
|  | ||||
| class SessionCommandMixin(CommandContextMixIn): | ||||
|  | ||||
|     """ | ||||
|     A class mixin for command classes needing _build_session(). | ||||
|     """ | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         super().__init__() | ||||
|         self._session: Optional[PipSession] = None | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_index_urls(cls, options: Values) -> Optional[List[str]]: | ||||
|         """Return a list of index urls from user-provided options.""" | ||||
|         index_urls = [] | ||||
|         if not getattr(options, "no_index", False): | ||||
|             url = getattr(options, "index_url", None) | ||||
|             if url: | ||||
|                 index_urls.append(url) | ||||
|         urls = getattr(options, "extra_index_urls", None) | ||||
|         if urls: | ||||
|             index_urls.extend(urls) | ||||
|         # Return None rather than an empty list | ||||
|         return index_urls or None | ||||
|  | ||||
|     def get_default_session(self, options: Values) -> PipSession: | ||||
|         """Get a default-managed session.""" | ||||
|         if self._session is None: | ||||
|             self._session = self.enter_context(self._build_session(options)) | ||||
|             # there's no type annotation on requests.Session, so it's | ||||
|             # automatically ContextManager[Any] and self._session becomes Any, | ||||
|             # then https://github.com/python/mypy/issues/7696 kicks in | ||||
|             assert self._session is not None | ||||
|         return self._session | ||||
|  | ||||
|     def _build_session( | ||||
|         self, | ||||
|         options: Values, | ||||
|         retries: Optional[int] = None, | ||||
|         timeout: Optional[int] = None, | ||||
|         fallback_to_certifi: bool = False, | ||||
|     ) -> PipSession: | ||||
|         cache_dir = options.cache_dir | ||||
|         assert not cache_dir or os.path.isabs(cache_dir) | ||||
|  | ||||
|         if "truststore" in options.features_enabled: | ||||
|             try: | ||||
|                 ssl_context = _create_truststore_ssl_context() | ||||
|             except Exception: | ||||
|                 if not fallback_to_certifi: | ||||
|                     raise | ||||
|                 ssl_context = None | ||||
|         else: | ||||
|             ssl_context = None | ||||
|  | ||||
|         session = PipSession( | ||||
|             cache=os.path.join(cache_dir, "http") if cache_dir else None, | ||||
|             retries=retries if retries is not None else options.retries, | ||||
|             trusted_hosts=options.trusted_hosts, | ||||
|             index_urls=self._get_index_urls(options), | ||||
|             ssl_context=ssl_context, | ||||
|         ) | ||||
|  | ||||
|         # Handle custom ca-bundles from the user | ||||
|         if options.cert: | ||||
|             session.verify = options.cert | ||||
|  | ||||
|         # Handle SSL client certificate | ||||
|         if options.client_cert: | ||||
|             session.cert = options.client_cert | ||||
|  | ||||
|         # Handle timeouts | ||||
|         if options.timeout or timeout: | ||||
|             session.timeout = timeout if timeout is not None else options.timeout | ||||
|  | ||||
|         # Handle configured proxies | ||||
|         if options.proxy: | ||||
|             session.proxies = { | ||||
|                 "http": options.proxy, | ||||
|                 "https": options.proxy, | ||||
|             } | ||||
|  | ||||
|         # Determine if we can prompt the user for authentication or not | ||||
|         session.auth.prompting = not options.no_input | ||||
|         session.auth.keyring_provider = options.keyring_provider | ||||
|  | ||||
|         return session | ||||
|  | ||||
|  | ||||
| class IndexGroupCommand(Command, SessionCommandMixin): | ||||
|  | ||||
|     """ | ||||
|     Abstract base class for commands with the index_group options. | ||||
|  | ||||
|     This also corresponds to the commands that permit the pip version check. | ||||
|     """ | ||||
|  | ||||
|     def handle_pip_version_check(self, options: Values) -> None: | ||||
|         """ | ||||
|         Do the pip version check if not disabled. | ||||
|  | ||||
|         This overrides the default behavior of not doing the check. | ||||
|         """ | ||||
|         # Make sure the index_group options are present. | ||||
|         assert hasattr(options, "no_index") | ||||
|  | ||||
|         if options.disable_pip_version_check or options.no_index: | ||||
|             return | ||||
|  | ||||
|         # Otherwise, check if we're using the latest version of pip available. | ||||
|         session = self._build_session( | ||||
|             options, | ||||
|             retries=0, | ||||
|             timeout=min(5, options.timeout), | ||||
|             # This is set to ensure the function does not fail when truststore is | ||||
|             # specified in use-feature but cannot be loaded. This usually raises a | ||||
|             # CommandError and shows a nice user-facing error, but this function is not | ||||
|             # called in that try-except block. | ||||
|             fallback_to_certifi=True, | ||||
|         ) | ||||
|         with session: | ||||
|             pip_self_version_check(session, options) | ||||
|  | ||||
|  | ||||
| KEEPABLE_TEMPDIR_TYPES = [ | ||||
|     tempdir_kinds.BUILD_ENV, | ||||
|     tempdir_kinds.EPHEM_WHEEL_CACHE, | ||||
|     tempdir_kinds.REQ_BUILD, | ||||
| ] | ||||
|  | ||||
|  | ||||
| def warn_if_run_as_root() -> None: | ||||
|     """Output a warning for sudo users on Unix. | ||||
|  | ||||
|     In a virtual environment, sudo pip still writes to virtualenv. | ||||
|     On Windows, users may run pip as Administrator without issues. | ||||
|     This warning only applies to Unix root users outside of virtualenv. | ||||
|     """ | ||||
|     if running_under_virtualenv(): | ||||
|         return | ||||
|     if not hasattr(os, "getuid"): | ||||
|         return | ||||
|     # On Windows, there are no "system managed" Python packages. Installing as | ||||
|     # Administrator via pip is the correct way of updating system environments. | ||||
|     # | ||||
|     # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform | ||||
|     # checks: https://mypy.readthedocs.io/en/stable/common_issues.html | ||||
|     if sys.platform == "win32" or sys.platform == "cygwin": | ||||
|         return | ||||
|  | ||||
|     if os.getuid() != 0: | ||||
|         return | ||||
|  | ||||
|     logger.warning( | ||||
|         "Running pip as the 'root' user can result in broken permissions and " | ||||
|         "conflicting behaviour with the system package manager. " | ||||
|         "It is recommended to use a virtual environment instead: " | ||||
|         "https://pip.pypa.io/warnings/venv" | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def with_cleanup(func: Any) -> Any: | ||||
|     """Decorator for common logic related to managing temporary | ||||
|     directories. | ||||
|     """ | ||||
|  | ||||
|     def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None: | ||||
|         for t in KEEPABLE_TEMPDIR_TYPES: | ||||
|             registry.set_delete(t, False) | ||||
|  | ||||
|     def wrapper( | ||||
|         self: RequirementCommand, options: Values, args: List[Any] | ||||
|     ) -> Optional[int]: | ||||
|         assert self.tempdir_registry is not None | ||||
|         if options.no_clean: | ||||
|             configure_tempdir_registry(self.tempdir_registry) | ||||
|  | ||||
|         try: | ||||
|             return func(self, options, args) | ||||
|         except PreviousBuildDirError: | ||||
|             # This kind of conflict can occur when the user passes an explicit | ||||
|             # build directory with a pre-existing folder. In that case we do | ||||
|             # not want to accidentally remove it. | ||||
|             configure_tempdir_registry(self.tempdir_registry) | ||||
|             raise | ||||
|  | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
| class RequirementCommand(IndexGroupCommand): | ||||
|     def __init__(self, *args: Any, **kw: Any) -> None: | ||||
|         super().__init__(*args, **kw) | ||||
|  | ||||
|         self.cmd_opts.add_option(cmdoptions.no_clean()) | ||||
|  | ||||
|     @staticmethod | ||||
|     def determine_resolver_variant(options: Values) -> str: | ||||
|         """Determines which resolver should be used, based on the given options.""" | ||||
|         if "legacy-resolver" in options.deprecated_features_enabled: | ||||
|             return "legacy" | ||||
|  | ||||
|         return "2020-resolver" | ||||
|  | ||||
|     @classmethod | ||||
|     def make_requirement_preparer( | ||||
|         cls, | ||||
|         temp_build_dir: TempDirectory, | ||||
|         options: Values, | ||||
|         build_tracker: BuildTracker, | ||||
|         session: PipSession, | ||||
|         finder: PackageFinder, | ||||
|         use_user_site: bool, | ||||
|         download_dir: Optional[str] = None, | ||||
|         verbosity: int = 0, | ||||
|     ) -> RequirementPreparer: | ||||
|         """ | ||||
|         Create a RequirementPreparer instance for the given parameters. | ||||
|         """ | ||||
|         temp_build_dir_path = temp_build_dir.path | ||||
|         assert temp_build_dir_path is not None | ||||
|         legacy_resolver = False | ||||
|  | ||||
|         resolver_variant = cls.determine_resolver_variant(options) | ||||
|         if resolver_variant == "2020-resolver": | ||||
|             lazy_wheel = "fast-deps" in options.features_enabled | ||||
|             if lazy_wheel: | ||||
|                 logger.warning( | ||||
|                     "pip is using lazily downloaded wheels using HTTP " | ||||
|                     "range requests to obtain dependency information. " | ||||
|                     "This experimental feature is enabled through " | ||||
|                     "--use-feature=fast-deps and it is not ready for " | ||||
|                     "production." | ||||
|                 ) | ||||
|         else: | ||||
|             legacy_resolver = True | ||||
|             lazy_wheel = False | ||||
|             if "fast-deps" in options.features_enabled: | ||||
|                 logger.warning( | ||||
|                     "fast-deps has no effect when used with the legacy resolver." | ||||
|                 ) | ||||
|  | ||||
|         return RequirementPreparer( | ||||
|             build_dir=temp_build_dir_path, | ||||
|             src_dir=options.src_dir, | ||||
|             download_dir=download_dir, | ||||
|             build_isolation=options.build_isolation, | ||||
|             check_build_deps=options.check_build_deps, | ||||
|             build_tracker=build_tracker, | ||||
|             session=session, | ||||
|             progress_bar=options.progress_bar, | ||||
|             finder=finder, | ||||
|             require_hashes=options.require_hashes, | ||||
|             use_user_site=use_user_site, | ||||
|             lazy_wheel=lazy_wheel, | ||||
|             verbosity=verbosity, | ||||
|             legacy_resolver=legacy_resolver, | ||||
|         ) | ||||
|  | ||||
|     @classmethod | ||||
|     def make_resolver( | ||||
|         cls, | ||||
|         preparer: RequirementPreparer, | ||||
|         finder: PackageFinder, | ||||
|         options: Values, | ||||
|         wheel_cache: Optional[WheelCache] = None, | ||||
|         use_user_site: bool = False, | ||||
|         ignore_installed: bool = True, | ||||
|         ignore_requires_python: bool = False, | ||||
|         force_reinstall: bool = False, | ||||
|         upgrade_strategy: str = "to-satisfy-only", | ||||
|         use_pep517: Optional[bool] = None, | ||||
|         py_version_info: Optional[Tuple[int, ...]] = None, | ||||
|     ) -> BaseResolver: | ||||
|         """ | ||||
|         Create a Resolver instance for the given parameters. | ||||
|         """ | ||||
|         make_install_req = partial( | ||||
|             install_req_from_req_string, | ||||
|             isolated=options.isolated_mode, | ||||
|             use_pep517=use_pep517, | ||||
|         ) | ||||
|         resolver_variant = cls.determine_resolver_variant(options) | ||||
|         # The long import name and duplicated invocation is needed to convince | ||||
|         # Mypy into correctly typechecking. Otherwise it would complain the | ||||
|         # "Resolver" class being redefined. | ||||
|         if resolver_variant == "2020-resolver": | ||||
|             import pip._internal.resolution.resolvelib.resolver | ||||
|  | ||||
|             return pip._internal.resolution.resolvelib.resolver.Resolver( | ||||
|                 preparer=preparer, | ||||
|                 finder=finder, | ||||
|                 wheel_cache=wheel_cache, | ||||
|                 make_install_req=make_install_req, | ||||
|                 use_user_site=use_user_site, | ||||
|                 ignore_dependencies=options.ignore_dependencies, | ||||
|                 ignore_installed=ignore_installed, | ||||
|                 ignore_requires_python=ignore_requires_python, | ||||
|                 force_reinstall=force_reinstall, | ||||
|                 upgrade_strategy=upgrade_strategy, | ||||
|                 py_version_info=py_version_info, | ||||
|             ) | ||||
|         import pip._internal.resolution.legacy.resolver | ||||
|  | ||||
|         return pip._internal.resolution.legacy.resolver.Resolver( | ||||
|             preparer=preparer, | ||||
|             finder=finder, | ||||
|             wheel_cache=wheel_cache, | ||||
|             make_install_req=make_install_req, | ||||
|             use_user_site=use_user_site, | ||||
|             ignore_dependencies=options.ignore_dependencies, | ||||
|             ignore_installed=ignore_installed, | ||||
|             ignore_requires_python=ignore_requires_python, | ||||
|             force_reinstall=force_reinstall, | ||||
|             upgrade_strategy=upgrade_strategy, | ||||
|             py_version_info=py_version_info, | ||||
|         ) | ||||
|  | ||||
|     def get_requirements( | ||||
|         self, | ||||
|         args: List[str], | ||||
|         options: Values, | ||||
|         finder: PackageFinder, | ||||
|         session: PipSession, | ||||
|     ) -> List[InstallRequirement]: | ||||
|         """ | ||||
|         Parse command-line arguments into the corresponding requirements. | ||||
|         """ | ||||
|         requirements: List[InstallRequirement] = [] | ||||
|         for filename in options.constraints: | ||||
|             for parsed_req in parse_requirements( | ||||
|                 filename, | ||||
|                 constraint=True, | ||||
|                 finder=finder, | ||||
|                 options=options, | ||||
|                 session=session, | ||||
|             ): | ||||
|                 req_to_add = install_req_from_parsed_requirement( | ||||
|                     parsed_req, | ||||
|                     isolated=options.isolated_mode, | ||||
|                     user_supplied=False, | ||||
|                 ) | ||||
|                 requirements.append(req_to_add) | ||||
|  | ||||
|         for req in args: | ||||
|             req_to_add = install_req_from_line( | ||||
|                 req, | ||||
|                 comes_from=None, | ||||
|                 isolated=options.isolated_mode, | ||||
|                 use_pep517=options.use_pep517, | ||||
|                 user_supplied=True, | ||||
|                 config_settings=getattr(options, "config_settings", None), | ||||
|             ) | ||||
|             requirements.append(req_to_add) | ||||
|  | ||||
|         for req in options.editables: | ||||
|             req_to_add = install_req_from_editable( | ||||
|                 req, | ||||
|                 user_supplied=True, | ||||
|                 isolated=options.isolated_mode, | ||||
|                 use_pep517=options.use_pep517, | ||||
|                 config_settings=getattr(options, "config_settings", None), | ||||
|             ) | ||||
|             requirements.append(req_to_add) | ||||
|  | ||||
|         # NOTE: options.require_hashes may be set if --require-hashes is True | ||||
|         for filename in options.requirements: | ||||
|             for parsed_req in parse_requirements( | ||||
|                 filename, finder=finder, options=options, session=session | ||||
|             ): | ||||
|                 req_to_add = install_req_from_parsed_requirement( | ||||
|                     parsed_req, | ||||
|                     isolated=options.isolated_mode, | ||||
|                     use_pep517=options.use_pep517, | ||||
|                     user_supplied=True, | ||||
|                     config_settings=parsed_req.options.get("config_settings") | ||||
|                     if parsed_req.options | ||||
|                     else None, | ||||
|                 ) | ||||
|                 requirements.append(req_to_add) | ||||
|  | ||||
|         # If any requirement has hash options, enable hash checking. | ||||
|         if any(req.has_hash_options for req in requirements): | ||||
|             options.require_hashes = True | ||||
|  | ||||
|         if not (args or options.editables or options.requirements): | ||||
|             opts = {"name": self.name} | ||||
|             if options.find_links: | ||||
|                 raise CommandError( | ||||
|                     "You must give at least one requirement to {name} " | ||||
|                     '(maybe you meant "pip {name} {links}"?)'.format( | ||||
|                         **dict(opts, links=" ".join(options.find_links)) | ||||
|                     ) | ||||
|                 ) | ||||
|             else: | ||||
|                 raise CommandError( | ||||
|                     "You must give at least one requirement to {name} " | ||||
|                     '(see "pip help {name}")'.format(**opts) | ||||
|                 ) | ||||
|  | ||||
|         return requirements | ||||
|  | ||||
|     @staticmethod | ||||
|     def trace_basic_info(finder: PackageFinder) -> None: | ||||
|         """ | ||||
|         Trace basic information about the provided objects. | ||||
|         """ | ||||
|         # Display where finder is looking for packages | ||||
|         search_scope = finder.search_scope | ||||
|         locations = search_scope.get_formatted_locations() | ||||
|         if locations: | ||||
|             logger.info(locations) | ||||
|  | ||||
|     def _build_package_finder( | ||||
|         self, | ||||
|         options: Values, | ||||
|         session: PipSession, | ||||
|         target_python: Optional[TargetPython] = None, | ||||
|         ignore_requires_python: Optional[bool] = None, | ||||
|     ) -> PackageFinder: | ||||
|         """ | ||||
|         Create a package finder appropriate to this requirement command. | ||||
|  | ||||
|         :param ignore_requires_python: Whether to ignore incompatible | ||||
|             "Requires-Python" values in links. Defaults to False. | ||||
|         """ | ||||
|         link_collector = LinkCollector.create(session, options=options) | ||||
|         selection_prefs = SelectionPreferences( | ||||
|             allow_yanked=True, | ||||
|             format_control=options.format_control, | ||||
|             allow_all_prereleases=options.pre, | ||||
|             prefer_binary=options.prefer_binary, | ||||
|             ignore_requires_python=ignore_requires_python, | ||||
|         ) | ||||
|  | ||||
|         return PackageFinder.create( | ||||
|             link_collector=link_collector, | ||||
|             selection_prefs=selection_prefs, | ||||
|             target_python=target_python, | ||||
|         ) | ||||
| @ -0,0 +1,159 @@ | ||||
| import contextlib | ||||
| import itertools | ||||
| import logging | ||||
| import sys | ||||
| import time | ||||
| from typing import IO, Generator, Optional | ||||
|  | ||||
| from pip._internal.utils.compat import WINDOWS | ||||
| from pip._internal.utils.logging import get_indentation | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class SpinnerInterface: | ||||
|     def spin(self) -> None: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def finish(self, final_status: str) -> None: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|  | ||||
| class InteractiveSpinner(SpinnerInterface): | ||||
|     def __init__( | ||||
|         self, | ||||
|         message: str, | ||||
|         file: Optional[IO[str]] = None, | ||||
|         spin_chars: str = "-\\|/", | ||||
|         # Empirically, 8 updates/second looks nice | ||||
|         min_update_interval_seconds: float = 0.125, | ||||
|     ): | ||||
|         self._message = message | ||||
|         if file is None: | ||||
|             file = sys.stdout | ||||
|         self._file = file | ||||
|         self._rate_limiter = RateLimiter(min_update_interval_seconds) | ||||
|         self._finished = False | ||||
|  | ||||
|         self._spin_cycle = itertools.cycle(spin_chars) | ||||
|  | ||||
|         self._file.write(" " * get_indentation() + self._message + " ... ") | ||||
|         self._width = 0 | ||||
|  | ||||
|     def _write(self, status: str) -> None: | ||||
|         assert not self._finished | ||||
|         # Erase what we wrote before by backspacing to the beginning, writing | ||||
|         # spaces to overwrite the old text, and then backspacing again | ||||
|         backup = "\b" * self._width | ||||
|         self._file.write(backup + " " * self._width + backup) | ||||
|         # Now we have a blank slate to add our status | ||||
|         self._file.write(status) | ||||
|         self._width = len(status) | ||||
|         self._file.flush() | ||||
|         self._rate_limiter.reset() | ||||
|  | ||||
|     def spin(self) -> None: | ||||
|         if self._finished: | ||||
|             return | ||||
|         if not self._rate_limiter.ready(): | ||||
|             return | ||||
|         self._write(next(self._spin_cycle)) | ||||
|  | ||||
|     def finish(self, final_status: str) -> None: | ||||
|         if self._finished: | ||||
|             return | ||||
|         self._write(final_status) | ||||
|         self._file.write("\n") | ||||
|         self._file.flush() | ||||
|         self._finished = True | ||||
|  | ||||
|  | ||||
| # Used for dumb terminals, non-interactive installs (no tty), etc. | ||||
| # We still print updates occasionally (once every 60 seconds by default) to | ||||
| # act as a keep-alive for systems like Travis-CI that take lack-of-output as | ||||
| # an indication that a task has frozen. | ||||
| class NonInteractiveSpinner(SpinnerInterface): | ||||
|     def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None: | ||||
|         self._message = message | ||||
|         self._finished = False | ||||
|         self._rate_limiter = RateLimiter(min_update_interval_seconds) | ||||
|         self._update("started") | ||||
|  | ||||
|     def _update(self, status: str) -> None: | ||||
|         assert not self._finished | ||||
|         self._rate_limiter.reset() | ||||
|         logger.info("%s: %s", self._message, status) | ||||
|  | ||||
|     def spin(self) -> None: | ||||
|         if self._finished: | ||||
|             return | ||||
|         if not self._rate_limiter.ready(): | ||||
|             return | ||||
|         self._update("still running...") | ||||
|  | ||||
|     def finish(self, final_status: str) -> None: | ||||
|         if self._finished: | ||||
|             return | ||||
|         self._update(f"finished with status '{final_status}'") | ||||
|         self._finished = True | ||||
|  | ||||
|  | ||||
| class RateLimiter: | ||||
|     def __init__(self, min_update_interval_seconds: float) -> None: | ||||
|         self._min_update_interval_seconds = min_update_interval_seconds | ||||
|         self._last_update: float = 0 | ||||
|  | ||||
|     def ready(self) -> bool: | ||||
|         now = time.time() | ||||
|         delta = now - self._last_update | ||||
|         return delta >= self._min_update_interval_seconds | ||||
|  | ||||
|     def reset(self) -> None: | ||||
|         self._last_update = time.time() | ||||
|  | ||||
|  | ||||
| @contextlib.contextmanager | ||||
| def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]: | ||||
|     # Interactive spinner goes directly to sys.stdout rather than being routed | ||||
|     # through the logging system, but it acts like it has level INFO, | ||||
|     # i.e. it's only displayed if we're at level INFO or better. | ||||
|     # Non-interactive spinner goes through the logging system, so it is always | ||||
|     # in sync with logging configuration. | ||||
|     if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: | ||||
|         spinner: SpinnerInterface = InteractiveSpinner(message) | ||||
|     else: | ||||
|         spinner = NonInteractiveSpinner(message) | ||||
|     try: | ||||
|         with hidden_cursor(sys.stdout): | ||||
|             yield spinner | ||||
|     except KeyboardInterrupt: | ||||
|         spinner.finish("canceled") | ||||
|         raise | ||||
|     except Exception: | ||||
|         spinner.finish("error") | ||||
|         raise | ||||
|     else: | ||||
|         spinner.finish("done") | ||||
|  | ||||
|  | ||||
| HIDE_CURSOR = "\x1b[?25l" | ||||
| SHOW_CURSOR = "\x1b[?25h" | ||||
|  | ||||
|  | ||||
| @contextlib.contextmanager | ||||
| def hidden_cursor(file: IO[str]) -> Generator[None, None, None]: | ||||
|     # The Windows terminal does not support the hide/show cursor ANSI codes, | ||||
|     # even via colorama. So don't even try. | ||||
|     if WINDOWS: | ||||
|         yield | ||||
|     # We don't want to clutter the output with control characters if we're | ||||
|     # writing to a file, or if the user is running with --quiet. | ||||
|     # See https://github.com/pypa/pip/issues/3418 | ||||
|     elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: | ||||
|         yield | ||||
|     else: | ||||
|         file.write(HIDE_CURSOR) | ||||
|         try: | ||||
|             yield | ||||
|         finally: | ||||
|             file.write(SHOW_CURSOR) | ||||
| @ -0,0 +1,6 @@ | ||||
| SUCCESS = 0 | ||||
| ERROR = 1 | ||||
| UNKNOWN_ERROR = 2 | ||||
| VIRTUALENV_NOT_FOUND = 3 | ||||
| PREVIOUS_BUILD_DIR_ERROR = 4 | ||||
| NO_MATCHES_FOUND = 23 | ||||
| @ -0,0 +1,132 @@ | ||||
| """ | ||||
| Package containing all pip commands | ||||
| """ | ||||
|  | ||||
| import importlib | ||||
| from collections import namedtuple | ||||
| from typing import Any, Dict, Optional | ||||
|  | ||||
| from pip._internal.cli.base_command import Command | ||||
|  | ||||
| CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary") | ||||
|  | ||||
| # This dictionary does a bunch of heavy lifting for help output: | ||||
| # - Enables avoiding additional (costly) imports for presenting `--help`. | ||||
| # - The ordering matters for help display. | ||||
| # | ||||
| # Even though the module path starts with the same "pip._internal.commands" | ||||
| # prefix, the full path makes testing easier (specifically when modifying | ||||
| # `commands_dict` in test setup / teardown). | ||||
| commands_dict: Dict[str, CommandInfo] = { | ||||
|     "install": CommandInfo( | ||||
|         "pip._internal.commands.install", | ||||
|         "InstallCommand", | ||||
|         "Install packages.", | ||||
|     ), | ||||
|     "download": CommandInfo( | ||||
|         "pip._internal.commands.download", | ||||
|         "DownloadCommand", | ||||
|         "Download packages.", | ||||
|     ), | ||||
|     "uninstall": CommandInfo( | ||||
|         "pip._internal.commands.uninstall", | ||||
|         "UninstallCommand", | ||||
|         "Uninstall packages.", | ||||
|     ), | ||||
|     "freeze": CommandInfo( | ||||
|         "pip._internal.commands.freeze", | ||||
|         "FreezeCommand", | ||||
|         "Output installed packages in requirements format.", | ||||
|     ), | ||||
|     "inspect": CommandInfo( | ||||
|         "pip._internal.commands.inspect", | ||||
|         "InspectCommand", | ||||
|         "Inspect the python environment.", | ||||
|     ), | ||||
|     "list": CommandInfo( | ||||
|         "pip._internal.commands.list", | ||||
|         "ListCommand", | ||||
|         "List installed packages.", | ||||
|     ), | ||||
|     "show": CommandInfo( | ||||
|         "pip._internal.commands.show", | ||||
|         "ShowCommand", | ||||
|         "Show information about installed packages.", | ||||
|     ), | ||||
|     "check": CommandInfo( | ||||
|         "pip._internal.commands.check", | ||||
|         "CheckCommand", | ||||
|         "Verify installed packages have compatible dependencies.", | ||||
|     ), | ||||
|     "config": CommandInfo( | ||||
|         "pip._internal.commands.configuration", | ||||
|         "ConfigurationCommand", | ||||
|         "Manage local and global configuration.", | ||||
|     ), | ||||
|     "search": CommandInfo( | ||||
|         "pip._internal.commands.search", | ||||
|         "SearchCommand", | ||||
|         "Search PyPI for packages.", | ||||
|     ), | ||||
|     "cache": CommandInfo( | ||||
|         "pip._internal.commands.cache", | ||||
|         "CacheCommand", | ||||
|         "Inspect and manage pip's wheel cache.", | ||||
|     ), | ||||
|     "index": CommandInfo( | ||||
|         "pip._internal.commands.index", | ||||
|         "IndexCommand", | ||||
|         "Inspect information available from package indexes.", | ||||
|     ), | ||||
|     "wheel": CommandInfo( | ||||
|         "pip._internal.commands.wheel", | ||||
|         "WheelCommand", | ||||
|         "Build wheels from your requirements.", | ||||
|     ), | ||||
|     "hash": CommandInfo( | ||||
|         "pip._internal.commands.hash", | ||||
|         "HashCommand", | ||||
|         "Compute hashes of package archives.", | ||||
|     ), | ||||
|     "completion": CommandInfo( | ||||
|         "pip._internal.commands.completion", | ||||
|         "CompletionCommand", | ||||
|         "A helper command used for command completion.", | ||||
|     ), | ||||
|     "debug": CommandInfo( | ||||
|         "pip._internal.commands.debug", | ||||
|         "DebugCommand", | ||||
|         "Show information useful for debugging.", | ||||
|     ), | ||||
|     "help": CommandInfo( | ||||
|         "pip._internal.commands.help", | ||||
|         "HelpCommand", | ||||
|         "Show help for commands.", | ||||
|     ), | ||||
| } | ||||
|  | ||||
|  | ||||
| def create_command(name: str, **kwargs: Any) -> Command: | ||||
|     """ | ||||
|     Create an instance of the Command class with the given name. | ||||
|     """ | ||||
|     module_path, class_name, summary = commands_dict[name] | ||||
|     module = importlib.import_module(module_path) | ||||
|     command_class = getattr(module, class_name) | ||||
|     command = command_class(name=name, summary=summary, **kwargs) | ||||
|  | ||||
|     return command | ||||
|  | ||||
|  | ||||
| def get_similar_commands(name: str) -> Optional[str]: | ||||
|     """Command name auto-correct.""" | ||||
|     from difflib import get_close_matches | ||||
|  | ||||
|     name = name.lower() | ||||
|  | ||||
|     close_commands = get_close_matches(name, commands_dict.keys()) | ||||
|  | ||||
|     if close_commands: | ||||
|         return close_commands[0] | ||||
|     else: | ||||
|         return None | ||||
| @ -0,0 +1,222 @@ | ||||
| import os | ||||
| import textwrap | ||||
| from optparse import Values | ||||
| from typing import Any, List | ||||
|  | ||||
| import pip._internal.utils.filesystem as filesystem | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.status_codes import ERROR, SUCCESS | ||||
| from pip._internal.exceptions import CommandError, PipError | ||||
| from pip._internal.utils.logging import getLogger | ||||
|  | ||||
| logger = getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class CacheCommand(Command): | ||||
|     """ | ||||
|     Inspect and manage pip's wheel cache. | ||||
|  | ||||
|     Subcommands: | ||||
|  | ||||
|     - dir: Show the cache directory. | ||||
|     - info: Show information about the cache. | ||||
|     - list: List filenames of packages stored in the cache. | ||||
|     - remove: Remove one or more package from the cache. | ||||
|     - purge: Remove all items from the cache. | ||||
|  | ||||
|     ``<pattern>`` can be a glob expression or a package name. | ||||
|     """ | ||||
|  | ||||
|     ignore_require_venv = True | ||||
|     usage = """ | ||||
|         %prog dir | ||||
|         %prog info | ||||
|         %prog list [<pattern>] [--format=[human, abspath]] | ||||
|         %prog remove <pattern> | ||||
|         %prog purge | ||||
|     """ | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "--format", | ||||
|             action="store", | ||||
|             dest="list_format", | ||||
|             default="human", | ||||
|             choices=("human", "abspath"), | ||||
|             help="Select the output format among: human (default) or abspath", | ||||
|         ) | ||||
|  | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         handlers = { | ||||
|             "dir": self.get_cache_dir, | ||||
|             "info": self.get_cache_info, | ||||
|             "list": self.list_cache_items, | ||||
|             "remove": self.remove_cache_items, | ||||
|             "purge": self.purge_cache, | ||||
|         } | ||||
|  | ||||
|         if not options.cache_dir: | ||||
|             logger.error("pip cache commands can not function since cache is disabled.") | ||||
|             return ERROR | ||||
|  | ||||
|         # Determine action | ||||
|         if not args or args[0] not in handlers: | ||||
|             logger.error( | ||||
|                 "Need an action (%s) to perform.", | ||||
|                 ", ".join(sorted(handlers)), | ||||
|             ) | ||||
|             return ERROR | ||||
|  | ||||
|         action = args[0] | ||||
|  | ||||
|         # Error handling happens here, not in the action-handlers. | ||||
|         try: | ||||
|             handlers[action](options, args[1:]) | ||||
|         except PipError as e: | ||||
|             logger.error(e.args[0]) | ||||
|             return ERROR | ||||
|  | ||||
|         return SUCCESS | ||||
|  | ||||
|     def get_cache_dir(self, options: Values, args: List[Any]) -> None: | ||||
|         if args: | ||||
|             raise CommandError("Too many arguments") | ||||
|  | ||||
|         logger.info(options.cache_dir) | ||||
|  | ||||
|     def get_cache_info(self, options: Values, args: List[Any]) -> None: | ||||
|         if args: | ||||
|             raise CommandError("Too many arguments") | ||||
|  | ||||
|         num_http_files = len(self._find_http_files(options)) | ||||
|         num_packages = len(self._find_wheels(options, "*")) | ||||
|  | ||||
|         http_cache_location = self._cache_dir(options, "http") | ||||
|         wheels_cache_location = self._cache_dir(options, "wheels") | ||||
|         http_cache_size = filesystem.format_directory_size(http_cache_location) | ||||
|         wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) | ||||
|  | ||||
|         message = ( | ||||
|             textwrap.dedent( | ||||
|                 """ | ||||
|                     Package index page cache location: {http_cache_location} | ||||
|                     Package index page cache size: {http_cache_size} | ||||
|                     Number of HTTP files: {num_http_files} | ||||
|                     Locally built wheels location: {wheels_cache_location} | ||||
|                     Locally built wheels size: {wheels_cache_size} | ||||
|                     Number of locally built wheels: {package_count} | ||||
|                 """ | ||||
|             ) | ||||
|             .format( | ||||
|                 http_cache_location=http_cache_location, | ||||
|                 http_cache_size=http_cache_size, | ||||
|                 num_http_files=num_http_files, | ||||
|                 wheels_cache_location=wheels_cache_location, | ||||
|                 package_count=num_packages, | ||||
|                 wheels_cache_size=wheels_cache_size, | ||||
|             ) | ||||
|             .strip() | ||||
|         ) | ||||
|  | ||||
|         logger.info(message) | ||||
|  | ||||
|     def list_cache_items(self, options: Values, args: List[Any]) -> None: | ||||
|         if len(args) > 1: | ||||
|             raise CommandError("Too many arguments") | ||||
|  | ||||
|         if args: | ||||
|             pattern = args[0] | ||||
|         else: | ||||
|             pattern = "*" | ||||
|  | ||||
|         files = self._find_wheels(options, pattern) | ||||
|         if options.list_format == "human": | ||||
|             self.format_for_human(files) | ||||
|         else: | ||||
|             self.format_for_abspath(files) | ||||
|  | ||||
|     def format_for_human(self, files: List[str]) -> None: | ||||
|         if not files: | ||||
|             logger.info("No locally built wheels cached.") | ||||
|             return | ||||
|  | ||||
|         results = [] | ||||
|         for filename in files: | ||||
|             wheel = os.path.basename(filename) | ||||
|             size = filesystem.format_file_size(filename) | ||||
|             results.append(f" - {wheel} ({size})") | ||||
|         logger.info("Cache contents:\n") | ||||
|         logger.info("\n".join(sorted(results))) | ||||
|  | ||||
|     def format_for_abspath(self, files: List[str]) -> None: | ||||
|         if not files: | ||||
|             return | ||||
|  | ||||
|         results = [] | ||||
|         for filename in files: | ||||
|             results.append(filename) | ||||
|  | ||||
|         logger.info("\n".join(sorted(results))) | ||||
|  | ||||
|     def remove_cache_items(self, options: Values, args: List[Any]) -> None: | ||||
|         if len(args) > 1: | ||||
|             raise CommandError("Too many arguments") | ||||
|  | ||||
|         if not args: | ||||
|             raise CommandError("Please provide a pattern") | ||||
|  | ||||
|         files = self._find_wheels(options, args[0]) | ||||
|  | ||||
|         no_matching_msg = "No matching packages" | ||||
|         if args[0] == "*": | ||||
|             # Only fetch http files if no specific pattern given | ||||
|             files += self._find_http_files(options) | ||||
|         else: | ||||
|             # Add the pattern to the log message | ||||
|             no_matching_msg += ' for pattern "{}"'.format(args[0]) | ||||
|  | ||||
|         if not files: | ||||
|             logger.warning(no_matching_msg) | ||||
|  | ||||
|         for filename in files: | ||||
|             os.unlink(filename) | ||||
|             logger.verbose("Removed %s", filename) | ||||
|         logger.info("Files removed: %s", len(files)) | ||||
|  | ||||
|     def purge_cache(self, options: Values, args: List[Any]) -> None: | ||||
|         if args: | ||||
|             raise CommandError("Too many arguments") | ||||
|  | ||||
|         return self.remove_cache_items(options, ["*"]) | ||||
|  | ||||
|     def _cache_dir(self, options: Values, subdir: str) -> str: | ||||
|         return os.path.join(options.cache_dir, subdir) | ||||
|  | ||||
|     def _find_http_files(self, options: Values) -> List[str]: | ||||
|         http_dir = self._cache_dir(options, "http") | ||||
|         return filesystem.find_files(http_dir, "*") | ||||
|  | ||||
|     def _find_wheels(self, options: Values, pattern: str) -> List[str]: | ||||
|         wheel_dir = self._cache_dir(options, "wheels") | ||||
|  | ||||
|         # The wheel filename format, as specified in PEP 427, is: | ||||
|         #     {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl | ||||
|         # | ||||
|         # Additionally, non-alphanumeric values in the distribution are | ||||
|         # normalized to underscores (_), meaning hyphens can never occur | ||||
|         # before `-{version}`. | ||||
|         # | ||||
|         # Given that information: | ||||
|         # - If the pattern we're given contains a hyphen (-), the user is | ||||
|         #   providing at least the version. Thus, we can just append `*.whl` | ||||
|         #   to match the rest of it. | ||||
|         # - If the pattern we're given doesn't contain a hyphen (-), the | ||||
|         #   user is only providing the name. Thus, we append `-*.whl` to | ||||
|         #   match the hyphen before the version, followed by anything else. | ||||
|         # | ||||
|         # PEP 427: https://www.python.org/dev/peps/pep-0427/ | ||||
|         pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl") | ||||
|  | ||||
|         return filesystem.find_files(wheel_dir, pattern) | ||||
| @ -0,0 +1,54 @@ | ||||
| import logging | ||||
| from optparse import Values | ||||
| from typing import List | ||||
|  | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.status_codes import ERROR, SUCCESS | ||||
| from pip._internal.operations.check import ( | ||||
|     check_package_set, | ||||
|     create_package_set_from_installed, | ||||
|     warn_legacy_versions_and_specifiers, | ||||
| ) | ||||
| from pip._internal.utils.misc import write_output | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class CheckCommand(Command): | ||||
|     """Verify installed packages have compatible dependencies.""" | ||||
|  | ||||
|     usage = """ | ||||
|       %prog [options]""" | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         package_set, parsing_probs = create_package_set_from_installed() | ||||
|         warn_legacy_versions_and_specifiers(package_set) | ||||
|         missing, conflicting = check_package_set(package_set) | ||||
|  | ||||
|         for project_name in missing: | ||||
|             version = package_set[project_name].version | ||||
|             for dependency in missing[project_name]: | ||||
|                 write_output( | ||||
|                     "%s %s requires %s, which is not installed.", | ||||
|                     project_name, | ||||
|                     version, | ||||
|                     dependency[0], | ||||
|                 ) | ||||
|  | ||||
|         for project_name in conflicting: | ||||
|             version = package_set[project_name].version | ||||
|             for dep_name, dep_version, req in conflicting[project_name]: | ||||
|                 write_output( | ||||
|                     "%s %s has requirement %s, but you have %s %s.", | ||||
|                     project_name, | ||||
|                     version, | ||||
|                     req, | ||||
|                     dep_name, | ||||
|                     dep_version, | ||||
|                 ) | ||||
|  | ||||
|         if missing or conflicting or parsing_probs: | ||||
|             return ERROR | ||||
|         else: | ||||
|             write_output("No broken requirements found.") | ||||
|             return SUCCESS | ||||
| @ -0,0 +1,121 @@ | ||||
| import sys | ||||
| import textwrap | ||||
| from optparse import Values | ||||
| from typing import List | ||||
|  | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.status_codes import SUCCESS | ||||
| from pip._internal.utils.misc import get_prog | ||||
|  | ||||
| BASE_COMPLETION = """ | ||||
| # pip {shell} completion start{script}# pip {shell} completion end | ||||
| """ | ||||
|  | ||||
| COMPLETION_SCRIPTS = { | ||||
|     "bash": """ | ||||
|         _pip_completion() | ||||
|         {{ | ||||
|             COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\ | ||||
|                            COMP_CWORD=$COMP_CWORD \\ | ||||
|                            PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) | ||||
|         }} | ||||
|         complete -o default -F _pip_completion {prog} | ||||
|     """, | ||||
|     "zsh": """ | ||||
|         #compdef -P pip[0-9.]# | ||||
|         compadd $( COMP_WORDS="$words[*]" \\ | ||||
|                    COMP_CWORD=$((CURRENT-1)) \\ | ||||
|                    PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) | ||||
|     """, | ||||
|     "fish": """ | ||||
|         function __fish_complete_pip | ||||
|             set -lx COMP_WORDS (commandline -o) "" | ||||
|             set -lx COMP_CWORD ( \\ | ||||
|                 math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ | ||||
|             ) | ||||
|             set -lx PIP_AUTO_COMPLETE 1 | ||||
|             string split \\  -- (eval $COMP_WORDS[1]) | ||||
|         end | ||||
|         complete -fa "(__fish_complete_pip)" -c {prog} | ||||
|     """, | ||||
|     "powershell": """ | ||||
|         if ((Test-Path Function:\\TabExpansion) -and -not ` | ||||
|             (Test-Path Function:\\_pip_completeBackup)) {{ | ||||
|             Rename-Item Function:\\TabExpansion _pip_completeBackup | ||||
|         }} | ||||
|         function TabExpansion($line, $lastWord) {{ | ||||
|             $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart() | ||||
|             if ($lastBlock.StartsWith("{prog} ")) {{ | ||||
|                 $Env:COMP_WORDS=$lastBlock | ||||
|                 $Env:COMP_CWORD=$lastBlock.Split().Length - 1 | ||||
|                 $Env:PIP_AUTO_COMPLETE=1 | ||||
|                 (& {prog}).Split() | ||||
|                 Remove-Item Env:COMP_WORDS | ||||
|                 Remove-Item Env:COMP_CWORD | ||||
|                 Remove-Item Env:PIP_AUTO_COMPLETE | ||||
|             }} | ||||
|             elseif (Test-Path Function:\\_pip_completeBackup) {{ | ||||
|                 # Fall back on existing tab expansion | ||||
|                 _pip_completeBackup $line $lastWord | ||||
|             }} | ||||
|         }} | ||||
|     """, | ||||
| } | ||||
|  | ||||
|  | ||||
| class CompletionCommand(Command): | ||||
|     """A helper command to be used for command completion.""" | ||||
|  | ||||
|     ignore_require_venv = True | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "--bash", | ||||
|             "-b", | ||||
|             action="store_const", | ||||
|             const="bash", | ||||
|             dest="shell", | ||||
|             help="Emit completion code for bash", | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--zsh", | ||||
|             "-z", | ||||
|             action="store_const", | ||||
|             const="zsh", | ||||
|             dest="shell", | ||||
|             help="Emit completion code for zsh", | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--fish", | ||||
|             "-f", | ||||
|             action="store_const", | ||||
|             const="fish", | ||||
|             dest="shell", | ||||
|             help="Emit completion code for fish", | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--powershell", | ||||
|             "-p", | ||||
|             action="store_const", | ||||
|             const="powershell", | ||||
|             dest="shell", | ||||
|             help="Emit completion code for powershell", | ||||
|         ) | ||||
|  | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         """Prints the completion code of the given shell""" | ||||
|         shells = COMPLETION_SCRIPTS.keys() | ||||
|         shell_options = ["--" + shell for shell in sorted(shells)] | ||||
|         if options.shell in shells: | ||||
|             script = textwrap.dedent( | ||||
|                 COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog()) | ||||
|             ) | ||||
|             print(BASE_COMPLETION.format(script=script, shell=options.shell)) | ||||
|             return SUCCESS | ||||
|         else: | ||||
|             sys.stderr.write( | ||||
|                 "ERROR: You must pass {}\n".format(" or ".join(shell_options)) | ||||
|             ) | ||||
|             return SUCCESS | ||||
| @ -0,0 +1,282 @@ | ||||
| import logging | ||||
| import os | ||||
| import subprocess | ||||
| from optparse import Values | ||||
| from typing import Any, List, Optional | ||||
|  | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.status_codes import ERROR, SUCCESS | ||||
| from pip._internal.configuration import ( | ||||
|     Configuration, | ||||
|     Kind, | ||||
|     get_configuration_files, | ||||
|     kinds, | ||||
| ) | ||||
| from pip._internal.exceptions import PipError | ||||
| from pip._internal.utils.logging import indent_log | ||||
| from pip._internal.utils.misc import get_prog, write_output | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class ConfigurationCommand(Command): | ||||
|     """ | ||||
|     Manage local and global configuration. | ||||
|  | ||||
|     Subcommands: | ||||
|  | ||||
|     - list: List the active configuration (or from the file specified) | ||||
|     - edit: Edit the configuration file in an editor | ||||
|     - get: Get the value associated with command.option | ||||
|     - set: Set the command.option=value | ||||
|     - unset: Unset the value associated with command.option | ||||
|     - debug: List the configuration files and values defined under them | ||||
|  | ||||
|     Configuration keys should be dot separated command and option name, | ||||
|     with the special prefix "global" affecting any command. For example, | ||||
|     "pip config set global.index-url https://example.org/" would configure | ||||
|     the index url for all commands, but "pip config set download.timeout 10" | ||||
|     would configure a 10 second timeout only for "pip download" commands. | ||||
|  | ||||
|     If none of --user, --global and --site are passed, a virtual | ||||
|     environment configuration file is used if one is active and the file | ||||
|     exists. Otherwise, all modifications happen to the user file by | ||||
|     default. | ||||
|     """ | ||||
|  | ||||
|     ignore_require_venv = True | ||||
|     usage = """ | ||||
|         %prog [<file-option>] list | ||||
|         %prog [<file-option>] [--editor <editor-path>] edit | ||||
|  | ||||
|         %prog [<file-option>] get command.option | ||||
|         %prog [<file-option>] set command.option value | ||||
|         %prog [<file-option>] unset command.option | ||||
|         %prog [<file-option>] debug | ||||
|     """ | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "--editor", | ||||
|             dest="editor", | ||||
|             action="store", | ||||
|             default=None, | ||||
|             help=( | ||||
|                 "Editor to use to edit the file. Uses VISUAL or EDITOR " | ||||
|                 "environment variables if not provided." | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--global", | ||||
|             dest="global_file", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="Use the system-wide configuration file only", | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--user", | ||||
|             dest="user_file", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="Use the user configuration file only", | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--site", | ||||
|             dest="site_file", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="Use the current environment configuration file only", | ||||
|         ) | ||||
|  | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         handlers = { | ||||
|             "list": self.list_values, | ||||
|             "edit": self.open_in_editor, | ||||
|             "get": self.get_name, | ||||
|             "set": self.set_name_value, | ||||
|             "unset": self.unset_name, | ||||
|             "debug": self.list_config_values, | ||||
|         } | ||||
|  | ||||
|         # Determine action | ||||
|         if not args or args[0] not in handlers: | ||||
|             logger.error( | ||||
|                 "Need an action (%s) to perform.", | ||||
|                 ", ".join(sorted(handlers)), | ||||
|             ) | ||||
|             return ERROR | ||||
|  | ||||
|         action = args[0] | ||||
|  | ||||
|         # Determine which configuration files are to be loaded | ||||
|         #    Depends on whether the command is modifying. | ||||
|         try: | ||||
|             load_only = self._determine_file( | ||||
|                 options, need_value=(action in ["get", "set", "unset", "edit"]) | ||||
|             ) | ||||
|         except PipError as e: | ||||
|             logger.error(e.args[0]) | ||||
|             return ERROR | ||||
|  | ||||
|         # Load a new configuration | ||||
|         self.configuration = Configuration( | ||||
|             isolated=options.isolated_mode, load_only=load_only | ||||
|         ) | ||||
|         self.configuration.load() | ||||
|  | ||||
|         # Error handling happens here, not in the action-handlers. | ||||
|         try: | ||||
|             handlers[action](options, args[1:]) | ||||
|         except PipError as e: | ||||
|             logger.error(e.args[0]) | ||||
|             return ERROR | ||||
|  | ||||
|         return SUCCESS | ||||
|  | ||||
|     def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]: | ||||
|         file_options = [ | ||||
|             key | ||||
|             for key, value in ( | ||||
|                 (kinds.USER, options.user_file), | ||||
|                 (kinds.GLOBAL, options.global_file), | ||||
|                 (kinds.SITE, options.site_file), | ||||
|             ) | ||||
|             if value | ||||
|         ] | ||||
|  | ||||
|         if not file_options: | ||||
|             if not need_value: | ||||
|                 return None | ||||
|             # Default to user, unless there's a site file. | ||||
|             elif any( | ||||
|                 os.path.exists(site_config_file) | ||||
|                 for site_config_file in get_configuration_files()[kinds.SITE] | ||||
|             ): | ||||
|                 return kinds.SITE | ||||
|             else: | ||||
|                 return kinds.USER | ||||
|         elif len(file_options) == 1: | ||||
|             return file_options[0] | ||||
|  | ||||
|         raise PipError( | ||||
|             "Need exactly one file to operate upon " | ||||
|             "(--user, --site, --global) to perform." | ||||
|         ) | ||||
|  | ||||
|     def list_values(self, options: Values, args: List[str]) -> None: | ||||
|         self._get_n_args(args, "list", n=0) | ||||
|  | ||||
|         for key, value in sorted(self.configuration.items()): | ||||
|             write_output("%s=%r", key, value) | ||||
|  | ||||
|     def get_name(self, options: Values, args: List[str]) -> None: | ||||
|         key = self._get_n_args(args, "get [name]", n=1) | ||||
|         value = self.configuration.get_value(key) | ||||
|  | ||||
|         write_output("%s", value) | ||||
|  | ||||
|     def set_name_value(self, options: Values, args: List[str]) -> None: | ||||
|         key, value = self._get_n_args(args, "set [name] [value]", n=2) | ||||
|         self.configuration.set_value(key, value) | ||||
|  | ||||
|         self._save_configuration() | ||||
|  | ||||
|     def unset_name(self, options: Values, args: List[str]) -> None: | ||||
|         key = self._get_n_args(args, "unset [name]", n=1) | ||||
|         self.configuration.unset_value(key) | ||||
|  | ||||
|         self._save_configuration() | ||||
|  | ||||
|     def list_config_values(self, options: Values, args: List[str]) -> None: | ||||
|         """List config key-value pairs across different config files""" | ||||
|         self._get_n_args(args, "debug", n=0) | ||||
|  | ||||
|         self.print_env_var_values() | ||||
|         # Iterate over config files and print if they exist, and the | ||||
|         # key-value pairs present in them if they do | ||||
|         for variant, files in sorted(self.configuration.iter_config_files()): | ||||
|             write_output("%s:", variant) | ||||
|             for fname in files: | ||||
|                 with indent_log(): | ||||
|                     file_exists = os.path.exists(fname) | ||||
|                     write_output("%s, exists: %r", fname, file_exists) | ||||
|                     if file_exists: | ||||
|                         self.print_config_file_values(variant) | ||||
|  | ||||
|     def print_config_file_values(self, variant: Kind) -> None: | ||||
|         """Get key-value pairs from the file of a variant""" | ||||
|         for name, value in self.configuration.get_values_in_config(variant).items(): | ||||
|             with indent_log(): | ||||
|                 write_output("%s: %s", name, value) | ||||
|  | ||||
|     def print_env_var_values(self) -> None: | ||||
|         """Get key-values pairs present as environment variables""" | ||||
|         write_output("%s:", "env_var") | ||||
|         with indent_log(): | ||||
|             for key, value in sorted(self.configuration.get_environ_vars()): | ||||
|                 env_var = f"PIP_{key.upper()}" | ||||
|                 write_output("%s=%r", env_var, value) | ||||
|  | ||||
|     def open_in_editor(self, options: Values, args: List[str]) -> None: | ||||
|         editor = self._determine_editor(options) | ||||
|  | ||||
|         fname = self.configuration.get_file_to_edit() | ||||
|         if fname is None: | ||||
|             raise PipError("Could not determine appropriate file.") | ||||
|         elif '"' in fname: | ||||
|             # This shouldn't happen, unless we see a username like that. | ||||
|             # If that happens, we'd appreciate a pull request fixing this. | ||||
|             raise PipError( | ||||
|                 f'Can not open an editor for a file name containing "\n{fname}' | ||||
|             ) | ||||
|  | ||||
|         try: | ||||
|             subprocess.check_call(f'{editor} "{fname}"', shell=True) | ||||
|         except FileNotFoundError as e: | ||||
|             if not e.filename: | ||||
|                 e.filename = editor | ||||
|             raise | ||||
|         except subprocess.CalledProcessError as e: | ||||
|             raise PipError( | ||||
|                 "Editor Subprocess exited with exit code {}".format(e.returncode) | ||||
|             ) | ||||
|  | ||||
|     def _get_n_args(self, args: List[str], example: str, n: int) -> Any: | ||||
|         """Helper to make sure the command got the right number of arguments""" | ||||
|         if len(args) != n: | ||||
|             msg = ( | ||||
|                 "Got unexpected number of arguments, expected {}. " | ||||
|                 '(example: "{} config {}")' | ||||
|             ).format(n, get_prog(), example) | ||||
|             raise PipError(msg) | ||||
|  | ||||
|         if n == 1: | ||||
|             return args[0] | ||||
|         else: | ||||
|             return args | ||||
|  | ||||
|     def _save_configuration(self) -> None: | ||||
|         # We successfully ran a modifying command. Need to save the | ||||
|         # configuration. | ||||
|         try: | ||||
|             self.configuration.save() | ||||
|         except Exception: | ||||
|             logger.exception( | ||||
|                 "Unable to save configuration. Please report this as a bug." | ||||
|             ) | ||||
|             raise PipError("Internal Error.") | ||||
|  | ||||
|     def _determine_editor(self, options: Values) -> str: | ||||
|         if options.editor is not None: | ||||
|             return options.editor | ||||
|         elif "VISUAL" in os.environ: | ||||
|             return os.environ["VISUAL"] | ||||
|         elif "EDITOR" in os.environ: | ||||
|             return os.environ["EDITOR"] | ||||
|         else: | ||||
|             raise PipError("Could not determine editor to use.") | ||||
| @ -0,0 +1,199 @@ | ||||
| import importlib.resources | ||||
| import locale | ||||
| import logging | ||||
| import os | ||||
| import sys | ||||
| from optparse import Values | ||||
| from types import ModuleType | ||||
| from typing import Any, Dict, List, Optional | ||||
|  | ||||
| import pip._vendor | ||||
| from pip._vendor.certifi import where | ||||
| from pip._vendor.packaging.version import parse as parse_version | ||||
|  | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.cmdoptions import make_target_python | ||||
| from pip._internal.cli.status_codes import SUCCESS | ||||
| from pip._internal.configuration import Configuration | ||||
| from pip._internal.metadata import get_environment | ||||
| from pip._internal.utils.logging import indent_log | ||||
| from pip._internal.utils.misc import get_pip_version | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def show_value(name: str, value: Any) -> None: | ||||
|     logger.info("%s: %s", name, value) | ||||
|  | ||||
|  | ||||
| def show_sys_implementation() -> None: | ||||
|     logger.info("sys.implementation:") | ||||
|     implementation_name = sys.implementation.name | ||||
|     with indent_log(): | ||||
|         show_value("name", implementation_name) | ||||
|  | ||||
|  | ||||
| def create_vendor_txt_map() -> Dict[str, str]: | ||||
|     with importlib.resources.open_text("pip._vendor", "vendor.txt") as f: | ||||
|         # Purge non version specifying lines. | ||||
|         # Also, remove any space prefix or suffixes (including comments). | ||||
|         lines = [ | ||||
|             line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line | ||||
|         ] | ||||
|  | ||||
|     # Transform into "module" -> version dict. | ||||
|     return dict(line.split("==", 1) for line in lines) | ||||
|  | ||||
|  | ||||
| def get_module_from_module_name(module_name: str) -> ModuleType: | ||||
|     # Module name can be uppercase in vendor.txt for some reason... | ||||
|     module_name = module_name.lower().replace("-", "_") | ||||
|     # PATCH: setuptools is actually only pkg_resources. | ||||
|     if module_name == "setuptools": | ||||
|         module_name = "pkg_resources" | ||||
|  | ||||
|     __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) | ||||
|     return getattr(pip._vendor, module_name) | ||||
|  | ||||
|  | ||||
| def get_vendor_version_from_module(module_name: str) -> Optional[str]: | ||||
|     module = get_module_from_module_name(module_name) | ||||
|     version = getattr(module, "__version__", None) | ||||
|  | ||||
|     if not version: | ||||
|         # Try to find version in debundled module info. | ||||
|         assert module.__file__ is not None | ||||
|         env = get_environment([os.path.dirname(module.__file__)]) | ||||
|         dist = env.get_distribution(module_name) | ||||
|         if dist: | ||||
|             version = str(dist.version) | ||||
|  | ||||
|     return version | ||||
|  | ||||
|  | ||||
| def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None: | ||||
|     """Log the actual version and print extra info if there is | ||||
|     a conflict or if the actual version could not be imported. | ||||
|     """ | ||||
|     for module_name, expected_version in vendor_txt_versions.items(): | ||||
|         extra_message = "" | ||||
|         actual_version = get_vendor_version_from_module(module_name) | ||||
|         if not actual_version: | ||||
|             extra_message = ( | ||||
|                 " (Unable to locate actual module version, using" | ||||
|                 " vendor.txt specified version)" | ||||
|             ) | ||||
|             actual_version = expected_version | ||||
|         elif parse_version(actual_version) != parse_version(expected_version): | ||||
|             extra_message = ( | ||||
|                 " (CONFLICT: vendor.txt suggests version should" | ||||
|                 " be {})".format(expected_version) | ||||
|             ) | ||||
|         logger.info("%s==%s%s", module_name, actual_version, extra_message) | ||||
|  | ||||
|  | ||||
| def show_vendor_versions() -> None: | ||||
|     logger.info("vendored library versions:") | ||||
|  | ||||
|     vendor_txt_versions = create_vendor_txt_map() | ||||
|     with indent_log(): | ||||
|         show_actual_vendor_versions(vendor_txt_versions) | ||||
|  | ||||
|  | ||||
| def show_tags(options: Values) -> None: | ||||
|     tag_limit = 10 | ||||
|  | ||||
|     target_python = make_target_python(options) | ||||
|     tags = target_python.get_tags() | ||||
|  | ||||
|     # Display the target options that were explicitly provided. | ||||
|     formatted_target = target_python.format_given() | ||||
|     suffix = "" | ||||
|     if formatted_target: | ||||
|         suffix = f" (target: {formatted_target})" | ||||
|  | ||||
|     msg = "Compatible tags: {}{}".format(len(tags), suffix) | ||||
|     logger.info(msg) | ||||
|  | ||||
|     if options.verbose < 1 and len(tags) > tag_limit: | ||||
|         tags_limited = True | ||||
|         tags = tags[:tag_limit] | ||||
|     else: | ||||
|         tags_limited = False | ||||
|  | ||||
|     with indent_log(): | ||||
|         for tag in tags: | ||||
|             logger.info(str(tag)) | ||||
|  | ||||
|         if tags_limited: | ||||
|             msg = ( | ||||
|                 "...\n[First {tag_limit} tags shown. Pass --verbose to show all.]" | ||||
|             ).format(tag_limit=tag_limit) | ||||
|             logger.info(msg) | ||||
|  | ||||
|  | ||||
| def ca_bundle_info(config: Configuration) -> str: | ||||
|     levels = set() | ||||
|     for key, _ in config.items(): | ||||
|         levels.add(key.split(".")[0]) | ||||
|  | ||||
|     if not levels: | ||||
|         return "Not specified" | ||||
|  | ||||
|     levels_that_override_global = ["install", "wheel", "download"] | ||||
|     global_overriding_level = [ | ||||
|         level for level in levels if level in levels_that_override_global | ||||
|     ] | ||||
|     if not global_overriding_level: | ||||
|         return "global" | ||||
|  | ||||
|     if "global" in levels: | ||||
|         levels.remove("global") | ||||
|     return ", ".join(levels) | ||||
|  | ||||
|  | ||||
| class DebugCommand(Command): | ||||
|     """ | ||||
|     Display debug information. | ||||
|     """ | ||||
|  | ||||
|     usage = """ | ||||
|       %prog <options>""" | ||||
|     ignore_require_venv = True | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         cmdoptions.add_target_python_options(self.cmd_opts) | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|         self.parser.config.load() | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         logger.warning( | ||||
|             "This command is only meant for debugging. " | ||||
|             "Do not use this with automation for parsing and getting these " | ||||
|             "details, since the output and options of this command may " | ||||
|             "change without notice." | ||||
|         ) | ||||
|         show_value("pip version", get_pip_version()) | ||||
|         show_value("sys.version", sys.version) | ||||
|         show_value("sys.executable", sys.executable) | ||||
|         show_value("sys.getdefaultencoding", sys.getdefaultencoding()) | ||||
|         show_value("sys.getfilesystemencoding", sys.getfilesystemencoding()) | ||||
|         show_value( | ||||
|             "locale.getpreferredencoding", | ||||
|             locale.getpreferredencoding(), | ||||
|         ) | ||||
|         show_value("sys.platform", sys.platform) | ||||
|         show_sys_implementation() | ||||
|  | ||||
|         show_value("'cert' config value", ca_bundle_info(self.parser.config)) | ||||
|         show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE")) | ||||
|         show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE")) | ||||
|         show_value("pip._vendor.certifi.where()", where()) | ||||
|         show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED) | ||||
|  | ||||
|         show_vendor_versions() | ||||
|  | ||||
|         show_tags(options) | ||||
|  | ||||
|         return SUCCESS | ||||
| @ -0,0 +1,147 @@ | ||||
| import logging | ||||
| import os | ||||
| from optparse import Values | ||||
| from typing import List | ||||
|  | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.cmdoptions import make_target_python | ||||
| from pip._internal.cli.req_command import RequirementCommand, with_cleanup | ||||
| from pip._internal.cli.status_codes import SUCCESS | ||||
| from pip._internal.operations.build.build_tracker import get_build_tracker | ||||
| from pip._internal.req.req_install import check_legacy_setup_py_options | ||||
| from pip._internal.utils.misc import ensure_dir, normalize_path, write_output | ||||
| from pip._internal.utils.temp_dir import TempDirectory | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class DownloadCommand(RequirementCommand): | ||||
|     """ | ||||
|     Download packages from: | ||||
|  | ||||
|     - PyPI (and other indexes) using requirement specifiers. | ||||
|     - VCS project urls. | ||||
|     - Local project directories. | ||||
|     - Local or remote source archives. | ||||
|  | ||||
|     pip also supports downloading from "requirements files", which provide | ||||
|     an easy way to specify a whole environment to be downloaded. | ||||
|     """ | ||||
|  | ||||
|     usage = """ | ||||
|       %prog [options] <requirement specifier> [package-index-options] ... | ||||
|       %prog [options] -r <requirements file> [package-index-options] ... | ||||
|       %prog [options] <vcs project url> ... | ||||
|       %prog [options] <local project path> ... | ||||
|       %prog [options] <archive url/path> ...""" | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option(cmdoptions.constraints()) | ||||
|         self.cmd_opts.add_option(cmdoptions.requirements()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_deps()) | ||||
|         self.cmd_opts.add_option(cmdoptions.global_options()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.only_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.prefer_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.src()) | ||||
|         self.cmd_opts.add_option(cmdoptions.pre()) | ||||
|         self.cmd_opts.add_option(cmdoptions.require_hashes()) | ||||
|         self.cmd_opts.add_option(cmdoptions.progress_bar()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_build_isolation()) | ||||
|         self.cmd_opts.add_option(cmdoptions.use_pep517()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_use_pep517()) | ||||
|         self.cmd_opts.add_option(cmdoptions.check_build_deps()) | ||||
|         self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "-d", | ||||
|             "--dest", | ||||
|             "--destination-dir", | ||||
|             "--destination-directory", | ||||
|             dest="download_dir", | ||||
|             metavar="dir", | ||||
|             default=os.curdir, | ||||
|             help="Download packages into <dir>.", | ||||
|         ) | ||||
|  | ||||
|         cmdoptions.add_target_python_options(self.cmd_opts) | ||||
|  | ||||
|         index_opts = cmdoptions.make_option_group( | ||||
|             cmdoptions.index_group, | ||||
|             self.parser, | ||||
|         ) | ||||
|  | ||||
|         self.parser.insert_option_group(0, index_opts) | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     @with_cleanup | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         options.ignore_installed = True | ||||
|         # editable doesn't really make sense for `pip download`, but the bowels | ||||
|         # of the RequirementSet code require that property. | ||||
|         options.editables = [] | ||||
|  | ||||
|         cmdoptions.check_dist_restriction(options) | ||||
|  | ||||
|         options.download_dir = normalize_path(options.download_dir) | ||||
|         ensure_dir(options.download_dir) | ||||
|  | ||||
|         session = self.get_default_session(options) | ||||
|  | ||||
|         target_python = make_target_python(options) | ||||
|         finder = self._build_package_finder( | ||||
|             options=options, | ||||
|             session=session, | ||||
|             target_python=target_python, | ||||
|             ignore_requires_python=options.ignore_requires_python, | ||||
|         ) | ||||
|  | ||||
|         build_tracker = self.enter_context(get_build_tracker()) | ||||
|  | ||||
|         directory = TempDirectory( | ||||
|             delete=not options.no_clean, | ||||
|             kind="download", | ||||
|             globally_managed=True, | ||||
|         ) | ||||
|  | ||||
|         reqs = self.get_requirements(args, options, finder, session) | ||||
|         check_legacy_setup_py_options(options, reqs) | ||||
|  | ||||
|         preparer = self.make_requirement_preparer( | ||||
|             temp_build_dir=directory, | ||||
|             options=options, | ||||
|             build_tracker=build_tracker, | ||||
|             session=session, | ||||
|             finder=finder, | ||||
|             download_dir=options.download_dir, | ||||
|             use_user_site=False, | ||||
|             verbosity=self.verbosity, | ||||
|         ) | ||||
|  | ||||
|         resolver = self.make_resolver( | ||||
|             preparer=preparer, | ||||
|             finder=finder, | ||||
|             options=options, | ||||
|             ignore_requires_python=options.ignore_requires_python, | ||||
|             use_pep517=options.use_pep517, | ||||
|             py_version_info=options.python_version, | ||||
|         ) | ||||
|  | ||||
|         self.trace_basic_info(finder) | ||||
|  | ||||
|         requirement_set = resolver.resolve(reqs, check_supported_wheels=True) | ||||
|  | ||||
|         downloaded: List[str] = [] | ||||
|         for req in requirement_set.requirements.values(): | ||||
|             if req.satisfied_by is None: | ||||
|                 assert req.name is not None | ||||
|                 preparer.save_linked_requirement(req) | ||||
|                 downloaded.append(req.name) | ||||
|  | ||||
|         preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) | ||||
|         requirement_set.warn_legacy_versions_and_specifiers() | ||||
|  | ||||
|         if downloaded: | ||||
|             write_output("Successfully downloaded %s", " ".join(downloaded)) | ||||
|  | ||||
|         return SUCCESS | ||||
| @ -0,0 +1,108 @@ | ||||
| import sys | ||||
| from optparse import Values | ||||
| from typing import AbstractSet, List | ||||
|  | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.status_codes import SUCCESS | ||||
| from pip._internal.operations.freeze import freeze | ||||
| from pip._internal.utils.compat import stdlib_pkgs | ||||
|  | ||||
|  | ||||
| def _should_suppress_build_backends() -> bool: | ||||
|     return sys.version_info < (3, 12) | ||||
|  | ||||
|  | ||||
| def _dev_pkgs() -> AbstractSet[str]: | ||||
|     pkgs = {"pip"} | ||||
|  | ||||
|     if _should_suppress_build_backends(): | ||||
|         pkgs |= {"setuptools", "distribute", "wheel"} | ||||
|  | ||||
|     return pkgs | ||||
|  | ||||
|  | ||||
| class FreezeCommand(Command): | ||||
|     """ | ||||
|     Output installed packages in requirements format. | ||||
|  | ||||
|     packages are listed in a case-insensitive sorted order. | ||||
|     """ | ||||
|  | ||||
|     usage = """ | ||||
|       %prog [options]""" | ||||
|     log_streams = ("ext://sys.stderr", "ext://sys.stderr") | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "-r", | ||||
|             "--requirement", | ||||
|             dest="requirements", | ||||
|             action="append", | ||||
|             default=[], | ||||
|             metavar="file", | ||||
|             help=( | ||||
|                 "Use the order in the given requirements file and its " | ||||
|                 "comments when generating output. This option can be " | ||||
|                 "used multiple times." | ||||
|             ), | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "-l", | ||||
|             "--local", | ||||
|             dest="local", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help=( | ||||
|                 "If in a virtualenv that has global access, do not output " | ||||
|                 "globally-installed packages." | ||||
|             ), | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--user", | ||||
|             dest="user", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="Only output packages installed in user-site.", | ||||
|         ) | ||||
|         self.cmd_opts.add_option(cmdoptions.list_path()) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--all", | ||||
|             dest="freeze_all", | ||||
|             action="store_true", | ||||
|             help=( | ||||
|                 "Do not skip these packages in the output:" | ||||
|                 " {}".format(", ".join(_dev_pkgs())) | ||||
|             ), | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--exclude-editable", | ||||
|             dest="exclude_editable", | ||||
|             action="store_true", | ||||
|             help="Exclude editable package from output.", | ||||
|         ) | ||||
|         self.cmd_opts.add_option(cmdoptions.list_exclude()) | ||||
|  | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         skip = set(stdlib_pkgs) | ||||
|         if not options.freeze_all: | ||||
|             skip.update(_dev_pkgs()) | ||||
|  | ||||
|         if options.excludes: | ||||
|             skip.update(options.excludes) | ||||
|  | ||||
|         cmdoptions.check_list_path_option(options) | ||||
|  | ||||
|         for line in freeze( | ||||
|             requirement=options.requirements, | ||||
|             local_only=options.local, | ||||
|             user_only=options.user, | ||||
|             paths=options.path, | ||||
|             isolated=options.isolated_mode, | ||||
|             skip=skip, | ||||
|             exclude_editable=options.exclude_editable, | ||||
|         ): | ||||
|             sys.stdout.write(line + "\n") | ||||
|         return SUCCESS | ||||
| @ -0,0 +1,59 @@ | ||||
| import hashlib | ||||
| import logging | ||||
| import sys | ||||
| from optparse import Values | ||||
| from typing import List | ||||
|  | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.status_codes import ERROR, SUCCESS | ||||
| from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES | ||||
| from pip._internal.utils.misc import read_chunks, write_output | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class HashCommand(Command): | ||||
|     """ | ||||
|     Compute a hash of a local package archive. | ||||
|  | ||||
|     These can be used with --hash in a requirements file to do repeatable | ||||
|     installs. | ||||
|     """ | ||||
|  | ||||
|     usage = "%prog [options] <file> ..." | ||||
|     ignore_require_venv = True | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "-a", | ||||
|             "--algorithm", | ||||
|             dest="algorithm", | ||||
|             choices=STRONG_HASHES, | ||||
|             action="store", | ||||
|             default=FAVORITE_HASH, | ||||
|             help="The hash algorithm to use: one of {}".format( | ||||
|                 ", ".join(STRONG_HASHES) | ||||
|             ), | ||||
|         ) | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         if not args: | ||||
|             self.parser.print_usage(sys.stderr) | ||||
|             return ERROR | ||||
|  | ||||
|         algorithm = options.algorithm | ||||
|         for path in args: | ||||
|             write_output( | ||||
|                 "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm) | ||||
|             ) | ||||
|         return SUCCESS | ||||
|  | ||||
|  | ||||
| def _hash_of_file(path: str, algorithm: str) -> str: | ||||
|     """Return the hash digest of a file.""" | ||||
|     with open(path, "rb") as archive: | ||||
|         hash = hashlib.new(algorithm) | ||||
|         for chunk in read_chunks(archive): | ||||
|             hash.update(chunk) | ||||
|     return hash.hexdigest() | ||||
| @ -0,0 +1,41 @@ | ||||
| from optparse import Values | ||||
| from typing import List | ||||
|  | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.status_codes import SUCCESS | ||||
| from pip._internal.exceptions import CommandError | ||||
|  | ||||
|  | ||||
| class HelpCommand(Command): | ||||
|     """Show help for commands""" | ||||
|  | ||||
|     usage = """ | ||||
|       %prog <command>""" | ||||
|     ignore_require_venv = True | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         from pip._internal.commands import ( | ||||
|             commands_dict, | ||||
|             create_command, | ||||
|             get_similar_commands, | ||||
|         ) | ||||
|  | ||||
|         try: | ||||
|             # 'pip help' with no args is handled by pip.__init__.parseopt() | ||||
|             cmd_name = args[0]  # the command we need help for | ||||
|         except IndexError: | ||||
|             return SUCCESS | ||||
|  | ||||
|         if cmd_name not in commands_dict: | ||||
|             guess = get_similar_commands(cmd_name) | ||||
|  | ||||
|             msg = [f'unknown command "{cmd_name}"'] | ||||
|             if guess: | ||||
|                 msg.append(f'maybe you meant "{guess}"') | ||||
|  | ||||
|             raise CommandError(" - ".join(msg)) | ||||
|  | ||||
|         command = create_command(cmd_name) | ||||
|         command.parser.print_help() | ||||
|  | ||||
|         return SUCCESS | ||||
| @ -0,0 +1,139 @@ | ||||
| import logging | ||||
| from optparse import Values | ||||
| from typing import Any, Iterable, List, Optional, Union | ||||
|  | ||||
| from pip._vendor.packaging.version import LegacyVersion, Version | ||||
|  | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.req_command import IndexGroupCommand | ||||
| from pip._internal.cli.status_codes import ERROR, SUCCESS | ||||
| from pip._internal.commands.search import print_dist_installation_info | ||||
| from pip._internal.exceptions import CommandError, DistributionNotFound, PipError | ||||
| from pip._internal.index.collector import LinkCollector | ||||
| from pip._internal.index.package_finder import PackageFinder | ||||
| from pip._internal.models.selection_prefs import SelectionPreferences | ||||
| from pip._internal.models.target_python import TargetPython | ||||
| from pip._internal.network.session import PipSession | ||||
| from pip._internal.utils.misc import write_output | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class IndexCommand(IndexGroupCommand): | ||||
|     """ | ||||
|     Inspect information available from package indexes. | ||||
|     """ | ||||
|  | ||||
|     ignore_require_venv = True | ||||
|     usage = """ | ||||
|         %prog versions <package> | ||||
|     """ | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         cmdoptions.add_target_python_options(self.cmd_opts) | ||||
|  | ||||
|         self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) | ||||
|         self.cmd_opts.add_option(cmdoptions.pre()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.only_binary()) | ||||
|  | ||||
|         index_opts = cmdoptions.make_option_group( | ||||
|             cmdoptions.index_group, | ||||
|             self.parser, | ||||
|         ) | ||||
|  | ||||
|         self.parser.insert_option_group(0, index_opts) | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         handlers = { | ||||
|             "versions": self.get_available_package_versions, | ||||
|         } | ||||
|  | ||||
|         logger.warning( | ||||
|             "pip index is currently an experimental command. " | ||||
|             "It may be removed/changed in a future release " | ||||
|             "without prior warning." | ||||
|         ) | ||||
|  | ||||
|         # Determine action | ||||
|         if not args or args[0] not in handlers: | ||||
|             logger.error( | ||||
|                 "Need an action (%s) to perform.", | ||||
|                 ", ".join(sorted(handlers)), | ||||
|             ) | ||||
|             return ERROR | ||||
|  | ||||
|         action = args[0] | ||||
|  | ||||
|         # Error handling happens here, not in the action-handlers. | ||||
|         try: | ||||
|             handlers[action](options, args[1:]) | ||||
|         except PipError as e: | ||||
|             logger.error(e.args[0]) | ||||
|             return ERROR | ||||
|  | ||||
|         return SUCCESS | ||||
|  | ||||
|     def _build_package_finder( | ||||
|         self, | ||||
|         options: Values, | ||||
|         session: PipSession, | ||||
|         target_python: Optional[TargetPython] = None, | ||||
|         ignore_requires_python: Optional[bool] = None, | ||||
|     ) -> PackageFinder: | ||||
|         """ | ||||
|         Create a package finder appropriate to the index command. | ||||
|         """ | ||||
|         link_collector = LinkCollector.create(session, options=options) | ||||
|  | ||||
|         # Pass allow_yanked=False to ignore yanked versions. | ||||
|         selection_prefs = SelectionPreferences( | ||||
|             allow_yanked=False, | ||||
|             allow_all_prereleases=options.pre, | ||||
|             ignore_requires_python=ignore_requires_python, | ||||
|         ) | ||||
|  | ||||
|         return PackageFinder.create( | ||||
|             link_collector=link_collector, | ||||
|             selection_prefs=selection_prefs, | ||||
|             target_python=target_python, | ||||
|         ) | ||||
|  | ||||
|     def get_available_package_versions(self, options: Values, args: List[Any]) -> None: | ||||
|         if len(args) != 1: | ||||
|             raise CommandError("You need to specify exactly one argument") | ||||
|  | ||||
|         target_python = cmdoptions.make_target_python(options) | ||||
|         query = args[0] | ||||
|  | ||||
|         with self._build_session(options) as session: | ||||
|             finder = self._build_package_finder( | ||||
|                 options=options, | ||||
|                 session=session, | ||||
|                 target_python=target_python, | ||||
|                 ignore_requires_python=options.ignore_requires_python, | ||||
|             ) | ||||
|  | ||||
|             versions: Iterable[Union[LegacyVersion, Version]] = ( | ||||
|                 candidate.version for candidate in finder.find_all_candidates(query) | ||||
|             ) | ||||
|  | ||||
|             if not options.pre: | ||||
|                 # Remove prereleases | ||||
|                 versions = ( | ||||
|                     version for version in versions if not version.is_prerelease | ||||
|                 ) | ||||
|             versions = set(versions) | ||||
|  | ||||
|             if not versions: | ||||
|                 raise DistributionNotFound( | ||||
|                     "No matching distribution found for {}".format(query) | ||||
|                 ) | ||||
|  | ||||
|             formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)] | ||||
|             latest = formatted_versions[0] | ||||
|  | ||||
|         write_output("{} ({})".format(query, latest)) | ||||
|         write_output("Available versions: {}".format(", ".join(formatted_versions))) | ||||
|         print_dist_installation_info(query, latest) | ||||
| @ -0,0 +1,92 @@ | ||||
| import logging | ||||
| from optparse import Values | ||||
| from typing import Any, Dict, List | ||||
|  | ||||
| from pip._vendor.packaging.markers import default_environment | ||||
| from pip._vendor.rich import print_json | ||||
|  | ||||
| from pip import __version__ | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.req_command import Command | ||||
| from pip._internal.cli.status_codes import SUCCESS | ||||
| from pip._internal.metadata import BaseDistribution, get_environment | ||||
| from pip._internal.utils.compat import stdlib_pkgs | ||||
| from pip._internal.utils.urls import path_to_url | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class InspectCommand(Command): | ||||
|     """ | ||||
|     Inspect the content of a Python environment and produce a report in JSON format. | ||||
|     """ | ||||
|  | ||||
|     ignore_require_venv = True | ||||
|     usage = """ | ||||
|       %prog [options]""" | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "--local", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help=( | ||||
|                 "If in a virtualenv that has global access, do not list " | ||||
|                 "globally-installed packages." | ||||
|             ), | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--user", | ||||
|             dest="user", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="Only output packages installed in user-site.", | ||||
|         ) | ||||
|         self.cmd_opts.add_option(cmdoptions.list_path()) | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         cmdoptions.check_list_path_option(options) | ||||
|         dists = get_environment(options.path).iter_installed_distributions( | ||||
|             local_only=options.local, | ||||
|             user_only=options.user, | ||||
|             skip=set(stdlib_pkgs), | ||||
|         ) | ||||
|         output = { | ||||
|             "version": "1", | ||||
|             "pip_version": __version__, | ||||
|             "installed": [self._dist_to_dict(dist) for dist in dists], | ||||
|             "environment": default_environment(), | ||||
|             # TODO tags? scheme? | ||||
|         } | ||||
|         print_json(data=output) | ||||
|         return SUCCESS | ||||
|  | ||||
|     def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]: | ||||
|         res: Dict[str, Any] = { | ||||
|             "metadata": dist.metadata_dict, | ||||
|             "metadata_location": dist.info_location, | ||||
|         } | ||||
|         # direct_url. Note that we don't have download_info (as in the installation | ||||
|         # report) since it is not recorded in installed metadata. | ||||
|         direct_url = dist.direct_url | ||||
|         if direct_url is not None: | ||||
|             res["direct_url"] = direct_url.to_dict() | ||||
|         else: | ||||
|             # Emulate direct_url for legacy editable installs. | ||||
|             editable_project_location = dist.editable_project_location | ||||
|             if editable_project_location is not None: | ||||
|                 res["direct_url"] = { | ||||
|                     "url": path_to_url(editable_project_location), | ||||
|                     "dir_info": { | ||||
|                         "editable": True, | ||||
|                     }, | ||||
|                 } | ||||
|         # installer | ||||
|         installer = dist.installer | ||||
|         if dist.installer: | ||||
|             res["installer"] = installer | ||||
|         # requested | ||||
|         if dist.installed_with_dist_info: | ||||
|             res["requested"] = dist.requested | ||||
|         return res | ||||
| @ -0,0 +1,778 @@ | ||||
| import errno | ||||
| import json | ||||
| import operator | ||||
| import os | ||||
| import shutil | ||||
| import site | ||||
| from optparse import SUPPRESS_HELP, Values | ||||
| from typing import List, Optional | ||||
|  | ||||
| from pip._vendor.rich import print_json | ||||
|  | ||||
| from pip._internal.cache import WheelCache | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.cmdoptions import make_target_python | ||||
| from pip._internal.cli.req_command import ( | ||||
|     RequirementCommand, | ||||
|     warn_if_run_as_root, | ||||
|     with_cleanup, | ||||
| ) | ||||
| from pip._internal.cli.status_codes import ERROR, SUCCESS | ||||
| from pip._internal.exceptions import CommandError, InstallationError | ||||
| from pip._internal.locations import get_scheme | ||||
| from pip._internal.metadata import get_environment | ||||
| from pip._internal.models.installation_report import InstallationReport | ||||
| from pip._internal.operations.build.build_tracker import get_build_tracker | ||||
| from pip._internal.operations.check import ConflictDetails, check_install_conflicts | ||||
| from pip._internal.req import install_given_reqs | ||||
| from pip._internal.req.req_install import ( | ||||
|     InstallRequirement, | ||||
|     check_legacy_setup_py_options, | ||||
| ) | ||||
| from pip._internal.utils.compat import WINDOWS | ||||
| from pip._internal.utils.filesystem import test_writable_dir | ||||
| from pip._internal.utils.logging import getLogger | ||||
| from pip._internal.utils.misc import ( | ||||
|     check_externally_managed, | ||||
|     ensure_dir, | ||||
|     get_pip_version, | ||||
|     protect_pip_from_modification_on_windows, | ||||
|     write_output, | ||||
| ) | ||||
| from pip._internal.utils.temp_dir import TempDirectory | ||||
| from pip._internal.utils.virtualenv import ( | ||||
|     running_under_virtualenv, | ||||
|     virtualenv_no_global, | ||||
| ) | ||||
| from pip._internal.wheel_builder import build, should_build_for_install_command | ||||
|  | ||||
| logger = getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class InstallCommand(RequirementCommand): | ||||
|     """ | ||||
|     Install packages from: | ||||
|  | ||||
|     - PyPI (and other indexes) using requirement specifiers. | ||||
|     - VCS project urls. | ||||
|     - Local project directories. | ||||
|     - Local or remote source archives. | ||||
|  | ||||
|     pip also supports installing from "requirements files", which provide | ||||
|     an easy way to specify a whole environment to be installed. | ||||
|     """ | ||||
|  | ||||
|     usage = """ | ||||
|       %prog [options] <requirement specifier> [package-index-options] ... | ||||
|       %prog [options] -r <requirements file> [package-index-options] ... | ||||
|       %prog [options] [-e] <vcs project url> ... | ||||
|       %prog [options] [-e] <local project path> ... | ||||
|       %prog [options] <archive url/path> ...""" | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option(cmdoptions.requirements()) | ||||
|         self.cmd_opts.add_option(cmdoptions.constraints()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_deps()) | ||||
|         self.cmd_opts.add_option(cmdoptions.pre()) | ||||
|  | ||||
|         self.cmd_opts.add_option(cmdoptions.editable()) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--dry-run", | ||||
|             action="store_true", | ||||
|             dest="dry_run", | ||||
|             default=False, | ||||
|             help=( | ||||
|                 "Don't actually install anything, just print what would be. " | ||||
|                 "Can be used in combination with --ignore-installed " | ||||
|                 "to 'resolve' the requirements." | ||||
|             ), | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "-t", | ||||
|             "--target", | ||||
|             dest="target_dir", | ||||
|             metavar="dir", | ||||
|             default=None, | ||||
|             help=( | ||||
|                 "Install packages into <dir>. " | ||||
|                 "By default this will not replace existing files/folders in " | ||||
|                 "<dir>. Use --upgrade to replace existing packages in <dir> " | ||||
|                 "with new versions." | ||||
|             ), | ||||
|         ) | ||||
|         cmdoptions.add_target_python_options(self.cmd_opts) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--user", | ||||
|             dest="use_user_site", | ||||
|             action="store_true", | ||||
|             help=( | ||||
|                 "Install to the Python user install directory for your " | ||||
|                 "platform. Typically ~/.local/, or %APPDATA%\\Python on " | ||||
|                 "Windows. (See the Python documentation for site.USER_BASE " | ||||
|                 "for full details.)" | ||||
|             ), | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--no-user", | ||||
|             dest="use_user_site", | ||||
|             action="store_false", | ||||
|             help=SUPPRESS_HELP, | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--root", | ||||
|             dest="root_path", | ||||
|             metavar="dir", | ||||
|             default=None, | ||||
|             help="Install everything relative to this alternate root directory.", | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--prefix", | ||||
|             dest="prefix_path", | ||||
|             metavar="dir", | ||||
|             default=None, | ||||
|             help=( | ||||
|                 "Installation prefix where lib, bin and other top-level " | ||||
|                 "folders are placed. Note that the resulting installation may " | ||||
|                 "contain scripts and other resources which reference the " | ||||
|                 "Python interpreter of pip, and not that of ``--prefix``. " | ||||
|                 "See also the ``--python`` option if the intention is to " | ||||
|                 "install packages into another (possibly pip-free) " | ||||
|                 "environment." | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option(cmdoptions.src()) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "-U", | ||||
|             "--upgrade", | ||||
|             dest="upgrade", | ||||
|             action="store_true", | ||||
|             help=( | ||||
|                 "Upgrade all specified packages to the newest available " | ||||
|                 "version. The handling of dependencies depends on the " | ||||
|                 "upgrade-strategy used." | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--upgrade-strategy", | ||||
|             dest="upgrade_strategy", | ||||
|             default="only-if-needed", | ||||
|             choices=["only-if-needed", "eager"], | ||||
|             help=( | ||||
|                 "Determines how dependency upgrading should be handled " | ||||
|                 "[default: %default]. " | ||||
|                 '"eager" - dependencies are upgraded regardless of ' | ||||
|                 "whether the currently installed version satisfies the " | ||||
|                 "requirements of the upgraded package(s). " | ||||
|                 '"only-if-needed" -  are upgraded only when they do not ' | ||||
|                 "satisfy the requirements of the upgraded package(s)." | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--force-reinstall", | ||||
|             dest="force_reinstall", | ||||
|             action="store_true", | ||||
|             help="Reinstall all packages even if they are already up-to-date.", | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "-I", | ||||
|             "--ignore-installed", | ||||
|             dest="ignore_installed", | ||||
|             action="store_true", | ||||
|             help=( | ||||
|                 "Ignore the installed packages, overwriting them. " | ||||
|                 "This can break your system if the existing package " | ||||
|                 "is of a different version or was installed " | ||||
|                 "with a different package manager!" | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_build_isolation()) | ||||
|         self.cmd_opts.add_option(cmdoptions.use_pep517()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_use_pep517()) | ||||
|         self.cmd_opts.add_option(cmdoptions.check_build_deps()) | ||||
|         self.cmd_opts.add_option(cmdoptions.override_externally_managed()) | ||||
|  | ||||
|         self.cmd_opts.add_option(cmdoptions.config_settings()) | ||||
|         self.cmd_opts.add_option(cmdoptions.global_options()) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--compile", | ||||
|             action="store_true", | ||||
|             dest="compile", | ||||
|             default=True, | ||||
|             help="Compile Python source files to bytecode", | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--no-compile", | ||||
|             action="store_false", | ||||
|             dest="compile", | ||||
|             help="Do not compile Python source files to bytecode", | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--no-warn-script-location", | ||||
|             action="store_false", | ||||
|             dest="warn_script_location", | ||||
|             default=True, | ||||
|             help="Do not warn when installing scripts outside PATH", | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--no-warn-conflicts", | ||||
|             action="store_false", | ||||
|             dest="warn_about_conflicts", | ||||
|             default=True, | ||||
|             help="Do not warn about broken dependencies", | ||||
|         ) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.only_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.prefer_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.require_hashes()) | ||||
|         self.cmd_opts.add_option(cmdoptions.progress_bar()) | ||||
|         self.cmd_opts.add_option(cmdoptions.root_user_action()) | ||||
|  | ||||
|         index_opts = cmdoptions.make_option_group( | ||||
|             cmdoptions.index_group, | ||||
|             self.parser, | ||||
|         ) | ||||
|  | ||||
|         self.parser.insert_option_group(0, index_opts) | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--report", | ||||
|             dest="json_report_file", | ||||
|             metavar="file", | ||||
|             default=None, | ||||
|             help=( | ||||
|                 "Generate a JSON file describing what pip did to install " | ||||
|                 "the provided requirements. " | ||||
|                 "Can be used in combination with --dry-run and --ignore-installed " | ||||
|                 "to 'resolve' the requirements. " | ||||
|                 "When - is used as file name it writes to stdout. " | ||||
|                 "When writing to stdout, please combine with the --quiet option " | ||||
|                 "to avoid mixing pip logging output with JSON output." | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|     @with_cleanup | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         if options.use_user_site and options.target_dir is not None: | ||||
|             raise CommandError("Can not combine '--user' and '--target'") | ||||
|  | ||||
|         # Check whether the environment we're installing into is externally | ||||
|         # managed, as specified in PEP 668. Specifying --root, --target, or | ||||
|         # --prefix disables the check, since there's no reliable way to locate | ||||
|         # the EXTERNALLY-MANAGED file for those cases. An exception is also | ||||
|         # made specifically for "--dry-run --report" for convenience. | ||||
|         installing_into_current_environment = ( | ||||
|             not (options.dry_run and options.json_report_file) | ||||
|             and options.root_path is None | ||||
|             and options.target_dir is None | ||||
|             and options.prefix_path is None | ||||
|         ) | ||||
|         if ( | ||||
|             installing_into_current_environment | ||||
|             and not options.override_externally_managed | ||||
|         ): | ||||
|             check_externally_managed() | ||||
|  | ||||
|         upgrade_strategy = "to-satisfy-only" | ||||
|         if options.upgrade: | ||||
|             upgrade_strategy = options.upgrade_strategy | ||||
|  | ||||
|         cmdoptions.check_dist_restriction(options, check_target=True) | ||||
|  | ||||
|         logger.verbose("Using %s", get_pip_version()) | ||||
|         options.use_user_site = decide_user_install( | ||||
|             options.use_user_site, | ||||
|             prefix_path=options.prefix_path, | ||||
|             target_dir=options.target_dir, | ||||
|             root_path=options.root_path, | ||||
|             isolated_mode=options.isolated_mode, | ||||
|         ) | ||||
|  | ||||
|         target_temp_dir: Optional[TempDirectory] = None | ||||
|         target_temp_dir_path: Optional[str] = None | ||||
|         if options.target_dir: | ||||
|             options.ignore_installed = True | ||||
|             options.target_dir = os.path.abspath(options.target_dir) | ||||
|             if ( | ||||
|                 # fmt: off | ||||
|                 os.path.exists(options.target_dir) and | ||||
|                 not os.path.isdir(options.target_dir) | ||||
|                 # fmt: on | ||||
|             ): | ||||
|                 raise CommandError( | ||||
|                     "Target path exists but is not a directory, will not continue." | ||||
|                 ) | ||||
|  | ||||
|             # Create a target directory for using with the target option | ||||
|             target_temp_dir = TempDirectory(kind="target") | ||||
|             target_temp_dir_path = target_temp_dir.path | ||||
|             self.enter_context(target_temp_dir) | ||||
|  | ||||
|         global_options = options.global_options or [] | ||||
|  | ||||
|         session = self.get_default_session(options) | ||||
|  | ||||
|         target_python = make_target_python(options) | ||||
|         finder = self._build_package_finder( | ||||
|             options=options, | ||||
|             session=session, | ||||
|             target_python=target_python, | ||||
|             ignore_requires_python=options.ignore_requires_python, | ||||
|         ) | ||||
|         build_tracker = self.enter_context(get_build_tracker()) | ||||
|  | ||||
|         directory = TempDirectory( | ||||
|             delete=not options.no_clean, | ||||
|             kind="install", | ||||
|             globally_managed=True, | ||||
|         ) | ||||
|  | ||||
|         try: | ||||
|             reqs = self.get_requirements(args, options, finder, session) | ||||
|             check_legacy_setup_py_options(options, reqs) | ||||
|  | ||||
|             wheel_cache = WheelCache(options.cache_dir) | ||||
|  | ||||
|             # Only when installing is it permitted to use PEP 660. | ||||
|             # In other circumstances (pip wheel, pip download) we generate | ||||
|             # regular (i.e. non editable) metadata and wheels. | ||||
|             for req in reqs: | ||||
|                 req.permit_editable_wheels = True | ||||
|  | ||||
|             preparer = self.make_requirement_preparer( | ||||
|                 temp_build_dir=directory, | ||||
|                 options=options, | ||||
|                 build_tracker=build_tracker, | ||||
|                 session=session, | ||||
|                 finder=finder, | ||||
|                 use_user_site=options.use_user_site, | ||||
|                 verbosity=self.verbosity, | ||||
|             ) | ||||
|             resolver = self.make_resolver( | ||||
|                 preparer=preparer, | ||||
|                 finder=finder, | ||||
|                 options=options, | ||||
|                 wheel_cache=wheel_cache, | ||||
|                 use_user_site=options.use_user_site, | ||||
|                 ignore_installed=options.ignore_installed, | ||||
|                 ignore_requires_python=options.ignore_requires_python, | ||||
|                 force_reinstall=options.force_reinstall, | ||||
|                 upgrade_strategy=upgrade_strategy, | ||||
|                 use_pep517=options.use_pep517, | ||||
|             ) | ||||
|  | ||||
|             self.trace_basic_info(finder) | ||||
|  | ||||
|             requirement_set = resolver.resolve( | ||||
|                 reqs, check_supported_wheels=not options.target_dir | ||||
|             ) | ||||
|  | ||||
|             if options.json_report_file: | ||||
|                 report = InstallationReport(requirement_set.requirements_to_install) | ||||
|                 if options.json_report_file == "-": | ||||
|                     print_json(data=report.to_dict()) | ||||
|                 else: | ||||
|                     with open(options.json_report_file, "w", encoding="utf-8") as f: | ||||
|                         json.dump(report.to_dict(), f, indent=2, ensure_ascii=False) | ||||
|  | ||||
|             if options.dry_run: | ||||
|                 # In non dry-run mode, the legacy versions and specifiers check | ||||
|                 # will be done as part of conflict detection. | ||||
|                 requirement_set.warn_legacy_versions_and_specifiers() | ||||
|                 would_install_items = sorted( | ||||
|                     (r.metadata["name"], r.metadata["version"]) | ||||
|                     for r in requirement_set.requirements_to_install | ||||
|                 ) | ||||
|                 if would_install_items: | ||||
|                     write_output( | ||||
|                         "Would install %s", | ||||
|                         " ".join("-".join(item) for item in would_install_items), | ||||
|                     ) | ||||
|                 return SUCCESS | ||||
|  | ||||
|             try: | ||||
|                 pip_req = requirement_set.get_requirement("pip") | ||||
|             except KeyError: | ||||
|                 modifying_pip = False | ||||
|             else: | ||||
|                 # If we're not replacing an already installed pip, | ||||
|                 # we're not modifying it. | ||||
|                 modifying_pip = pip_req.satisfied_by is None | ||||
|             protect_pip_from_modification_on_windows(modifying_pip=modifying_pip) | ||||
|  | ||||
|             reqs_to_build = [ | ||||
|                 r | ||||
|                 for r in requirement_set.requirements.values() | ||||
|                 if should_build_for_install_command(r) | ||||
|             ] | ||||
|  | ||||
|             _, build_failures = build( | ||||
|                 reqs_to_build, | ||||
|                 wheel_cache=wheel_cache, | ||||
|                 verify=True, | ||||
|                 build_options=[], | ||||
|                 global_options=global_options, | ||||
|             ) | ||||
|  | ||||
|             if build_failures: | ||||
|                 raise InstallationError( | ||||
|                     "Could not build wheels for {}, which is required to " | ||||
|                     "install pyproject.toml-based projects".format( | ||||
|                         ", ".join(r.name for r in build_failures)  # type: ignore | ||||
|                     ) | ||||
|                 ) | ||||
|  | ||||
|             to_install = resolver.get_installation_order(requirement_set) | ||||
|  | ||||
|             # Check for conflicts in the package set we're installing. | ||||
|             conflicts: Optional[ConflictDetails] = None | ||||
|             should_warn_about_conflicts = ( | ||||
|                 not options.ignore_dependencies and options.warn_about_conflicts | ||||
|             ) | ||||
|             if should_warn_about_conflicts: | ||||
|                 conflicts = self._determine_conflicts(to_install) | ||||
|  | ||||
|             # Don't warn about script install locations if | ||||
|             # --target or --prefix has been specified | ||||
|             warn_script_location = options.warn_script_location | ||||
|             if options.target_dir or options.prefix_path: | ||||
|                 warn_script_location = False | ||||
|  | ||||
|             installed = install_given_reqs( | ||||
|                 to_install, | ||||
|                 global_options, | ||||
|                 root=options.root_path, | ||||
|                 home=target_temp_dir_path, | ||||
|                 prefix=options.prefix_path, | ||||
|                 warn_script_location=warn_script_location, | ||||
|                 use_user_site=options.use_user_site, | ||||
|                 pycompile=options.compile, | ||||
|             ) | ||||
|  | ||||
|             lib_locations = get_lib_location_guesses( | ||||
|                 user=options.use_user_site, | ||||
|                 home=target_temp_dir_path, | ||||
|                 root=options.root_path, | ||||
|                 prefix=options.prefix_path, | ||||
|                 isolated=options.isolated_mode, | ||||
|             ) | ||||
|             env = get_environment(lib_locations) | ||||
|  | ||||
|             installed.sort(key=operator.attrgetter("name")) | ||||
|             items = [] | ||||
|             for result in installed: | ||||
|                 item = result.name | ||||
|                 try: | ||||
|                     installed_dist = env.get_distribution(item) | ||||
|                     if installed_dist is not None: | ||||
|                         item = f"{item}-{installed_dist.version}" | ||||
|                 except Exception: | ||||
|                     pass | ||||
|                 items.append(item) | ||||
|  | ||||
|             if conflicts is not None: | ||||
|                 self._warn_about_conflicts( | ||||
|                     conflicts, | ||||
|                     resolver_variant=self.determine_resolver_variant(options), | ||||
|                 ) | ||||
|  | ||||
|             installed_desc = " ".join(items) | ||||
|             if installed_desc: | ||||
|                 write_output( | ||||
|                     "Successfully installed %s", | ||||
|                     installed_desc, | ||||
|                 ) | ||||
|         except OSError as error: | ||||
|             show_traceback = self.verbosity >= 1 | ||||
|  | ||||
|             message = create_os_error_message( | ||||
|                 error, | ||||
|                 show_traceback, | ||||
|                 options.use_user_site, | ||||
|             ) | ||||
|             logger.error(message, exc_info=show_traceback)  # noqa | ||||
|  | ||||
|             return ERROR | ||||
|  | ||||
|         if options.target_dir: | ||||
|             assert target_temp_dir | ||||
|             self._handle_target_dir( | ||||
|                 options.target_dir, target_temp_dir, options.upgrade | ||||
|             ) | ||||
|         if options.root_user_action == "warn": | ||||
|             warn_if_run_as_root() | ||||
|         return SUCCESS | ||||
|  | ||||
|     def _handle_target_dir( | ||||
|         self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool | ||||
|     ) -> None: | ||||
|         ensure_dir(target_dir) | ||||
|  | ||||
|         # Checking both purelib and platlib directories for installed | ||||
|         # packages to be moved to target directory | ||||
|         lib_dir_list = [] | ||||
|  | ||||
|         # Checking both purelib and platlib directories for installed | ||||
|         # packages to be moved to target directory | ||||
|         scheme = get_scheme("", home=target_temp_dir.path) | ||||
|         purelib_dir = scheme.purelib | ||||
|         platlib_dir = scheme.platlib | ||||
|         data_dir = scheme.data | ||||
|  | ||||
|         if os.path.exists(purelib_dir): | ||||
|             lib_dir_list.append(purelib_dir) | ||||
|         if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: | ||||
|             lib_dir_list.append(platlib_dir) | ||||
|         if os.path.exists(data_dir): | ||||
|             lib_dir_list.append(data_dir) | ||||
|  | ||||
|         for lib_dir in lib_dir_list: | ||||
|             for item in os.listdir(lib_dir): | ||||
|                 if lib_dir == data_dir: | ||||
|                     ddir = os.path.join(data_dir, item) | ||||
|                     if any(s.startswith(ddir) for s in lib_dir_list[:-1]): | ||||
|                         continue | ||||
|                 target_item_dir = os.path.join(target_dir, item) | ||||
|                 if os.path.exists(target_item_dir): | ||||
|                     if not upgrade: | ||||
|                         logger.warning( | ||||
|                             "Target directory %s already exists. Specify " | ||||
|                             "--upgrade to force replacement.", | ||||
|                             target_item_dir, | ||||
|                         ) | ||||
|                         continue | ||||
|                     if os.path.islink(target_item_dir): | ||||
|                         logger.warning( | ||||
|                             "Target directory %s already exists and is " | ||||
|                             "a link. pip will not automatically replace " | ||||
|                             "links, please remove if replacement is " | ||||
|                             "desired.", | ||||
|                             target_item_dir, | ||||
|                         ) | ||||
|                         continue | ||||
|                     if os.path.isdir(target_item_dir): | ||||
|                         shutil.rmtree(target_item_dir) | ||||
|                     else: | ||||
|                         os.remove(target_item_dir) | ||||
|  | ||||
|                 shutil.move(os.path.join(lib_dir, item), target_item_dir) | ||||
|  | ||||
|     def _determine_conflicts( | ||||
|         self, to_install: List[InstallRequirement] | ||||
|     ) -> Optional[ConflictDetails]: | ||||
|         try: | ||||
|             return check_install_conflicts(to_install) | ||||
|         except Exception: | ||||
|             logger.exception( | ||||
|                 "Error while checking for conflicts. Please file an issue on " | ||||
|                 "pip's issue tracker: https://github.com/pypa/pip/issues/new" | ||||
|             ) | ||||
|             return None | ||||
|  | ||||
|     def _warn_about_conflicts( | ||||
|         self, conflict_details: ConflictDetails, resolver_variant: str | ||||
|     ) -> None: | ||||
|         package_set, (missing, conflicting) = conflict_details | ||||
|         if not missing and not conflicting: | ||||
|             return | ||||
|  | ||||
|         parts: List[str] = [] | ||||
|         if resolver_variant == "legacy": | ||||
|             parts.append( | ||||
|                 "pip's legacy dependency resolver does not consider dependency " | ||||
|                 "conflicts when selecting packages. This behaviour is the " | ||||
|                 "source of the following dependency conflicts." | ||||
|             ) | ||||
|         else: | ||||
|             assert resolver_variant == "2020-resolver" | ||||
|             parts.append( | ||||
|                 "pip's dependency resolver does not currently take into account " | ||||
|                 "all the packages that are installed. This behaviour is the " | ||||
|                 "source of the following dependency conflicts." | ||||
|             ) | ||||
|  | ||||
|         # NOTE: There is some duplication here, with commands/check.py | ||||
|         for project_name in missing: | ||||
|             version = package_set[project_name][0] | ||||
|             for dependency in missing[project_name]: | ||||
|                 message = ( | ||||
|                     "{name} {version} requires {requirement}, " | ||||
|                     "which is not installed." | ||||
|                 ).format( | ||||
|                     name=project_name, | ||||
|                     version=version, | ||||
|                     requirement=dependency[1], | ||||
|                 ) | ||||
|                 parts.append(message) | ||||
|  | ||||
|         for project_name in conflicting: | ||||
|             version = package_set[project_name][0] | ||||
|             for dep_name, dep_version, req in conflicting[project_name]: | ||||
|                 message = ( | ||||
|                     "{name} {version} requires {requirement}, but {you} have " | ||||
|                     "{dep_name} {dep_version} which is incompatible." | ||||
|                 ).format( | ||||
|                     name=project_name, | ||||
|                     version=version, | ||||
|                     requirement=req, | ||||
|                     dep_name=dep_name, | ||||
|                     dep_version=dep_version, | ||||
|                     you=("you" if resolver_variant == "2020-resolver" else "you'll"), | ||||
|                 ) | ||||
|                 parts.append(message) | ||||
|  | ||||
|         logger.critical("\n".join(parts)) | ||||
|  | ||||
|  | ||||
| def get_lib_location_guesses( | ||||
|     user: bool = False, | ||||
|     home: Optional[str] = None, | ||||
|     root: Optional[str] = None, | ||||
|     isolated: bool = False, | ||||
|     prefix: Optional[str] = None, | ||||
| ) -> List[str]: | ||||
|     scheme = get_scheme( | ||||
|         "", | ||||
|         user=user, | ||||
|         home=home, | ||||
|         root=root, | ||||
|         isolated=isolated, | ||||
|         prefix=prefix, | ||||
|     ) | ||||
|     return [scheme.purelib, scheme.platlib] | ||||
|  | ||||
|  | ||||
| def site_packages_writable(root: Optional[str], isolated: bool) -> bool: | ||||
|     return all( | ||||
|         test_writable_dir(d) | ||||
|         for d in set(get_lib_location_guesses(root=root, isolated=isolated)) | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def decide_user_install( | ||||
|     use_user_site: Optional[bool], | ||||
|     prefix_path: Optional[str] = None, | ||||
|     target_dir: Optional[str] = None, | ||||
|     root_path: Optional[str] = None, | ||||
|     isolated_mode: bool = False, | ||||
| ) -> bool: | ||||
|     """Determine whether to do a user install based on the input options. | ||||
|  | ||||
|     If use_user_site is False, no additional checks are done. | ||||
|     If use_user_site is True, it is checked for compatibility with other | ||||
|     options. | ||||
|     If use_user_site is None, the default behaviour depends on the environment, | ||||
|     which is provided by the other arguments. | ||||
|     """ | ||||
|     # In some cases (config from tox), use_user_site can be set to an integer | ||||
|     # rather than a bool, which 'use_user_site is False' wouldn't catch. | ||||
|     if (use_user_site is not None) and (not use_user_site): | ||||
|         logger.debug("Non-user install by explicit request") | ||||
|         return False | ||||
|  | ||||
|     if use_user_site: | ||||
|         if prefix_path: | ||||
|             raise CommandError( | ||||
|                 "Can not combine '--user' and '--prefix' as they imply " | ||||
|                 "different installation locations" | ||||
|             ) | ||||
|         if virtualenv_no_global(): | ||||
|             raise InstallationError( | ||||
|                 "Can not perform a '--user' install. User site-packages " | ||||
|                 "are not visible in this virtualenv." | ||||
|             ) | ||||
|         logger.debug("User install by explicit request") | ||||
|         return True | ||||
|  | ||||
|     # If we are here, user installs have not been explicitly requested/avoided | ||||
|     assert use_user_site is None | ||||
|  | ||||
|     # user install incompatible with --prefix/--target | ||||
|     if prefix_path or target_dir: | ||||
|         logger.debug("Non-user install due to --prefix or --target option") | ||||
|         return False | ||||
|  | ||||
|     # If user installs are not enabled, choose a non-user install | ||||
|     if not site.ENABLE_USER_SITE: | ||||
|         logger.debug("Non-user install because user site-packages disabled") | ||||
|         return False | ||||
|  | ||||
|     # If we have permission for a non-user install, do that, | ||||
|     # otherwise do a user install. | ||||
|     if site_packages_writable(root=root_path, isolated=isolated_mode): | ||||
|         logger.debug("Non-user install because site-packages writeable") | ||||
|         return False | ||||
|  | ||||
|     logger.info( | ||||
|         "Defaulting to user installation because normal site-packages " | ||||
|         "is not writeable" | ||||
|     ) | ||||
|     return True | ||||
|  | ||||
|  | ||||
| def create_os_error_message( | ||||
|     error: OSError, show_traceback: bool, using_user_site: bool | ||||
| ) -> str: | ||||
|     """Format an error message for an OSError | ||||
|  | ||||
|     It may occur anytime during the execution of the install command. | ||||
|     """ | ||||
|     parts = [] | ||||
|  | ||||
|     # Mention the error if we are not going to show a traceback | ||||
|     parts.append("Could not install packages due to an OSError") | ||||
|     if not show_traceback: | ||||
|         parts.append(": ") | ||||
|         parts.append(str(error)) | ||||
|     else: | ||||
|         parts.append(".") | ||||
|  | ||||
|     # Spilt the error indication from a helper message (if any) | ||||
|     parts[-1] += "\n" | ||||
|  | ||||
|     # Suggest useful actions to the user: | ||||
|     #  (1) using user site-packages or (2) verifying the permissions | ||||
|     if error.errno == errno.EACCES: | ||||
|         user_option_part = "Consider using the `--user` option" | ||||
|         permissions_part = "Check the permissions" | ||||
|  | ||||
|         if not running_under_virtualenv() and not using_user_site: | ||||
|             parts.extend( | ||||
|                 [ | ||||
|                     user_option_part, | ||||
|                     " or ", | ||||
|                     permissions_part.lower(), | ||||
|                 ] | ||||
|             ) | ||||
|         else: | ||||
|             parts.append(permissions_part) | ||||
|         parts.append(".\n") | ||||
|  | ||||
|     # Suggest the user to enable Long Paths if path length is | ||||
|     # more than 260 | ||||
|     if ( | ||||
|         WINDOWS | ||||
|         and error.errno == errno.ENOENT | ||||
|         and error.filename | ||||
|         and len(error.filename) > 260 | ||||
|     ): | ||||
|         parts.append( | ||||
|             "HINT: This error might have occurred since " | ||||
|             "this system does not have Windows Long Path " | ||||
|             "support enabled. You can find information on " | ||||
|             "how to enable this at " | ||||
|             "https://pip.pypa.io/warnings/enable-long-paths\n" | ||||
|         ) | ||||
|  | ||||
|     return "".join(parts).strip() + "\n" | ||||
| @ -0,0 +1,368 @@ | ||||
| import json | ||||
| import logging | ||||
| from optparse import Values | ||||
| from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast | ||||
|  | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
|  | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.req_command import IndexGroupCommand | ||||
| from pip._internal.cli.status_codes import SUCCESS | ||||
| from pip._internal.exceptions import CommandError | ||||
| from pip._internal.index.collector import LinkCollector | ||||
| from pip._internal.index.package_finder import PackageFinder | ||||
| from pip._internal.metadata import BaseDistribution, get_environment | ||||
| from pip._internal.models.selection_prefs import SelectionPreferences | ||||
| from pip._internal.network.session import PipSession | ||||
| from pip._internal.utils.compat import stdlib_pkgs | ||||
| from pip._internal.utils.misc import tabulate, write_output | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from pip._internal.metadata.base import DistributionVersion | ||||
|  | ||||
|     class _DistWithLatestInfo(BaseDistribution): | ||||
|         """Give the distribution object a couple of extra fields. | ||||
|  | ||||
|         These will be populated during ``get_outdated()``. This is dirty but | ||||
|         makes the rest of the code much cleaner. | ||||
|         """ | ||||
|  | ||||
|         latest_version: DistributionVersion | ||||
|         latest_filetype: str | ||||
|  | ||||
|     _ProcessedDists = Sequence[_DistWithLatestInfo] | ||||
|  | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class ListCommand(IndexGroupCommand): | ||||
|     """ | ||||
|     List installed packages, including editables. | ||||
|  | ||||
|     Packages are listed in a case-insensitive sorted order. | ||||
|     """ | ||||
|  | ||||
|     ignore_require_venv = True | ||||
|     usage = """ | ||||
|       %prog [options]""" | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "-o", | ||||
|             "--outdated", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="List outdated packages", | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "-u", | ||||
|             "--uptodate", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="List uptodate packages", | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "-e", | ||||
|             "--editable", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="List editable projects.", | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "-l", | ||||
|             "--local", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help=( | ||||
|                 "If in a virtualenv that has global access, do not list " | ||||
|                 "globally-installed packages." | ||||
|             ), | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--user", | ||||
|             dest="user", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="Only output packages installed in user-site.", | ||||
|         ) | ||||
|         self.cmd_opts.add_option(cmdoptions.list_path()) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--pre", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help=( | ||||
|                 "Include pre-release and development versions. By default, " | ||||
|                 "pip only finds stable versions." | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--format", | ||||
|             action="store", | ||||
|             dest="list_format", | ||||
|             default="columns", | ||||
|             choices=("columns", "freeze", "json"), | ||||
|             help=( | ||||
|                 "Select the output format among: columns (default), freeze, or json. " | ||||
|                 "The 'freeze' format cannot be used with the --outdated option." | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--not-required", | ||||
|             action="store_true", | ||||
|             dest="not_required", | ||||
|             help="List packages that are not dependencies of installed packages.", | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--exclude-editable", | ||||
|             action="store_false", | ||||
|             dest="include_editable", | ||||
|             help="Exclude editable package from output.", | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "--include-editable", | ||||
|             action="store_true", | ||||
|             dest="include_editable", | ||||
|             help="Include editable package from output.", | ||||
|             default=True, | ||||
|         ) | ||||
|         self.cmd_opts.add_option(cmdoptions.list_exclude()) | ||||
|         index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) | ||||
|  | ||||
|         self.parser.insert_option_group(0, index_opts) | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def _build_package_finder( | ||||
|         self, options: Values, session: PipSession | ||||
|     ) -> PackageFinder: | ||||
|         """ | ||||
|         Create a package finder appropriate to this list command. | ||||
|         """ | ||||
|         link_collector = LinkCollector.create(session, options=options) | ||||
|  | ||||
|         # Pass allow_yanked=False to ignore yanked versions. | ||||
|         selection_prefs = SelectionPreferences( | ||||
|             allow_yanked=False, | ||||
|             allow_all_prereleases=options.pre, | ||||
|         ) | ||||
|  | ||||
|         return PackageFinder.create( | ||||
|             link_collector=link_collector, | ||||
|             selection_prefs=selection_prefs, | ||||
|         ) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         if options.outdated and options.uptodate: | ||||
|             raise CommandError("Options --outdated and --uptodate cannot be combined.") | ||||
|  | ||||
|         if options.outdated and options.list_format == "freeze": | ||||
|             raise CommandError( | ||||
|                 "List format 'freeze' cannot be used with the --outdated option." | ||||
|             ) | ||||
|  | ||||
|         cmdoptions.check_list_path_option(options) | ||||
|  | ||||
|         skip = set(stdlib_pkgs) | ||||
|         if options.excludes: | ||||
|             skip.update(canonicalize_name(n) for n in options.excludes) | ||||
|  | ||||
|         packages: "_ProcessedDists" = [ | ||||
|             cast("_DistWithLatestInfo", d) | ||||
|             for d in get_environment(options.path).iter_installed_distributions( | ||||
|                 local_only=options.local, | ||||
|                 user_only=options.user, | ||||
|                 editables_only=options.editable, | ||||
|                 include_editables=options.include_editable, | ||||
|                 skip=skip, | ||||
|             ) | ||||
|         ] | ||||
|  | ||||
|         # get_not_required must be called firstly in order to find and | ||||
|         # filter out all dependencies correctly. Otherwise a package | ||||
|         # can't be identified as requirement because some parent packages | ||||
|         # could be filtered out before. | ||||
|         if options.not_required: | ||||
|             packages = self.get_not_required(packages, options) | ||||
|  | ||||
|         if options.outdated: | ||||
|             packages = self.get_outdated(packages, options) | ||||
|         elif options.uptodate: | ||||
|             packages = self.get_uptodate(packages, options) | ||||
|  | ||||
|         self.output_package_listing(packages, options) | ||||
|         return SUCCESS | ||||
|  | ||||
|     def get_outdated( | ||||
|         self, packages: "_ProcessedDists", options: Values | ||||
|     ) -> "_ProcessedDists": | ||||
|         return [ | ||||
|             dist | ||||
|             for dist in self.iter_packages_latest_infos(packages, options) | ||||
|             if dist.latest_version > dist.version | ||||
|         ] | ||||
|  | ||||
|     def get_uptodate( | ||||
|         self, packages: "_ProcessedDists", options: Values | ||||
|     ) -> "_ProcessedDists": | ||||
|         return [ | ||||
|             dist | ||||
|             for dist in self.iter_packages_latest_infos(packages, options) | ||||
|             if dist.latest_version == dist.version | ||||
|         ] | ||||
|  | ||||
|     def get_not_required( | ||||
|         self, packages: "_ProcessedDists", options: Values | ||||
|     ) -> "_ProcessedDists": | ||||
|         dep_keys = { | ||||
|             canonicalize_name(dep.name) | ||||
|             for dist in packages | ||||
|             for dep in (dist.iter_dependencies() or ()) | ||||
|         } | ||||
|  | ||||
|         # Create a set to remove duplicate packages, and cast it to a list | ||||
|         # to keep the return type consistent with get_outdated and | ||||
|         # get_uptodate | ||||
|         return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys}) | ||||
|  | ||||
|     def iter_packages_latest_infos( | ||||
|         self, packages: "_ProcessedDists", options: Values | ||||
|     ) -> Generator["_DistWithLatestInfo", None, None]: | ||||
|         with self._build_session(options) as session: | ||||
|             finder = self._build_package_finder(options, session) | ||||
|  | ||||
|             def latest_info( | ||||
|                 dist: "_DistWithLatestInfo", | ||||
|             ) -> Optional["_DistWithLatestInfo"]: | ||||
|                 all_candidates = finder.find_all_candidates(dist.canonical_name) | ||||
|                 if not options.pre: | ||||
|                     # Remove prereleases | ||||
|                     all_candidates = [ | ||||
|                         candidate | ||||
|                         for candidate in all_candidates | ||||
|                         if not candidate.version.is_prerelease | ||||
|                     ] | ||||
|  | ||||
|                 evaluator = finder.make_candidate_evaluator( | ||||
|                     project_name=dist.canonical_name, | ||||
|                 ) | ||||
|                 best_candidate = evaluator.sort_best_candidate(all_candidates) | ||||
|                 if best_candidate is None: | ||||
|                     return None | ||||
|  | ||||
|                 remote_version = best_candidate.version | ||||
|                 if best_candidate.link.is_wheel: | ||||
|                     typ = "wheel" | ||||
|                 else: | ||||
|                     typ = "sdist" | ||||
|                 dist.latest_version = remote_version | ||||
|                 dist.latest_filetype = typ | ||||
|                 return dist | ||||
|  | ||||
|             for dist in map(latest_info, packages): | ||||
|                 if dist is not None: | ||||
|                     yield dist | ||||
|  | ||||
|     def output_package_listing( | ||||
|         self, packages: "_ProcessedDists", options: Values | ||||
|     ) -> None: | ||||
|         packages = sorted( | ||||
|             packages, | ||||
|             key=lambda dist: dist.canonical_name, | ||||
|         ) | ||||
|         if options.list_format == "columns" and packages: | ||||
|             data, header = format_for_columns(packages, options) | ||||
|             self.output_package_listing_columns(data, header) | ||||
|         elif options.list_format == "freeze": | ||||
|             for dist in packages: | ||||
|                 if options.verbose >= 1: | ||||
|                     write_output( | ||||
|                         "%s==%s (%s)", dist.raw_name, dist.version, dist.location | ||||
|                     ) | ||||
|                 else: | ||||
|                     write_output("%s==%s", dist.raw_name, dist.version) | ||||
|         elif options.list_format == "json": | ||||
|             write_output(format_for_json(packages, options)) | ||||
|  | ||||
|     def output_package_listing_columns( | ||||
|         self, data: List[List[str]], header: List[str] | ||||
|     ) -> None: | ||||
|         # insert the header first: we need to know the size of column names | ||||
|         if len(data) > 0: | ||||
|             data.insert(0, header) | ||||
|  | ||||
|         pkg_strings, sizes = tabulate(data) | ||||
|  | ||||
|         # Create and add a separator. | ||||
|         if len(data) > 0: | ||||
|             pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes))) | ||||
|  | ||||
|         for val in pkg_strings: | ||||
|             write_output(val) | ||||
|  | ||||
|  | ||||
| def format_for_columns( | ||||
|     pkgs: "_ProcessedDists", options: Values | ||||
| ) -> Tuple[List[List[str]], List[str]]: | ||||
|     """ | ||||
|     Convert the package data into something usable | ||||
|     by output_package_listing_columns. | ||||
|     """ | ||||
|     header = ["Package", "Version"] | ||||
|  | ||||
|     running_outdated = options.outdated | ||||
|     if running_outdated: | ||||
|         header.extend(["Latest", "Type"]) | ||||
|  | ||||
|     has_editables = any(x.editable for x in pkgs) | ||||
|     if has_editables: | ||||
|         header.append("Editable project location") | ||||
|  | ||||
|     if options.verbose >= 1: | ||||
|         header.append("Location") | ||||
|     if options.verbose >= 1: | ||||
|         header.append("Installer") | ||||
|  | ||||
|     data = [] | ||||
|     for proj in pkgs: | ||||
|         # if we're working on the 'outdated' list, separate out the | ||||
|         # latest_version and type | ||||
|         row = [proj.raw_name, str(proj.version)] | ||||
|  | ||||
|         if running_outdated: | ||||
|             row.append(str(proj.latest_version)) | ||||
|             row.append(proj.latest_filetype) | ||||
|  | ||||
|         if has_editables: | ||||
|             row.append(proj.editable_project_location or "") | ||||
|  | ||||
|         if options.verbose >= 1: | ||||
|             row.append(proj.location or "") | ||||
|         if options.verbose >= 1: | ||||
|             row.append(proj.installer) | ||||
|  | ||||
|         data.append(row) | ||||
|  | ||||
|     return data, header | ||||
|  | ||||
|  | ||||
| def format_for_json(packages: "_ProcessedDists", options: Values) -> str: | ||||
|     data = [] | ||||
|     for dist in packages: | ||||
|         info = { | ||||
|             "name": dist.raw_name, | ||||
|             "version": str(dist.version), | ||||
|         } | ||||
|         if options.verbose >= 1: | ||||
|             info["location"] = dist.location or "" | ||||
|             info["installer"] = dist.installer | ||||
|         if options.outdated: | ||||
|             info["latest_version"] = str(dist.latest_version) | ||||
|             info["latest_filetype"] = dist.latest_filetype | ||||
|         editable_project_location = dist.editable_project_location | ||||
|         if editable_project_location: | ||||
|             info["editable_project_location"] = editable_project_location | ||||
|         data.append(info) | ||||
|     return json.dumps(data) | ||||
| @ -0,0 +1,174 @@ | ||||
| import logging | ||||
| import shutil | ||||
| import sys | ||||
| import textwrap | ||||
| import xmlrpc.client | ||||
| from collections import OrderedDict | ||||
| from optparse import Values | ||||
| from typing import TYPE_CHECKING, Dict, List, Optional | ||||
|  | ||||
| from pip._vendor.packaging.version import parse as parse_version | ||||
|  | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.req_command import SessionCommandMixin | ||||
| from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS | ||||
| from pip._internal.exceptions import CommandError | ||||
| from pip._internal.metadata import get_default_environment | ||||
| from pip._internal.models.index import PyPI | ||||
| from pip._internal.network.xmlrpc import PipXmlrpcTransport | ||||
| from pip._internal.utils.logging import indent_log | ||||
| from pip._internal.utils.misc import write_output | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from typing import TypedDict | ||||
|  | ||||
|     class TransformedHit(TypedDict): | ||||
|         name: str | ||||
|         summary: str | ||||
|         versions: List[str] | ||||
|  | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class SearchCommand(Command, SessionCommandMixin): | ||||
|     """Search for PyPI packages whose name or summary contains <query>.""" | ||||
|  | ||||
|     usage = """ | ||||
|       %prog [options] <query>""" | ||||
|     ignore_require_venv = True | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "-i", | ||||
|             "--index", | ||||
|             dest="index", | ||||
|             metavar="URL", | ||||
|             default=PyPI.pypi_url, | ||||
|             help="Base URL of Python Package Index (default %default)", | ||||
|         ) | ||||
|  | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         if not args: | ||||
|             raise CommandError("Missing required argument (search query).") | ||||
|         query = args | ||||
|         pypi_hits = self.search(query, options) | ||||
|         hits = transform_hits(pypi_hits) | ||||
|  | ||||
|         terminal_width = None | ||||
|         if sys.stdout.isatty(): | ||||
|             terminal_width = shutil.get_terminal_size()[0] | ||||
|  | ||||
|         print_results(hits, terminal_width=terminal_width) | ||||
|         if pypi_hits: | ||||
|             return SUCCESS | ||||
|         return NO_MATCHES_FOUND | ||||
|  | ||||
|     def search(self, query: List[str], options: Values) -> List[Dict[str, str]]: | ||||
|         index_url = options.index | ||||
|  | ||||
|         session = self.get_default_session(options) | ||||
|  | ||||
|         transport = PipXmlrpcTransport(index_url, session) | ||||
|         pypi = xmlrpc.client.ServerProxy(index_url, transport) | ||||
|         try: | ||||
|             hits = pypi.search({"name": query, "summary": query}, "or") | ||||
|         except xmlrpc.client.Fault as fault: | ||||
|             message = "XMLRPC request failed [code: {code}]\n{string}".format( | ||||
|                 code=fault.faultCode, | ||||
|                 string=fault.faultString, | ||||
|             ) | ||||
|             raise CommandError(message) | ||||
|         assert isinstance(hits, list) | ||||
|         return hits | ||||
|  | ||||
|  | ||||
| def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]: | ||||
|     """ | ||||
|     The list from pypi is really a list of versions. We want a list of | ||||
|     packages with the list of versions stored inline. This converts the | ||||
|     list from pypi into one we can use. | ||||
|     """ | ||||
|     packages: Dict[str, "TransformedHit"] = OrderedDict() | ||||
|     for hit in hits: | ||||
|         name = hit["name"] | ||||
|         summary = hit["summary"] | ||||
|         version = hit["version"] | ||||
|  | ||||
|         if name not in packages.keys(): | ||||
|             packages[name] = { | ||||
|                 "name": name, | ||||
|                 "summary": summary, | ||||
|                 "versions": [version], | ||||
|             } | ||||
|         else: | ||||
|             packages[name]["versions"].append(version) | ||||
|  | ||||
|             # if this is the highest version, replace summary and score | ||||
|             if version == highest_version(packages[name]["versions"]): | ||||
|                 packages[name]["summary"] = summary | ||||
|  | ||||
|     return list(packages.values()) | ||||
|  | ||||
|  | ||||
| def print_dist_installation_info(name: str, latest: str) -> None: | ||||
|     env = get_default_environment() | ||||
|     dist = env.get_distribution(name) | ||||
|     if dist is not None: | ||||
|         with indent_log(): | ||||
|             if dist.version == latest: | ||||
|                 write_output("INSTALLED: %s (latest)", dist.version) | ||||
|             else: | ||||
|                 write_output("INSTALLED: %s", dist.version) | ||||
|                 if parse_version(latest).pre: | ||||
|                     write_output( | ||||
|                         "LATEST:    %s (pre-release; install" | ||||
|                         " with `pip install --pre`)", | ||||
|                         latest, | ||||
|                     ) | ||||
|                 else: | ||||
|                     write_output("LATEST:    %s", latest) | ||||
|  | ||||
|  | ||||
| def print_results( | ||||
|     hits: List["TransformedHit"], | ||||
|     name_column_width: Optional[int] = None, | ||||
|     terminal_width: Optional[int] = None, | ||||
| ) -> None: | ||||
|     if not hits: | ||||
|         return | ||||
|     if name_column_width is None: | ||||
|         name_column_width = ( | ||||
|             max( | ||||
|                 [ | ||||
|                     len(hit["name"]) + len(highest_version(hit.get("versions", ["-"]))) | ||||
|                     for hit in hits | ||||
|                 ] | ||||
|             ) | ||||
|             + 4 | ||||
|         ) | ||||
|  | ||||
|     for hit in hits: | ||||
|         name = hit["name"] | ||||
|         summary = hit["summary"] or "" | ||||
|         latest = highest_version(hit.get("versions", ["-"])) | ||||
|         if terminal_width is not None: | ||||
|             target_width = terminal_width - name_column_width - 5 | ||||
|             if target_width > 10: | ||||
|                 # wrap and indent summary to fit terminal | ||||
|                 summary_lines = textwrap.wrap(summary, target_width) | ||||
|                 summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines) | ||||
|  | ||||
|         name_latest = f"{name} ({latest})" | ||||
|         line = f"{name_latest:{name_column_width}} - {summary}" | ||||
|         try: | ||||
|             write_output(line) | ||||
|             print_dist_installation_info(name, latest) | ||||
|         except UnicodeEncodeError: | ||||
|             pass | ||||
|  | ||||
|  | ||||
| def highest_version(versions: List[str]) -> str: | ||||
|     return max(versions, key=parse_version) | ||||
| @ -0,0 +1,189 @@ | ||||
| import logging | ||||
| from optparse import Values | ||||
| from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional | ||||
|  | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
|  | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.status_codes import ERROR, SUCCESS | ||||
| from pip._internal.metadata import BaseDistribution, get_default_environment | ||||
| from pip._internal.utils.misc import write_output | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class ShowCommand(Command): | ||||
|     """ | ||||
|     Show information about one or more installed packages. | ||||
|  | ||||
|     The output is in RFC-compliant mail header format. | ||||
|     """ | ||||
|  | ||||
|     usage = """ | ||||
|       %prog [options] <package> ...""" | ||||
|     ignore_require_venv = True | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "-f", | ||||
|             "--files", | ||||
|             dest="files", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="Show the full list of installed files for each package.", | ||||
|         ) | ||||
|  | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         if not args: | ||||
|             logger.warning("ERROR: Please provide a package name or names.") | ||||
|             return ERROR | ||||
|         query = args | ||||
|  | ||||
|         results = search_packages_info(query) | ||||
|         if not print_results( | ||||
|             results, list_files=options.files, verbose=options.verbose | ||||
|         ): | ||||
|             return ERROR | ||||
|         return SUCCESS | ||||
|  | ||||
|  | ||||
| class _PackageInfo(NamedTuple): | ||||
|     name: str | ||||
|     version: str | ||||
|     location: str | ||||
|     editable_project_location: Optional[str] | ||||
|     requires: List[str] | ||||
|     required_by: List[str] | ||||
|     installer: str | ||||
|     metadata_version: str | ||||
|     classifiers: List[str] | ||||
|     summary: str | ||||
|     homepage: str | ||||
|     project_urls: List[str] | ||||
|     author: str | ||||
|     author_email: str | ||||
|     license: str | ||||
|     entry_points: List[str] | ||||
|     files: Optional[List[str]] | ||||
|  | ||||
|  | ||||
| def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]: | ||||
|     """ | ||||
|     Gather details from installed distributions. Print distribution name, | ||||
|     version, location, and installed files. Installed files requires a | ||||
|     pip generated 'installed-files.txt' in the distributions '.egg-info' | ||||
|     directory. | ||||
|     """ | ||||
|     env = get_default_environment() | ||||
|  | ||||
|     installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()} | ||||
|     query_names = [canonicalize_name(name) for name in query] | ||||
|     missing = sorted( | ||||
|         [name for name, pkg in zip(query, query_names) if pkg not in installed] | ||||
|     ) | ||||
|     if missing: | ||||
|         logger.warning("Package(s) not found: %s", ", ".join(missing)) | ||||
|  | ||||
|     def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: | ||||
|         return ( | ||||
|             dist.metadata["Name"] or "UNKNOWN" | ||||
|             for dist in installed.values() | ||||
|             if current_dist.canonical_name | ||||
|             in {canonicalize_name(d.name) for d in dist.iter_dependencies()} | ||||
|         ) | ||||
|  | ||||
|     for query_name in query_names: | ||||
|         try: | ||||
|             dist = installed[query_name] | ||||
|         except KeyError: | ||||
|             continue | ||||
|  | ||||
|         requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower) | ||||
|         required_by = sorted(_get_requiring_packages(dist), key=str.lower) | ||||
|  | ||||
|         try: | ||||
|             entry_points_text = dist.read_text("entry_points.txt") | ||||
|             entry_points = entry_points_text.splitlines(keepends=False) | ||||
|         except FileNotFoundError: | ||||
|             entry_points = [] | ||||
|  | ||||
|         files_iter = dist.iter_declared_entries() | ||||
|         if files_iter is None: | ||||
|             files: Optional[List[str]] = None | ||||
|         else: | ||||
|             files = sorted(files_iter) | ||||
|  | ||||
|         metadata = dist.metadata | ||||
|  | ||||
|         yield _PackageInfo( | ||||
|             name=dist.raw_name, | ||||
|             version=str(dist.version), | ||||
|             location=dist.location or "", | ||||
|             editable_project_location=dist.editable_project_location, | ||||
|             requires=requires, | ||||
|             required_by=required_by, | ||||
|             installer=dist.installer, | ||||
|             metadata_version=dist.metadata_version or "", | ||||
|             classifiers=metadata.get_all("Classifier", []), | ||||
|             summary=metadata.get("Summary", ""), | ||||
|             homepage=metadata.get("Home-page", ""), | ||||
|             project_urls=metadata.get_all("Project-URL", []), | ||||
|             author=metadata.get("Author", ""), | ||||
|             author_email=metadata.get("Author-email", ""), | ||||
|             license=metadata.get("License", ""), | ||||
|             entry_points=entry_points, | ||||
|             files=files, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def print_results( | ||||
|     distributions: Iterable[_PackageInfo], | ||||
|     list_files: bool, | ||||
|     verbose: bool, | ||||
| ) -> bool: | ||||
|     """ | ||||
|     Print the information from installed distributions found. | ||||
|     """ | ||||
|     results_printed = False | ||||
|     for i, dist in enumerate(distributions): | ||||
|         results_printed = True | ||||
|         if i > 0: | ||||
|             write_output("---") | ||||
|  | ||||
|         write_output("Name: %s", dist.name) | ||||
|         write_output("Version: %s", dist.version) | ||||
|         write_output("Summary: %s", dist.summary) | ||||
|         write_output("Home-page: %s", dist.homepage) | ||||
|         write_output("Author: %s", dist.author) | ||||
|         write_output("Author-email: %s", dist.author_email) | ||||
|         write_output("License: %s", dist.license) | ||||
|         write_output("Location: %s", dist.location) | ||||
|         if dist.editable_project_location is not None: | ||||
|             write_output( | ||||
|                 "Editable project location: %s", dist.editable_project_location | ||||
|             ) | ||||
|         write_output("Requires: %s", ", ".join(dist.requires)) | ||||
|         write_output("Required-by: %s", ", ".join(dist.required_by)) | ||||
|  | ||||
|         if verbose: | ||||
|             write_output("Metadata-Version: %s", dist.metadata_version) | ||||
|             write_output("Installer: %s", dist.installer) | ||||
|             write_output("Classifiers:") | ||||
|             for classifier in dist.classifiers: | ||||
|                 write_output("  %s", classifier) | ||||
|             write_output("Entry-points:") | ||||
|             for entry in dist.entry_points: | ||||
|                 write_output("  %s", entry.strip()) | ||||
|             write_output("Project-URLs:") | ||||
|             for project_url in dist.project_urls: | ||||
|                 write_output("  %s", project_url) | ||||
|         if list_files: | ||||
|             write_output("Files:") | ||||
|             if dist.files is None: | ||||
|                 write_output("Cannot locate RECORD or installed-files.txt") | ||||
|             else: | ||||
|                 for line in dist.files: | ||||
|                     write_output("  %s", line.strip()) | ||||
|     return results_printed | ||||
| @ -0,0 +1,113 @@ | ||||
| import logging | ||||
| from optparse import Values | ||||
| from typing import List | ||||
|  | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
|  | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.base_command import Command | ||||
| from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root | ||||
| from pip._internal.cli.status_codes import SUCCESS | ||||
| from pip._internal.exceptions import InstallationError | ||||
| from pip._internal.req import parse_requirements | ||||
| from pip._internal.req.constructors import ( | ||||
|     install_req_from_line, | ||||
|     install_req_from_parsed_requirement, | ||||
| ) | ||||
| from pip._internal.utils.misc import ( | ||||
|     check_externally_managed, | ||||
|     protect_pip_from_modification_on_windows, | ||||
| ) | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class UninstallCommand(Command, SessionCommandMixin): | ||||
|     """ | ||||
|     Uninstall packages. | ||||
|  | ||||
|     pip is able to uninstall most installed packages. Known exceptions are: | ||||
|  | ||||
|     - Pure distutils packages installed with ``python setup.py install``, which | ||||
|       leave behind no metadata to determine what files were installed. | ||||
|     - Script wrappers installed by ``python setup.py develop``. | ||||
|     """ | ||||
|  | ||||
|     usage = """ | ||||
|       %prog [options] <package> ... | ||||
|       %prog [options] -r <requirements file> ...""" | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "-r", | ||||
|             "--requirement", | ||||
|             dest="requirements", | ||||
|             action="append", | ||||
|             default=[], | ||||
|             metavar="file", | ||||
|             help=( | ||||
|                 "Uninstall all the packages listed in the given requirements " | ||||
|                 "file.  This option can be used multiple times." | ||||
|             ), | ||||
|         ) | ||||
|         self.cmd_opts.add_option( | ||||
|             "-y", | ||||
|             "--yes", | ||||
|             dest="yes", | ||||
|             action="store_true", | ||||
|             help="Don't ask for confirmation of uninstall deletions.", | ||||
|         ) | ||||
|         self.cmd_opts.add_option(cmdoptions.root_user_action()) | ||||
|         self.cmd_opts.add_option(cmdoptions.override_externally_managed()) | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         session = self.get_default_session(options) | ||||
|  | ||||
|         reqs_to_uninstall = {} | ||||
|         for name in args: | ||||
|             req = install_req_from_line( | ||||
|                 name, | ||||
|                 isolated=options.isolated_mode, | ||||
|             ) | ||||
|             if req.name: | ||||
|                 reqs_to_uninstall[canonicalize_name(req.name)] = req | ||||
|             else: | ||||
|                 logger.warning( | ||||
|                     "Invalid requirement: %r ignored -" | ||||
|                     " the uninstall command expects named" | ||||
|                     " requirements.", | ||||
|                     name, | ||||
|                 ) | ||||
|         for filename in options.requirements: | ||||
|             for parsed_req in parse_requirements( | ||||
|                 filename, options=options, session=session | ||||
|             ): | ||||
|                 req = install_req_from_parsed_requirement( | ||||
|                     parsed_req, isolated=options.isolated_mode | ||||
|                 ) | ||||
|                 if req.name: | ||||
|                     reqs_to_uninstall[canonicalize_name(req.name)] = req | ||||
|         if not reqs_to_uninstall: | ||||
|             raise InstallationError( | ||||
|                 f"You must give at least one requirement to {self.name} (see " | ||||
|                 f'"pip help {self.name}")' | ||||
|             ) | ||||
|  | ||||
|         if not options.override_externally_managed: | ||||
|             check_externally_managed() | ||||
|  | ||||
|         protect_pip_from_modification_on_windows( | ||||
|             modifying_pip="pip" in reqs_to_uninstall | ||||
|         ) | ||||
|  | ||||
|         for req in reqs_to_uninstall.values(): | ||||
|             uninstall_pathset = req.uninstall( | ||||
|                 auto_confirm=options.yes, | ||||
|                 verbose=self.verbosity > 0, | ||||
|             ) | ||||
|             if uninstall_pathset: | ||||
|                 uninstall_pathset.commit() | ||||
|         if options.root_user_action == "warn": | ||||
|             warn_if_run_as_root() | ||||
|         return SUCCESS | ||||
| @ -0,0 +1,183 @@ | ||||
| import logging | ||||
| import os | ||||
| import shutil | ||||
| from optparse import Values | ||||
| from typing import List | ||||
|  | ||||
| from pip._internal.cache import WheelCache | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.cli.req_command import RequirementCommand, with_cleanup | ||||
| from pip._internal.cli.status_codes import SUCCESS | ||||
| from pip._internal.exceptions import CommandError | ||||
| from pip._internal.operations.build.build_tracker import get_build_tracker | ||||
| from pip._internal.req.req_install import ( | ||||
|     InstallRequirement, | ||||
|     check_legacy_setup_py_options, | ||||
| ) | ||||
| from pip._internal.utils.misc import ensure_dir, normalize_path | ||||
| from pip._internal.utils.temp_dir import TempDirectory | ||||
| from pip._internal.wheel_builder import build, should_build_for_wheel_command | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class WheelCommand(RequirementCommand): | ||||
|     """ | ||||
|     Build Wheel archives for your requirements and dependencies. | ||||
|  | ||||
|     Wheel is a built-package format, and offers the advantage of not | ||||
|     recompiling your software during every install. For more details, see the | ||||
|     wheel docs: https://wheel.readthedocs.io/en/latest/ | ||||
|  | ||||
|     'pip wheel' uses the build system interface as described here: | ||||
|     https://pip.pypa.io/en/stable/reference/build-system/ | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     usage = """ | ||||
|       %prog [options] <requirement specifier> ... | ||||
|       %prog [options] -r <requirements file> ... | ||||
|       %prog [options] [-e] <vcs project url> ... | ||||
|       %prog [options] [-e] <local project path> ... | ||||
|       %prog [options] <archive url/path> ...""" | ||||
|  | ||||
|     def add_options(self) -> None: | ||||
|         self.cmd_opts.add_option( | ||||
|             "-w", | ||||
|             "--wheel-dir", | ||||
|             dest="wheel_dir", | ||||
|             metavar="dir", | ||||
|             default=os.curdir, | ||||
|             help=( | ||||
|                 "Build wheels into <dir>, where the default is the " | ||||
|                 "current working directory." | ||||
|             ), | ||||
|         ) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.only_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.prefer_binary()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_build_isolation()) | ||||
|         self.cmd_opts.add_option(cmdoptions.use_pep517()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_use_pep517()) | ||||
|         self.cmd_opts.add_option(cmdoptions.check_build_deps()) | ||||
|         self.cmd_opts.add_option(cmdoptions.constraints()) | ||||
|         self.cmd_opts.add_option(cmdoptions.editable()) | ||||
|         self.cmd_opts.add_option(cmdoptions.requirements()) | ||||
|         self.cmd_opts.add_option(cmdoptions.src()) | ||||
|         self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) | ||||
|         self.cmd_opts.add_option(cmdoptions.no_deps()) | ||||
|         self.cmd_opts.add_option(cmdoptions.progress_bar()) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--no-verify", | ||||
|             dest="no_verify", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help="Don't verify if built wheel is valid.", | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option(cmdoptions.config_settings()) | ||||
|         self.cmd_opts.add_option(cmdoptions.build_options()) | ||||
|         self.cmd_opts.add_option(cmdoptions.global_options()) | ||||
|  | ||||
|         self.cmd_opts.add_option( | ||||
|             "--pre", | ||||
|             action="store_true", | ||||
|             default=False, | ||||
|             help=( | ||||
|                 "Include pre-release and development versions. By default, " | ||||
|                 "pip only finds stable versions." | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         self.cmd_opts.add_option(cmdoptions.require_hashes()) | ||||
|  | ||||
|         index_opts = cmdoptions.make_option_group( | ||||
|             cmdoptions.index_group, | ||||
|             self.parser, | ||||
|         ) | ||||
|  | ||||
|         self.parser.insert_option_group(0, index_opts) | ||||
|         self.parser.insert_option_group(0, self.cmd_opts) | ||||
|  | ||||
|     @with_cleanup | ||||
|     def run(self, options: Values, args: List[str]) -> int: | ||||
|         session = self.get_default_session(options) | ||||
|  | ||||
|         finder = self._build_package_finder(options, session) | ||||
|  | ||||
|         options.wheel_dir = normalize_path(options.wheel_dir) | ||||
|         ensure_dir(options.wheel_dir) | ||||
|  | ||||
|         build_tracker = self.enter_context(get_build_tracker()) | ||||
|  | ||||
|         directory = TempDirectory( | ||||
|             delete=not options.no_clean, | ||||
|             kind="wheel", | ||||
|             globally_managed=True, | ||||
|         ) | ||||
|  | ||||
|         reqs = self.get_requirements(args, options, finder, session) | ||||
|         check_legacy_setup_py_options(options, reqs) | ||||
|  | ||||
|         wheel_cache = WheelCache(options.cache_dir) | ||||
|  | ||||
|         preparer = self.make_requirement_preparer( | ||||
|             temp_build_dir=directory, | ||||
|             options=options, | ||||
|             build_tracker=build_tracker, | ||||
|             session=session, | ||||
|             finder=finder, | ||||
|             download_dir=options.wheel_dir, | ||||
|             use_user_site=False, | ||||
|             verbosity=self.verbosity, | ||||
|         ) | ||||
|  | ||||
|         resolver = self.make_resolver( | ||||
|             preparer=preparer, | ||||
|             finder=finder, | ||||
|             options=options, | ||||
|             wheel_cache=wheel_cache, | ||||
|             ignore_requires_python=options.ignore_requires_python, | ||||
|             use_pep517=options.use_pep517, | ||||
|         ) | ||||
|  | ||||
|         self.trace_basic_info(finder) | ||||
|  | ||||
|         requirement_set = resolver.resolve(reqs, check_supported_wheels=True) | ||||
|  | ||||
|         reqs_to_build: List[InstallRequirement] = [] | ||||
|         for req in requirement_set.requirements.values(): | ||||
|             if req.is_wheel: | ||||
|                 preparer.save_linked_requirement(req) | ||||
|             elif should_build_for_wheel_command(req): | ||||
|                 reqs_to_build.append(req) | ||||
|  | ||||
|         preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) | ||||
|         requirement_set.warn_legacy_versions_and_specifiers() | ||||
|  | ||||
|         # build wheels | ||||
|         build_successes, build_failures = build( | ||||
|             reqs_to_build, | ||||
|             wheel_cache=wheel_cache, | ||||
|             verify=(not options.no_verify), | ||||
|             build_options=options.build_options or [], | ||||
|             global_options=options.global_options or [], | ||||
|         ) | ||||
|         for req in build_successes: | ||||
|             assert req.link and req.link.is_wheel | ||||
|             assert req.local_file_path | ||||
|             # copy from cache to target directory | ||||
|             try: | ||||
|                 shutil.copy(req.local_file_path, options.wheel_dir) | ||||
|             except OSError as e: | ||||
|                 logger.warning( | ||||
|                     "Building wheel for %s failed: %s", | ||||
|                     req.name, | ||||
|                     e, | ||||
|                 ) | ||||
|                 build_failures.append(req) | ||||
|         if len(build_failures) != 0: | ||||
|             raise CommandError("Failed to build one or more wheels") | ||||
|  | ||||
|         return SUCCESS | ||||
| @ -0,0 +1,381 @@ | ||||
| """Configuration management setup | ||||
|  | ||||
| Some terminology: | ||||
| - name | ||||
|   As written in config files. | ||||
| - value | ||||
|   Value associated with a name | ||||
| - key | ||||
|   Name combined with it's section (section.name) | ||||
| - variant | ||||
|   A single word describing where the configuration key-value pair came from | ||||
| """ | ||||
|  | ||||
| import configparser | ||||
| import locale | ||||
| import os | ||||
| import sys | ||||
| from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple | ||||
|  | ||||
| from pip._internal.exceptions import ( | ||||
|     ConfigurationError, | ||||
|     ConfigurationFileCouldNotBeLoaded, | ||||
| ) | ||||
| from pip._internal.utils import appdirs | ||||
| from pip._internal.utils.compat import WINDOWS | ||||
| from pip._internal.utils.logging import getLogger | ||||
| from pip._internal.utils.misc import ensure_dir, enum | ||||
|  | ||||
| RawConfigParser = configparser.RawConfigParser  # Shorthand | ||||
| Kind = NewType("Kind", str) | ||||
|  | ||||
| CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf" | ||||
| ENV_NAMES_IGNORED = "version", "help" | ||||
|  | ||||
| # The kinds of configurations there are. | ||||
| kinds = enum( | ||||
|     USER="user",  # User Specific | ||||
|     GLOBAL="global",  # System Wide | ||||
|     SITE="site",  # [Virtual] Environment Specific | ||||
|     ENV="env",  # from PIP_CONFIG_FILE | ||||
|     ENV_VAR="env-var",  # from Environment Variables | ||||
| ) | ||||
| OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR | ||||
| VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE | ||||
|  | ||||
| logger = getLogger(__name__) | ||||
|  | ||||
|  | ||||
| # NOTE: Maybe use the optionx attribute to normalize keynames. | ||||
| def _normalize_name(name: str) -> str: | ||||
|     """Make a name consistent regardless of source (environment or file)""" | ||||
|     name = name.lower().replace("_", "-") | ||||
|     if name.startswith("--"): | ||||
|         name = name[2:]  # only prefer long opts | ||||
|     return name | ||||
|  | ||||
|  | ||||
| def _disassemble_key(name: str) -> List[str]: | ||||
|     if "." not in name: | ||||
|         error_message = ( | ||||
|             "Key does not contain dot separated section and key. " | ||||
|             "Perhaps you wanted to use 'global.{}' instead?" | ||||
|         ).format(name) | ||||
|         raise ConfigurationError(error_message) | ||||
|     return name.split(".", 1) | ||||
|  | ||||
|  | ||||
| def get_configuration_files() -> Dict[Kind, List[str]]: | ||||
|     global_config_files = [ | ||||
|         os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip") | ||||
|     ] | ||||
|  | ||||
|     site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) | ||||
|     legacy_config_file = os.path.join( | ||||
|         os.path.expanduser("~"), | ||||
|         "pip" if WINDOWS else ".pip", | ||||
|         CONFIG_BASENAME, | ||||
|     ) | ||||
|     new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME) | ||||
|     return { | ||||
|         kinds.GLOBAL: global_config_files, | ||||
|         kinds.SITE: [site_config_file], | ||||
|         kinds.USER: [legacy_config_file, new_config_file], | ||||
|     } | ||||
|  | ||||
|  | ||||
| class Configuration: | ||||
|     """Handles management of configuration. | ||||
|  | ||||
|     Provides an interface to accessing and managing configuration files. | ||||
|  | ||||
|     This class converts provides an API that takes "section.key-name" style | ||||
|     keys and stores the value associated with it as "key-name" under the | ||||
|     section "section". | ||||
|  | ||||
|     This allows for a clean interface wherein the both the section and the | ||||
|     key-name are preserved in an easy to manage form in the configuration files | ||||
|     and the data stored is also nice. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None: | ||||
|         super().__init__() | ||||
|  | ||||
|         if load_only is not None and load_only not in VALID_LOAD_ONLY: | ||||
|             raise ConfigurationError( | ||||
|                 "Got invalid value for load_only - should be one of {}".format( | ||||
|                     ", ".join(map(repr, VALID_LOAD_ONLY)) | ||||
|                 ) | ||||
|             ) | ||||
|         self.isolated = isolated | ||||
|         self.load_only = load_only | ||||
|  | ||||
|         # Because we keep track of where we got the data from | ||||
|         self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = { | ||||
|             variant: [] for variant in OVERRIDE_ORDER | ||||
|         } | ||||
|         self._config: Dict[Kind, Dict[str, Any]] = { | ||||
|             variant: {} for variant in OVERRIDE_ORDER | ||||
|         } | ||||
|         self._modified_parsers: List[Tuple[str, RawConfigParser]] = [] | ||||
|  | ||||
|     def load(self) -> None: | ||||
|         """Loads configuration from configuration files and environment""" | ||||
|         self._load_config_files() | ||||
|         if not self.isolated: | ||||
|             self._load_environment_vars() | ||||
|  | ||||
|     def get_file_to_edit(self) -> Optional[str]: | ||||
|         """Returns the file with highest priority in configuration""" | ||||
|         assert self.load_only is not None, "Need to be specified a file to be editing" | ||||
|  | ||||
|         try: | ||||
|             return self._get_parser_to_modify()[0] | ||||
|         except IndexError: | ||||
|             return None | ||||
|  | ||||
|     def items(self) -> Iterable[Tuple[str, Any]]: | ||||
|         """Returns key-value pairs like dict.items() representing the loaded | ||||
|         configuration | ||||
|         """ | ||||
|         return self._dictionary.items() | ||||
|  | ||||
|     def get_value(self, key: str) -> Any: | ||||
|         """Get a value from the configuration.""" | ||||
|         orig_key = key | ||||
|         key = _normalize_name(key) | ||||
|         try: | ||||
|             return self._dictionary[key] | ||||
|         except KeyError: | ||||
|             # disassembling triggers a more useful error message than simply | ||||
|             # "No such key" in the case that the key isn't in the form command.option | ||||
|             _disassemble_key(key) | ||||
|             raise ConfigurationError(f"No such key - {orig_key}") | ||||
|  | ||||
|     def set_value(self, key: str, value: Any) -> None: | ||||
|         """Modify a value in the configuration.""" | ||||
|         key = _normalize_name(key) | ||||
|         self._ensure_have_load_only() | ||||
|  | ||||
|         assert self.load_only | ||||
|         fname, parser = self._get_parser_to_modify() | ||||
|  | ||||
|         if parser is not None: | ||||
|             section, name = _disassemble_key(key) | ||||
|  | ||||
|             # Modify the parser and the configuration | ||||
|             if not parser.has_section(section): | ||||
|                 parser.add_section(section) | ||||
|             parser.set(section, name, value) | ||||
|  | ||||
|         self._config[self.load_only][key] = value | ||||
|         self._mark_as_modified(fname, parser) | ||||
|  | ||||
|     def unset_value(self, key: str) -> None: | ||||
|         """Unset a value in the configuration.""" | ||||
|         orig_key = key | ||||
|         key = _normalize_name(key) | ||||
|         self._ensure_have_load_only() | ||||
|  | ||||
|         assert self.load_only | ||||
|         if key not in self._config[self.load_only]: | ||||
|             raise ConfigurationError(f"No such key - {orig_key}") | ||||
|  | ||||
|         fname, parser = self._get_parser_to_modify() | ||||
|  | ||||
|         if parser is not None: | ||||
|             section, name = _disassemble_key(key) | ||||
|             if not ( | ||||
|                 parser.has_section(section) and parser.remove_option(section, name) | ||||
|             ): | ||||
|                 # The option was not removed. | ||||
|                 raise ConfigurationError( | ||||
|                     "Fatal Internal error [id=1]. Please report as a bug." | ||||
|                 ) | ||||
|  | ||||
|             # The section may be empty after the option was removed. | ||||
|             if not parser.items(section): | ||||
|                 parser.remove_section(section) | ||||
|             self._mark_as_modified(fname, parser) | ||||
|  | ||||
|         del self._config[self.load_only][key] | ||||
|  | ||||
|     def save(self) -> None: | ||||
|         """Save the current in-memory state.""" | ||||
|         self._ensure_have_load_only() | ||||
|  | ||||
|         for fname, parser in self._modified_parsers: | ||||
|             logger.info("Writing to %s", fname) | ||||
|  | ||||
|             # Ensure directory exists. | ||||
|             ensure_dir(os.path.dirname(fname)) | ||||
|  | ||||
|             # Ensure directory's permission(need to be writeable) | ||||
|             try: | ||||
|                 with open(fname, "w") as f: | ||||
|                     parser.write(f) | ||||
|             except OSError as error: | ||||
|                 raise ConfigurationError( | ||||
|                     f"An error occurred while writing to the configuration file " | ||||
|                     f"{fname}: {error}" | ||||
|                 ) | ||||
|  | ||||
|     # | ||||
|     # Private routines | ||||
|     # | ||||
|  | ||||
|     def _ensure_have_load_only(self) -> None: | ||||
|         if self.load_only is None: | ||||
|             raise ConfigurationError("Needed a specific file to be modifying.") | ||||
|         logger.debug("Will be working with %s variant only", self.load_only) | ||||
|  | ||||
|     @property | ||||
|     def _dictionary(self) -> Dict[str, Any]: | ||||
|         """A dictionary representing the loaded configuration.""" | ||||
|         # NOTE: Dictionaries are not populated if not loaded. So, conditionals | ||||
|         #       are not needed here. | ||||
|         retval = {} | ||||
|  | ||||
|         for variant in OVERRIDE_ORDER: | ||||
|             retval.update(self._config[variant]) | ||||
|  | ||||
|         return retval | ||||
|  | ||||
|     def _load_config_files(self) -> None: | ||||
|         """Loads configuration from configuration files""" | ||||
|         config_files = dict(self.iter_config_files()) | ||||
|         if config_files[kinds.ENV][0:1] == [os.devnull]: | ||||
|             logger.debug( | ||||
|                 "Skipping loading configuration files due to " | ||||
|                 "environment's PIP_CONFIG_FILE being os.devnull" | ||||
|             ) | ||||
|             return | ||||
|  | ||||
|         for variant, files in config_files.items(): | ||||
|             for fname in files: | ||||
|                 # If there's specific variant set in `load_only`, load only | ||||
|                 # that variant, not the others. | ||||
|                 if self.load_only is not None and variant != self.load_only: | ||||
|                     logger.debug("Skipping file '%s' (variant: %s)", fname, variant) | ||||
|                     continue | ||||
|  | ||||
|                 parser = self._load_file(variant, fname) | ||||
|  | ||||
|                 # Keeping track of the parsers used | ||||
|                 self._parsers[variant].append((fname, parser)) | ||||
|  | ||||
|     def _load_file(self, variant: Kind, fname: str) -> RawConfigParser: | ||||
|         logger.verbose("For variant '%s', will try loading '%s'", variant, fname) | ||||
|         parser = self._construct_parser(fname) | ||||
|  | ||||
|         for section in parser.sections(): | ||||
|             items = parser.items(section) | ||||
|             self._config[variant].update(self._normalized_keys(section, items)) | ||||
|  | ||||
|         return parser | ||||
|  | ||||
|     def _construct_parser(self, fname: str) -> RawConfigParser: | ||||
|         parser = configparser.RawConfigParser() | ||||
|         # If there is no such file, don't bother reading it but create the | ||||
|         # parser anyway, to hold the data. | ||||
|         # Doing this is useful when modifying and saving files, where we don't | ||||
|         # need to construct a parser. | ||||
|         if os.path.exists(fname): | ||||
|             locale_encoding = locale.getpreferredencoding(False) | ||||
|             try: | ||||
|                 parser.read(fname, encoding=locale_encoding) | ||||
|             except UnicodeDecodeError: | ||||
|                 # See https://github.com/pypa/pip/issues/4963 | ||||
|                 raise ConfigurationFileCouldNotBeLoaded( | ||||
|                     reason=f"contains invalid {locale_encoding} characters", | ||||
|                     fname=fname, | ||||
|                 ) | ||||
|             except configparser.Error as error: | ||||
|                 # See https://github.com/pypa/pip/issues/4893 | ||||
|                 raise ConfigurationFileCouldNotBeLoaded(error=error) | ||||
|         return parser | ||||
|  | ||||
|     def _load_environment_vars(self) -> None: | ||||
|         """Loads configuration from environment variables""" | ||||
|         self._config[kinds.ENV_VAR].update( | ||||
|             self._normalized_keys(":env:", self.get_environ_vars()) | ||||
|         ) | ||||
|  | ||||
|     def _normalized_keys( | ||||
|         self, section: str, items: Iterable[Tuple[str, Any]] | ||||
|     ) -> Dict[str, Any]: | ||||
|         """Normalizes items to construct a dictionary with normalized keys. | ||||
|  | ||||
|         This routine is where the names become keys and are made the same | ||||
|         regardless of source - configuration files or environment. | ||||
|         """ | ||||
|         normalized = {} | ||||
|         for name, val in items: | ||||
|             key = section + "." + _normalize_name(name) | ||||
|             normalized[key] = val | ||||
|         return normalized | ||||
|  | ||||
|     def get_environ_vars(self) -> Iterable[Tuple[str, str]]: | ||||
|         """Returns a generator with all environmental vars with prefix PIP_""" | ||||
|         for key, val in os.environ.items(): | ||||
|             if key.startswith("PIP_"): | ||||
|                 name = key[4:].lower() | ||||
|                 if name not in ENV_NAMES_IGNORED: | ||||
|                     yield name, val | ||||
|  | ||||
|     # XXX: This is patched in the tests. | ||||
|     def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: | ||||
|         """Yields variant and configuration files associated with it. | ||||
|  | ||||
|         This should be treated like items of a dictionary. | ||||
|         """ | ||||
|         # SMELL: Move the conditions out of this function | ||||
|  | ||||
|         # environment variables have the lowest priority | ||||
|         config_file = os.environ.get("PIP_CONFIG_FILE", None) | ||||
|         if config_file is not None: | ||||
|             yield kinds.ENV, [config_file] | ||||
|         else: | ||||
|             yield kinds.ENV, [] | ||||
|  | ||||
|         config_files = get_configuration_files() | ||||
|  | ||||
|         # at the base we have any global configuration | ||||
|         yield kinds.GLOBAL, config_files[kinds.GLOBAL] | ||||
|  | ||||
|         # per-user configuration next | ||||
|         should_load_user_config = not self.isolated and not ( | ||||
|             config_file and os.path.exists(config_file) | ||||
|         ) | ||||
|         if should_load_user_config: | ||||
|             # The legacy config file is overridden by the new config file | ||||
|             yield kinds.USER, config_files[kinds.USER] | ||||
|  | ||||
|         # finally virtualenv configuration first trumping others | ||||
|         yield kinds.SITE, config_files[kinds.SITE] | ||||
|  | ||||
|     def get_values_in_config(self, variant: Kind) -> Dict[str, Any]: | ||||
|         """Get values present in a config file""" | ||||
|         return self._config[variant] | ||||
|  | ||||
|     def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]: | ||||
|         # Determine which parser to modify | ||||
|         assert self.load_only | ||||
|         parsers = self._parsers[self.load_only] | ||||
|         if not parsers: | ||||
|             # This should not happen if everything works correctly. | ||||
|             raise ConfigurationError( | ||||
|                 "Fatal Internal error [id=2]. Please report as a bug." | ||||
|             ) | ||||
|  | ||||
|         # Use the highest priority parser. | ||||
|         return parsers[-1] | ||||
|  | ||||
|     # XXX: This is patched in the tests. | ||||
|     def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None: | ||||
|         file_parser_tuple = (fname, parser) | ||||
|         if file_parser_tuple not in self._modified_parsers: | ||||
|             self._modified_parsers.append(file_parser_tuple) | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return f"{self.__class__.__name__}({self._dictionary!r})" | ||||
| @ -0,0 +1,21 @@ | ||||
| from pip._internal.distributions.base import AbstractDistribution | ||||
| from pip._internal.distributions.sdist import SourceDistribution | ||||
| from pip._internal.distributions.wheel import WheelDistribution | ||||
| from pip._internal.req.req_install import InstallRequirement | ||||
|  | ||||
|  | ||||
| def make_distribution_for_install_requirement( | ||||
|     install_req: InstallRequirement, | ||||
| ) -> AbstractDistribution: | ||||
|     """Returns a Distribution for the given InstallRequirement""" | ||||
|     # Editable requirements will always be source distributions. They use the | ||||
|     # legacy logic until we create a modern standard for them. | ||||
|     if install_req.editable: | ||||
|         return SourceDistribution(install_req) | ||||
|  | ||||
|     # If it's a wheel, it's a WheelDistribution | ||||
|     if install_req.is_wheel: | ||||
|         return WheelDistribution(install_req) | ||||
|  | ||||
|     # Otherwise, a SourceDistribution | ||||
|     return SourceDistribution(install_req) | ||||
| @ -0,0 +1,39 @@ | ||||
| import abc | ||||
|  | ||||
| from pip._internal.index.package_finder import PackageFinder | ||||
| from pip._internal.metadata.base import BaseDistribution | ||||
| from pip._internal.req import InstallRequirement | ||||
|  | ||||
|  | ||||
| class AbstractDistribution(metaclass=abc.ABCMeta): | ||||
|     """A base class for handling installable artifacts. | ||||
|  | ||||
|     The requirements for anything installable are as follows: | ||||
|  | ||||
|      - we must be able to determine the requirement name | ||||
|        (or we can't correctly handle the non-upgrade case). | ||||
|  | ||||
|      - for packages with setup requirements, we must also be able | ||||
|        to determine their requirements without installing additional | ||||
|        packages (for the same reason as run-time dependencies) | ||||
|  | ||||
|      - we must be able to create a Distribution object exposing the | ||||
|        above metadata. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, req: InstallRequirement) -> None: | ||||
|         super().__init__() | ||||
|         self.req = req | ||||
|  | ||||
|     @abc.abstractmethod | ||||
|     def get_metadata_distribution(self) -> BaseDistribution: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @abc.abstractmethod | ||||
|     def prepare_distribution_metadata( | ||||
|         self, | ||||
|         finder: PackageFinder, | ||||
|         build_isolation: bool, | ||||
|         check_build_deps: bool, | ||||
|     ) -> None: | ||||
|         raise NotImplementedError() | ||||
| @ -0,0 +1,23 @@ | ||||
| from pip._internal.distributions.base import AbstractDistribution | ||||
| from pip._internal.index.package_finder import PackageFinder | ||||
| from pip._internal.metadata import BaseDistribution | ||||
|  | ||||
|  | ||||
| class InstalledDistribution(AbstractDistribution): | ||||
|     """Represents an installed package. | ||||
|  | ||||
|     This does not need any preparation as the required information has already | ||||
|     been computed. | ||||
|     """ | ||||
|  | ||||
|     def get_metadata_distribution(self) -> BaseDistribution: | ||||
|         assert self.req.satisfied_by is not None, "not actually installed" | ||||
|         return self.req.satisfied_by | ||||
|  | ||||
|     def prepare_distribution_metadata( | ||||
|         self, | ||||
|         finder: PackageFinder, | ||||
|         build_isolation: bool, | ||||
|         check_build_deps: bool, | ||||
|     ) -> None: | ||||
|         pass | ||||
| @ -0,0 +1,150 @@ | ||||
| import logging | ||||
| from typing import Iterable, Set, Tuple | ||||
|  | ||||
| from pip._internal.build_env import BuildEnvironment | ||||
| from pip._internal.distributions.base import AbstractDistribution | ||||
| from pip._internal.exceptions import InstallationError | ||||
| from pip._internal.index.package_finder import PackageFinder | ||||
| from pip._internal.metadata import BaseDistribution | ||||
| from pip._internal.utils.subprocess import runner_with_spinner_message | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class SourceDistribution(AbstractDistribution): | ||||
|     """Represents a source distribution. | ||||
|  | ||||
|     The preparation step for these needs metadata for the packages to be | ||||
|     generated, either using PEP 517 or using the legacy `setup.py egg_info`. | ||||
|     """ | ||||
|  | ||||
|     def get_metadata_distribution(self) -> BaseDistribution: | ||||
|         return self.req.get_dist() | ||||
|  | ||||
|     def prepare_distribution_metadata( | ||||
|         self, | ||||
|         finder: PackageFinder, | ||||
|         build_isolation: bool, | ||||
|         check_build_deps: bool, | ||||
|     ) -> None: | ||||
|         # Load pyproject.toml, to determine whether PEP 517 is to be used | ||||
|         self.req.load_pyproject_toml() | ||||
|  | ||||
|         # Set up the build isolation, if this requirement should be isolated | ||||
|         should_isolate = self.req.use_pep517 and build_isolation | ||||
|         if should_isolate: | ||||
|             # Setup an isolated environment and install the build backend static | ||||
|             # requirements in it. | ||||
|             self._prepare_build_backend(finder) | ||||
|             # Check that if the requirement is editable, it either supports PEP 660 or | ||||
|             # has a setup.py or a setup.cfg. This cannot be done earlier because we need | ||||
|             # to setup the build backend to verify it supports build_editable, nor can | ||||
|             # it be done later, because we want to avoid installing build requirements | ||||
|             # needlessly. Doing it here also works around setuptools generating | ||||
|             # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory | ||||
|             # without setup.py nor setup.cfg. | ||||
|             self.req.isolated_editable_sanity_check() | ||||
|             # Install the dynamic build requirements. | ||||
|             self._install_build_reqs(finder) | ||||
|         # Check if the current environment provides build dependencies | ||||
|         should_check_deps = self.req.use_pep517 and check_build_deps | ||||
|         if should_check_deps: | ||||
|             pyproject_requires = self.req.pyproject_requires | ||||
|             assert pyproject_requires is not None | ||||
|             conflicting, missing = self.req.build_env.check_requirements( | ||||
|                 pyproject_requires | ||||
|             ) | ||||
|             if conflicting: | ||||
|                 self._raise_conflicts("the backend dependencies", conflicting) | ||||
|             if missing: | ||||
|                 self._raise_missing_reqs(missing) | ||||
|         self.req.prepare_metadata() | ||||
|  | ||||
|     def _prepare_build_backend(self, finder: PackageFinder) -> None: | ||||
|         # Isolate in a BuildEnvironment and install the build-time | ||||
|         # requirements. | ||||
|         pyproject_requires = self.req.pyproject_requires | ||||
|         assert pyproject_requires is not None | ||||
|  | ||||
|         self.req.build_env = BuildEnvironment() | ||||
|         self.req.build_env.install_requirements( | ||||
|             finder, pyproject_requires, "overlay", kind="build dependencies" | ||||
|         ) | ||||
|         conflicting, missing = self.req.build_env.check_requirements( | ||||
|             self.req.requirements_to_check | ||||
|         ) | ||||
|         if conflicting: | ||||
|             self._raise_conflicts("PEP 517/518 supported requirements", conflicting) | ||||
|         if missing: | ||||
|             logger.warning( | ||||
|                 "Missing build requirements in pyproject.toml for %s.", | ||||
|                 self.req, | ||||
|             ) | ||||
|             logger.warning( | ||||
|                 "The project does not specify a build backend, and " | ||||
|                 "pip cannot fall back to setuptools without %s.", | ||||
|                 " and ".join(map(repr, sorted(missing))), | ||||
|             ) | ||||
|  | ||||
|     def _get_build_requires_wheel(self) -> Iterable[str]: | ||||
|         with self.req.build_env: | ||||
|             runner = runner_with_spinner_message("Getting requirements to build wheel") | ||||
|             backend = self.req.pep517_backend | ||||
|             assert backend is not None | ||||
|             with backend.subprocess_runner(runner): | ||||
|                 return backend.get_requires_for_build_wheel() | ||||
|  | ||||
|     def _get_build_requires_editable(self) -> Iterable[str]: | ||||
|         with self.req.build_env: | ||||
|             runner = runner_with_spinner_message( | ||||
|                 "Getting requirements to build editable" | ||||
|             ) | ||||
|             backend = self.req.pep517_backend | ||||
|             assert backend is not None | ||||
|             with backend.subprocess_runner(runner): | ||||
|                 return backend.get_requires_for_build_editable() | ||||
|  | ||||
|     def _install_build_reqs(self, finder: PackageFinder) -> None: | ||||
|         # Install any extra build dependencies that the backend requests. | ||||
|         # This must be done in a second pass, as the pyproject.toml | ||||
|         # dependencies must be installed before we can call the backend. | ||||
|         if ( | ||||
|             self.req.editable | ||||
|             and self.req.permit_editable_wheels | ||||
|             and self.req.supports_pyproject_editable() | ||||
|         ): | ||||
|             build_reqs = self._get_build_requires_editable() | ||||
|         else: | ||||
|             build_reqs = self._get_build_requires_wheel() | ||||
|         conflicting, missing = self.req.build_env.check_requirements(build_reqs) | ||||
|         if conflicting: | ||||
|             self._raise_conflicts("the backend dependencies", conflicting) | ||||
|         self.req.build_env.install_requirements( | ||||
|             finder, missing, "normal", kind="backend dependencies" | ||||
|         ) | ||||
|  | ||||
|     def _raise_conflicts( | ||||
|         self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]] | ||||
|     ) -> None: | ||||
|         format_string = ( | ||||
|             "Some build dependencies for {requirement} " | ||||
|             "conflict with {conflicting_with}: {description}." | ||||
|         ) | ||||
|         error_message = format_string.format( | ||||
|             requirement=self.req, | ||||
|             conflicting_with=conflicting_with, | ||||
|             description=", ".join( | ||||
|                 f"{installed} is incompatible with {wanted}" | ||||
|                 for installed, wanted in sorted(conflicting_reqs) | ||||
|             ), | ||||
|         ) | ||||
|         raise InstallationError(error_message) | ||||
|  | ||||
|     def _raise_missing_reqs(self, missing: Set[str]) -> None: | ||||
|         format_string = ( | ||||
|             "Some build dependencies for {requirement} are missing: {missing}." | ||||
|         ) | ||||
|         error_message = format_string.format( | ||||
|             requirement=self.req, missing=", ".join(map(repr, sorted(missing))) | ||||
|         ) | ||||
|         raise InstallationError(error_message) | ||||
| @ -0,0 +1,34 @@ | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
|  | ||||
| from pip._internal.distributions.base import AbstractDistribution | ||||
| from pip._internal.index.package_finder import PackageFinder | ||||
| from pip._internal.metadata import ( | ||||
|     BaseDistribution, | ||||
|     FilesystemWheel, | ||||
|     get_wheel_distribution, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class WheelDistribution(AbstractDistribution): | ||||
|     """Represents a wheel distribution. | ||||
|  | ||||
|     This does not need any preparation as wheels can be directly unpacked. | ||||
|     """ | ||||
|  | ||||
|     def get_metadata_distribution(self) -> BaseDistribution: | ||||
|         """Loads the metadata from the wheel file into memory and returns a | ||||
|         Distribution that uses it, not relying on the wheel file or | ||||
|         requirement. | ||||
|         """ | ||||
|         assert self.req.local_file_path, "Set as part of preparation during download" | ||||
|         assert self.req.name, "Wheels are never unnamed" | ||||
|         wheel = FilesystemWheel(self.req.local_file_path) | ||||
|         return get_wheel_distribution(wheel, canonicalize_name(self.req.name)) | ||||
|  | ||||
|     def prepare_distribution_metadata( | ||||
|         self, | ||||
|         finder: PackageFinder, | ||||
|         build_isolation: bool, | ||||
|         check_build_deps: bool, | ||||
|     ) -> None: | ||||
|         pass | ||||
| @ -0,0 +1,733 @@ | ||||
| """Exceptions used throughout package. | ||||
|  | ||||
| This module MUST NOT try to import from anything within `pip._internal` to | ||||
| operate. This is expected to be importable from any/all files within the | ||||
| subpackage and, thus, should not depend on them. | ||||
| """ | ||||
|  | ||||
| import configparser | ||||
| import contextlib | ||||
| import locale | ||||
| import logging | ||||
| import pathlib | ||||
| import re | ||||
| import sys | ||||
| from itertools import chain, groupby, repeat | ||||
| from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union | ||||
|  | ||||
| from pip._vendor.requests.models import Request, Response | ||||
| from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult | ||||
| from pip._vendor.rich.markup import escape | ||||
| from pip._vendor.rich.text import Text | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from hashlib import _Hash | ||||
|     from typing import Literal | ||||
|  | ||||
|     from pip._internal.metadata import BaseDistribution | ||||
|     from pip._internal.req.req_install import InstallRequirement | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| # | ||||
| # Scaffolding | ||||
| # | ||||
| def _is_kebab_case(s: str) -> bool: | ||||
|     return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None | ||||
|  | ||||
|  | ||||
| def _prefix_with_indent( | ||||
|     s: Union[Text, str], | ||||
|     console: Console, | ||||
|     *, | ||||
|     prefix: str, | ||||
|     indent: str, | ||||
| ) -> Text: | ||||
|     if isinstance(s, Text): | ||||
|         text = s | ||||
|     else: | ||||
|         text = console.render_str(s) | ||||
|  | ||||
|     return console.render_str(prefix, overflow="ignore") + console.render_str( | ||||
|         f"\n{indent}", overflow="ignore" | ||||
|     ).join(text.split(allow_blank=True)) | ||||
|  | ||||
|  | ||||
| class PipError(Exception): | ||||
|     """The base pip error.""" | ||||
|  | ||||
|  | ||||
| class DiagnosticPipError(PipError): | ||||
|     """An error, that presents diagnostic information to the user. | ||||
|  | ||||
|     This contains a bunch of logic, to enable pretty presentation of our error | ||||
|     messages. Each error gets a unique reference. Each error can also include | ||||
|     additional context, a hint and/or a note -- which are presented with the | ||||
|     main error message in a consistent style. | ||||
|  | ||||
|     This is adapted from the error output styling in `sphinx-theme-builder`. | ||||
|     """ | ||||
|  | ||||
|     reference: str | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *, | ||||
|         kind: 'Literal["error", "warning"]' = "error", | ||||
|         reference: Optional[str] = None, | ||||
|         message: Union[str, Text], | ||||
|         context: Optional[Union[str, Text]], | ||||
|         hint_stmt: Optional[Union[str, Text]], | ||||
|         note_stmt: Optional[Union[str, Text]] = None, | ||||
|         link: Optional[str] = None, | ||||
|     ) -> None: | ||||
|         # Ensure a proper reference is provided. | ||||
|         if reference is None: | ||||
|             assert hasattr(self, "reference"), "error reference not provided!" | ||||
|             reference = self.reference | ||||
|         assert _is_kebab_case(reference), "error reference must be kebab-case!" | ||||
|  | ||||
|         self.kind = kind | ||||
|         self.reference = reference | ||||
|  | ||||
|         self.message = message | ||||
|         self.context = context | ||||
|  | ||||
|         self.note_stmt = note_stmt | ||||
|         self.hint_stmt = hint_stmt | ||||
|  | ||||
|         self.link = link | ||||
|  | ||||
|         super().__init__(f"<{self.__class__.__name__}: {self.reference}>") | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return ( | ||||
|             f"<{self.__class__.__name__}(" | ||||
|             f"reference={self.reference!r}, " | ||||
|             f"message={self.message!r}, " | ||||
|             f"context={self.context!r}, " | ||||
|             f"note_stmt={self.note_stmt!r}, " | ||||
|             f"hint_stmt={self.hint_stmt!r}" | ||||
|             ")>" | ||||
|         ) | ||||
|  | ||||
|     def __rich_console__( | ||||
|         self, | ||||
|         console: Console, | ||||
|         options: ConsoleOptions, | ||||
|     ) -> RenderResult: | ||||
|         colour = "red" if self.kind == "error" else "yellow" | ||||
|  | ||||
|         yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]" | ||||
|         yield "" | ||||
|  | ||||
|         if not options.ascii_only: | ||||
|             # Present the main message, with relevant context indented. | ||||
|             if self.context is not None: | ||||
|                 yield _prefix_with_indent( | ||||
|                     self.message, | ||||
|                     console, | ||||
|                     prefix=f"[{colour}]×[/] ", | ||||
|                     indent=f"[{colour}]│[/] ", | ||||
|                 ) | ||||
|                 yield _prefix_with_indent( | ||||
|                     self.context, | ||||
|                     console, | ||||
|                     prefix=f"[{colour}]╰─>[/] ", | ||||
|                     indent=f"[{colour}]   [/] ", | ||||
|                 ) | ||||
|             else: | ||||
|                 yield _prefix_with_indent( | ||||
|                     self.message, | ||||
|                     console, | ||||
|                     prefix="[red]×[/] ", | ||||
|                     indent="  ", | ||||
|                 ) | ||||
|         else: | ||||
|             yield self.message | ||||
|             if self.context is not None: | ||||
|                 yield "" | ||||
|                 yield self.context | ||||
|  | ||||
|         if self.note_stmt is not None or self.hint_stmt is not None: | ||||
|             yield "" | ||||
|  | ||||
|         if self.note_stmt is not None: | ||||
|             yield _prefix_with_indent( | ||||
|                 self.note_stmt, | ||||
|                 console, | ||||
|                 prefix="[magenta bold]note[/]: ", | ||||
|                 indent="      ", | ||||
|             ) | ||||
|         if self.hint_stmt is not None: | ||||
|             yield _prefix_with_indent( | ||||
|                 self.hint_stmt, | ||||
|                 console, | ||||
|                 prefix="[cyan bold]hint[/]: ", | ||||
|                 indent="      ", | ||||
|             ) | ||||
|  | ||||
|         if self.link is not None: | ||||
|             yield "" | ||||
|             yield f"Link: {self.link}" | ||||
|  | ||||
|  | ||||
| # | ||||
| # Actual Errors | ||||
| # | ||||
| class ConfigurationError(PipError): | ||||
|     """General exception in configuration""" | ||||
|  | ||||
|  | ||||
| class InstallationError(PipError): | ||||
|     """General exception during installation""" | ||||
|  | ||||
|  | ||||
| class UninstallationError(PipError): | ||||
|     """General exception during uninstallation""" | ||||
|  | ||||
|  | ||||
| class MissingPyProjectBuildRequires(DiagnosticPipError): | ||||
|     """Raised when pyproject.toml has `build-system`, but no `build-system.requires`.""" | ||||
|  | ||||
|     reference = "missing-pyproject-build-system-requires" | ||||
|  | ||||
|     def __init__(self, *, package: str) -> None: | ||||
|         super().__init__( | ||||
|             message=f"Can not process {escape(package)}", | ||||
|             context=Text( | ||||
|                 "This package has an invalid pyproject.toml file.\n" | ||||
|                 "The [build-system] table is missing the mandatory `requires` key." | ||||
|             ), | ||||
|             note_stmt="This is an issue with the package mentioned above, not pip.", | ||||
|             hint_stmt=Text("See PEP 518 for the detailed specification."), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class InvalidPyProjectBuildRequires(DiagnosticPipError): | ||||
|     """Raised when pyproject.toml an invalid `build-system.requires`.""" | ||||
|  | ||||
|     reference = "invalid-pyproject-build-system-requires" | ||||
|  | ||||
|     def __init__(self, *, package: str, reason: str) -> None: | ||||
|         super().__init__( | ||||
|             message=f"Can not process {escape(package)}", | ||||
|             context=Text( | ||||
|                 "This package has an invalid `build-system.requires` key in " | ||||
|                 f"pyproject.toml.\n{reason}" | ||||
|             ), | ||||
|             note_stmt="This is an issue with the package mentioned above, not pip.", | ||||
|             hint_stmt=Text("See PEP 518 for the detailed specification."), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class NoneMetadataError(PipError): | ||||
|     """Raised when accessing a Distribution's "METADATA" or "PKG-INFO". | ||||
|  | ||||
|     This signifies an inconsistency, when the Distribution claims to have | ||||
|     the metadata file (if not, raise ``FileNotFoundError`` instead), but is | ||||
|     not actually able to produce its content. This may be due to permission | ||||
|     errors. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         dist: "BaseDistribution", | ||||
|         metadata_name: str, | ||||
|     ) -> None: | ||||
|         """ | ||||
|         :param dist: A Distribution object. | ||||
|         :param metadata_name: The name of the metadata being accessed | ||||
|             (can be "METADATA" or "PKG-INFO"). | ||||
|         """ | ||||
|         self.dist = dist | ||||
|         self.metadata_name = metadata_name | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         # Use `dist` in the error message because its stringification | ||||
|         # includes more information, like the version and location. | ||||
|         return "None {} metadata found for distribution: {}".format( | ||||
|             self.metadata_name, | ||||
|             self.dist, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class UserInstallationInvalid(InstallationError): | ||||
|     """A --user install is requested on an environment without user site.""" | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return "User base directory is not specified" | ||||
|  | ||||
|  | ||||
| class InvalidSchemeCombination(InstallationError): | ||||
|     def __str__(self) -> str: | ||||
|         before = ", ".join(str(a) for a in self.args[:-1]) | ||||
|         return f"Cannot set {before} and {self.args[-1]} together" | ||||
|  | ||||
|  | ||||
| class DistributionNotFound(InstallationError): | ||||
|     """Raised when a distribution cannot be found to satisfy a requirement""" | ||||
|  | ||||
|  | ||||
| class RequirementsFileParseError(InstallationError): | ||||
|     """Raised when a general error occurs parsing a requirements file line.""" | ||||
|  | ||||
|  | ||||
| class BestVersionAlreadyInstalled(PipError): | ||||
|     """Raised when the most up-to-date version of a package is already | ||||
|     installed.""" | ||||
|  | ||||
|  | ||||
| class BadCommand(PipError): | ||||
|     """Raised when virtualenv or a command is not found""" | ||||
|  | ||||
|  | ||||
| class CommandError(PipError): | ||||
|     """Raised when there is an error in command-line arguments""" | ||||
|  | ||||
|  | ||||
| class PreviousBuildDirError(PipError): | ||||
|     """Raised when there's a previous conflicting build directory""" | ||||
|  | ||||
|  | ||||
| class NetworkConnectionError(PipError): | ||||
|     """HTTP connection error""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         error_msg: str, | ||||
|         response: Optional[Response] = None, | ||||
|         request: Optional[Request] = None, | ||||
|     ) -> None: | ||||
|         """ | ||||
|         Initialize NetworkConnectionError with  `request` and `response` | ||||
|         objects. | ||||
|         """ | ||||
|         self.response = response | ||||
|         self.request = request | ||||
|         self.error_msg = error_msg | ||||
|         if ( | ||||
|             self.response is not None | ||||
|             and not self.request | ||||
|             and hasattr(response, "request") | ||||
|         ): | ||||
|             self.request = self.response.request | ||||
|         super().__init__(error_msg, response, request) | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return str(self.error_msg) | ||||
|  | ||||
|  | ||||
| class InvalidWheelFilename(InstallationError): | ||||
|     """Invalid wheel filename.""" | ||||
|  | ||||
|  | ||||
| class UnsupportedWheel(InstallationError): | ||||
|     """Unsupported wheel.""" | ||||
|  | ||||
|  | ||||
| class InvalidWheel(InstallationError): | ||||
|     """Invalid (e.g. corrupt) wheel.""" | ||||
|  | ||||
|     def __init__(self, location: str, name: str): | ||||
|         self.location = location | ||||
|         self.name = name | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Wheel '{self.name}' located at {self.location} is invalid." | ||||
|  | ||||
|  | ||||
| class MetadataInconsistent(InstallationError): | ||||
|     """Built metadata contains inconsistent information. | ||||
|  | ||||
|     This is raised when the metadata contains values (e.g. name and version) | ||||
|     that do not match the information previously obtained from sdist filename, | ||||
|     user-supplied ``#egg=`` value, or an install requirement name. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str | ||||
|     ) -> None: | ||||
|         self.ireq = ireq | ||||
|         self.field = field | ||||
|         self.f_val = f_val | ||||
|         self.m_val = m_val | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return ( | ||||
|             f"Requested {self.ireq} has inconsistent {self.field}: " | ||||
|             f"expected {self.f_val!r}, but metadata has {self.m_val!r}" | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class InstallationSubprocessError(DiagnosticPipError, InstallationError): | ||||
|     """A subprocess call failed.""" | ||||
|  | ||||
|     reference = "subprocess-exited-with-error" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *, | ||||
|         command_description: str, | ||||
|         exit_code: int, | ||||
|         output_lines: Optional[List[str]], | ||||
|     ) -> None: | ||||
|         if output_lines is None: | ||||
|             output_prompt = Text("See above for output.") | ||||
|         else: | ||||
|             output_prompt = ( | ||||
|                 Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n") | ||||
|                 + Text("".join(output_lines)) | ||||
|                 + Text.from_markup(R"[red]\[end of output][/]") | ||||
|             ) | ||||
|  | ||||
|         super().__init__( | ||||
|             message=( | ||||
|                 f"[green]{escape(command_description)}[/] did not run successfully.\n" | ||||
|                 f"exit code: {exit_code}" | ||||
|             ), | ||||
|             context=output_prompt, | ||||
|             hint_stmt=None, | ||||
|             note_stmt=( | ||||
|                 "This error originates from a subprocess, and is likely not a " | ||||
|                 "problem with pip." | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|         self.command_description = command_description | ||||
|         self.exit_code = exit_code | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.command_description} exited with {self.exit_code}" | ||||
|  | ||||
|  | ||||
| class MetadataGenerationFailed(InstallationSubprocessError, InstallationError): | ||||
|     reference = "metadata-generation-failed" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *, | ||||
|         package_details: str, | ||||
|     ) -> None: | ||||
|         super(InstallationSubprocessError, self).__init__( | ||||
|             message="Encountered error while generating package metadata.", | ||||
|             context=escape(package_details), | ||||
|             hint_stmt="See above for details.", | ||||
|             note_stmt="This is an issue with the package mentioned above, not pip.", | ||||
|         ) | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return "metadata generation failed" | ||||
|  | ||||
|  | ||||
| class HashErrors(InstallationError): | ||||
|     """Multiple HashError instances rolled into one for reporting""" | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         self.errors: List["HashError"] = [] | ||||
|  | ||||
|     def append(self, error: "HashError") -> None: | ||||
|         self.errors.append(error) | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         lines = [] | ||||
|         self.errors.sort(key=lambda e: e.order) | ||||
|         for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): | ||||
|             lines.append(cls.head) | ||||
|             lines.extend(e.body() for e in errors_of_cls) | ||||
|         if lines: | ||||
|             return "\n".join(lines) | ||||
|         return "" | ||||
|  | ||||
|     def __bool__(self) -> bool: | ||||
|         return bool(self.errors) | ||||
|  | ||||
|  | ||||
| class HashError(InstallationError): | ||||
|     """ | ||||
|     A failure to verify a package against known-good hashes | ||||
|  | ||||
|     :cvar order: An int sorting hash exception classes by difficulty of | ||||
|         recovery (lower being harder), so the user doesn't bother fretting | ||||
|         about unpinned packages when he has deeper issues, like VCS | ||||
|         dependencies, to deal with. Also keeps error reports in a | ||||
|         deterministic order. | ||||
|     :cvar head: A section heading for display above potentially many | ||||
|         exceptions of this kind | ||||
|     :ivar req: The InstallRequirement that triggered this error. This is | ||||
|         pasted on after the exception is instantiated, because it's not | ||||
|         typically available earlier. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     req: Optional["InstallRequirement"] = None | ||||
|     head = "" | ||||
|     order: int = -1 | ||||
|  | ||||
|     def body(self) -> str: | ||||
|         """Return a summary of me for display under the heading. | ||||
|  | ||||
|         This default implementation simply prints a description of the | ||||
|         triggering requirement. | ||||
|  | ||||
|         :param req: The InstallRequirement that provoked this error, with | ||||
|             its link already populated by the resolver's _populate_link(). | ||||
|  | ||||
|         """ | ||||
|         return f"    {self._requirement_name()}" | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.head}\n{self.body()}" | ||||
|  | ||||
|     def _requirement_name(self) -> str: | ||||
|         """Return a description of the requirement that triggered me. | ||||
|  | ||||
|         This default implementation returns long description of the req, with | ||||
|         line numbers | ||||
|  | ||||
|         """ | ||||
|         return str(self.req) if self.req else "unknown package" | ||||
|  | ||||
|  | ||||
| class VcsHashUnsupported(HashError): | ||||
|     """A hash was provided for a version-control-system-based requirement, but | ||||
|     we don't have a method for hashing those.""" | ||||
|  | ||||
|     order = 0 | ||||
|     head = ( | ||||
|         "Can't verify hashes for these requirements because we don't " | ||||
|         "have a way to hash version control repositories:" | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class DirectoryUrlHashUnsupported(HashError): | ||||
|     """A hash was provided for a version-control-system-based requirement, but | ||||
|     we don't have a method for hashing those.""" | ||||
|  | ||||
|     order = 1 | ||||
|     head = ( | ||||
|         "Can't verify hashes for these file:// requirements because they " | ||||
|         "point to directories:" | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class HashMissing(HashError): | ||||
|     """A hash was needed for a requirement but is absent.""" | ||||
|  | ||||
|     order = 2 | ||||
|     head = ( | ||||
|         "Hashes are required in --require-hashes mode, but they are " | ||||
|         "missing from some requirements. Here is a list of those " | ||||
|         "requirements along with the hashes their downloaded archives " | ||||
|         "actually had. Add lines like these to your requirements files to " | ||||
|         "prevent tampering. (If you did not enable --require-hashes " | ||||
|         "manually, note that it turns on automatically when any package " | ||||
|         "has a hash.)" | ||||
|     ) | ||||
|  | ||||
|     def __init__(self, gotten_hash: str) -> None: | ||||
|         """ | ||||
|         :param gotten_hash: The hash of the (possibly malicious) archive we | ||||
|             just downloaded | ||||
|         """ | ||||
|         self.gotten_hash = gotten_hash | ||||
|  | ||||
|     def body(self) -> str: | ||||
|         # Dodge circular import. | ||||
|         from pip._internal.utils.hashes import FAVORITE_HASH | ||||
|  | ||||
|         package = None | ||||
|         if self.req: | ||||
|             # In the case of URL-based requirements, display the original URL | ||||
|             # seen in the requirements file rather than the package name, | ||||
|             # so the output can be directly copied into the requirements file. | ||||
|             package = ( | ||||
|                 self.req.original_link | ||||
|                 if self.req.is_direct | ||||
|                 # In case someone feeds something downright stupid | ||||
|                 # to InstallRequirement's constructor. | ||||
|                 else getattr(self.req, "req", None) | ||||
|             ) | ||||
|         return "    {} --hash={}:{}".format( | ||||
|             package or "unknown package", FAVORITE_HASH, self.gotten_hash | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class HashUnpinned(HashError): | ||||
|     """A requirement had a hash specified but was not pinned to a specific | ||||
|     version.""" | ||||
|  | ||||
|     order = 3 | ||||
|     head = ( | ||||
|         "In --require-hashes mode, all requirements must have their " | ||||
|         "versions pinned with ==. These do not:" | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class HashMismatch(HashError): | ||||
|     """ | ||||
|     Distribution file hash values don't match. | ||||
|  | ||||
|     :ivar package_name: The name of the package that triggered the hash | ||||
|         mismatch. Feel free to write to this after the exception is raise to | ||||
|         improve its error message. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     order = 4 | ||||
|     head = ( | ||||
|         "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS " | ||||
|         "FILE. If you have updated the package versions, please update " | ||||
|         "the hashes. Otherwise, examine the package contents carefully; " | ||||
|         "someone may have tampered with them." | ||||
|     ) | ||||
|  | ||||
|     def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None: | ||||
|         """ | ||||
|         :param allowed: A dict of algorithm names pointing to lists of allowed | ||||
|             hex digests | ||||
|         :param gots: A dict of algorithm names pointing to hashes we | ||||
|             actually got from the files under suspicion | ||||
|         """ | ||||
|         self.allowed = allowed | ||||
|         self.gots = gots | ||||
|  | ||||
|     def body(self) -> str: | ||||
|         return "    {}:\n{}".format(self._requirement_name(), self._hash_comparison()) | ||||
|  | ||||
|     def _hash_comparison(self) -> str: | ||||
|         """ | ||||
|         Return a comparison of actual and expected hash values. | ||||
|  | ||||
|         Example:: | ||||
|  | ||||
|                Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde | ||||
|                             or 123451234512345123451234512345123451234512345 | ||||
|                     Got        bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef | ||||
|  | ||||
|         """ | ||||
|  | ||||
|         def hash_then_or(hash_name: str) -> "chain[str]": | ||||
|             # For now, all the decent hashes have 6-char names, so we can get | ||||
|             # away with hard-coding space literals. | ||||
|             return chain([hash_name], repeat("    or")) | ||||
|  | ||||
|         lines: List[str] = [] | ||||
|         for hash_name, expecteds in self.allowed.items(): | ||||
|             prefix = hash_then_or(hash_name) | ||||
|             lines.extend( | ||||
|                 ("        Expected {} {}".format(next(prefix), e)) for e in expecteds | ||||
|             ) | ||||
|             lines.append( | ||||
|                 "             Got        {}\n".format(self.gots[hash_name].hexdigest()) | ||||
|             ) | ||||
|         return "\n".join(lines) | ||||
|  | ||||
|  | ||||
| class UnsupportedPythonVersion(InstallationError): | ||||
|     """Unsupported python version according to Requires-Python package | ||||
|     metadata.""" | ||||
|  | ||||
|  | ||||
| class ConfigurationFileCouldNotBeLoaded(ConfigurationError): | ||||
|     """When there are errors while loading a configuration file""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         reason: str = "could not be loaded", | ||||
|         fname: Optional[str] = None, | ||||
|         error: Optional[configparser.Error] = None, | ||||
|     ) -> None: | ||||
|         super().__init__(error) | ||||
|         self.reason = reason | ||||
|         self.fname = fname | ||||
|         self.error = error | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         if self.fname is not None: | ||||
|             message_part = f" in {self.fname}." | ||||
|         else: | ||||
|             assert self.error is not None | ||||
|             message_part = f".\n{self.error}\n" | ||||
|         return f"Configuration file {self.reason}{message_part}" | ||||
|  | ||||
|  | ||||
| _DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\ | ||||
| The Python environment under {sys.prefix} is managed externally, and may not be | ||||
| manipulated by the user. Please use specific tooling from the distributor of | ||||
| the Python installation to interact with this environment instead. | ||||
| """ | ||||
|  | ||||
|  | ||||
| class ExternallyManagedEnvironment(DiagnosticPipError): | ||||
|     """The current environment is externally managed. | ||||
|  | ||||
|     This is raised when the current environment is externally managed, as | ||||
|     defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked | ||||
|     and displayed when the error is bubbled up to the user. | ||||
|  | ||||
|     :param error: The error message read from ``EXTERNALLY-MANAGED``. | ||||
|     """ | ||||
|  | ||||
|     reference = "externally-managed-environment" | ||||
|  | ||||
|     def __init__(self, error: Optional[str]) -> None: | ||||
|         if error is None: | ||||
|             context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR) | ||||
|         else: | ||||
|             context = Text(error) | ||||
|         super().__init__( | ||||
|             message="This environment is externally managed", | ||||
|             context=context, | ||||
|             note_stmt=( | ||||
|                 "If you believe this is a mistake, please contact your " | ||||
|                 "Python installation or OS distribution provider. " | ||||
|                 "You can override this, at the risk of breaking your Python " | ||||
|                 "installation or OS, by passing --break-system-packages." | ||||
|             ), | ||||
|             hint_stmt=Text("See PEP 668 for the detailed specification."), | ||||
|         ) | ||||
|  | ||||
|     @staticmethod | ||||
|     def _iter_externally_managed_error_keys() -> Iterator[str]: | ||||
|         # LC_MESSAGES is in POSIX, but not the C standard. The most common | ||||
|         # platform that does not implement this category is Windows, where | ||||
|         # using other categories for console message localization is equally | ||||
|         # unreliable, so we fall back to the locale-less vendor message. This | ||||
|         # can always be re-evaluated when a vendor proposes a new alternative. | ||||
|         try: | ||||
|             category = locale.LC_MESSAGES | ||||
|         except AttributeError: | ||||
|             lang: Optional[str] = None | ||||
|         else: | ||||
|             lang, _ = locale.getlocale(category) | ||||
|         if lang is not None: | ||||
|             yield f"Error-{lang}" | ||||
|             for sep in ("-", "_"): | ||||
|                 before, found, _ = lang.partition(sep) | ||||
|                 if not found: | ||||
|                     continue | ||||
|                 yield f"Error-{before}" | ||||
|         yield "Error" | ||||
|  | ||||
|     @classmethod | ||||
|     def from_config( | ||||
|         cls, | ||||
|         config: Union[pathlib.Path, str], | ||||
|     ) -> "ExternallyManagedEnvironment": | ||||
|         parser = configparser.ConfigParser(interpolation=None) | ||||
|         try: | ||||
|             parser.read(config, encoding="utf-8") | ||||
|             section = parser["externally-managed"] | ||||
|             for key in cls._iter_externally_managed_error_keys(): | ||||
|                 with contextlib.suppress(KeyError): | ||||
|                     return cls(section[key]) | ||||
|         except KeyError: | ||||
|             pass | ||||
|         except (OSError, UnicodeDecodeError, configparser.ParsingError): | ||||
|             from pip._internal.utils._log import VERBOSE | ||||
|  | ||||
|             exc_info = logger.isEnabledFor(VERBOSE) | ||||
|             logger.warning("Failed to read %s", config, exc_info=exc_info) | ||||
|         return cls(None) | ||||
| @ -0,0 +1,2 @@ | ||||
| """Index interaction code | ||||
| """ | ||||
| @ -0,0 +1,505 @@ | ||||
| """ | ||||
| The main purpose of this module is to expose LinkCollector.collect_sources(). | ||||
| """ | ||||
|  | ||||
| import collections | ||||
| import email.message | ||||
| import functools | ||||
| import itertools | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import urllib.parse | ||||
| import urllib.request | ||||
| from html.parser import HTMLParser | ||||
| from optparse import Values | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
|     Callable, | ||||
|     Dict, | ||||
|     Iterable, | ||||
|     List, | ||||
|     MutableMapping, | ||||
|     NamedTuple, | ||||
|     Optional, | ||||
|     Sequence, | ||||
|     Tuple, | ||||
|     Union, | ||||
| ) | ||||
|  | ||||
| from pip._vendor import requests | ||||
| from pip._vendor.requests import Response | ||||
| from pip._vendor.requests.exceptions import RetryError, SSLError | ||||
|  | ||||
| from pip._internal.exceptions import NetworkConnectionError | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.models.search_scope import SearchScope | ||||
| from pip._internal.network.session import PipSession | ||||
| from pip._internal.network.utils import raise_for_status | ||||
| from pip._internal.utils.filetypes import is_archive_file | ||||
| from pip._internal.utils.misc import redact_auth_from_url | ||||
| from pip._internal.vcs import vcs | ||||
|  | ||||
| from .sources import CandidatesFromPage, LinkSource, build_source | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from typing import Protocol | ||||
| else: | ||||
|     Protocol = object | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
| ResponseHeaders = MutableMapping[str, str] | ||||
|  | ||||
|  | ||||
| def _match_vcs_scheme(url: str) -> Optional[str]: | ||||
|     """Look for VCS schemes in the URL. | ||||
|  | ||||
|     Returns the matched VCS scheme, or None if there's no match. | ||||
|     """ | ||||
|     for scheme in vcs.schemes: | ||||
|         if url.lower().startswith(scheme) and url[len(scheme)] in "+:": | ||||
|             return scheme | ||||
|     return None | ||||
|  | ||||
|  | ||||
| class _NotAPIContent(Exception): | ||||
|     def __init__(self, content_type: str, request_desc: str) -> None: | ||||
|         super().__init__(content_type, request_desc) | ||||
|         self.content_type = content_type | ||||
|         self.request_desc = request_desc | ||||
|  | ||||
|  | ||||
| def _ensure_api_header(response: Response) -> None: | ||||
|     """ | ||||
|     Check the Content-Type header to ensure the response contains a Simple | ||||
|     API Response. | ||||
|  | ||||
|     Raises `_NotAPIContent` if the content type is not a valid content-type. | ||||
|     """ | ||||
|     content_type = response.headers.get("Content-Type", "Unknown") | ||||
|  | ||||
|     content_type_l = content_type.lower() | ||||
|     if content_type_l.startswith( | ||||
|         ( | ||||
|             "text/html", | ||||
|             "application/vnd.pypi.simple.v1+html", | ||||
|             "application/vnd.pypi.simple.v1+json", | ||||
|         ) | ||||
|     ): | ||||
|         return | ||||
|  | ||||
|     raise _NotAPIContent(content_type, response.request.method) | ||||
|  | ||||
|  | ||||
| class _NotHTTP(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def _ensure_api_response(url: str, session: PipSession) -> None: | ||||
|     """ | ||||
|     Send a HEAD request to the URL, and ensure the response contains a simple | ||||
|     API Response. | ||||
|  | ||||
|     Raises `_NotHTTP` if the URL is not available for a HEAD request, or | ||||
|     `_NotAPIContent` if the content type is not a valid content type. | ||||
|     """ | ||||
|     scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url) | ||||
|     if scheme not in {"http", "https"}: | ||||
|         raise _NotHTTP() | ||||
|  | ||||
|     resp = session.head(url, allow_redirects=True) | ||||
|     raise_for_status(resp) | ||||
|  | ||||
|     _ensure_api_header(resp) | ||||
|  | ||||
|  | ||||
| def _get_simple_response(url: str, session: PipSession) -> Response: | ||||
|     """Access an Simple API response with GET, and return the response. | ||||
|  | ||||
|     This consists of three parts: | ||||
|  | ||||
|     1. If the URL looks suspiciously like an archive, send a HEAD first to | ||||
|        check the Content-Type is HTML or Simple API, to avoid downloading a | ||||
|        large file. Raise `_NotHTTP` if the content type cannot be determined, or | ||||
|        `_NotAPIContent` if it is not HTML or a Simple API. | ||||
|     2. Actually perform the request. Raise HTTP exceptions on network failures. | ||||
|     3. Check the Content-Type header to make sure we got a Simple API response, | ||||
|        and raise `_NotAPIContent` otherwise. | ||||
|     """ | ||||
|     if is_archive_file(Link(url).filename): | ||||
|         _ensure_api_response(url, session=session) | ||||
|  | ||||
|     logger.debug("Getting page %s", redact_auth_from_url(url)) | ||||
|  | ||||
|     resp = session.get( | ||||
|         url, | ||||
|         headers={ | ||||
|             "Accept": ", ".join( | ||||
|                 [ | ||||
|                     "application/vnd.pypi.simple.v1+json", | ||||
|                     "application/vnd.pypi.simple.v1+html; q=0.1", | ||||
|                     "text/html; q=0.01", | ||||
|                 ] | ||||
|             ), | ||||
|             # We don't want to blindly returned cached data for | ||||
|             # /simple/, because authors generally expecting that | ||||
|             # twine upload && pip install will function, but if | ||||
|             # they've done a pip install in the last ~10 minutes | ||||
|             # it won't. Thus by setting this to zero we will not | ||||
|             # blindly use any cached data, however the benefit of | ||||
|             # using max-age=0 instead of no-cache, is that we will | ||||
|             # still support conditional requests, so we will still | ||||
|             # minimize traffic sent in cases where the page hasn't | ||||
|             # changed at all, we will just always incur the round | ||||
|             # trip for the conditional GET now instead of only | ||||
|             # once per 10 minutes. | ||||
|             # For more information, please see pypa/pip#5670. | ||||
|             "Cache-Control": "max-age=0", | ||||
|         }, | ||||
|     ) | ||||
|     raise_for_status(resp) | ||||
|  | ||||
|     # The check for archives above only works if the url ends with | ||||
|     # something that looks like an archive. However that is not a | ||||
|     # requirement of an url. Unless we issue a HEAD request on every | ||||
|     # url we cannot know ahead of time for sure if something is a | ||||
|     # Simple API response or not. However we can check after we've | ||||
|     # downloaded it. | ||||
|     _ensure_api_header(resp) | ||||
|  | ||||
|     logger.debug( | ||||
|         "Fetched page %s as %s", | ||||
|         redact_auth_from_url(url), | ||||
|         resp.headers.get("Content-Type", "Unknown"), | ||||
|     ) | ||||
|  | ||||
|     return resp | ||||
|  | ||||
|  | ||||
| def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]: | ||||
|     """Determine if we have any encoding information in our headers.""" | ||||
|     if headers and "Content-Type" in headers: | ||||
|         m = email.message.Message() | ||||
|         m["content-type"] = headers["Content-Type"] | ||||
|         charset = m.get_param("charset") | ||||
|         if charset: | ||||
|             return str(charset) | ||||
|     return None | ||||
|  | ||||
|  | ||||
| class CacheablePageContent: | ||||
|     def __init__(self, page: "IndexContent") -> None: | ||||
|         assert page.cache_link_parsing | ||||
|         self.page = page | ||||
|  | ||||
|     def __eq__(self, other: object) -> bool: | ||||
|         return isinstance(other, type(self)) and self.page.url == other.page.url | ||||
|  | ||||
|     def __hash__(self) -> int: | ||||
|         return hash(self.page.url) | ||||
|  | ||||
|  | ||||
| class ParseLinks(Protocol): | ||||
|     def __call__(self, page: "IndexContent") -> Iterable[Link]: | ||||
|         ... | ||||
|  | ||||
|  | ||||
| def with_cached_index_content(fn: ParseLinks) -> ParseLinks: | ||||
|     """ | ||||
|     Given a function that parses an Iterable[Link] from an IndexContent, cache the | ||||
|     function's result (keyed by CacheablePageContent), unless the IndexContent | ||||
|     `page` has `page.cache_link_parsing == False`. | ||||
|     """ | ||||
|  | ||||
|     @functools.lru_cache(maxsize=None) | ||||
|     def wrapper(cacheable_page: CacheablePageContent) -> List[Link]: | ||||
|         return list(fn(cacheable_page.page)) | ||||
|  | ||||
|     @functools.wraps(fn) | ||||
|     def wrapper_wrapper(page: "IndexContent") -> List[Link]: | ||||
|         if page.cache_link_parsing: | ||||
|             return wrapper(CacheablePageContent(page)) | ||||
|         return list(fn(page)) | ||||
|  | ||||
|     return wrapper_wrapper | ||||
|  | ||||
|  | ||||
| @with_cached_index_content | ||||
| def parse_links(page: "IndexContent") -> Iterable[Link]: | ||||
|     """ | ||||
|     Parse a Simple API's Index Content, and yield its anchor elements as Link objects. | ||||
|     """ | ||||
|  | ||||
|     content_type_l = page.content_type.lower() | ||||
|     if content_type_l.startswith("application/vnd.pypi.simple.v1+json"): | ||||
|         data = json.loads(page.content) | ||||
|         for file in data.get("files", []): | ||||
|             link = Link.from_json(file, page.url) | ||||
|             if link is None: | ||||
|                 continue | ||||
|             yield link | ||||
|         return | ||||
|  | ||||
|     parser = HTMLLinkParser(page.url) | ||||
|     encoding = page.encoding or "utf-8" | ||||
|     parser.feed(page.content.decode(encoding)) | ||||
|  | ||||
|     url = page.url | ||||
|     base_url = parser.base_url or url | ||||
|     for anchor in parser.anchors: | ||||
|         link = Link.from_element(anchor, page_url=url, base_url=base_url) | ||||
|         if link is None: | ||||
|             continue | ||||
|         yield link | ||||
|  | ||||
|  | ||||
| class IndexContent: | ||||
|     """Represents one response (or page), along with its URL""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         content: bytes, | ||||
|         content_type: str, | ||||
|         encoding: Optional[str], | ||||
|         url: str, | ||||
|         cache_link_parsing: bool = True, | ||||
|     ) -> None: | ||||
|         """ | ||||
|         :param encoding: the encoding to decode the given content. | ||||
|         :param url: the URL from which the HTML was downloaded. | ||||
|         :param cache_link_parsing: whether links parsed from this page's url | ||||
|                                    should be cached. PyPI index urls should | ||||
|                                    have this set to False, for example. | ||||
|         """ | ||||
|         self.content = content | ||||
|         self.content_type = content_type | ||||
|         self.encoding = encoding | ||||
|         self.url = url | ||||
|         self.cache_link_parsing = cache_link_parsing | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return redact_auth_from_url(self.url) | ||||
|  | ||||
|  | ||||
| class HTMLLinkParser(HTMLParser): | ||||
|     """ | ||||
|     HTMLParser that keeps the first base HREF and a list of all anchor | ||||
|     elements' attributes. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, url: str) -> None: | ||||
|         super().__init__(convert_charrefs=True) | ||||
|  | ||||
|         self.url: str = url | ||||
|         self.base_url: Optional[str] = None | ||||
|         self.anchors: List[Dict[str, Optional[str]]] = [] | ||||
|  | ||||
|     def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None: | ||||
|         if tag == "base" and self.base_url is None: | ||||
|             href = self.get_href(attrs) | ||||
|             if href is not None: | ||||
|                 self.base_url = href | ||||
|         elif tag == "a": | ||||
|             self.anchors.append(dict(attrs)) | ||||
|  | ||||
|     def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]: | ||||
|         for name, value in attrs: | ||||
|             if name == "href": | ||||
|                 return value | ||||
|         return None | ||||
|  | ||||
|  | ||||
| def _handle_get_simple_fail( | ||||
|     link: Link, | ||||
|     reason: Union[str, Exception], | ||||
|     meth: Optional[Callable[..., None]] = None, | ||||
| ) -> None: | ||||
|     if meth is None: | ||||
|         meth = logger.debug | ||||
|     meth("Could not fetch URL %s: %s - skipping", link, reason) | ||||
|  | ||||
|  | ||||
| def _make_index_content( | ||||
|     response: Response, cache_link_parsing: bool = True | ||||
| ) -> IndexContent: | ||||
|     encoding = _get_encoding_from_headers(response.headers) | ||||
|     return IndexContent( | ||||
|         response.content, | ||||
|         response.headers["Content-Type"], | ||||
|         encoding=encoding, | ||||
|         url=response.url, | ||||
|         cache_link_parsing=cache_link_parsing, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]: | ||||
|     url = link.url.split("#", 1)[0] | ||||
|  | ||||
|     # Check for VCS schemes that do not support lookup as web pages. | ||||
|     vcs_scheme = _match_vcs_scheme(url) | ||||
|     if vcs_scheme: | ||||
|         logger.warning( | ||||
|             "Cannot look at %s URL %s because it does not support lookup as web pages.", | ||||
|             vcs_scheme, | ||||
|             link, | ||||
|         ) | ||||
|         return None | ||||
|  | ||||
|     # Tack index.html onto file:// URLs that point to directories | ||||
|     scheme, _, path, _, _, _ = urllib.parse.urlparse(url) | ||||
|     if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)): | ||||
|         # add trailing slash if not present so urljoin doesn't trim | ||||
|         # final segment | ||||
|         if not url.endswith("/"): | ||||
|             url += "/" | ||||
|         # TODO: In the future, it would be nice if pip supported PEP 691 | ||||
|         #       style responses in the file:// URLs, however there's no | ||||
|         #       standard file extension for application/vnd.pypi.simple.v1+json | ||||
|         #       so we'll need to come up with something on our own. | ||||
|         url = urllib.parse.urljoin(url, "index.html") | ||||
|         logger.debug(" file: URL is directory, getting %s", url) | ||||
|  | ||||
|     try: | ||||
|         resp = _get_simple_response(url, session=session) | ||||
|     except _NotHTTP: | ||||
|         logger.warning( | ||||
|             "Skipping page %s because it looks like an archive, and cannot " | ||||
|             "be checked by a HTTP HEAD request.", | ||||
|             link, | ||||
|         ) | ||||
|     except _NotAPIContent as exc: | ||||
|         logger.warning( | ||||
|             "Skipping page %s because the %s request got Content-Type: %s. " | ||||
|             "The only supported Content-Types are application/vnd.pypi.simple.v1+json, " | ||||
|             "application/vnd.pypi.simple.v1+html, and text/html", | ||||
|             link, | ||||
|             exc.request_desc, | ||||
|             exc.content_type, | ||||
|         ) | ||||
|     except NetworkConnectionError as exc: | ||||
|         _handle_get_simple_fail(link, exc) | ||||
|     except RetryError as exc: | ||||
|         _handle_get_simple_fail(link, exc) | ||||
|     except SSLError as exc: | ||||
|         reason = "There was a problem confirming the ssl certificate: " | ||||
|         reason += str(exc) | ||||
|         _handle_get_simple_fail(link, reason, meth=logger.info) | ||||
|     except requests.ConnectionError as exc: | ||||
|         _handle_get_simple_fail(link, f"connection error: {exc}") | ||||
|     except requests.Timeout: | ||||
|         _handle_get_simple_fail(link, "timed out") | ||||
|     else: | ||||
|         return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing) | ||||
|     return None | ||||
|  | ||||
|  | ||||
| class CollectedSources(NamedTuple): | ||||
|     find_links: Sequence[Optional[LinkSource]] | ||||
|     index_urls: Sequence[Optional[LinkSource]] | ||||
|  | ||||
|  | ||||
| class LinkCollector: | ||||
|  | ||||
|     """ | ||||
|     Responsible for collecting Link objects from all configured locations, | ||||
|     making network requests as needed. | ||||
|  | ||||
|     The class's main method is its collect_sources() method. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         session: PipSession, | ||||
|         search_scope: SearchScope, | ||||
|     ) -> None: | ||||
|         self.search_scope = search_scope | ||||
|         self.session = session | ||||
|  | ||||
|     @classmethod | ||||
|     def create( | ||||
|         cls, | ||||
|         session: PipSession, | ||||
|         options: Values, | ||||
|         suppress_no_index: bool = False, | ||||
|     ) -> "LinkCollector": | ||||
|         """ | ||||
|         :param session: The Session to use to make requests. | ||||
|         :param suppress_no_index: Whether to ignore the --no-index option | ||||
|             when constructing the SearchScope object. | ||||
|         """ | ||||
|         index_urls = [options.index_url] + options.extra_index_urls | ||||
|         if options.no_index and not suppress_no_index: | ||||
|             logger.debug( | ||||
|                 "Ignoring indexes: %s", | ||||
|                 ",".join(redact_auth_from_url(url) for url in index_urls), | ||||
|             ) | ||||
|             index_urls = [] | ||||
|  | ||||
|         # Make sure find_links is a list before passing to create(). | ||||
|         find_links = options.find_links or [] | ||||
|  | ||||
|         search_scope = SearchScope.create( | ||||
|             find_links=find_links, | ||||
|             index_urls=index_urls, | ||||
|             no_index=options.no_index, | ||||
|         ) | ||||
|         link_collector = LinkCollector( | ||||
|             session=session, | ||||
|             search_scope=search_scope, | ||||
|         ) | ||||
|         return link_collector | ||||
|  | ||||
|     @property | ||||
|     def find_links(self) -> List[str]: | ||||
|         return self.search_scope.find_links | ||||
|  | ||||
|     def fetch_response(self, location: Link) -> Optional[IndexContent]: | ||||
|         """ | ||||
|         Fetch an HTML page containing package links. | ||||
|         """ | ||||
|         return _get_index_content(location, session=self.session) | ||||
|  | ||||
|     def collect_sources( | ||||
|         self, | ||||
|         project_name: str, | ||||
|         candidates_from_page: CandidatesFromPage, | ||||
|     ) -> CollectedSources: | ||||
|         # The OrderedDict calls deduplicate sources by URL. | ||||
|         index_url_sources = collections.OrderedDict( | ||||
|             build_source( | ||||
|                 loc, | ||||
|                 candidates_from_page=candidates_from_page, | ||||
|                 page_validator=self.session.is_secure_origin, | ||||
|                 expand_dir=False, | ||||
|                 cache_link_parsing=False, | ||||
|             ) | ||||
|             for loc in self.search_scope.get_index_urls_locations(project_name) | ||||
|         ).values() | ||||
|         find_links_sources = collections.OrderedDict( | ||||
|             build_source( | ||||
|                 loc, | ||||
|                 candidates_from_page=candidates_from_page, | ||||
|                 page_validator=self.session.is_secure_origin, | ||||
|                 expand_dir=True, | ||||
|                 cache_link_parsing=True, | ||||
|             ) | ||||
|             for loc in self.find_links | ||||
|         ).values() | ||||
|  | ||||
|         if logger.isEnabledFor(logging.DEBUG): | ||||
|             lines = [ | ||||
|                 f"* {s.link}" | ||||
|                 for s in itertools.chain(find_links_sources, index_url_sources) | ||||
|                 if s is not None and s.link is not None | ||||
|             ] | ||||
|             lines = [ | ||||
|                 f"{len(lines)} location(s) to search " | ||||
|                 f"for versions of {project_name}:" | ||||
|             ] + lines | ||||
|             logger.debug("\n".join(lines)) | ||||
|  | ||||
|         return CollectedSources( | ||||
|             find_links=list(find_links_sources), | ||||
|             index_urls=list(index_url_sources), | ||||
|         ) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @ -0,0 +1,223 @@ | ||||
| import logging | ||||
| import mimetypes | ||||
| import os | ||||
| import pathlib | ||||
| from typing import Callable, Iterable, Optional, Tuple | ||||
|  | ||||
| from pip._internal.models.candidate import InstallationCandidate | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.utils.urls import path_to_url, url_to_path | ||||
| from pip._internal.vcs import is_url | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
| FoundCandidates = Iterable[InstallationCandidate] | ||||
| FoundLinks = Iterable[Link] | ||||
| CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]] | ||||
| PageValidator = Callable[[Link], bool] | ||||
|  | ||||
|  | ||||
| class LinkSource: | ||||
|     @property | ||||
|     def link(self) -> Optional[Link]: | ||||
|         """Returns the underlying link, if there's one.""" | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def page_candidates(self) -> FoundCandidates: | ||||
|         """Candidates found by parsing an archive listing HTML file.""" | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def file_links(self) -> FoundLinks: | ||||
|         """Links found by specifying archives directly.""" | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|  | ||||
| def _is_html_file(file_url: str) -> bool: | ||||
|     return mimetypes.guess_type(file_url, strict=False)[0] == "text/html" | ||||
|  | ||||
|  | ||||
| class _FlatDirectorySource(LinkSource): | ||||
|     """Link source specified by ``--find-links=<path-to-dir>``. | ||||
|  | ||||
|     This looks the content of the directory, and returns: | ||||
|  | ||||
|     * ``page_candidates``: Links listed on each HTML file in the directory. | ||||
|     * ``file_candidates``: Archives in the directory. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         candidates_from_page: CandidatesFromPage, | ||||
|         path: str, | ||||
|     ) -> None: | ||||
|         self._candidates_from_page = candidates_from_page | ||||
|         self._path = pathlib.Path(os.path.realpath(path)) | ||||
|  | ||||
|     @property | ||||
|     def link(self) -> Optional[Link]: | ||||
|         return None | ||||
|  | ||||
|     def page_candidates(self) -> FoundCandidates: | ||||
|         for path in self._path.iterdir(): | ||||
|             url = path_to_url(str(path)) | ||||
|             if not _is_html_file(url): | ||||
|                 continue | ||||
|             yield from self._candidates_from_page(Link(url)) | ||||
|  | ||||
|     def file_links(self) -> FoundLinks: | ||||
|         for path in self._path.iterdir(): | ||||
|             url = path_to_url(str(path)) | ||||
|             if _is_html_file(url): | ||||
|                 continue | ||||
|             yield Link(url) | ||||
|  | ||||
|  | ||||
| class _LocalFileSource(LinkSource): | ||||
|     """``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``. | ||||
|  | ||||
|     If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to | ||||
|     the option, it is converted to a URL first. This returns: | ||||
|  | ||||
|     * ``page_candidates``: Links listed on an HTML file. | ||||
|     * ``file_candidates``: The non-HTML file. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         candidates_from_page: CandidatesFromPage, | ||||
|         link: Link, | ||||
|     ) -> None: | ||||
|         self._candidates_from_page = candidates_from_page | ||||
|         self._link = link | ||||
|  | ||||
|     @property | ||||
|     def link(self) -> Optional[Link]: | ||||
|         return self._link | ||||
|  | ||||
|     def page_candidates(self) -> FoundCandidates: | ||||
|         if not _is_html_file(self._link.url): | ||||
|             return | ||||
|         yield from self._candidates_from_page(self._link) | ||||
|  | ||||
|     def file_links(self) -> FoundLinks: | ||||
|         if _is_html_file(self._link.url): | ||||
|             return | ||||
|         yield self._link | ||||
|  | ||||
|  | ||||
| class _RemoteFileSource(LinkSource): | ||||
|     """``--find-links=<url>`` or ``--[extra-]index-url=<url>``. | ||||
|  | ||||
|     This returns: | ||||
|  | ||||
|     * ``page_candidates``: Links listed on an HTML file. | ||||
|     * ``file_candidates``: The non-HTML file. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         candidates_from_page: CandidatesFromPage, | ||||
|         page_validator: PageValidator, | ||||
|         link: Link, | ||||
|     ) -> None: | ||||
|         self._candidates_from_page = candidates_from_page | ||||
|         self._page_validator = page_validator | ||||
|         self._link = link | ||||
|  | ||||
|     @property | ||||
|     def link(self) -> Optional[Link]: | ||||
|         return self._link | ||||
|  | ||||
|     def page_candidates(self) -> FoundCandidates: | ||||
|         if not self._page_validator(self._link): | ||||
|             return | ||||
|         yield from self._candidates_from_page(self._link) | ||||
|  | ||||
|     def file_links(self) -> FoundLinks: | ||||
|         yield self._link | ||||
|  | ||||
|  | ||||
| class _IndexDirectorySource(LinkSource): | ||||
|     """``--[extra-]index-url=<path-to-directory>``. | ||||
|  | ||||
|     This is treated like a remote URL; ``candidates_from_page`` contains logic | ||||
|     for this by appending ``index.html`` to the link. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         candidates_from_page: CandidatesFromPage, | ||||
|         link: Link, | ||||
|     ) -> None: | ||||
|         self._candidates_from_page = candidates_from_page | ||||
|         self._link = link | ||||
|  | ||||
|     @property | ||||
|     def link(self) -> Optional[Link]: | ||||
|         return self._link | ||||
|  | ||||
|     def page_candidates(self) -> FoundCandidates: | ||||
|         yield from self._candidates_from_page(self._link) | ||||
|  | ||||
|     def file_links(self) -> FoundLinks: | ||||
|         return () | ||||
|  | ||||
|  | ||||
| def build_source( | ||||
|     location: str, | ||||
|     *, | ||||
|     candidates_from_page: CandidatesFromPage, | ||||
|     page_validator: PageValidator, | ||||
|     expand_dir: bool, | ||||
|     cache_link_parsing: bool, | ||||
| ) -> Tuple[Optional[str], Optional[LinkSource]]: | ||||
|     path: Optional[str] = None | ||||
|     url: Optional[str] = None | ||||
|     if os.path.exists(location):  # Is a local path. | ||||
|         url = path_to_url(location) | ||||
|         path = location | ||||
|     elif location.startswith("file:"):  # A file: URL. | ||||
|         url = location | ||||
|         path = url_to_path(location) | ||||
|     elif is_url(location): | ||||
|         url = location | ||||
|  | ||||
|     if url is None: | ||||
|         msg = ( | ||||
|             "Location '%s' is ignored: " | ||||
|             "it is either a non-existing path or lacks a specific scheme." | ||||
|         ) | ||||
|         logger.warning(msg, location) | ||||
|         return (None, None) | ||||
|  | ||||
|     if path is None: | ||||
|         source: LinkSource = _RemoteFileSource( | ||||
|             candidates_from_page=candidates_from_page, | ||||
|             page_validator=page_validator, | ||||
|             link=Link(url, cache_link_parsing=cache_link_parsing), | ||||
|         ) | ||||
|         return (url, source) | ||||
|  | ||||
|     if os.path.isdir(path): | ||||
|         if expand_dir: | ||||
|             source = _FlatDirectorySource( | ||||
|                 candidates_from_page=candidates_from_page, | ||||
|                 path=path, | ||||
|             ) | ||||
|         else: | ||||
|             source = _IndexDirectorySource( | ||||
|                 candidates_from_page=candidates_from_page, | ||||
|                 link=Link(url, cache_link_parsing=cache_link_parsing), | ||||
|             ) | ||||
|         return (url, source) | ||||
|     elif os.path.isfile(path): | ||||
|         source = _LocalFileSource( | ||||
|             candidates_from_page=candidates_from_page, | ||||
|             link=Link(url, cache_link_parsing=cache_link_parsing), | ||||
|         ) | ||||
|         return (url, source) | ||||
|     logger.warning( | ||||
|         "Location '%s' is ignored: it is neither a file nor a directory.", | ||||
|         location, | ||||
|     ) | ||||
|     return (url, None) | ||||
| @ -0,0 +1,467 @@ | ||||
| import functools | ||||
| import logging | ||||
| import os | ||||
| import pathlib | ||||
| import sys | ||||
| import sysconfig | ||||
| from typing import Any, Dict, Generator, Optional, Tuple | ||||
|  | ||||
| from pip._internal.models.scheme import SCHEME_KEYS, Scheme | ||||
| from pip._internal.utils.compat import WINDOWS | ||||
| from pip._internal.utils.deprecation import deprecated | ||||
| from pip._internal.utils.virtualenv import running_under_virtualenv | ||||
|  | ||||
| from . import _sysconfig | ||||
| from .base import ( | ||||
|     USER_CACHE_DIR, | ||||
|     get_major_minor_version, | ||||
|     get_src_prefix, | ||||
|     is_osx_framework, | ||||
|     site_packages, | ||||
|     user_site, | ||||
| ) | ||||
|  | ||||
| __all__ = [ | ||||
|     "USER_CACHE_DIR", | ||||
|     "get_bin_prefix", | ||||
|     "get_bin_user", | ||||
|     "get_major_minor_version", | ||||
|     "get_platlib", | ||||
|     "get_purelib", | ||||
|     "get_scheme", | ||||
|     "get_src_prefix", | ||||
|     "site_packages", | ||||
|     "user_site", | ||||
| ] | ||||
|  | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| _PLATLIBDIR: str = getattr(sys, "platlibdir", "lib") | ||||
|  | ||||
| _USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10) | ||||
|  | ||||
|  | ||||
| def _should_use_sysconfig() -> bool: | ||||
|     """This function determines the value of _USE_SYSCONFIG. | ||||
|  | ||||
|     By default, pip uses sysconfig on Python 3.10+. | ||||
|     But Python distributors can override this decision by setting: | ||||
|         sysconfig._PIP_USE_SYSCONFIG = True / False | ||||
|     Rationale in https://github.com/pypa/pip/issues/10647 | ||||
|  | ||||
|     This is a function for testability, but should be constant during any one | ||||
|     run. | ||||
|     """ | ||||
|     return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT)) | ||||
|  | ||||
|  | ||||
| _USE_SYSCONFIG = _should_use_sysconfig() | ||||
|  | ||||
| if not _USE_SYSCONFIG: | ||||
|     # Import distutils lazily to avoid deprecation warnings, | ||||
|     # but import it soon enough that it is in memory and available during | ||||
|     # a pip reinstall. | ||||
|     from . import _distutils | ||||
|  | ||||
| # Be noisy about incompatibilities if this platforms "should" be using | ||||
| # sysconfig, but is explicitly opting out and using distutils instead. | ||||
| if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG: | ||||
|     _MISMATCH_LEVEL = logging.WARNING | ||||
| else: | ||||
|     _MISMATCH_LEVEL = logging.DEBUG | ||||
|  | ||||
|  | ||||
| def _looks_like_bpo_44860() -> bool: | ||||
|     """The resolution to bpo-44860 will change this incorrect platlib. | ||||
|  | ||||
|     See <https://bugs.python.org/issue44860>. | ||||
|     """ | ||||
|     from distutils.command.install import INSTALL_SCHEMES | ||||
|  | ||||
|     try: | ||||
|         unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"] | ||||
|     except KeyError: | ||||
|         return False | ||||
|     return unix_user_platlib == "$usersite" | ||||
|  | ||||
|  | ||||
| def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool: | ||||
|     platlib = scheme["platlib"] | ||||
|     if "/$platlibdir/" in platlib: | ||||
|         platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/") | ||||
|     if "/lib64/" not in platlib: | ||||
|         return False | ||||
|     unpatched = platlib.replace("/lib64/", "/lib/") | ||||
|     return unpatched.replace("$platbase/", "$base/") == scheme["purelib"] | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def _looks_like_red_hat_lib() -> bool: | ||||
|     """Red Hat patches platlib in unix_prefix and unix_home, but not purelib. | ||||
|  | ||||
|     This is the only way I can see to tell a Red Hat-patched Python. | ||||
|     """ | ||||
|     from distutils.command.install import INSTALL_SCHEMES | ||||
|  | ||||
|     return all( | ||||
|         k in INSTALL_SCHEMES | ||||
|         and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k]) | ||||
|         for k in ("unix_prefix", "unix_home") | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def _looks_like_debian_scheme() -> bool: | ||||
|     """Debian adds two additional schemes.""" | ||||
|     from distutils.command.install import INSTALL_SCHEMES | ||||
|  | ||||
|     return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def _looks_like_red_hat_scheme() -> bool: | ||||
|     """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``. | ||||
|  | ||||
|     Red Hat's ``00251-change-user-install-location.patch`` changes the install | ||||
|     command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is | ||||
|     (fortunately?) done quite unconditionally, so we create a default command | ||||
|     object without any configuration to detect this. | ||||
|     """ | ||||
|     from distutils.command.install import install | ||||
|     from distutils.dist import Distribution | ||||
|  | ||||
|     cmd: Any = install(Distribution()) | ||||
|     cmd.finalize_options() | ||||
|     return ( | ||||
|         cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local" | ||||
|         and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local" | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def _looks_like_slackware_scheme() -> bool: | ||||
|     """Slackware patches sysconfig but fails to patch distutils and site. | ||||
|  | ||||
|     Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib | ||||
|     path, but does not do the same to the site module. | ||||
|     """ | ||||
|     if user_site is None:  # User-site not available. | ||||
|         return False | ||||
|     try: | ||||
|         paths = sysconfig.get_paths(scheme="posix_user", expand=False) | ||||
|     except KeyError:  # User-site not available. | ||||
|         return False | ||||
|     return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def _looks_like_msys2_mingw_scheme() -> bool: | ||||
|     """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme. | ||||
|  | ||||
|     However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is | ||||
|     likely going to be included in their 3.10 release, so we ignore the warning. | ||||
|     See msys2/MINGW-packages#9319. | ||||
|  | ||||
|     MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase, | ||||
|     and is missing the final ``"site-packages"``. | ||||
|     """ | ||||
|     paths = sysconfig.get_paths("nt", expand=False) | ||||
|     return all( | ||||
|         "Lib" not in p and "lib" in p and not p.endswith("site-packages") | ||||
|         for p in (paths[key] for key in ("platlib", "purelib")) | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]: | ||||
|     ldversion = sysconfig.get_config_var("LDVERSION") | ||||
|     abiflags = getattr(sys, "abiflags", None) | ||||
|  | ||||
|     # LDVERSION does not end with sys.abiflags. Just return the path unchanged. | ||||
|     if not ldversion or not abiflags or not ldversion.endswith(abiflags): | ||||
|         yield from parts | ||||
|         return | ||||
|  | ||||
|     # Strip sys.abiflags from LDVERSION-based path components. | ||||
|     for part in parts: | ||||
|         if part.endswith(ldversion): | ||||
|             part = part[: (0 - len(abiflags))] | ||||
|         yield part | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None: | ||||
|     issue_url = "https://github.com/pypa/pip/issues/10151" | ||||
|     message = ( | ||||
|         "Value for %s does not match. Please report this to <%s>" | ||||
|         "\ndistutils: %s" | ||||
|         "\nsysconfig: %s" | ||||
|     ) | ||||
|     logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new) | ||||
|  | ||||
|  | ||||
| def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool: | ||||
|     if old == new: | ||||
|         return False | ||||
|     _warn_mismatched(old, new, key=key) | ||||
|     return True | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def _log_context( | ||||
|     *, | ||||
|     user: bool = False, | ||||
|     home: Optional[str] = None, | ||||
|     root: Optional[str] = None, | ||||
|     prefix: Optional[str] = None, | ||||
| ) -> None: | ||||
|     parts = [ | ||||
|         "Additional context:", | ||||
|         "user = %r", | ||||
|         "home = %r", | ||||
|         "root = %r", | ||||
|         "prefix = %r", | ||||
|     ] | ||||
|  | ||||
|     logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix) | ||||
|  | ||||
|  | ||||
| def get_scheme( | ||||
|     dist_name: str, | ||||
|     user: bool = False, | ||||
|     home: Optional[str] = None, | ||||
|     root: Optional[str] = None, | ||||
|     isolated: bool = False, | ||||
|     prefix: Optional[str] = None, | ||||
| ) -> Scheme: | ||||
|     new = _sysconfig.get_scheme( | ||||
|         dist_name, | ||||
|         user=user, | ||||
|         home=home, | ||||
|         root=root, | ||||
|         isolated=isolated, | ||||
|         prefix=prefix, | ||||
|     ) | ||||
|     if _USE_SYSCONFIG: | ||||
|         return new | ||||
|  | ||||
|     old = _distutils.get_scheme( | ||||
|         dist_name, | ||||
|         user=user, | ||||
|         home=home, | ||||
|         root=root, | ||||
|         isolated=isolated, | ||||
|         prefix=prefix, | ||||
|     ) | ||||
|  | ||||
|     warning_contexts = [] | ||||
|     for k in SCHEME_KEYS: | ||||
|         old_v = pathlib.Path(getattr(old, k)) | ||||
|         new_v = pathlib.Path(getattr(new, k)) | ||||
|  | ||||
|         if old_v == new_v: | ||||
|             continue | ||||
|  | ||||
|         # distutils incorrectly put PyPy packages under ``site-packages/python`` | ||||
|         # in the ``posix_home`` scheme, but PyPy devs said they expect the | ||||
|         # directory name to be ``pypy`` instead. So we treat this as a bug fix | ||||
|         # and not warn about it. See bpo-43307 and python/cpython#24628. | ||||
|         skip_pypy_special_case = ( | ||||
|             sys.implementation.name == "pypy" | ||||
|             and home is not None | ||||
|             and k in ("platlib", "purelib") | ||||
|             and old_v.parent == new_v.parent | ||||
|             and old_v.name.startswith("python") | ||||
|             and new_v.name.startswith("pypy") | ||||
|         ) | ||||
|         if skip_pypy_special_case: | ||||
|             continue | ||||
|  | ||||
|         # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in | ||||
|         # the ``include`` value, but distutils's ``headers`` does. We'll let | ||||
|         # CPython decide whether this is a bug or feature. See bpo-43948. | ||||
|         skip_osx_framework_user_special_case = ( | ||||
|             user | ||||
|             and is_osx_framework() | ||||
|             and k == "headers" | ||||
|             and old_v.parent.parent == new_v.parent | ||||
|             and old_v.parent.name.startswith("python") | ||||
|         ) | ||||
|         if skip_osx_framework_user_special_case: | ||||
|             continue | ||||
|  | ||||
|         # On Red Hat and derived Linux distributions, distutils is patched to | ||||
|         # use "lib64" instead of "lib" for platlib. | ||||
|         if k == "platlib" and _looks_like_red_hat_lib(): | ||||
|             continue | ||||
|  | ||||
|         # On Python 3.9+, sysconfig's posix_user scheme sets platlib against | ||||
|         # sys.platlibdir, but distutils's unix_user incorrectly coninutes | ||||
|         # using the same $usersite for both platlib and purelib. This creates a | ||||
|         # mismatch when sys.platlibdir is not "lib". | ||||
|         skip_bpo_44860 = ( | ||||
|             user | ||||
|             and k == "platlib" | ||||
|             and not WINDOWS | ||||
|             and sys.version_info >= (3, 9) | ||||
|             and _PLATLIBDIR != "lib" | ||||
|             and _looks_like_bpo_44860() | ||||
|         ) | ||||
|         if skip_bpo_44860: | ||||
|             continue | ||||
|  | ||||
|         # Slackware incorrectly patches posix_user to use lib64 instead of lib, | ||||
|         # but not usersite to match the location. | ||||
|         skip_slackware_user_scheme = ( | ||||
|             user | ||||
|             and k in ("platlib", "purelib") | ||||
|             and not WINDOWS | ||||
|             and _looks_like_slackware_scheme() | ||||
|         ) | ||||
|         if skip_slackware_user_scheme: | ||||
|             continue | ||||
|  | ||||
|         # Both Debian and Red Hat patch Python to place the system site under | ||||
|         # /usr/local instead of /usr. Debian also places lib in dist-packages | ||||
|         # instead of site-packages, but the /usr/local check should cover it. | ||||
|         skip_linux_system_special_case = ( | ||||
|             not (user or home or prefix or running_under_virtualenv()) | ||||
|             and old_v.parts[1:3] == ("usr", "local") | ||||
|             and len(new_v.parts) > 1 | ||||
|             and new_v.parts[1] == "usr" | ||||
|             and (len(new_v.parts) < 3 or new_v.parts[2] != "local") | ||||
|             and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme()) | ||||
|         ) | ||||
|         if skip_linux_system_special_case: | ||||
|             continue | ||||
|  | ||||
|         # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in | ||||
|         # the "pythonX.Y" part of the path, but distutils does. | ||||
|         skip_sysconfig_abiflag_bug = ( | ||||
|             sys.version_info < (3, 8) | ||||
|             and not WINDOWS | ||||
|             and k in ("headers", "platlib", "purelib") | ||||
|             and tuple(_fix_abiflags(old_v.parts)) == new_v.parts | ||||
|         ) | ||||
|         if skip_sysconfig_abiflag_bug: | ||||
|             continue | ||||
|  | ||||
|         # MSYS2 MINGW's sysconfig patch does not include the "site-packages" | ||||
|         # part of the path. This is incorrect and will be fixed in MSYS. | ||||
|         skip_msys2_mingw_bug = ( | ||||
|             WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme() | ||||
|         ) | ||||
|         if skip_msys2_mingw_bug: | ||||
|             continue | ||||
|  | ||||
|         # CPython's POSIX install script invokes pip (via ensurepip) against the | ||||
|         # interpreter located in the source tree, not the install site. This | ||||
|         # triggers special logic in sysconfig that's not present in distutils. | ||||
|         # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194 | ||||
|         skip_cpython_build = ( | ||||
|             sysconfig.is_python_build(check_home=True) | ||||
|             and not WINDOWS | ||||
|             and k in ("headers", "include", "platinclude") | ||||
|         ) | ||||
|         if skip_cpython_build: | ||||
|             continue | ||||
|  | ||||
|         warning_contexts.append((old_v, new_v, f"scheme.{k}")) | ||||
|  | ||||
|     if not warning_contexts: | ||||
|         return old | ||||
|  | ||||
|     # Check if this path mismatch is caused by distutils config files. Those | ||||
|     # files will no longer work once we switch to sysconfig, so this raises a | ||||
|     # deprecation message for them. | ||||
|     default_old = _distutils.distutils_scheme( | ||||
|         dist_name, | ||||
|         user, | ||||
|         home, | ||||
|         root, | ||||
|         isolated, | ||||
|         prefix, | ||||
|         ignore_config_files=True, | ||||
|     ) | ||||
|     if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS): | ||||
|         deprecated( | ||||
|             reason=( | ||||
|                 "Configuring installation scheme with distutils config files " | ||||
|                 "is deprecated and will no longer work in the near future. If you " | ||||
|                 "are using a Homebrew or Linuxbrew Python, please see discussion " | ||||
|                 "at https://github.com/Homebrew/homebrew-core/issues/76621" | ||||
|             ), | ||||
|             replacement=None, | ||||
|             gone_in=None, | ||||
|         ) | ||||
|         return old | ||||
|  | ||||
|     # Post warnings about this mismatch so user can report them back. | ||||
|     for old_v, new_v, key in warning_contexts: | ||||
|         _warn_mismatched(old_v, new_v, key=key) | ||||
|     _log_context(user=user, home=home, root=root, prefix=prefix) | ||||
|  | ||||
|     return old | ||||
|  | ||||
|  | ||||
| def get_bin_prefix() -> str: | ||||
|     new = _sysconfig.get_bin_prefix() | ||||
|     if _USE_SYSCONFIG: | ||||
|         return new | ||||
|  | ||||
|     old = _distutils.get_bin_prefix() | ||||
|     if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"): | ||||
|         _log_context() | ||||
|     return old | ||||
|  | ||||
|  | ||||
| def get_bin_user() -> str: | ||||
|     return _sysconfig.get_scheme("", user=True).scripts | ||||
|  | ||||
|  | ||||
| def _looks_like_deb_system_dist_packages(value: str) -> bool: | ||||
|     """Check if the value is Debian's APT-controlled dist-packages. | ||||
|  | ||||
|     Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the | ||||
|     default package path controlled by APT, but does not patch ``sysconfig`` to | ||||
|     do the same. This is similar to the bug worked around in ``get_scheme()``, | ||||
|     but here the default is ``deb_system`` instead of ``unix_local``. Ultimately | ||||
|     we can't do anything about this Debian bug, and this detection allows us to | ||||
|     skip the warning when needed. | ||||
|     """ | ||||
|     if not _looks_like_debian_scheme(): | ||||
|         return False | ||||
|     if value == "/usr/lib/python3/dist-packages": | ||||
|         return True | ||||
|     return False | ||||
|  | ||||
|  | ||||
| def get_purelib() -> str: | ||||
|     """Return the default pure-Python lib location.""" | ||||
|     new = _sysconfig.get_purelib() | ||||
|     if _USE_SYSCONFIG: | ||||
|         return new | ||||
|  | ||||
|     old = _distutils.get_purelib() | ||||
|     if _looks_like_deb_system_dist_packages(old): | ||||
|         return old | ||||
|     if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"): | ||||
|         _log_context() | ||||
|     return old | ||||
|  | ||||
|  | ||||
| def get_platlib() -> str: | ||||
|     """Return the default platform-shared lib location.""" | ||||
|     new = _sysconfig.get_platlib() | ||||
|     if _USE_SYSCONFIG: | ||||
|         return new | ||||
|  | ||||
|     from . import _distutils | ||||
|  | ||||
|     old = _distutils.get_platlib() | ||||
|     if _looks_like_deb_system_dist_packages(old): | ||||
|         return old | ||||
|     if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): | ||||
|         _log_context() | ||||
|     return old | ||||
| @ -0,0 +1,173 @@ | ||||
| """Locations where we look for configs, install stuff, etc""" | ||||
|  | ||||
| # The following comment should be removed at some point in the future. | ||||
| # mypy: strict-optional=False | ||||
|  | ||||
| # If pip's going to use distutils, it should not be using the copy that setuptools | ||||
| # might have injected into the environment. This is done by removing the injected | ||||
| # shim, if it's injected. | ||||
| # | ||||
| # See https://github.com/pypa/pip/issues/8761 for the original discussion and | ||||
| # rationale for why this is done within pip. | ||||
| try: | ||||
|     __import__("_distutils_hack").remove_shim() | ||||
| except (ImportError, AttributeError): | ||||
|     pass | ||||
|  | ||||
| import logging | ||||
| import os | ||||
| import sys | ||||
| from distutils.cmd import Command as DistutilsCommand | ||||
| from distutils.command.install import SCHEME_KEYS | ||||
| from distutils.command.install import install as distutils_install_command | ||||
| from distutils.sysconfig import get_python_lib | ||||
| from typing import Dict, List, Optional, Union, cast | ||||
|  | ||||
| from pip._internal.models.scheme import Scheme | ||||
| from pip._internal.utils.compat import WINDOWS | ||||
| from pip._internal.utils.virtualenv import running_under_virtualenv | ||||
|  | ||||
| from .base import get_major_minor_version | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def distutils_scheme( | ||||
|     dist_name: str, | ||||
|     user: bool = False, | ||||
|     home: Optional[str] = None, | ||||
|     root: Optional[str] = None, | ||||
|     isolated: bool = False, | ||||
|     prefix: Optional[str] = None, | ||||
|     *, | ||||
|     ignore_config_files: bool = False, | ||||
| ) -> Dict[str, str]: | ||||
|     """ | ||||
|     Return a distutils install scheme | ||||
|     """ | ||||
|     from distutils.dist import Distribution | ||||
|  | ||||
|     dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name} | ||||
|     if isolated: | ||||
|         dist_args["script_args"] = ["--no-user-cfg"] | ||||
|  | ||||
|     d = Distribution(dist_args) | ||||
|     if not ignore_config_files: | ||||
|         try: | ||||
|             d.parse_config_files() | ||||
|         except UnicodeDecodeError: | ||||
|             # Typeshed does not include find_config_files() for some reason. | ||||
|             paths = d.find_config_files()  # type: ignore | ||||
|             logger.warning( | ||||
|                 "Ignore distutils configs in %s due to encoding errors.", | ||||
|                 ", ".join(os.path.basename(p) for p in paths), | ||||
|             ) | ||||
|     obj: Optional[DistutilsCommand] = None | ||||
|     obj = d.get_command_obj("install", create=True) | ||||
|     assert obj is not None | ||||
|     i = cast(distutils_install_command, obj) | ||||
|     # NOTE: setting user or home has the side-effect of creating the home dir | ||||
|     # or user base for installations during finalize_options() | ||||
|     # ideally, we'd prefer a scheme class that has no side-effects. | ||||
|     assert not (user and prefix), f"user={user} prefix={prefix}" | ||||
|     assert not (home and prefix), f"home={home} prefix={prefix}" | ||||
|     i.user = user or i.user | ||||
|     if user or home: | ||||
|         i.prefix = "" | ||||
|     i.prefix = prefix or i.prefix | ||||
|     i.home = home or i.home | ||||
|     i.root = root or i.root | ||||
|     i.finalize_options() | ||||
|  | ||||
|     scheme = {} | ||||
|     for key in SCHEME_KEYS: | ||||
|         scheme[key] = getattr(i, "install_" + key) | ||||
|  | ||||
|     # install_lib specified in setup.cfg should install *everything* | ||||
|     # into there (i.e. it takes precedence over both purelib and | ||||
|     # platlib).  Note, i.install_lib is *always* set after | ||||
|     # finalize_options(); we only want to override here if the user | ||||
|     # has explicitly requested it hence going back to the config | ||||
|     if "install_lib" in d.get_option_dict("install"): | ||||
|         scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) | ||||
|  | ||||
|     if running_under_virtualenv(): | ||||
|         if home: | ||||
|             prefix = home | ||||
|         elif user: | ||||
|             prefix = i.install_userbase | ||||
|         else: | ||||
|             prefix = i.prefix | ||||
|         scheme["headers"] = os.path.join( | ||||
|             prefix, | ||||
|             "include", | ||||
|             "site", | ||||
|             f"python{get_major_minor_version()}", | ||||
|             dist_name, | ||||
|         ) | ||||
|  | ||||
|         if root is not None: | ||||
|             path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1] | ||||
|             scheme["headers"] = os.path.join(root, path_no_drive[1:]) | ||||
|  | ||||
|     return scheme | ||||
|  | ||||
|  | ||||
| def get_scheme( | ||||
|     dist_name: str, | ||||
|     user: bool = False, | ||||
|     home: Optional[str] = None, | ||||
|     root: Optional[str] = None, | ||||
|     isolated: bool = False, | ||||
|     prefix: Optional[str] = None, | ||||
| ) -> Scheme: | ||||
|     """ | ||||
|     Get the "scheme" corresponding to the input parameters. The distutils | ||||
|     documentation provides the context for the available schemes: | ||||
|     https://docs.python.org/3/install/index.html#alternate-installation | ||||
|  | ||||
|     :param dist_name: the name of the package to retrieve the scheme for, used | ||||
|         in the headers scheme path | ||||
|     :param user: indicates to use the "user" scheme | ||||
|     :param home: indicates to use the "home" scheme and provides the base | ||||
|         directory for the same | ||||
|     :param root: root under which other directories are re-based | ||||
|     :param isolated: equivalent to --no-user-cfg, i.e. do not consider | ||||
|         ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for | ||||
|         scheme paths | ||||
|     :param prefix: indicates to use the "prefix" scheme and provides the | ||||
|         base directory for the same | ||||
|     """ | ||||
|     scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix) | ||||
|     return Scheme( | ||||
|         platlib=scheme["platlib"], | ||||
|         purelib=scheme["purelib"], | ||||
|         headers=scheme["headers"], | ||||
|         scripts=scheme["scripts"], | ||||
|         data=scheme["data"], | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def get_bin_prefix() -> str: | ||||
|     # XXX: In old virtualenv versions, sys.prefix can contain '..' components, | ||||
|     # so we need to call normpath to eliminate them. | ||||
|     prefix = os.path.normpath(sys.prefix) | ||||
|     if WINDOWS: | ||||
|         bin_py = os.path.join(prefix, "Scripts") | ||||
|         # buildout uses 'bin' on Windows too? | ||||
|         if not os.path.exists(bin_py): | ||||
|             bin_py = os.path.join(prefix, "bin") | ||||
|         return bin_py | ||||
|     # Forcing to use /usr/local/bin for standard macOS framework installs | ||||
|     # Also log to ~/Library/Logs/ for use with the Console.app log viewer | ||||
|     if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/": | ||||
|         return "/usr/local/bin" | ||||
|     return os.path.join(prefix, "bin") | ||||
|  | ||||
|  | ||||
| def get_purelib() -> str: | ||||
|     return get_python_lib(plat_specific=False) | ||||
|  | ||||
|  | ||||
| def get_platlib() -> str: | ||||
|     return get_python_lib(plat_specific=True) | ||||
| @ -0,0 +1,213 @@ | ||||
| import logging | ||||
| import os | ||||
| import sys | ||||
| import sysconfig | ||||
| import typing | ||||
|  | ||||
| from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid | ||||
| from pip._internal.models.scheme import SCHEME_KEYS, Scheme | ||||
| from pip._internal.utils.virtualenv import running_under_virtualenv | ||||
|  | ||||
| from .base import change_root, get_major_minor_version, is_osx_framework | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| # Notes on _infer_* functions. | ||||
| # Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no | ||||
| # way to ask things like "what is the '_prefix' scheme on this platform". These | ||||
| # functions try to answer that with some heuristics while accounting for ad-hoc | ||||
| # platforms not covered by CPython's default sysconfig implementation. If the | ||||
| # ad-hoc implementation does not fully implement sysconfig, we'll fall back to | ||||
| # a POSIX scheme. | ||||
|  | ||||
| _AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) | ||||
|  | ||||
| _PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None) | ||||
|  | ||||
|  | ||||
| def _should_use_osx_framework_prefix() -> bool: | ||||
|     """Check for Apple's ``osx_framework_library`` scheme. | ||||
|  | ||||
|     Python distributed by Apple's Command Line Tools has this special scheme | ||||
|     that's used when: | ||||
|  | ||||
|     * This is a framework build. | ||||
|     * We are installing into the system prefix. | ||||
|  | ||||
|     This does not account for ``pip install --prefix`` (also means we're not | ||||
|     installing to the system prefix), which should use ``posix_prefix``, but | ||||
|     logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But | ||||
|     since ``prefix`` is not available for ``sysconfig.get_default_scheme()``, | ||||
|     which is the stdlib replacement for ``_infer_prefix()``, presumably Apple | ||||
|     wouldn't be able to magically switch between ``osx_framework_library`` and | ||||
|     ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library`` | ||||
|     means its behavior is consistent whether we use the stdlib implementation | ||||
|     or our own, and we deal with this special case in ``get_scheme()`` instead. | ||||
|     """ | ||||
|     return ( | ||||
|         "osx_framework_library" in _AVAILABLE_SCHEMES | ||||
|         and not running_under_virtualenv() | ||||
|         and is_osx_framework() | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def _infer_prefix() -> str: | ||||
|     """Try to find a prefix scheme for the current platform. | ||||
|  | ||||
|     This tries: | ||||
|  | ||||
|     * A special ``osx_framework_library`` for Python distributed by Apple's | ||||
|       Command Line Tools, when not running in a virtual environment. | ||||
|     * Implementation + OS, used by PyPy on Windows (``pypy_nt``). | ||||
|     * Implementation without OS, used by PyPy on POSIX (``pypy``). | ||||
|     * OS + "prefix", used by CPython on POSIX (``posix_prefix``). | ||||
|     * Just the OS name, used by CPython on Windows (``nt``). | ||||
|  | ||||
|     If none of the above works, fall back to ``posix_prefix``. | ||||
|     """ | ||||
|     if _PREFERRED_SCHEME_API: | ||||
|         return _PREFERRED_SCHEME_API("prefix") | ||||
|     if _should_use_osx_framework_prefix(): | ||||
|         return "osx_framework_library" | ||||
|     implementation_suffixed = f"{sys.implementation.name}_{os.name}" | ||||
|     if implementation_suffixed in _AVAILABLE_SCHEMES: | ||||
|         return implementation_suffixed | ||||
|     if sys.implementation.name in _AVAILABLE_SCHEMES: | ||||
|         return sys.implementation.name | ||||
|     suffixed = f"{os.name}_prefix" | ||||
|     if suffixed in _AVAILABLE_SCHEMES: | ||||
|         return suffixed | ||||
|     if os.name in _AVAILABLE_SCHEMES:  # On Windows, prefx is just called "nt". | ||||
|         return os.name | ||||
|     return "posix_prefix" | ||||
|  | ||||
|  | ||||
| def _infer_user() -> str: | ||||
|     """Try to find a user scheme for the current platform.""" | ||||
|     if _PREFERRED_SCHEME_API: | ||||
|         return _PREFERRED_SCHEME_API("user") | ||||
|     if is_osx_framework() and not running_under_virtualenv(): | ||||
|         suffixed = "osx_framework_user" | ||||
|     else: | ||||
|         suffixed = f"{os.name}_user" | ||||
|     if suffixed in _AVAILABLE_SCHEMES: | ||||
|         return suffixed | ||||
|     if "posix_user" not in _AVAILABLE_SCHEMES:  # User scheme unavailable. | ||||
|         raise UserInstallationInvalid() | ||||
|     return "posix_user" | ||||
|  | ||||
|  | ||||
| def _infer_home() -> str: | ||||
|     """Try to find a home for the current platform.""" | ||||
|     if _PREFERRED_SCHEME_API: | ||||
|         return _PREFERRED_SCHEME_API("home") | ||||
|     suffixed = f"{os.name}_home" | ||||
|     if suffixed in _AVAILABLE_SCHEMES: | ||||
|         return suffixed | ||||
|     return "posix_home" | ||||
|  | ||||
|  | ||||
| # Update these keys if the user sets a custom home. | ||||
| _HOME_KEYS = [ | ||||
|     "installed_base", | ||||
|     "base", | ||||
|     "installed_platbase", | ||||
|     "platbase", | ||||
|     "prefix", | ||||
|     "exec_prefix", | ||||
| ] | ||||
| if sysconfig.get_config_var("userbase") is not None: | ||||
|     _HOME_KEYS.append("userbase") | ||||
|  | ||||
|  | ||||
| def get_scheme( | ||||
|     dist_name: str, | ||||
|     user: bool = False, | ||||
|     home: typing.Optional[str] = None, | ||||
|     root: typing.Optional[str] = None, | ||||
|     isolated: bool = False, | ||||
|     prefix: typing.Optional[str] = None, | ||||
| ) -> Scheme: | ||||
|     """ | ||||
|     Get the "scheme" corresponding to the input parameters. | ||||
|  | ||||
|     :param dist_name: the name of the package to retrieve the scheme for, used | ||||
|         in the headers scheme path | ||||
|     :param user: indicates to use the "user" scheme | ||||
|     :param home: indicates to use the "home" scheme | ||||
|     :param root: root under which other directories are re-based | ||||
|     :param isolated: ignored, but kept for distutils compatibility (where | ||||
|         this controls whether the user-site pydistutils.cfg is honored) | ||||
|     :param prefix: indicates to use the "prefix" scheme and provides the | ||||
|         base directory for the same | ||||
|     """ | ||||
|     if user and prefix: | ||||
|         raise InvalidSchemeCombination("--user", "--prefix") | ||||
|     if home and prefix: | ||||
|         raise InvalidSchemeCombination("--home", "--prefix") | ||||
|  | ||||
|     if home is not None: | ||||
|         scheme_name = _infer_home() | ||||
|     elif user: | ||||
|         scheme_name = _infer_user() | ||||
|     else: | ||||
|         scheme_name = _infer_prefix() | ||||
|  | ||||
|     # Special case: When installing into a custom prefix, use posix_prefix | ||||
|     # instead of osx_framework_library. See _should_use_osx_framework_prefix() | ||||
|     # docstring for details. | ||||
|     if prefix is not None and scheme_name == "osx_framework_library": | ||||
|         scheme_name = "posix_prefix" | ||||
|  | ||||
|     if home is not None: | ||||
|         variables = {k: home for k in _HOME_KEYS} | ||||
|     elif prefix is not None: | ||||
|         variables = {k: prefix for k in _HOME_KEYS} | ||||
|     else: | ||||
|         variables = {} | ||||
|  | ||||
|     paths = sysconfig.get_paths(scheme=scheme_name, vars=variables) | ||||
|  | ||||
|     # Logic here is very arbitrary, we're doing it for compatibility, don't ask. | ||||
|     # 1. Pip historically uses a special header path in virtual environments. | ||||
|     # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We | ||||
|     #    only do the same when not running in a virtual environment because | ||||
|     #    pip's historical header path logic (see point 1) did not do this. | ||||
|     if running_under_virtualenv(): | ||||
|         if user: | ||||
|             base = variables.get("userbase", sys.prefix) | ||||
|         else: | ||||
|             base = variables.get("base", sys.prefix) | ||||
|         python_xy = f"python{get_major_minor_version()}" | ||||
|         paths["include"] = os.path.join(base, "include", "site", python_xy) | ||||
|     elif not dist_name: | ||||
|         dist_name = "UNKNOWN" | ||||
|  | ||||
|     scheme = Scheme( | ||||
|         platlib=paths["platlib"], | ||||
|         purelib=paths["purelib"], | ||||
|         headers=os.path.join(paths["include"], dist_name), | ||||
|         scripts=paths["scripts"], | ||||
|         data=paths["data"], | ||||
|     ) | ||||
|     if root is not None: | ||||
|         for key in SCHEME_KEYS: | ||||
|             value = change_root(root, getattr(scheme, key)) | ||||
|             setattr(scheme, key, value) | ||||
|     return scheme | ||||
|  | ||||
|  | ||||
| def get_bin_prefix() -> str: | ||||
|     # Forcing to use /usr/local/bin for standard macOS framework installs. | ||||
|     if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": | ||||
|         return "/usr/local/bin" | ||||
|     return sysconfig.get_paths()["scripts"] | ||||
|  | ||||
|  | ||||
| def get_purelib() -> str: | ||||
|     return sysconfig.get_paths()["purelib"] | ||||
|  | ||||
|  | ||||
| def get_platlib() -> str: | ||||
|     return sysconfig.get_paths()["platlib"] | ||||
| @ -0,0 +1,81 @@ | ||||
| import functools | ||||
| import os | ||||
| import site | ||||
| import sys | ||||
| import sysconfig | ||||
| import typing | ||||
|  | ||||
| from pip._internal.exceptions import InstallationError | ||||
| from pip._internal.utils import appdirs | ||||
| from pip._internal.utils.virtualenv import running_under_virtualenv | ||||
|  | ||||
| # Application Directories | ||||
| USER_CACHE_DIR = appdirs.user_cache_dir("pip") | ||||
|  | ||||
| # FIXME doesn't account for venv linked to global site-packages | ||||
| site_packages: str = sysconfig.get_path("purelib") | ||||
|  | ||||
|  | ||||
| def get_major_minor_version() -> str: | ||||
|     """ | ||||
|     Return the major-minor version of the current Python as a string, e.g. | ||||
|     "3.7" or "3.10". | ||||
|     """ | ||||
|     return "{}.{}".format(*sys.version_info) | ||||
|  | ||||
|  | ||||
| def change_root(new_root: str, pathname: str) -> str: | ||||
|     """Return 'pathname' with 'new_root' prepended. | ||||
|  | ||||
|     If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname). | ||||
|     Otherwise, it requires making 'pathname' relative and then joining the | ||||
|     two, which is tricky on DOS/Windows and Mac OS. | ||||
|  | ||||
|     This is borrowed from Python's standard library's distutils module. | ||||
|     """ | ||||
|     if os.name == "posix": | ||||
|         if not os.path.isabs(pathname): | ||||
|             return os.path.join(new_root, pathname) | ||||
|         else: | ||||
|             return os.path.join(new_root, pathname[1:]) | ||||
|  | ||||
|     elif os.name == "nt": | ||||
|         (drive, path) = os.path.splitdrive(pathname) | ||||
|         if path[0] == "\\": | ||||
|             path = path[1:] | ||||
|         return os.path.join(new_root, path) | ||||
|  | ||||
|     else: | ||||
|         raise InstallationError( | ||||
|             f"Unknown platform: {os.name}\n" | ||||
|             "Can not change root path prefix on unknown platform." | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def get_src_prefix() -> str: | ||||
|     if running_under_virtualenv(): | ||||
|         src_prefix = os.path.join(sys.prefix, "src") | ||||
|     else: | ||||
|         # FIXME: keep src in cwd for now (it is not a temporary folder) | ||||
|         try: | ||||
|             src_prefix = os.path.join(os.getcwd(), "src") | ||||
|         except OSError: | ||||
|             # In case the current working directory has been renamed or deleted | ||||
|             sys.exit("The folder you are executing pip from can no longer be found.") | ||||
|  | ||||
|     # under macOS + virtualenv sys.prefix is not properly resolved | ||||
|     # it is something like /path/to/python/bin/.. | ||||
|     return os.path.abspath(src_prefix) | ||||
|  | ||||
|  | ||||
| try: | ||||
|     # Use getusersitepackages if this is present, as it ensures that the | ||||
|     # value is initialised properly. | ||||
|     user_site: typing.Optional[str] = site.getusersitepackages() | ||||
| except AttributeError: | ||||
|     user_site = site.USER_SITE | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def is_osx_framework() -> bool: | ||||
|     return bool(sysconfig.get_config_var("PYTHONFRAMEWORK")) | ||||
| @ -0,0 +1,12 @@ | ||||
| from typing import List, Optional | ||||
|  | ||||
|  | ||||
| def main(args: Optional[List[str]] = None) -> int: | ||||
|     """This is preserved for old console scripts that may still be referencing | ||||
|     it. | ||||
|  | ||||
|     For additional details, see https://github.com/pypa/pip/issues/7498. | ||||
|     """ | ||||
|     from pip._internal.utils.entrypoints import _wrapper | ||||
|  | ||||
|     return _wrapper(args) | ||||
| @ -0,0 +1,127 @@ | ||||
| import contextlib | ||||
| import functools | ||||
| import os | ||||
| import sys | ||||
| from typing import TYPE_CHECKING, List, Optional, Type, cast | ||||
|  | ||||
| from pip._internal.utils.misc import strtobool | ||||
|  | ||||
| from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from typing import Protocol | ||||
| else: | ||||
|     Protocol = object | ||||
|  | ||||
| __all__ = [ | ||||
|     "BaseDistribution", | ||||
|     "BaseEnvironment", | ||||
|     "FilesystemWheel", | ||||
|     "MemoryWheel", | ||||
|     "Wheel", | ||||
|     "get_default_environment", | ||||
|     "get_environment", | ||||
|     "get_wheel_distribution", | ||||
|     "select_backend", | ||||
| ] | ||||
|  | ||||
|  | ||||
| def _should_use_importlib_metadata() -> bool: | ||||
|     """Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend. | ||||
|  | ||||
|     By default, pip uses ``importlib.metadata`` on Python 3.11+, and | ||||
|     ``pkg_resourcess`` otherwise. This can be overridden by a couple of ways: | ||||
|  | ||||
|     * If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it | ||||
|       dictates whether ``importlib.metadata`` is used, regardless of Python | ||||
|       version. | ||||
|     * On Python 3.11+, Python distributors can patch ``importlib.metadata`` | ||||
|       to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This | ||||
|       makes pip use ``pkg_resources`` (unless the user set the aforementioned | ||||
|       environment variable to *True*). | ||||
|     """ | ||||
|     with contextlib.suppress(KeyError, ValueError): | ||||
|         return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"])) | ||||
|     if sys.version_info < (3, 11): | ||||
|         return False | ||||
|     import importlib.metadata | ||||
|  | ||||
|     return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True)) | ||||
|  | ||||
|  | ||||
| class Backend(Protocol): | ||||
|     Distribution: Type[BaseDistribution] | ||||
|     Environment: Type[BaseEnvironment] | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def select_backend() -> Backend: | ||||
|     if _should_use_importlib_metadata(): | ||||
|         from . import importlib | ||||
|  | ||||
|         return cast(Backend, importlib) | ||||
|     from . import pkg_resources | ||||
|  | ||||
|     return cast(Backend, pkg_resources) | ||||
|  | ||||
|  | ||||
| def get_default_environment() -> BaseEnvironment: | ||||
|     """Get the default representation for the current environment. | ||||
|  | ||||
|     This returns an Environment instance from the chosen backend. The default | ||||
|     Environment instance should be built from ``sys.path`` and may use caching | ||||
|     to share instance state accorss calls. | ||||
|     """ | ||||
|     return select_backend().Environment.default() | ||||
|  | ||||
|  | ||||
| def get_environment(paths: Optional[List[str]]) -> BaseEnvironment: | ||||
|     """Get a representation of the environment specified by ``paths``. | ||||
|  | ||||
|     This returns an Environment instance from the chosen backend based on the | ||||
|     given import paths. The backend must build a fresh instance representing | ||||
|     the state of installed distributions when this function is called. | ||||
|     """ | ||||
|     return select_backend().Environment.from_paths(paths) | ||||
|  | ||||
|  | ||||
| def get_directory_distribution(directory: str) -> BaseDistribution: | ||||
|     """Get the distribution metadata representation in the specified directory. | ||||
|  | ||||
|     This returns a Distribution instance from the chosen backend based on | ||||
|     the given on-disk ``.dist-info`` directory. | ||||
|     """ | ||||
|     return select_backend().Distribution.from_directory(directory) | ||||
|  | ||||
|  | ||||
| def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution: | ||||
|     """Get the representation of the specified wheel's distribution metadata. | ||||
|  | ||||
|     This returns a Distribution instance from the chosen backend based on | ||||
|     the given wheel's ``.dist-info`` directory. | ||||
|  | ||||
|     :param canonical_name: Normalized project name of the given wheel. | ||||
|     """ | ||||
|     return select_backend().Distribution.from_wheel(wheel, canonical_name) | ||||
|  | ||||
|  | ||||
| def get_metadata_distribution( | ||||
|     metadata_contents: bytes, | ||||
|     filename: str, | ||||
|     canonical_name: str, | ||||
| ) -> BaseDistribution: | ||||
|     """Get the dist representation of the specified METADATA file contents. | ||||
|  | ||||
|     This returns a Distribution instance from the chosen backend sourced from the data | ||||
|     in `metadata_contents`. | ||||
|  | ||||
|     :param metadata_contents: Contents of a METADATA file within a dist, or one served | ||||
|                               via PEP 658. | ||||
|     :param filename: Filename for the dist this metadata represents. | ||||
|     :param canonical_name: Normalized project name of the given dist. | ||||
|     """ | ||||
|     return select_backend().Distribution.from_metadata_file_contents( | ||||
|         metadata_contents, | ||||
|         filename, | ||||
|         canonical_name, | ||||
|     ) | ||||
| @ -0,0 +1,84 @@ | ||||
| # Extracted from https://github.com/pfmoore/pkg_metadata | ||||
|  | ||||
| from email.header import Header, decode_header, make_header | ||||
| from email.message import Message | ||||
| from typing import Any, Dict, List, Union | ||||
|  | ||||
| METADATA_FIELDS = [ | ||||
|     # Name, Multiple-Use | ||||
|     ("Metadata-Version", False), | ||||
|     ("Name", False), | ||||
|     ("Version", False), | ||||
|     ("Dynamic", True), | ||||
|     ("Platform", True), | ||||
|     ("Supported-Platform", True), | ||||
|     ("Summary", False), | ||||
|     ("Description", False), | ||||
|     ("Description-Content-Type", False), | ||||
|     ("Keywords", False), | ||||
|     ("Home-page", False), | ||||
|     ("Download-URL", False), | ||||
|     ("Author", False), | ||||
|     ("Author-email", False), | ||||
|     ("Maintainer", False), | ||||
|     ("Maintainer-email", False), | ||||
|     ("License", False), | ||||
|     ("Classifier", True), | ||||
|     ("Requires-Dist", True), | ||||
|     ("Requires-Python", False), | ||||
|     ("Requires-External", True), | ||||
|     ("Project-URL", True), | ||||
|     ("Provides-Extra", True), | ||||
|     ("Provides-Dist", True), | ||||
|     ("Obsoletes-Dist", True), | ||||
| ] | ||||
|  | ||||
|  | ||||
| def json_name(field: str) -> str: | ||||
|     return field.lower().replace("-", "_") | ||||
|  | ||||
|  | ||||
| def msg_to_json(msg: Message) -> Dict[str, Any]: | ||||
|     """Convert a Message object into a JSON-compatible dictionary.""" | ||||
|  | ||||
|     def sanitise_header(h: Union[Header, str]) -> str: | ||||
|         if isinstance(h, Header): | ||||
|             chunks = [] | ||||
|             for bytes, encoding in decode_header(h): | ||||
|                 if encoding == "unknown-8bit": | ||||
|                     try: | ||||
|                         # See if UTF-8 works | ||||
|                         bytes.decode("utf-8") | ||||
|                         encoding = "utf-8" | ||||
|                     except UnicodeDecodeError: | ||||
|                         # If not, latin1 at least won't fail | ||||
|                         encoding = "latin1" | ||||
|                 chunks.append((bytes, encoding)) | ||||
|             return str(make_header(chunks)) | ||||
|         return str(h) | ||||
|  | ||||
|     result = {} | ||||
|     for field, multi in METADATA_FIELDS: | ||||
|         if field not in msg: | ||||
|             continue | ||||
|         key = json_name(field) | ||||
|         if multi: | ||||
|             value: Union[str, List[str]] = [ | ||||
|                 sanitise_header(v) for v in msg.get_all(field) | ||||
|             ] | ||||
|         else: | ||||
|             value = sanitise_header(msg.get(field)) | ||||
|             if key == "keywords": | ||||
|                 # Accept both comma-separated and space-separated | ||||
|                 # forms, for better compatibility with old data. | ||||
|                 if "," in value: | ||||
|                     value = [v.strip() for v in value.split(",")] | ||||
|                 else: | ||||
|                     value = value.split() | ||||
|         result[key] = value | ||||
|  | ||||
|     payload = msg.get_payload() | ||||
|     if payload: | ||||
|         result["description"] = payload | ||||
|  | ||||
|     return result | ||||
| @ -0,0 +1,688 @@ | ||||
| import csv | ||||
| import email.message | ||||
| import functools | ||||
| import json | ||||
| import logging | ||||
| import pathlib | ||||
| import re | ||||
| import zipfile | ||||
| from typing import ( | ||||
|     IO, | ||||
|     TYPE_CHECKING, | ||||
|     Any, | ||||
|     Collection, | ||||
|     Container, | ||||
|     Dict, | ||||
|     Iterable, | ||||
|     Iterator, | ||||
|     List, | ||||
|     NamedTuple, | ||||
|     Optional, | ||||
|     Tuple, | ||||
|     Union, | ||||
| ) | ||||
|  | ||||
| from pip._vendor.packaging.requirements import Requirement | ||||
| from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet | ||||
| from pip._vendor.packaging.utils import NormalizedName | ||||
| from pip._vendor.packaging.version import LegacyVersion, Version | ||||
|  | ||||
| from pip._internal.exceptions import NoneMetadataError | ||||
| from pip._internal.locations import site_packages, user_site | ||||
| from pip._internal.models.direct_url import ( | ||||
|     DIRECT_URL_METADATA_NAME, | ||||
|     DirectUrl, | ||||
|     DirectUrlValidationError, | ||||
| ) | ||||
| from pip._internal.utils.compat import stdlib_pkgs  # TODO: Move definition here. | ||||
| from pip._internal.utils.egg_link import egg_link_path_from_sys_path | ||||
| from pip._internal.utils.misc import is_local, normalize_path | ||||
| from pip._internal.utils.packaging import safe_extra | ||||
| from pip._internal.utils.urls import url_to_path | ||||
|  | ||||
| from ._json import msg_to_json | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from typing import Protocol | ||||
| else: | ||||
|     Protocol = object | ||||
|  | ||||
| DistributionVersion = Union[LegacyVersion, Version] | ||||
|  | ||||
| InfoPath = Union[str, pathlib.PurePath] | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class BaseEntryPoint(Protocol): | ||||
|     @property | ||||
|     def name(self) -> str: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @property | ||||
|     def value(self) -> str: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @property | ||||
|     def group(self) -> str: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|  | ||||
| def _convert_installed_files_path( | ||||
|     entry: Tuple[str, ...], | ||||
|     info: Tuple[str, ...], | ||||
| ) -> str: | ||||
|     """Convert a legacy installed-files.txt path into modern RECORD path. | ||||
|  | ||||
|     The legacy format stores paths relative to the info directory, while the | ||||
|     modern format stores paths relative to the package root, e.g. the | ||||
|     site-packages directory. | ||||
|  | ||||
|     :param entry: Path parts of the installed-files.txt entry. | ||||
|     :param info: Path parts of the egg-info directory relative to package root. | ||||
|     :returns: The converted entry. | ||||
|  | ||||
|     For best compatibility with symlinks, this does not use ``abspath()`` or | ||||
|     ``Path.resolve()``, but tries to work with path parts: | ||||
|  | ||||
|     1. While ``entry`` starts with ``..``, remove the equal amounts of parts | ||||
|        from ``info``; if ``info`` is empty, start appending ``..`` instead. | ||||
|     2. Join the two directly. | ||||
|     """ | ||||
|     while entry and entry[0] == "..": | ||||
|         if not info or info[-1] == "..": | ||||
|             info += ("..",) | ||||
|         else: | ||||
|             info = info[:-1] | ||||
|         entry = entry[1:] | ||||
|     return str(pathlib.Path(*info, *entry)) | ||||
|  | ||||
|  | ||||
| class RequiresEntry(NamedTuple): | ||||
|     requirement: str | ||||
|     extra: str | ||||
|     marker: str | ||||
|  | ||||
|  | ||||
| class BaseDistribution(Protocol): | ||||
|     @classmethod | ||||
|     def from_directory(cls, directory: str) -> "BaseDistribution": | ||||
|         """Load the distribution from a metadata directory. | ||||
|  | ||||
|         :param directory: Path to a metadata directory, e.g. ``.dist-info``. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @classmethod | ||||
|     def from_metadata_file_contents( | ||||
|         cls, | ||||
|         metadata_contents: bytes, | ||||
|         filename: str, | ||||
|         project_name: str, | ||||
|     ) -> "BaseDistribution": | ||||
|         """Load the distribution from the contents of a METADATA file. | ||||
|  | ||||
|         This is used to implement PEP 658 by generating a "shallow" dist object that can | ||||
|         be used for resolution without downloading or building the actual dist yet. | ||||
|  | ||||
|         :param metadata_contents: The contents of a METADATA file. | ||||
|         :param filename: File name for the dist with this metadata. | ||||
|         :param project_name: Name of the project this dist represents. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @classmethod | ||||
|     def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution": | ||||
|         """Load the distribution from a given wheel. | ||||
|  | ||||
|         :param wheel: A concrete wheel definition. | ||||
|         :param name: File name of the wheel. | ||||
|  | ||||
|         :raises InvalidWheel: Whenever loading of the wheel causes a | ||||
|             :py:exc:`zipfile.BadZipFile` exception to be thrown. | ||||
|         :raises UnsupportedWheel: If the wheel is a valid zip, but malformed | ||||
|             internally. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return f"{self.raw_name} {self.version} ({self.location})" | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.raw_name} {self.version}" | ||||
|  | ||||
|     @property | ||||
|     def location(self) -> Optional[str]: | ||||
|         """Where the distribution is loaded from. | ||||
|  | ||||
|         A string value is not necessarily a filesystem path, since distributions | ||||
|         can be loaded from other sources, e.g. arbitrary zip archives. ``None`` | ||||
|         means the distribution is created in-memory. | ||||
|  | ||||
|         Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If | ||||
|         this is a symbolic link, we want to preserve the relative path between | ||||
|         it and files in the distribution. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @property | ||||
|     def editable_project_location(self) -> Optional[str]: | ||||
|         """The project location for editable distributions. | ||||
|  | ||||
|         This is the directory where pyproject.toml or setup.py is located. | ||||
|         None if the distribution is not installed in editable mode. | ||||
|         """ | ||||
|         # TODO: this property is relatively costly to compute, memoize it ? | ||||
|         direct_url = self.direct_url | ||||
|         if direct_url: | ||||
|             if direct_url.is_local_editable(): | ||||
|                 return url_to_path(direct_url.url) | ||||
|         else: | ||||
|             # Search for an .egg-link file by walking sys.path, as it was | ||||
|             # done before by dist_is_editable(). | ||||
|             egg_link_path = egg_link_path_from_sys_path(self.raw_name) | ||||
|             if egg_link_path: | ||||
|                 # TODO: get project location from second line of egg_link file | ||||
|                 #       (https://github.com/pypa/pip/issues/10243) | ||||
|                 return self.location | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def installed_location(self) -> Optional[str]: | ||||
|         """The distribution's "installed" location. | ||||
|  | ||||
|         This should generally be a ``site-packages`` directory. This is | ||||
|         usually ``dist.location``, except for legacy develop-installed packages, | ||||
|         where ``dist.location`` is the source code location, and this is where | ||||
|         the ``.egg-link`` file is. | ||||
|  | ||||
|         The returned location is normalized (in particular, with symlinks removed). | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @property | ||||
|     def info_location(self) -> Optional[str]: | ||||
|         """Location of the .[egg|dist]-info directory or file. | ||||
|  | ||||
|         Similarly to ``location``, a string value is not necessarily a | ||||
|         filesystem path. ``None`` means the distribution is created in-memory. | ||||
|  | ||||
|         For a modern .dist-info installation on disk, this should be something | ||||
|         like ``{location}/{raw_name}-{version}.dist-info``. | ||||
|  | ||||
|         Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If | ||||
|         this is a symbolic link, we want to preserve the relative path between | ||||
|         it and other files in the distribution. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @property | ||||
|     def installed_by_distutils(self) -> bool: | ||||
|         """Whether this distribution is installed with legacy distutils format. | ||||
|  | ||||
|         A distribution installed with "raw" distutils not patched by setuptools | ||||
|         uses one single file at ``info_location`` to store metadata. We need to | ||||
|         treat this specially on uninstallation. | ||||
|         """ | ||||
|         info_location = self.info_location | ||||
|         if not info_location: | ||||
|             return False | ||||
|         return pathlib.Path(info_location).is_file() | ||||
|  | ||||
|     @property | ||||
|     def installed_as_egg(self) -> bool: | ||||
|         """Whether this distribution is installed as an egg. | ||||
|  | ||||
|         This usually indicates the distribution was installed by (older versions | ||||
|         of) easy_install. | ||||
|         """ | ||||
|         location = self.location | ||||
|         if not location: | ||||
|             return False | ||||
|         return location.endswith(".egg") | ||||
|  | ||||
|     @property | ||||
|     def installed_with_setuptools_egg_info(self) -> bool: | ||||
|         """Whether this distribution is installed with the ``.egg-info`` format. | ||||
|  | ||||
|         This usually indicates the distribution was installed with setuptools | ||||
|         with an old pip version or with ``single-version-externally-managed``. | ||||
|  | ||||
|         Note that this ensure the metadata store is a directory. distutils can | ||||
|         also installs an ``.egg-info``, but as a file, not a directory. This | ||||
|         property is *False* for that case. Also see ``installed_by_distutils``. | ||||
|         """ | ||||
|         info_location = self.info_location | ||||
|         if not info_location: | ||||
|             return False | ||||
|         if not info_location.endswith(".egg-info"): | ||||
|             return False | ||||
|         return pathlib.Path(info_location).is_dir() | ||||
|  | ||||
|     @property | ||||
|     def installed_with_dist_info(self) -> bool: | ||||
|         """Whether this distribution is installed with the "modern format". | ||||
|  | ||||
|         This indicates a "modern" installation, e.g. storing metadata in the | ||||
|         ``.dist-info`` directory. This applies to installations made by | ||||
|         setuptools (but through pip, not directly), or anything using the | ||||
|         standardized build backend interface (PEP 517). | ||||
|         """ | ||||
|         info_location = self.info_location | ||||
|         if not info_location: | ||||
|             return False | ||||
|         if not info_location.endswith(".dist-info"): | ||||
|             return False | ||||
|         return pathlib.Path(info_location).is_dir() | ||||
|  | ||||
|     @property | ||||
|     def canonical_name(self) -> NormalizedName: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @property | ||||
|     def version(self) -> DistributionVersion: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @property | ||||
|     def setuptools_filename(self) -> str: | ||||
|         """Convert a project name to its setuptools-compatible filename. | ||||
|  | ||||
|         This is a copy of ``pkg_resources.to_filename()`` for compatibility. | ||||
|         """ | ||||
|         return self.raw_name.replace("-", "_") | ||||
|  | ||||
|     @property | ||||
|     def direct_url(self) -> Optional[DirectUrl]: | ||||
|         """Obtain a DirectUrl from this distribution. | ||||
|  | ||||
|         Returns None if the distribution has no `direct_url.json` metadata, | ||||
|         or if `direct_url.json` is invalid. | ||||
|         """ | ||||
|         try: | ||||
|             content = self.read_text(DIRECT_URL_METADATA_NAME) | ||||
|         except FileNotFoundError: | ||||
|             return None | ||||
|         try: | ||||
|             return DirectUrl.from_json(content) | ||||
|         except ( | ||||
|             UnicodeDecodeError, | ||||
|             json.JSONDecodeError, | ||||
|             DirectUrlValidationError, | ||||
|         ) as e: | ||||
|             logger.warning( | ||||
|                 "Error parsing %s for %s: %s", | ||||
|                 DIRECT_URL_METADATA_NAME, | ||||
|                 self.canonical_name, | ||||
|                 e, | ||||
|             ) | ||||
|             return None | ||||
|  | ||||
|     @property | ||||
|     def installer(self) -> str: | ||||
|         try: | ||||
|             installer_text = self.read_text("INSTALLER") | ||||
|         except (OSError, ValueError, NoneMetadataError): | ||||
|             return ""  # Fail silently if the installer file cannot be read. | ||||
|         for line in installer_text.splitlines(): | ||||
|             cleaned_line = line.strip() | ||||
|             if cleaned_line: | ||||
|                 return cleaned_line | ||||
|         return "" | ||||
|  | ||||
|     @property | ||||
|     def requested(self) -> bool: | ||||
|         return self.is_file("REQUESTED") | ||||
|  | ||||
|     @property | ||||
|     def editable(self) -> bool: | ||||
|         return bool(self.editable_project_location) | ||||
|  | ||||
|     @property | ||||
|     def local(self) -> bool: | ||||
|         """If distribution is installed in the current virtual environment. | ||||
|  | ||||
|         Always True if we're not in a virtualenv. | ||||
|         """ | ||||
|         if self.installed_location is None: | ||||
|             return False | ||||
|         return is_local(self.installed_location) | ||||
|  | ||||
|     @property | ||||
|     def in_usersite(self) -> bool: | ||||
|         if self.installed_location is None or user_site is None: | ||||
|             return False | ||||
|         return self.installed_location.startswith(normalize_path(user_site)) | ||||
|  | ||||
|     @property | ||||
|     def in_site_packages(self) -> bool: | ||||
|         if self.installed_location is None or site_packages is None: | ||||
|             return False | ||||
|         return self.installed_location.startswith(normalize_path(site_packages)) | ||||
|  | ||||
|     def is_file(self, path: InfoPath) -> bool: | ||||
|         """Check whether an entry in the info directory is a file.""" | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def iter_distutils_script_names(self) -> Iterator[str]: | ||||
|         """Find distutils 'scripts' entries metadata. | ||||
|  | ||||
|         If 'scripts' is supplied in ``setup.py``, distutils records those in the | ||||
|         installed distribution's ``scripts`` directory, a file for each script. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def read_text(self, path: InfoPath) -> str: | ||||
|         """Read a file in the info directory. | ||||
|  | ||||
|         :raise FileNotFoundError: If ``path`` does not exist in the directory. | ||||
|         :raise NoneMetadataError: If ``path`` exists in the info directory, but | ||||
|             cannot be read. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def iter_entry_points(self) -> Iterable[BaseEntryPoint]: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def _metadata_impl(self) -> email.message.Message: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @functools.lru_cache(maxsize=1) | ||||
|     def _metadata_cached(self) -> email.message.Message: | ||||
|         # When we drop python 3.7 support, move this to the metadata property and use | ||||
|         # functools.cached_property instead of lru_cache. | ||||
|         metadata = self._metadata_impl() | ||||
|         self._add_egg_info_requires(metadata) | ||||
|         return metadata | ||||
|  | ||||
|     @property | ||||
|     def metadata(self) -> email.message.Message: | ||||
|         """Metadata of distribution parsed from e.g. METADATA or PKG-INFO. | ||||
|  | ||||
|         This should return an empty message if the metadata file is unavailable. | ||||
|  | ||||
|         :raises NoneMetadataError: If the metadata file is available, but does | ||||
|             not contain valid metadata. | ||||
|         """ | ||||
|         return self._metadata_cached() | ||||
|  | ||||
|     @property | ||||
|     def metadata_dict(self) -> Dict[str, Any]: | ||||
|         """PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO. | ||||
|  | ||||
|         This should return an empty dict if the metadata file is unavailable. | ||||
|  | ||||
|         :raises NoneMetadataError: If the metadata file is available, but does | ||||
|             not contain valid metadata. | ||||
|         """ | ||||
|         return msg_to_json(self.metadata) | ||||
|  | ||||
|     @property | ||||
|     def metadata_version(self) -> Optional[str]: | ||||
|         """Value of "Metadata-Version:" in distribution metadata, if available.""" | ||||
|         return self.metadata.get("Metadata-Version") | ||||
|  | ||||
|     @property | ||||
|     def raw_name(self) -> str: | ||||
|         """Value of "Name:" in distribution metadata.""" | ||||
|         # The metadata should NEVER be missing the Name: key, but if it somehow | ||||
|         # does, fall back to the known canonical name. | ||||
|         return self.metadata.get("Name", self.canonical_name) | ||||
|  | ||||
|     @property | ||||
|     def requires_python(self) -> SpecifierSet: | ||||
|         """Value of "Requires-Python:" in distribution metadata. | ||||
|  | ||||
|         If the key does not exist or contains an invalid value, an empty | ||||
|         SpecifierSet should be returned. | ||||
|         """ | ||||
|         value = self.metadata.get("Requires-Python") | ||||
|         if value is None: | ||||
|             return SpecifierSet() | ||||
|         try: | ||||
|             # Convert to str to satisfy the type checker; this can be a Header object. | ||||
|             spec = SpecifierSet(str(value)) | ||||
|         except InvalidSpecifier as e: | ||||
|             message = "Package %r has an invalid Requires-Python: %s" | ||||
|             logger.warning(message, self.raw_name, e) | ||||
|             return SpecifierSet() | ||||
|         return spec | ||||
|  | ||||
|     def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: | ||||
|         """Dependencies of this distribution. | ||||
|  | ||||
|         For modern .dist-info distributions, this is the collection of | ||||
|         "Requires-Dist:" entries in distribution metadata. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def iter_provided_extras(self) -> Iterable[str]: | ||||
|         """Extras provided by this distribution. | ||||
|  | ||||
|         For modern .dist-info distributions, this is the collection of | ||||
|         "Provides-Extra:" entries in distribution metadata. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]: | ||||
|         try: | ||||
|             text = self.read_text("RECORD") | ||||
|         except FileNotFoundError: | ||||
|             return None | ||||
|         # This extra Path-str cast normalizes entries. | ||||
|         return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines())) | ||||
|  | ||||
|     def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]: | ||||
|         try: | ||||
|             text = self.read_text("installed-files.txt") | ||||
|         except FileNotFoundError: | ||||
|             return None | ||||
|         paths = (p for p in text.splitlines(keepends=False) if p) | ||||
|         root = self.location | ||||
|         info = self.info_location | ||||
|         if root is None or info is None: | ||||
|             return paths | ||||
|         try: | ||||
|             info_rel = pathlib.Path(info).relative_to(root) | ||||
|         except ValueError:  # info is not relative to root. | ||||
|             return paths | ||||
|         if not info_rel.parts:  # info *is* root. | ||||
|             return paths | ||||
|         return ( | ||||
|             _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts) | ||||
|             for p in paths | ||||
|         ) | ||||
|  | ||||
|     def iter_declared_entries(self) -> Optional[Iterator[str]]: | ||||
|         """Iterate through file entries declared in this distribution. | ||||
|  | ||||
|         For modern .dist-info distributions, this is the files listed in the | ||||
|         ``RECORD`` metadata file. For legacy setuptools distributions, this | ||||
|         comes from ``installed-files.txt``, with entries normalized to be | ||||
|         compatible with the format used by ``RECORD``. | ||||
|  | ||||
|         :return: An iterator for listed entries, or None if the distribution | ||||
|             contains neither ``RECORD`` nor ``installed-files.txt``. | ||||
|         """ | ||||
|         return ( | ||||
|             self._iter_declared_entries_from_record() | ||||
|             or self._iter_declared_entries_from_legacy() | ||||
|         ) | ||||
|  | ||||
|     def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]: | ||||
|         """Parse a ``requires.txt`` in an egg-info directory. | ||||
|  | ||||
|         This is an INI-ish format where an egg-info stores dependencies. A | ||||
|         section name describes extra other environment markers, while each entry | ||||
|         is an arbitrary string (not a key-value pair) representing a dependency | ||||
|         as a requirement string (no markers). | ||||
|  | ||||
|         There is a construct in ``importlib.metadata`` called ``Sectioned`` that | ||||
|         does mostly the same, but the format is currently considered private. | ||||
|         """ | ||||
|         try: | ||||
|             content = self.read_text("requires.txt") | ||||
|         except FileNotFoundError: | ||||
|             return | ||||
|         extra = marker = ""  # Section-less entries don't have markers. | ||||
|         for line in content.splitlines(): | ||||
|             line = line.strip() | ||||
|             if not line or line.startswith("#"):  # Comment; ignored. | ||||
|                 continue | ||||
|             if line.startswith("[") and line.endswith("]"):  # A section header. | ||||
|                 extra, _, marker = line.strip("[]").partition(":") | ||||
|                 continue | ||||
|             yield RequiresEntry(requirement=line, extra=extra, marker=marker) | ||||
|  | ||||
|     def _iter_egg_info_extras(self) -> Iterable[str]: | ||||
|         """Get extras from the egg-info directory.""" | ||||
|         known_extras = {""} | ||||
|         for entry in self._iter_requires_txt_entries(): | ||||
|             if entry.extra in known_extras: | ||||
|                 continue | ||||
|             known_extras.add(entry.extra) | ||||
|             yield entry.extra | ||||
|  | ||||
|     def _iter_egg_info_dependencies(self) -> Iterable[str]: | ||||
|         """Get distribution dependencies from the egg-info directory. | ||||
|  | ||||
|         To ease parsing, this converts a legacy dependency entry into a PEP 508 | ||||
|         requirement string. Like ``_iter_requires_txt_entries()``, there is code | ||||
|         in ``importlib.metadata`` that does mostly the same, but not do exactly | ||||
|         what we need. | ||||
|  | ||||
|         Namely, ``importlib.metadata`` does not normalize the extra name before | ||||
|         putting it into the requirement string, which causes marker comparison | ||||
|         to fail because the dist-info format do normalize. This is consistent in | ||||
|         all currently available PEP 517 backends, although not standardized. | ||||
|         """ | ||||
|         for entry in self._iter_requires_txt_entries(): | ||||
|             if entry.extra and entry.marker: | ||||
|                 marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"' | ||||
|             elif entry.extra: | ||||
|                 marker = f'extra == "{safe_extra(entry.extra)}"' | ||||
|             elif entry.marker: | ||||
|                 marker = entry.marker | ||||
|             else: | ||||
|                 marker = "" | ||||
|             if marker: | ||||
|                 yield f"{entry.requirement} ; {marker}" | ||||
|             else: | ||||
|                 yield entry.requirement | ||||
|  | ||||
|     def _add_egg_info_requires(self, metadata: email.message.Message) -> None: | ||||
|         """Add egg-info requires.txt information to the metadata.""" | ||||
|         if not metadata.get_all("Requires-Dist"): | ||||
|             for dep in self._iter_egg_info_dependencies(): | ||||
|                 metadata["Requires-Dist"] = dep | ||||
|         if not metadata.get_all("Provides-Extra"): | ||||
|             for extra in self._iter_egg_info_extras(): | ||||
|                 metadata["Provides-Extra"] = extra | ||||
|  | ||||
|  | ||||
| class BaseEnvironment: | ||||
|     """An environment containing distributions to introspect.""" | ||||
|  | ||||
|     @classmethod | ||||
|     def default(cls) -> "BaseEnvironment": | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @classmethod | ||||
|     def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment": | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def get_distribution(self, name: str) -> Optional["BaseDistribution"]: | ||||
|         """Given a requirement name, return the installed distributions. | ||||
|  | ||||
|         The name may not be normalized. The implementation must canonicalize | ||||
|         it for lookup. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def _iter_distributions(self) -> Iterator["BaseDistribution"]: | ||||
|         """Iterate through installed distributions. | ||||
|  | ||||
|         This function should be implemented by subclass, but never called | ||||
|         directly. Use the public ``iter_distribution()`` instead, which | ||||
|         implements additional logic to make sure the distributions are valid. | ||||
|         """ | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def iter_all_distributions(self) -> Iterator[BaseDistribution]: | ||||
|         """Iterate through all installed distributions without any filtering.""" | ||||
|         for dist in self._iter_distributions(): | ||||
|             # Make sure the distribution actually comes from a valid Python | ||||
|             # packaging distribution. Pip's AdjacentTempDirectory leaves folders | ||||
|             # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The | ||||
|             # valid project name pattern is taken from PEP 508. | ||||
|             project_name_valid = re.match( | ||||
|                 r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", | ||||
|                 dist.canonical_name, | ||||
|                 flags=re.IGNORECASE, | ||||
|             ) | ||||
|             if not project_name_valid: | ||||
|                 logger.warning( | ||||
|                     "Ignoring invalid distribution %s (%s)", | ||||
|                     dist.canonical_name, | ||||
|                     dist.location, | ||||
|                 ) | ||||
|                 continue | ||||
|             yield dist | ||||
|  | ||||
|     def iter_installed_distributions( | ||||
|         self, | ||||
|         local_only: bool = True, | ||||
|         skip: Container[str] = stdlib_pkgs, | ||||
|         include_editables: bool = True, | ||||
|         editables_only: bool = False, | ||||
|         user_only: bool = False, | ||||
|     ) -> Iterator[BaseDistribution]: | ||||
|         """Return a list of installed distributions. | ||||
|  | ||||
|         This is based on ``iter_all_distributions()`` with additional filtering | ||||
|         options. Note that ``iter_installed_distributions()`` without arguments | ||||
|         is *not* equal to ``iter_all_distributions()``, since some of the | ||||
|         configurations exclude packages by default. | ||||
|  | ||||
|         :param local_only: If True (default), only return installations | ||||
|         local to the current virtualenv, if in a virtualenv. | ||||
|         :param skip: An iterable of canonicalized project names to ignore; | ||||
|             defaults to ``stdlib_pkgs``. | ||||
|         :param include_editables: If False, don't report editables. | ||||
|         :param editables_only: If True, only report editables. | ||||
|         :param user_only: If True, only report installations in the user | ||||
|         site directory. | ||||
|         """ | ||||
|         it = self.iter_all_distributions() | ||||
|         if local_only: | ||||
|             it = (d for d in it if d.local) | ||||
|         if not include_editables: | ||||
|             it = (d for d in it if not d.editable) | ||||
|         if editables_only: | ||||
|             it = (d for d in it if d.editable) | ||||
|         if user_only: | ||||
|             it = (d for d in it if d.in_usersite) | ||||
|         return (d for d in it if d.canonical_name not in skip) | ||||
|  | ||||
|  | ||||
| class Wheel(Protocol): | ||||
|     location: str | ||||
|  | ||||
|     def as_zipfile(self) -> zipfile.ZipFile: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|  | ||||
| class FilesystemWheel(Wheel): | ||||
|     def __init__(self, location: str) -> None: | ||||
|         self.location = location | ||||
|  | ||||
|     def as_zipfile(self) -> zipfile.ZipFile: | ||||
|         return zipfile.ZipFile(self.location, allowZip64=True) | ||||
|  | ||||
|  | ||||
| class MemoryWheel(Wheel): | ||||
|     def __init__(self, location: str, stream: IO[bytes]) -> None: | ||||
|         self.location = location | ||||
|         self.stream = stream | ||||
|  | ||||
|     def as_zipfile(self) -> zipfile.ZipFile: | ||||
|         return zipfile.ZipFile(self.stream, allowZip64=True) | ||||
| @ -0,0 +1,4 @@ | ||||
| from ._dists import Distribution | ||||
| from ._envs import Environment | ||||
|  | ||||
| __all__ = ["Distribution", "Environment"] | ||||
| @ -0,0 +1,55 @@ | ||||
| import importlib.metadata | ||||
| from typing import Any, Optional, Protocol, cast | ||||
|  | ||||
|  | ||||
| class BadMetadata(ValueError): | ||||
|     def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None: | ||||
|         self.dist = dist | ||||
|         self.reason = reason | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Bad metadata in {self.dist} ({self.reason})" | ||||
|  | ||||
|  | ||||
| class BasePath(Protocol): | ||||
|     """A protocol that various path objects conform. | ||||
|  | ||||
|     This exists because importlib.metadata uses both ``pathlib.Path`` and | ||||
|     ``zipfile.Path``, and we need a common base for type hints (Union does not | ||||
|     work well since ``zipfile.Path`` is too new for our linter setup). | ||||
|  | ||||
|     This does not mean to be exhaustive, but only contains things that present | ||||
|     in both classes *that we need*. | ||||
|     """ | ||||
|  | ||||
|     @property | ||||
|     def name(self) -> str: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     @property | ||||
|     def parent(self) -> "BasePath": | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|  | ||||
| def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]: | ||||
|     """Find the path to the distribution's metadata directory. | ||||
|  | ||||
|     HACK: This relies on importlib.metadata's private ``_path`` attribute. Not | ||||
|     all distributions exist on disk, so importlib.metadata is correct to not | ||||
|     expose the attribute as public. But pip's code base is old and not as clean, | ||||
|     so we do this to avoid having to rewrite too many things. Hopefully we can | ||||
|     eliminate this some day. | ||||
|     """ | ||||
|     return getattr(d, "_path", None) | ||||
|  | ||||
|  | ||||
| def get_dist_name(dist: importlib.metadata.Distribution) -> str: | ||||
|     """Get the distribution's project name. | ||||
|  | ||||
|     The ``name`` attribute is only available in Python 3.10 or later. We are | ||||
|     targeting exactly that, but Mypy does not know this. | ||||
|     """ | ||||
|     name = cast(Any, dist).name | ||||
|     if not isinstance(name, str): | ||||
|         raise BadMetadata(dist, reason="invalid metadata entry 'name'") | ||||
|     return name | ||||
| @ -0,0 +1,224 @@ | ||||
| import email.message | ||||
| import importlib.metadata | ||||
| import os | ||||
| import pathlib | ||||
| import zipfile | ||||
| from typing import ( | ||||
|     Collection, | ||||
|     Dict, | ||||
|     Iterable, | ||||
|     Iterator, | ||||
|     Mapping, | ||||
|     Optional, | ||||
|     Sequence, | ||||
|     cast, | ||||
| ) | ||||
|  | ||||
| from pip._vendor.packaging.requirements import Requirement | ||||
| from pip._vendor.packaging.utils import NormalizedName, canonicalize_name | ||||
| from pip._vendor.packaging.version import parse as parse_version | ||||
|  | ||||
| from pip._internal.exceptions import InvalidWheel, UnsupportedWheel | ||||
| from pip._internal.metadata.base import ( | ||||
|     BaseDistribution, | ||||
|     BaseEntryPoint, | ||||
|     DistributionVersion, | ||||
|     InfoPath, | ||||
|     Wheel, | ||||
| ) | ||||
| from pip._internal.utils.misc import normalize_path | ||||
| from pip._internal.utils.packaging import safe_extra | ||||
| from pip._internal.utils.temp_dir import TempDirectory | ||||
| from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file | ||||
|  | ||||
| from ._compat import BasePath, get_dist_name | ||||
|  | ||||
|  | ||||
| class WheelDistribution(importlib.metadata.Distribution): | ||||
|     """An ``importlib.metadata.Distribution`` read from a wheel. | ||||
|  | ||||
|     Although ``importlib.metadata.PathDistribution`` accepts ``zipfile.Path``, | ||||
|     its implementation is too "lazy" for pip's needs (we can't keep the ZipFile | ||||
|     handle open for the entire lifetime of the distribution object). | ||||
|  | ||||
|     This implementation eagerly reads the entire metadata directory into the | ||||
|     memory instead, and operates from that. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         files: Mapping[pathlib.PurePosixPath, bytes], | ||||
|         info_location: pathlib.PurePosixPath, | ||||
|     ) -> None: | ||||
|         self._files = files | ||||
|         self.info_location = info_location | ||||
|  | ||||
|     @classmethod | ||||
|     def from_zipfile( | ||||
|         cls, | ||||
|         zf: zipfile.ZipFile, | ||||
|         name: str, | ||||
|         location: str, | ||||
|     ) -> "WheelDistribution": | ||||
|         info_dir, _ = parse_wheel(zf, name) | ||||
|         paths = ( | ||||
|             (name, pathlib.PurePosixPath(name.split("/", 1)[-1])) | ||||
|             for name in zf.namelist() | ||||
|             if name.startswith(f"{info_dir}/") | ||||
|         ) | ||||
|         files = { | ||||
|             relpath: read_wheel_metadata_file(zf, fullpath) | ||||
|             for fullpath, relpath in paths | ||||
|         } | ||||
|         info_location = pathlib.PurePosixPath(location, info_dir) | ||||
|         return cls(files, info_location) | ||||
|  | ||||
|     def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: | ||||
|         # Only allow iterating through the metadata directory. | ||||
|         if pathlib.PurePosixPath(str(path)) in self._files: | ||||
|             return iter(self._files) | ||||
|         raise FileNotFoundError(path) | ||||
|  | ||||
|     def read_text(self, filename: str) -> Optional[str]: | ||||
|         try: | ||||
|             data = self._files[pathlib.PurePosixPath(filename)] | ||||
|         except KeyError: | ||||
|             return None | ||||
|         try: | ||||
|             text = data.decode("utf-8") | ||||
|         except UnicodeDecodeError as e: | ||||
|             wheel = self.info_location.parent | ||||
|             error = f"Error decoding metadata for {wheel}: {e} in {filename} file" | ||||
|             raise UnsupportedWheel(error) | ||||
|         return text | ||||
|  | ||||
|  | ||||
| class Distribution(BaseDistribution): | ||||
|     def __init__( | ||||
|         self, | ||||
|         dist: importlib.metadata.Distribution, | ||||
|         info_location: Optional[BasePath], | ||||
|         installed_location: Optional[BasePath], | ||||
|     ) -> None: | ||||
|         self._dist = dist | ||||
|         self._info_location = info_location | ||||
|         self._installed_location = installed_location | ||||
|  | ||||
|     @classmethod | ||||
|     def from_directory(cls, directory: str) -> BaseDistribution: | ||||
|         info_location = pathlib.Path(directory) | ||||
|         dist = importlib.metadata.Distribution.at(info_location) | ||||
|         return cls(dist, info_location, info_location.parent) | ||||
|  | ||||
|     @classmethod | ||||
|     def from_metadata_file_contents( | ||||
|         cls, | ||||
|         metadata_contents: bytes, | ||||
|         filename: str, | ||||
|         project_name: str, | ||||
|     ) -> BaseDistribution: | ||||
|         # Generate temp dir to contain the metadata file, and write the file contents. | ||||
|         temp_dir = pathlib.Path( | ||||
|             TempDirectory(kind="metadata", globally_managed=True).path | ||||
|         ) | ||||
|         metadata_path = temp_dir / "METADATA" | ||||
|         metadata_path.write_bytes(metadata_contents) | ||||
|         # Construct dist pointing to the newly created directory. | ||||
|         dist = importlib.metadata.Distribution.at(metadata_path.parent) | ||||
|         return cls(dist, metadata_path.parent, None) | ||||
|  | ||||
|     @classmethod | ||||
|     def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: | ||||
|         try: | ||||
|             with wheel.as_zipfile() as zf: | ||||
|                 dist = WheelDistribution.from_zipfile(zf, name, wheel.location) | ||||
|         except zipfile.BadZipFile as e: | ||||
|             raise InvalidWheel(wheel.location, name) from e | ||||
|         except UnsupportedWheel as e: | ||||
|             raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") | ||||
|         return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location)) | ||||
|  | ||||
|     @property | ||||
|     def location(self) -> Optional[str]: | ||||
|         if self._info_location is None: | ||||
|             return None | ||||
|         return str(self._info_location.parent) | ||||
|  | ||||
|     @property | ||||
|     def info_location(self) -> Optional[str]: | ||||
|         if self._info_location is None: | ||||
|             return None | ||||
|         return str(self._info_location) | ||||
|  | ||||
|     @property | ||||
|     def installed_location(self) -> Optional[str]: | ||||
|         if self._installed_location is None: | ||||
|             return None | ||||
|         return normalize_path(str(self._installed_location)) | ||||
|  | ||||
|     def _get_dist_name_from_location(self) -> Optional[str]: | ||||
|         """Try to get the name from the metadata directory name. | ||||
|  | ||||
|         This is much faster than reading metadata. | ||||
|         """ | ||||
|         if self._info_location is None: | ||||
|             return None | ||||
|         stem, suffix = os.path.splitext(self._info_location.name) | ||||
|         if suffix not in (".dist-info", ".egg-info"): | ||||
|             return None | ||||
|         return stem.split("-", 1)[0] | ||||
|  | ||||
|     @property | ||||
|     def canonical_name(self) -> NormalizedName: | ||||
|         name = self._get_dist_name_from_location() or get_dist_name(self._dist) | ||||
|         return canonicalize_name(name) | ||||
|  | ||||
|     @property | ||||
|     def version(self) -> DistributionVersion: | ||||
|         return parse_version(self._dist.version) | ||||
|  | ||||
|     def is_file(self, path: InfoPath) -> bool: | ||||
|         return self._dist.read_text(str(path)) is not None | ||||
|  | ||||
|     def iter_distutils_script_names(self) -> Iterator[str]: | ||||
|         # A distutils installation is always "flat" (not in e.g. egg form), so | ||||
|         # if this distribution's info location is NOT a pathlib.Path (but e.g. | ||||
|         # zipfile.Path), it can never contain any distutils scripts. | ||||
|         if not isinstance(self._info_location, pathlib.Path): | ||||
|             return | ||||
|         for child in self._info_location.joinpath("scripts").iterdir(): | ||||
|             yield child.name | ||||
|  | ||||
|     def read_text(self, path: InfoPath) -> str: | ||||
|         content = self._dist.read_text(str(path)) | ||||
|         if content is None: | ||||
|             raise FileNotFoundError(path) | ||||
|         return content | ||||
|  | ||||
|     def iter_entry_points(self) -> Iterable[BaseEntryPoint]: | ||||
|         # importlib.metadata's EntryPoint structure sasitfies BaseEntryPoint. | ||||
|         return self._dist.entry_points | ||||
|  | ||||
|     def _metadata_impl(self) -> email.message.Message: | ||||
|         # From Python 3.10+, importlib.metadata declares PackageMetadata as the | ||||
|         # return type. This protocol is unfortunately a disaster now and misses | ||||
|         # a ton of fields that we need, including get() and get_payload(). We | ||||
|         # rely on the implementation that the object is actually a Message now, | ||||
|         # until upstream can improve the protocol. (python/cpython#94952) | ||||
|         return cast(email.message.Message, self._dist.metadata) | ||||
|  | ||||
|     def iter_provided_extras(self) -> Iterable[str]: | ||||
|         return ( | ||||
|             safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", []) | ||||
|         ) | ||||
|  | ||||
|     def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: | ||||
|         contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras] | ||||
|         for req_string in self.metadata.get_all("Requires-Dist", []): | ||||
|             req = Requirement(req_string) | ||||
|             if not req.marker: | ||||
|                 yield req | ||||
|             elif not extras and req.marker.evaluate({"extra": ""}): | ||||
|                 yield req | ||||
|             elif any(req.marker.evaluate(context) for context in contexts): | ||||
|                 yield req | ||||
| @ -0,0 +1,188 @@ | ||||
| import functools | ||||
| import importlib.metadata | ||||
| import logging | ||||
| import os | ||||
| import pathlib | ||||
| import sys | ||||
| import zipfile | ||||
| import zipimport | ||||
| from typing import Iterator, List, Optional, Sequence, Set, Tuple | ||||
|  | ||||
| from pip._vendor.packaging.utils import NormalizedName, canonicalize_name | ||||
|  | ||||
| from pip._internal.metadata.base import BaseDistribution, BaseEnvironment | ||||
| from pip._internal.models.wheel import Wheel | ||||
| from pip._internal.utils.deprecation import deprecated | ||||
| from pip._internal.utils.filetypes import WHEEL_EXTENSION | ||||
|  | ||||
| from ._compat import BadMetadata, BasePath, get_dist_name, get_info_location | ||||
| from ._dists import Distribution | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def _looks_like_wheel(location: str) -> bool: | ||||
|     if not location.endswith(WHEEL_EXTENSION): | ||||
|         return False | ||||
|     if not os.path.isfile(location): | ||||
|         return False | ||||
|     if not Wheel.wheel_file_re.match(os.path.basename(location)): | ||||
|         return False | ||||
|     return zipfile.is_zipfile(location) | ||||
|  | ||||
|  | ||||
| class _DistributionFinder: | ||||
|     """Finder to locate distributions. | ||||
|  | ||||
|     The main purpose of this class is to memoize found distributions' names, so | ||||
|     only one distribution is returned for each package name. At lot of pip code | ||||
|     assumes this (because it is setuptools's behavior), and not doing the same | ||||
|     can potentially cause a distribution in lower precedence path to override a | ||||
|     higher precedence one if the caller is not careful. | ||||
|  | ||||
|     Eventually we probably want to make it possible to see lower precedence | ||||
|     installations as well. It's useful feature, after all. | ||||
|     """ | ||||
|  | ||||
|     FoundResult = Tuple[importlib.metadata.Distribution, Optional[BasePath]] | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         self._found_names: Set[NormalizedName] = set() | ||||
|  | ||||
|     def _find_impl(self, location: str) -> Iterator[FoundResult]: | ||||
|         """Find distributions in a location.""" | ||||
|         # Skip looking inside a wheel. Since a package inside a wheel is not | ||||
|         # always valid (due to .data directories etc.), its .dist-info entry | ||||
|         # should not be considered an installed distribution. | ||||
|         if _looks_like_wheel(location): | ||||
|             return | ||||
|         # To know exactly where we find a distribution, we have to feed in the | ||||
|         # paths one by one, instead of dumping the list to importlib.metadata. | ||||
|         for dist in importlib.metadata.distributions(path=[location]): | ||||
|             info_location = get_info_location(dist) | ||||
|             try: | ||||
|                 raw_name = get_dist_name(dist) | ||||
|             except BadMetadata as e: | ||||
|                 logger.warning("Skipping %s due to %s", info_location, e.reason) | ||||
|                 continue | ||||
|             normalized_name = canonicalize_name(raw_name) | ||||
|             if normalized_name in self._found_names: | ||||
|                 continue | ||||
|             self._found_names.add(normalized_name) | ||||
|             yield dist, info_location | ||||
|  | ||||
|     def find(self, location: str) -> Iterator[BaseDistribution]: | ||||
|         """Find distributions in a location. | ||||
|  | ||||
|         The path can be either a directory, or a ZIP archive. | ||||
|         """ | ||||
|         for dist, info_location in self._find_impl(location): | ||||
|             if info_location is None: | ||||
|                 installed_location: Optional[BasePath] = None | ||||
|             else: | ||||
|                 installed_location = info_location.parent | ||||
|             yield Distribution(dist, info_location, installed_location) | ||||
|  | ||||
|     def find_linked(self, location: str) -> Iterator[BaseDistribution]: | ||||
|         """Read location in egg-link files and return distributions in there. | ||||
|  | ||||
|         The path should be a directory; otherwise this returns nothing. This | ||||
|         follows how setuptools does this for compatibility. The first non-empty | ||||
|         line in the egg-link is read as a path (resolved against the egg-link's | ||||
|         containing directory if relative). Distributions found at that linked | ||||
|         location are returned. | ||||
|         """ | ||||
|         path = pathlib.Path(location) | ||||
|         if not path.is_dir(): | ||||
|             return | ||||
|         for child in path.iterdir(): | ||||
|             if child.suffix != ".egg-link": | ||||
|                 continue | ||||
|             with child.open() as f: | ||||
|                 lines = (line.strip() for line in f) | ||||
|                 target_rel = next((line for line in lines if line), "") | ||||
|             if not target_rel: | ||||
|                 continue | ||||
|             target_location = str(path.joinpath(target_rel)) | ||||
|             for dist, info_location in self._find_impl(target_location): | ||||
|                 yield Distribution(dist, info_location, path) | ||||
|  | ||||
|     def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]: | ||||
|         from pip._vendor.pkg_resources import find_distributions | ||||
|  | ||||
|         from pip._internal.metadata import pkg_resources as legacy | ||||
|  | ||||
|         with os.scandir(location) as it: | ||||
|             for entry in it: | ||||
|                 if not entry.name.endswith(".egg"): | ||||
|                     continue | ||||
|                 for dist in find_distributions(entry.path): | ||||
|                     yield legacy.Distribution(dist) | ||||
|  | ||||
|     def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]: | ||||
|         from pip._vendor.pkg_resources import find_eggs_in_zip | ||||
|  | ||||
|         from pip._internal.metadata import pkg_resources as legacy | ||||
|  | ||||
|         try: | ||||
|             importer = zipimport.zipimporter(location) | ||||
|         except zipimport.ZipImportError: | ||||
|             return | ||||
|         for dist in find_eggs_in_zip(importer, location): | ||||
|             yield legacy.Distribution(dist) | ||||
|  | ||||
|     def find_eggs(self, location: str) -> Iterator[BaseDistribution]: | ||||
|         """Find eggs in a location. | ||||
|  | ||||
|         This actually uses the old *pkg_resources* backend. We likely want to | ||||
|         deprecate this so we can eventually remove the *pkg_resources* | ||||
|         dependency entirely. Before that, this should first emit a deprecation | ||||
|         warning for some versions when using the fallback since importing | ||||
|         *pkg_resources* is slow for those who don't need it. | ||||
|         """ | ||||
|         if os.path.isdir(location): | ||||
|             yield from self._find_eggs_in_dir(location) | ||||
|         if zipfile.is_zipfile(location): | ||||
|             yield from self._find_eggs_in_zip(location) | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None)  # Warn a distribution exactly once. | ||||
| def _emit_egg_deprecation(location: Optional[str]) -> None: | ||||
|     deprecated( | ||||
|         reason=f"Loading egg at {location} is deprecated.", | ||||
|         replacement="to use pip for package installation.", | ||||
|         gone_in="23.3", | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class Environment(BaseEnvironment): | ||||
|     def __init__(self, paths: Sequence[str]) -> None: | ||||
|         self._paths = paths | ||||
|  | ||||
|     @classmethod | ||||
|     def default(cls) -> BaseEnvironment: | ||||
|         return cls(sys.path) | ||||
|  | ||||
|     @classmethod | ||||
|     def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: | ||||
|         if paths is None: | ||||
|             return cls(sys.path) | ||||
|         return cls(paths) | ||||
|  | ||||
|     def _iter_distributions(self) -> Iterator[BaseDistribution]: | ||||
|         finder = _DistributionFinder() | ||||
|         for location in self._paths: | ||||
|             yield from finder.find(location) | ||||
|             for dist in finder.find_eggs(location): | ||||
|                 _emit_egg_deprecation(dist.location) | ||||
|                 yield dist | ||||
|             # This must go last because that's how pkg_resources tie-breaks. | ||||
|             yield from finder.find_linked(location) | ||||
|  | ||||
|     def get_distribution(self, name: str) -> Optional[BaseDistribution]: | ||||
|         matches = ( | ||||
|             distribution | ||||
|             for distribution in self.iter_all_distributions() | ||||
|             if distribution.canonical_name == canonicalize_name(name) | ||||
|         ) | ||||
|         return next(matches, None) | ||||
| @ -0,0 +1,270 @@ | ||||
| import email.message | ||||
| import email.parser | ||||
| import logging | ||||
| import os | ||||
| import zipfile | ||||
| from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional | ||||
|  | ||||
| from pip._vendor import pkg_resources | ||||
| from pip._vendor.packaging.requirements import Requirement | ||||
| from pip._vendor.packaging.utils import NormalizedName, canonicalize_name | ||||
| from pip._vendor.packaging.version import parse as parse_version | ||||
|  | ||||
| from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel | ||||
| from pip._internal.utils.egg_link import egg_link_path_from_location | ||||
| from pip._internal.utils.misc import display_path, normalize_path | ||||
| from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file | ||||
|  | ||||
| from .base import ( | ||||
|     BaseDistribution, | ||||
|     BaseEntryPoint, | ||||
|     BaseEnvironment, | ||||
|     DistributionVersion, | ||||
|     InfoPath, | ||||
|     Wheel, | ||||
| ) | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class EntryPoint(NamedTuple): | ||||
|     name: str | ||||
|     value: str | ||||
|     group: str | ||||
|  | ||||
|  | ||||
| class InMemoryMetadata: | ||||
|     """IMetadataProvider that reads metadata files from a dictionary. | ||||
|  | ||||
|     This also maps metadata decoding exceptions to our internal exception type. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None: | ||||
|         self._metadata = metadata | ||||
|         self._wheel_name = wheel_name | ||||
|  | ||||
|     def has_metadata(self, name: str) -> bool: | ||||
|         return name in self._metadata | ||||
|  | ||||
|     def get_metadata(self, name: str) -> str: | ||||
|         try: | ||||
|             return self._metadata[name].decode() | ||||
|         except UnicodeDecodeError as e: | ||||
|             # Augment the default error with the origin of the file. | ||||
|             raise UnsupportedWheel( | ||||
|                 f"Error decoding metadata for {self._wheel_name}: {e} in {name} file" | ||||
|             ) | ||||
|  | ||||
|     def get_metadata_lines(self, name: str) -> Iterable[str]: | ||||
|         return pkg_resources.yield_lines(self.get_metadata(name)) | ||||
|  | ||||
|     def metadata_isdir(self, name: str) -> bool: | ||||
|         return False | ||||
|  | ||||
|     def metadata_listdir(self, name: str) -> List[str]: | ||||
|         return [] | ||||
|  | ||||
|     def run_script(self, script_name: str, namespace: str) -> None: | ||||
|         pass | ||||
|  | ||||
|  | ||||
| class Distribution(BaseDistribution): | ||||
|     def __init__(self, dist: pkg_resources.Distribution) -> None: | ||||
|         self._dist = dist | ||||
|  | ||||
|     @classmethod | ||||
|     def from_directory(cls, directory: str) -> BaseDistribution: | ||||
|         dist_dir = directory.rstrip(os.sep) | ||||
|  | ||||
|         # Build a PathMetadata object, from path to metadata. :wink: | ||||
|         base_dir, dist_dir_name = os.path.split(dist_dir) | ||||
|         metadata = pkg_resources.PathMetadata(base_dir, dist_dir) | ||||
|  | ||||
|         # Determine the correct Distribution object type. | ||||
|         if dist_dir.endswith(".egg-info"): | ||||
|             dist_cls = pkg_resources.Distribution | ||||
|             dist_name = os.path.splitext(dist_dir_name)[0] | ||||
|         else: | ||||
|             assert dist_dir.endswith(".dist-info") | ||||
|             dist_cls = pkg_resources.DistInfoDistribution | ||||
|             dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] | ||||
|  | ||||
|         dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata) | ||||
|         return cls(dist) | ||||
|  | ||||
|     @classmethod | ||||
|     def from_metadata_file_contents( | ||||
|         cls, | ||||
|         metadata_contents: bytes, | ||||
|         filename: str, | ||||
|         project_name: str, | ||||
|     ) -> BaseDistribution: | ||||
|         metadata_dict = { | ||||
|             "METADATA": metadata_contents, | ||||
|         } | ||||
|         dist = pkg_resources.DistInfoDistribution( | ||||
|             location=filename, | ||||
|             metadata=InMemoryMetadata(metadata_dict, filename), | ||||
|             project_name=project_name, | ||||
|         ) | ||||
|         return cls(dist) | ||||
|  | ||||
|     @classmethod | ||||
|     def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: | ||||
|         try: | ||||
|             with wheel.as_zipfile() as zf: | ||||
|                 info_dir, _ = parse_wheel(zf, name) | ||||
|                 metadata_dict = { | ||||
|                     path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path) | ||||
|                     for path in zf.namelist() | ||||
|                     if path.startswith(f"{info_dir}/") | ||||
|                 } | ||||
|         except zipfile.BadZipFile as e: | ||||
|             raise InvalidWheel(wheel.location, name) from e | ||||
|         except UnsupportedWheel as e: | ||||
|             raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") | ||||
|         dist = pkg_resources.DistInfoDistribution( | ||||
|             location=wheel.location, | ||||
|             metadata=InMemoryMetadata(metadata_dict, wheel.location), | ||||
|             project_name=name, | ||||
|         ) | ||||
|         return cls(dist) | ||||
|  | ||||
|     @property | ||||
|     def location(self) -> Optional[str]: | ||||
|         return self._dist.location | ||||
|  | ||||
|     @property | ||||
|     def installed_location(self) -> Optional[str]: | ||||
|         egg_link = egg_link_path_from_location(self.raw_name) | ||||
|         if egg_link: | ||||
|             location = egg_link | ||||
|         elif self.location: | ||||
|             location = self.location | ||||
|         else: | ||||
|             return None | ||||
|         return normalize_path(location) | ||||
|  | ||||
|     @property | ||||
|     def info_location(self) -> Optional[str]: | ||||
|         return self._dist.egg_info | ||||
|  | ||||
|     @property | ||||
|     def installed_by_distutils(self) -> bool: | ||||
|         # A distutils-installed distribution is provided by FileMetadata. This | ||||
|         # provider has a "path" attribute not present anywhere else. Not the | ||||
|         # best introspection logic, but pip has been doing this for a long time. | ||||
|         try: | ||||
|             return bool(self._dist._provider.path) | ||||
|         except AttributeError: | ||||
|             return False | ||||
|  | ||||
|     @property | ||||
|     def canonical_name(self) -> NormalizedName: | ||||
|         return canonicalize_name(self._dist.project_name) | ||||
|  | ||||
|     @property | ||||
|     def version(self) -> DistributionVersion: | ||||
|         return parse_version(self._dist.version) | ||||
|  | ||||
|     def is_file(self, path: InfoPath) -> bool: | ||||
|         return self._dist.has_metadata(str(path)) | ||||
|  | ||||
|     def iter_distutils_script_names(self) -> Iterator[str]: | ||||
|         yield from self._dist.metadata_listdir("scripts") | ||||
|  | ||||
|     def read_text(self, path: InfoPath) -> str: | ||||
|         name = str(path) | ||||
|         if not self._dist.has_metadata(name): | ||||
|             raise FileNotFoundError(name) | ||||
|         content = self._dist.get_metadata(name) | ||||
|         if content is None: | ||||
|             raise NoneMetadataError(self, name) | ||||
|         return content | ||||
|  | ||||
|     def iter_entry_points(self) -> Iterable[BaseEntryPoint]: | ||||
|         for group, entries in self._dist.get_entry_map().items(): | ||||
|             for name, entry_point in entries.items(): | ||||
|                 name, _, value = str(entry_point).partition("=") | ||||
|                 yield EntryPoint(name=name.strip(), value=value.strip(), group=group) | ||||
|  | ||||
|     def _metadata_impl(self) -> email.message.Message: | ||||
|         """ | ||||
|         :raises NoneMetadataError: if the distribution reports `has_metadata()` | ||||
|             True but `get_metadata()` returns None. | ||||
|         """ | ||||
|         if isinstance(self._dist, pkg_resources.DistInfoDistribution): | ||||
|             metadata_name = "METADATA" | ||||
|         else: | ||||
|             metadata_name = "PKG-INFO" | ||||
|         try: | ||||
|             metadata = self.read_text(metadata_name) | ||||
|         except FileNotFoundError: | ||||
|             if self.location: | ||||
|                 displaying_path = display_path(self.location) | ||||
|             else: | ||||
|                 displaying_path = repr(self.location) | ||||
|             logger.warning("No metadata found in %s", displaying_path) | ||||
|             metadata = "" | ||||
|         feed_parser = email.parser.FeedParser() | ||||
|         feed_parser.feed(metadata) | ||||
|         return feed_parser.close() | ||||
|  | ||||
|     def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: | ||||
|         if extras:  # pkg_resources raises on invalid extras, so we sanitize. | ||||
|             extras = frozenset(extras).intersection(self._dist.extras) | ||||
|         return self._dist.requires(extras) | ||||
|  | ||||
|     def iter_provided_extras(self) -> Iterable[str]: | ||||
|         return self._dist.extras | ||||
|  | ||||
|  | ||||
| class Environment(BaseEnvironment): | ||||
|     def __init__(self, ws: pkg_resources.WorkingSet) -> None: | ||||
|         self._ws = ws | ||||
|  | ||||
|     @classmethod | ||||
|     def default(cls) -> BaseEnvironment: | ||||
|         return cls(pkg_resources.working_set) | ||||
|  | ||||
|     @classmethod | ||||
|     def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: | ||||
|         return cls(pkg_resources.WorkingSet(paths)) | ||||
|  | ||||
|     def _iter_distributions(self) -> Iterator[BaseDistribution]: | ||||
|         for dist in self._ws: | ||||
|             yield Distribution(dist) | ||||
|  | ||||
|     def _search_distribution(self, name: str) -> Optional[BaseDistribution]: | ||||
|         """Find a distribution matching the ``name`` in the environment. | ||||
|  | ||||
|         This searches from *all* distributions available in the environment, to | ||||
|         match the behavior of ``pkg_resources.get_distribution()``. | ||||
|         """ | ||||
|         canonical_name = canonicalize_name(name) | ||||
|         for dist in self.iter_all_distributions(): | ||||
|             if dist.canonical_name == canonical_name: | ||||
|                 return dist | ||||
|         return None | ||||
|  | ||||
|     def get_distribution(self, name: str) -> Optional[BaseDistribution]: | ||||
|         # Search the distribution by looking through the working set. | ||||
|         dist = self._search_distribution(name) | ||||
|         if dist: | ||||
|             return dist | ||||
|  | ||||
|         # If distribution could not be found, call working_set.require to | ||||
|         # update the working set, and try to find the distribution again. | ||||
|         # This might happen for e.g. when you install a package twice, once | ||||
|         # using setup.py develop and again using setup.py install. Now when | ||||
|         # running pip uninstall twice, the package gets removed from the | ||||
|         # working set in the first uninstall, so we have to populate the | ||||
|         # working set again so that pip knows about it and the packages gets | ||||
|         # picked up and is successfully uninstalled the second time too. | ||||
|         try: | ||||
|             # We didn't pass in any version specifiers, so this can never | ||||
|             # raise pkg_resources.VersionConflict. | ||||
|             self._ws.require(name) | ||||
|         except pkg_resources.DistributionNotFound: | ||||
|             return None | ||||
|         return self._search_distribution(name) | ||||
| @ -0,0 +1,2 @@ | ||||
| """A package that contains models that represent entities. | ||||
| """ | ||||
| @ -0,0 +1,34 @@ | ||||
| from pip._vendor.packaging.version import parse as parse_version | ||||
|  | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.utils.models import KeyBasedCompareMixin | ||||
|  | ||||
|  | ||||
| class InstallationCandidate(KeyBasedCompareMixin): | ||||
|     """Represents a potential "candidate" for installation.""" | ||||
|  | ||||
|     __slots__ = ["name", "version", "link"] | ||||
|  | ||||
|     def __init__(self, name: str, version: str, link: Link) -> None: | ||||
|         self.name = name | ||||
|         self.version = parse_version(version) | ||||
|         self.link = link | ||||
|  | ||||
|         super().__init__( | ||||
|             key=(self.name, self.version, self.link), | ||||
|             defining_class=InstallationCandidate, | ||||
|         ) | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return "<InstallationCandidate({!r}, {!r}, {!r})>".format( | ||||
|             self.name, | ||||
|             self.version, | ||||
|             self.link, | ||||
|         ) | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return "{!r} candidate (version {} at {})".format( | ||||
|             self.name, | ||||
|             self.version, | ||||
|             self.link, | ||||
|         ) | ||||
| @ -0,0 +1,237 @@ | ||||
| """ PEP 610 """ | ||||
| import json | ||||
| import re | ||||
| import urllib.parse | ||||
| from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union | ||||
|  | ||||
| __all__ = [ | ||||
|     "DirectUrl", | ||||
|     "DirectUrlValidationError", | ||||
|     "DirInfo", | ||||
|     "ArchiveInfo", | ||||
|     "VcsInfo", | ||||
| ] | ||||
|  | ||||
| T = TypeVar("T") | ||||
|  | ||||
| DIRECT_URL_METADATA_NAME = "direct_url.json" | ||||
| ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$") | ||||
|  | ||||
|  | ||||
| class DirectUrlValidationError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def _get( | ||||
|     d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None | ||||
| ) -> Optional[T]: | ||||
|     """Get value from dictionary and verify expected type.""" | ||||
|     if key not in d: | ||||
|         return default | ||||
|     value = d[key] | ||||
|     if not isinstance(value, expected_type): | ||||
|         raise DirectUrlValidationError( | ||||
|             "{!r} has unexpected type for {} (expected {})".format( | ||||
|                 value, key, expected_type | ||||
|             ) | ||||
|         ) | ||||
|     return value | ||||
|  | ||||
|  | ||||
| def _get_required( | ||||
|     d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None | ||||
| ) -> T: | ||||
|     value = _get(d, expected_type, key, default) | ||||
|     if value is None: | ||||
|         raise DirectUrlValidationError(f"{key} must have a value") | ||||
|     return value | ||||
|  | ||||
|  | ||||
| def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType": | ||||
|     infos = [info for info in infos if info is not None] | ||||
|     if not infos: | ||||
|         raise DirectUrlValidationError( | ||||
|             "missing one of archive_info, dir_info, vcs_info" | ||||
|         ) | ||||
|     if len(infos) > 1: | ||||
|         raise DirectUrlValidationError( | ||||
|             "more than one of archive_info, dir_info, vcs_info" | ||||
|         ) | ||||
|     assert infos[0] is not None | ||||
|     return infos[0] | ||||
|  | ||||
|  | ||||
| def _filter_none(**kwargs: Any) -> Dict[str, Any]: | ||||
|     """Make dict excluding None values.""" | ||||
|     return {k: v for k, v in kwargs.items() if v is not None} | ||||
|  | ||||
|  | ||||
| class VcsInfo: | ||||
|     name = "vcs_info" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         vcs: str, | ||||
|         commit_id: str, | ||||
|         requested_revision: Optional[str] = None, | ||||
|     ) -> None: | ||||
|         self.vcs = vcs | ||||
|         self.requested_revision = requested_revision | ||||
|         self.commit_id = commit_id | ||||
|  | ||||
|     @classmethod | ||||
|     def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]: | ||||
|         if d is None: | ||||
|             return None | ||||
|         return cls( | ||||
|             vcs=_get_required(d, str, "vcs"), | ||||
|             commit_id=_get_required(d, str, "commit_id"), | ||||
|             requested_revision=_get(d, str, "requested_revision"), | ||||
|         ) | ||||
|  | ||||
|     def _to_dict(self) -> Dict[str, Any]: | ||||
|         return _filter_none( | ||||
|             vcs=self.vcs, | ||||
|             requested_revision=self.requested_revision, | ||||
|             commit_id=self.commit_id, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class ArchiveInfo: | ||||
|     name = "archive_info" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         hash: Optional[str] = None, | ||||
|         hashes: Optional[Dict[str, str]] = None, | ||||
|     ) -> None: | ||||
|         # set hashes before hash, since the hash setter will further populate hashes | ||||
|         self.hashes = hashes | ||||
|         self.hash = hash | ||||
|  | ||||
|     @property | ||||
|     def hash(self) -> Optional[str]: | ||||
|         return self._hash | ||||
|  | ||||
|     @hash.setter | ||||
|     def hash(self, value: Optional[str]) -> None: | ||||
|         if value is not None: | ||||
|             # Auto-populate the hashes key to upgrade to the new format automatically. | ||||
|             # We don't back-populate the legacy hash key from hashes. | ||||
|             try: | ||||
|                 hash_name, hash_value = value.split("=", 1) | ||||
|             except ValueError: | ||||
|                 raise DirectUrlValidationError( | ||||
|                     f"invalid archive_info.hash format: {value!r}" | ||||
|                 ) | ||||
|             if self.hashes is None: | ||||
|                 self.hashes = {hash_name: hash_value} | ||||
|             elif hash_name not in self.hashes: | ||||
|                 self.hashes = self.hashes.copy() | ||||
|                 self.hashes[hash_name] = hash_value | ||||
|         self._hash = value | ||||
|  | ||||
|     @classmethod | ||||
|     def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]: | ||||
|         if d is None: | ||||
|             return None | ||||
|         return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes")) | ||||
|  | ||||
|     def _to_dict(self) -> Dict[str, Any]: | ||||
|         return _filter_none(hash=self.hash, hashes=self.hashes) | ||||
|  | ||||
|  | ||||
| class DirInfo: | ||||
|     name = "dir_info" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         editable: bool = False, | ||||
|     ) -> None: | ||||
|         self.editable = editable | ||||
|  | ||||
|     @classmethod | ||||
|     def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]: | ||||
|         if d is None: | ||||
|             return None | ||||
|         return cls(editable=_get_required(d, bool, "editable", default=False)) | ||||
|  | ||||
|     def _to_dict(self) -> Dict[str, Any]: | ||||
|         return _filter_none(editable=self.editable or None) | ||||
|  | ||||
|  | ||||
| InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] | ||||
|  | ||||
|  | ||||
| class DirectUrl: | ||||
|     def __init__( | ||||
|         self, | ||||
|         url: str, | ||||
|         info: InfoType, | ||||
|         subdirectory: Optional[str] = None, | ||||
|     ) -> None: | ||||
|         self.url = url | ||||
|         self.info = info | ||||
|         self.subdirectory = subdirectory | ||||
|  | ||||
|     def _remove_auth_from_netloc(self, netloc: str) -> str: | ||||
|         if "@" not in netloc: | ||||
|             return netloc | ||||
|         user_pass, netloc_no_user_pass = netloc.split("@", 1) | ||||
|         if ( | ||||
|             isinstance(self.info, VcsInfo) | ||||
|             and self.info.vcs == "git" | ||||
|             and user_pass == "git" | ||||
|         ): | ||||
|             return netloc | ||||
|         if ENV_VAR_RE.match(user_pass): | ||||
|             return netloc | ||||
|         return netloc_no_user_pass | ||||
|  | ||||
|     @property | ||||
|     def redacted_url(self) -> str: | ||||
|         """url with user:password part removed unless it is formed with | ||||
|         environment variables as specified in PEP 610, or it is ``git`` | ||||
|         in the case of a git URL. | ||||
|         """ | ||||
|         purl = urllib.parse.urlsplit(self.url) | ||||
|         netloc = self._remove_auth_from_netloc(purl.netloc) | ||||
|         surl = urllib.parse.urlunsplit( | ||||
|             (purl.scheme, netloc, purl.path, purl.query, purl.fragment) | ||||
|         ) | ||||
|         return surl | ||||
|  | ||||
|     def validate(self) -> None: | ||||
|         self.from_dict(self.to_dict()) | ||||
|  | ||||
|     @classmethod | ||||
|     def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl": | ||||
|         return DirectUrl( | ||||
|             url=_get_required(d, str, "url"), | ||||
|             subdirectory=_get(d, str, "subdirectory"), | ||||
|             info=_exactly_one_of( | ||||
|                 [ | ||||
|                     ArchiveInfo._from_dict(_get(d, dict, "archive_info")), | ||||
|                     DirInfo._from_dict(_get(d, dict, "dir_info")), | ||||
|                     VcsInfo._from_dict(_get(d, dict, "vcs_info")), | ||||
|                 ] | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|     def to_dict(self) -> Dict[str, Any]: | ||||
|         res = _filter_none( | ||||
|             url=self.redacted_url, | ||||
|             subdirectory=self.subdirectory, | ||||
|         ) | ||||
|         res[self.info.name] = self.info._to_dict() | ||||
|         return res | ||||
|  | ||||
|     @classmethod | ||||
|     def from_json(cls, s: str) -> "DirectUrl": | ||||
|         return cls.from_dict(json.loads(s)) | ||||
|  | ||||
|     def to_json(self) -> str: | ||||
|         return json.dumps(self.to_dict(), sort_keys=True) | ||||
|  | ||||
|     def is_local_editable(self) -> bool: | ||||
|         return isinstance(self.info, DirInfo) and self.info.editable | ||||
| @ -0,0 +1,80 @@ | ||||
| from typing import FrozenSet, Optional, Set | ||||
|  | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
|  | ||||
| from pip._internal.exceptions import CommandError | ||||
|  | ||||
|  | ||||
| class FormatControl: | ||||
|     """Helper for managing formats from which a package can be installed.""" | ||||
|  | ||||
|     __slots__ = ["no_binary", "only_binary"] | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         no_binary: Optional[Set[str]] = None, | ||||
|         only_binary: Optional[Set[str]] = None, | ||||
|     ) -> None: | ||||
|         if no_binary is None: | ||||
|             no_binary = set() | ||||
|         if only_binary is None: | ||||
|             only_binary = set() | ||||
|  | ||||
|         self.no_binary = no_binary | ||||
|         self.only_binary = only_binary | ||||
|  | ||||
|     def __eq__(self, other: object) -> bool: | ||||
|         if not isinstance(other, self.__class__): | ||||
|             return NotImplemented | ||||
|  | ||||
|         if self.__slots__ != other.__slots__: | ||||
|             return False | ||||
|  | ||||
|         return all(getattr(self, k) == getattr(other, k) for k in self.__slots__) | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return "{}({}, {})".format( | ||||
|             self.__class__.__name__, self.no_binary, self.only_binary | ||||
|         ) | ||||
|  | ||||
|     @staticmethod | ||||
|     def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None: | ||||
|         if value.startswith("-"): | ||||
|             raise CommandError( | ||||
|                 "--no-binary / --only-binary option requires 1 argument." | ||||
|             ) | ||||
|         new = value.split(",") | ||||
|         while ":all:" in new: | ||||
|             other.clear() | ||||
|             target.clear() | ||||
|             target.add(":all:") | ||||
|             del new[: new.index(":all:") + 1] | ||||
|             # Without a none, we want to discard everything as :all: covers it | ||||
|             if ":none:" not in new: | ||||
|                 return | ||||
|         for name in new: | ||||
|             if name == ":none:": | ||||
|                 target.clear() | ||||
|                 continue | ||||
|             name = canonicalize_name(name) | ||||
|             other.discard(name) | ||||
|             target.add(name) | ||||
|  | ||||
|     def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]: | ||||
|         result = {"binary", "source"} | ||||
|         if canonical_name in self.only_binary: | ||||
|             result.discard("source") | ||||
|         elif canonical_name in self.no_binary: | ||||
|             result.discard("binary") | ||||
|         elif ":all:" in self.only_binary: | ||||
|             result.discard("source") | ||||
|         elif ":all:" in self.no_binary: | ||||
|             result.discard("binary") | ||||
|         return frozenset(result) | ||||
|  | ||||
|     def disallow_binaries(self) -> None: | ||||
|         self.handle_mutual_excludes( | ||||
|             ":all:", | ||||
|             self.no_binary, | ||||
|             self.only_binary, | ||||
|         ) | ||||
| @ -0,0 +1,28 @@ | ||||
| import urllib.parse | ||||
|  | ||||
|  | ||||
| class PackageIndex: | ||||
|     """Represents a Package Index and provides easier access to endpoints""" | ||||
|  | ||||
|     __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"] | ||||
|  | ||||
|     def __init__(self, url: str, file_storage_domain: str) -> None: | ||||
|         super().__init__() | ||||
|         self.url = url | ||||
|         self.netloc = urllib.parse.urlsplit(url).netloc | ||||
|         self.simple_url = self._url_for_path("simple") | ||||
|         self.pypi_url = self._url_for_path("pypi") | ||||
|  | ||||
|         # This is part of a temporary hack used to block installs of PyPI | ||||
|         # packages which depend on external urls only necessary until PyPI can | ||||
|         # block such packages themselves | ||||
|         self.file_storage_domain = file_storage_domain | ||||
|  | ||||
|     def _url_for_path(self, path: str) -> str: | ||||
|         return urllib.parse.urljoin(self.url, path) | ||||
|  | ||||
|  | ||||
| PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org") | ||||
| TestPyPI = PackageIndex( | ||||
|     "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org" | ||||
| ) | ||||
| @ -0,0 +1,53 @@ | ||||
| from typing import Any, Dict, Sequence | ||||
|  | ||||
| from pip._vendor.packaging.markers import default_environment | ||||
|  | ||||
| from pip import __version__ | ||||
| from pip._internal.req.req_install import InstallRequirement | ||||
|  | ||||
|  | ||||
| class InstallationReport: | ||||
|     def __init__(self, install_requirements: Sequence[InstallRequirement]): | ||||
|         self._install_requirements = install_requirements | ||||
|  | ||||
|     @classmethod | ||||
|     def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: | ||||
|         assert ireq.download_info, f"No download_info for {ireq}" | ||||
|         res = { | ||||
|             # PEP 610 json for the download URL. download_info.archive_info.hashes may | ||||
|             # be absent when the requirement was installed from the wheel cache | ||||
|             # and the cache entry was populated by an older pip version that did not | ||||
|             # record origin.json. | ||||
|             "download_info": ireq.download_info.to_dict(), | ||||
|             # is_direct is true if the requirement was a direct URL reference (which | ||||
|             # includes editable requirements), and false if the requirement was | ||||
|             # downloaded from a PEP 503 index or --find-links. | ||||
|             "is_direct": ireq.is_direct, | ||||
|             # requested is true if the requirement was specified by the user (aka | ||||
|             # top level requirement), and false if it was installed as a dependency of a | ||||
|             # requirement. https://peps.python.org/pep-0376/#requested | ||||
|             "requested": ireq.user_supplied, | ||||
|             # PEP 566 json encoding for metadata | ||||
|             # https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata | ||||
|             "metadata": ireq.get_dist().metadata_dict, | ||||
|         } | ||||
|         if ireq.user_supplied and ireq.extras: | ||||
|             # For top level requirements, the list of requested extras, if any. | ||||
|             res["requested_extras"] = list(sorted(ireq.extras)) | ||||
|         return res | ||||
|  | ||||
|     def to_dict(self) -> Dict[str, Any]: | ||||
|         return { | ||||
|             "version": "1", | ||||
|             "pip_version": __version__, | ||||
|             "install": [ | ||||
|                 self._install_req_to_dict(ireq) for ireq in self._install_requirements | ||||
|             ], | ||||
|             # https://peps.python.org/pep-0508/#environment-markers | ||||
|             # TODO: currently, the resolver uses the default environment to evaluate | ||||
|             # environment markers, so that is what we report here. In the future, it | ||||
|             # should also take into account options such as --python-version or | ||||
|             # --platform, perhaps under the form of an environment_override field? | ||||
|             # https://github.com/pypa/pip/issues/11198 | ||||
|             "environment": default_environment(), | ||||
|         } | ||||
| @ -0,0 +1,581 @@ | ||||
| import functools | ||||
| import itertools | ||||
| import logging | ||||
| import os | ||||
| import posixpath | ||||
| import re | ||||
| import urllib.parse | ||||
| from dataclasses import dataclass | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
|     Any, | ||||
|     Dict, | ||||
|     List, | ||||
|     Mapping, | ||||
|     NamedTuple, | ||||
|     Optional, | ||||
|     Tuple, | ||||
|     Union, | ||||
| ) | ||||
|  | ||||
| from pip._internal.utils.deprecation import deprecated | ||||
| from pip._internal.utils.filetypes import WHEEL_EXTENSION | ||||
| from pip._internal.utils.hashes import Hashes | ||||
| from pip._internal.utils.misc import ( | ||||
|     pairwise, | ||||
|     redact_auth_from_url, | ||||
|     split_auth_from_netloc, | ||||
|     splitext, | ||||
| ) | ||||
| from pip._internal.utils.models import KeyBasedCompareMixin | ||||
| from pip._internal.utils.urls import path_to_url, url_to_path | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from pip._internal.index.collector import IndexContent | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| # Order matters, earlier hashes have a precedence over later hashes for what | ||||
| # we will pick to use. | ||||
| _SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5") | ||||
|  | ||||
|  | ||||
| @dataclass(frozen=True) | ||||
| class LinkHash: | ||||
|     """Links to content may have embedded hash values. This class parses those. | ||||
|  | ||||
|     `name` must be any member of `_SUPPORTED_HASHES`. | ||||
|  | ||||
|     This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to | ||||
|     be JSON-serializable to conform to PEP 610, this class contains the logic for | ||||
|     parsing a hash name and value for correctness, and then checking whether that hash | ||||
|     conforms to a schema with `.is_hash_allowed()`.""" | ||||
|  | ||||
|     name: str | ||||
|     value: str | ||||
|  | ||||
|     _hash_url_fragment_re = re.compile( | ||||
|         # NB: we do not validate that the second group (.*) is a valid hex | ||||
|         # digest. Instead, we simply keep that string in this class, and then check it | ||||
|         # against Hashes when hash-checking is needed. This is easier to debug than | ||||
|         # proactively discarding an invalid hex digest, as we handle incorrect hashes | ||||
|         # and malformed hashes in the same place. | ||||
|         r"[#&]({choices})=([^&]*)".format( | ||||
|             choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES) | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     def __post_init__(self) -> None: | ||||
|         assert self.name in _SUPPORTED_HASHES | ||||
|  | ||||
|     @classmethod | ||||
|     @functools.lru_cache(maxsize=None) | ||||
|     def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]: | ||||
|         """Search a string for a checksum algorithm name and encoded output value.""" | ||||
|         match = cls._hash_url_fragment_re.search(url) | ||||
|         if match is None: | ||||
|             return None | ||||
|         name, value = match.groups() | ||||
|         return cls(name=name, value=value) | ||||
|  | ||||
|     def as_dict(self) -> Dict[str, str]: | ||||
|         return {self.name: self.value} | ||||
|  | ||||
|     def as_hashes(self) -> Hashes: | ||||
|         """Return a Hashes instance which checks only for the current hash.""" | ||||
|         return Hashes({self.name: [self.value]}) | ||||
|  | ||||
|     def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: | ||||
|         """ | ||||
|         Return True if the current hash is allowed by `hashes`. | ||||
|         """ | ||||
|         if hashes is None: | ||||
|             return False | ||||
|         return hashes.is_hash_allowed(self.name, hex_digest=self.value) | ||||
|  | ||||
|  | ||||
| @dataclass(frozen=True) | ||||
| class MetadataFile: | ||||
|     """Information about a core metadata file associated with a distribution.""" | ||||
|  | ||||
|     hashes: Optional[Dict[str, str]] | ||||
|  | ||||
|     def __post_init__(self) -> None: | ||||
|         if self.hashes is not None: | ||||
|             assert all(name in _SUPPORTED_HASHES for name in self.hashes) | ||||
|  | ||||
|  | ||||
| def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]: | ||||
|     # Remove any unsupported hash types from the mapping. If this leaves no | ||||
|     # supported hashes, return None | ||||
|     if hashes is None: | ||||
|         return None | ||||
|     hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES} | ||||
|     if not hashes: | ||||
|         return None | ||||
|     return hashes | ||||
|  | ||||
|  | ||||
| def _clean_url_path_part(part: str) -> str: | ||||
|     """ | ||||
|     Clean a "part" of a URL path (i.e. after splitting on "@" characters). | ||||
|     """ | ||||
|     # We unquote prior to quoting to make sure nothing is double quoted. | ||||
|     return urllib.parse.quote(urllib.parse.unquote(part)) | ||||
|  | ||||
|  | ||||
| def _clean_file_url_path(part: str) -> str: | ||||
|     """ | ||||
|     Clean the first part of a URL path that corresponds to a local | ||||
|     filesystem path (i.e. the first part after splitting on "@" characters). | ||||
|     """ | ||||
|     # We unquote prior to quoting to make sure nothing is double quoted. | ||||
|     # Also, on Windows the path part might contain a drive letter which | ||||
|     # should not be quoted. On Linux where drive letters do not | ||||
|     # exist, the colon should be quoted. We rely on urllib.request | ||||
|     # to do the right thing here. | ||||
|     return urllib.request.pathname2url(urllib.request.url2pathname(part)) | ||||
|  | ||||
|  | ||||
| # percent-encoded:                   / | ||||
| _reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE) | ||||
|  | ||||
|  | ||||
| def _clean_url_path(path: str, is_local_path: bool) -> str: | ||||
|     """ | ||||
|     Clean the path portion of a URL. | ||||
|     """ | ||||
|     if is_local_path: | ||||
|         clean_func = _clean_file_url_path | ||||
|     else: | ||||
|         clean_func = _clean_url_path_part | ||||
|  | ||||
|     # Split on the reserved characters prior to cleaning so that | ||||
|     # revision strings in VCS URLs are properly preserved. | ||||
|     parts = _reserved_chars_re.split(path) | ||||
|  | ||||
|     cleaned_parts = [] | ||||
|     for to_clean, reserved in pairwise(itertools.chain(parts, [""])): | ||||
|         cleaned_parts.append(clean_func(to_clean)) | ||||
|         # Normalize %xx escapes (e.g. %2f -> %2F) | ||||
|         cleaned_parts.append(reserved.upper()) | ||||
|  | ||||
|     return "".join(cleaned_parts) | ||||
|  | ||||
|  | ||||
| def _ensure_quoted_url(url: str) -> str: | ||||
|     """ | ||||
|     Make sure a link is fully quoted. | ||||
|     For example, if ' ' occurs in the URL, it will be replaced with "%20", | ||||
|     and without double-quoting other characters. | ||||
|     """ | ||||
|     # Split the URL into parts according to the general structure | ||||
|     # `scheme://netloc/path;parameters?query#fragment`. | ||||
|     result = urllib.parse.urlparse(url) | ||||
|     # If the netloc is empty, then the URL refers to a local filesystem path. | ||||
|     is_local_path = not result.netloc | ||||
|     path = _clean_url_path(result.path, is_local_path=is_local_path) | ||||
|     return urllib.parse.urlunparse(result._replace(path=path)) | ||||
|  | ||||
|  | ||||
| class Link(KeyBasedCompareMixin): | ||||
|     """Represents a parsed link from a Package Index's simple URL""" | ||||
|  | ||||
|     __slots__ = [ | ||||
|         "_parsed_url", | ||||
|         "_url", | ||||
|         "_hashes", | ||||
|         "comes_from", | ||||
|         "requires_python", | ||||
|         "yanked_reason", | ||||
|         "metadata_file_data", | ||||
|         "cache_link_parsing", | ||||
|         "egg_fragment", | ||||
|     ] | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         url: str, | ||||
|         comes_from: Optional[Union[str, "IndexContent"]] = None, | ||||
|         requires_python: Optional[str] = None, | ||||
|         yanked_reason: Optional[str] = None, | ||||
|         metadata_file_data: Optional[MetadataFile] = None, | ||||
|         cache_link_parsing: bool = True, | ||||
|         hashes: Optional[Mapping[str, str]] = None, | ||||
|     ) -> None: | ||||
|         """ | ||||
|         :param url: url of the resource pointed to (href of the link) | ||||
|         :param comes_from: instance of IndexContent where the link was found, | ||||
|             or string. | ||||
|         :param requires_python: String containing the `Requires-Python` | ||||
|             metadata field, specified in PEP 345. This may be specified by | ||||
|             a data-requires-python attribute in the HTML link tag, as | ||||
|             described in PEP 503. | ||||
|         :param yanked_reason: the reason the file has been yanked, if the | ||||
|             file has been yanked, or None if the file hasn't been yanked. | ||||
|             This is the value of the "data-yanked" attribute, if present, in | ||||
|             a simple repository HTML link. If the file has been yanked but | ||||
|             no reason was provided, this should be the empty string. See | ||||
|             PEP 592 for more information and the specification. | ||||
|         :param metadata_file_data: the metadata attached to the file, or None if | ||||
|             no such metadata is provided. This argument, if not None, indicates | ||||
|             that a separate metadata file exists, and also optionally supplies | ||||
|             hashes for that file. | ||||
|         :param cache_link_parsing: A flag that is used elsewhere to determine | ||||
|             whether resources retrieved from this link should be cached. PyPI | ||||
|             URLs should generally have this set to False, for example. | ||||
|         :param hashes: A mapping of hash names to digests to allow us to | ||||
|             determine the validity of a download. | ||||
|         """ | ||||
|  | ||||
|         # The comes_from, requires_python, and metadata_file_data arguments are | ||||
|         # only used by classmethods of this class, and are not used in client | ||||
|         # code directly. | ||||
|  | ||||
|         # url can be a UNC windows share | ||||
|         if url.startswith("\\\\"): | ||||
|             url = path_to_url(url) | ||||
|  | ||||
|         self._parsed_url = urllib.parse.urlsplit(url) | ||||
|         # Store the url as a private attribute to prevent accidentally | ||||
|         # trying to set a new value. | ||||
|         self._url = url | ||||
|  | ||||
|         link_hash = LinkHash.find_hash_url_fragment(url) | ||||
|         hashes_from_link = {} if link_hash is None else link_hash.as_dict() | ||||
|         if hashes is None: | ||||
|             self._hashes = hashes_from_link | ||||
|         else: | ||||
|             self._hashes = {**hashes, **hashes_from_link} | ||||
|  | ||||
|         self.comes_from = comes_from | ||||
|         self.requires_python = requires_python if requires_python else None | ||||
|         self.yanked_reason = yanked_reason | ||||
|         self.metadata_file_data = metadata_file_data | ||||
|  | ||||
|         super().__init__(key=url, defining_class=Link) | ||||
|  | ||||
|         self.cache_link_parsing = cache_link_parsing | ||||
|         self.egg_fragment = self._egg_fragment() | ||||
|  | ||||
|     @classmethod | ||||
|     def from_json( | ||||
|         cls, | ||||
|         file_data: Dict[str, Any], | ||||
|         page_url: str, | ||||
|     ) -> Optional["Link"]: | ||||
|         """ | ||||
|         Convert an pypi json document from a simple repository page into a Link. | ||||
|         """ | ||||
|         file_url = file_data.get("url") | ||||
|         if file_url is None: | ||||
|             return None | ||||
|  | ||||
|         url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url)) | ||||
|         pyrequire = file_data.get("requires-python") | ||||
|         yanked_reason = file_data.get("yanked") | ||||
|         hashes = file_data.get("hashes", {}) | ||||
|  | ||||
|         # PEP 714: Indexes must use the name core-metadata, but | ||||
|         # clients should support the old name as a fallback for compatibility. | ||||
|         metadata_info = file_data.get("core-metadata") | ||||
|         if metadata_info is None: | ||||
|             metadata_info = file_data.get("dist-info-metadata") | ||||
|  | ||||
|         # The metadata info value may be a boolean, or a dict of hashes. | ||||
|         if isinstance(metadata_info, dict): | ||||
|             # The file exists, and hashes have been supplied | ||||
|             metadata_file_data = MetadataFile(supported_hashes(metadata_info)) | ||||
|         elif metadata_info: | ||||
|             # The file exists, but there are no hashes | ||||
|             metadata_file_data = MetadataFile(None) | ||||
|         else: | ||||
|             # False or not present: the file does not exist | ||||
|             metadata_file_data = None | ||||
|  | ||||
|         # The Link.yanked_reason expects an empty string instead of a boolean. | ||||
|         if yanked_reason and not isinstance(yanked_reason, str): | ||||
|             yanked_reason = "" | ||||
|         # The Link.yanked_reason expects None instead of False. | ||||
|         elif not yanked_reason: | ||||
|             yanked_reason = None | ||||
|  | ||||
|         return cls( | ||||
|             url, | ||||
|             comes_from=page_url, | ||||
|             requires_python=pyrequire, | ||||
|             yanked_reason=yanked_reason, | ||||
|             hashes=hashes, | ||||
|             metadata_file_data=metadata_file_data, | ||||
|         ) | ||||
|  | ||||
|     @classmethod | ||||
|     def from_element( | ||||
|         cls, | ||||
|         anchor_attribs: Dict[str, Optional[str]], | ||||
|         page_url: str, | ||||
|         base_url: str, | ||||
|     ) -> Optional["Link"]: | ||||
|         """ | ||||
|         Convert an anchor element's attributes in a simple repository page to a Link. | ||||
|         """ | ||||
|         href = anchor_attribs.get("href") | ||||
|         if not href: | ||||
|             return None | ||||
|  | ||||
|         url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href)) | ||||
|         pyrequire = anchor_attribs.get("data-requires-python") | ||||
|         yanked_reason = anchor_attribs.get("data-yanked") | ||||
|  | ||||
|         # PEP 714: Indexes must use the name data-core-metadata, but | ||||
|         # clients should support the old name as a fallback for compatibility. | ||||
|         metadata_info = anchor_attribs.get("data-core-metadata") | ||||
|         if metadata_info is None: | ||||
|             metadata_info = anchor_attribs.get("data-dist-info-metadata") | ||||
|         # The metadata info value may be the string "true", or a string of | ||||
|         # the form "hashname=hashval" | ||||
|         if metadata_info == "true": | ||||
|             # The file exists, but there are no hashes | ||||
|             metadata_file_data = MetadataFile(None) | ||||
|         elif metadata_info is None: | ||||
|             # The file does not exist | ||||
|             metadata_file_data = None | ||||
|         else: | ||||
|             # The file exists, and hashes have been supplied | ||||
|             hashname, sep, hashval = metadata_info.partition("=") | ||||
|             if sep == "=": | ||||
|                 metadata_file_data = MetadataFile(supported_hashes({hashname: hashval})) | ||||
|             else: | ||||
|                 # Error - data is wrong. Treat as no hashes supplied. | ||||
|                 logger.debug( | ||||
|                     "Index returned invalid data-dist-info-metadata value: %s", | ||||
|                     metadata_info, | ||||
|                 ) | ||||
|                 metadata_file_data = MetadataFile(None) | ||||
|  | ||||
|         return cls( | ||||
|             url, | ||||
|             comes_from=page_url, | ||||
|             requires_python=pyrequire, | ||||
|             yanked_reason=yanked_reason, | ||||
|             metadata_file_data=metadata_file_data, | ||||
|         ) | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         if self.requires_python: | ||||
|             rp = f" (requires-python:{self.requires_python})" | ||||
|         else: | ||||
|             rp = "" | ||||
|         if self.comes_from: | ||||
|             return "{} (from {}){}".format( | ||||
|                 redact_auth_from_url(self._url), self.comes_from, rp | ||||
|             ) | ||||
|         else: | ||||
|             return redact_auth_from_url(str(self._url)) | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return f"<Link {self}>" | ||||
|  | ||||
|     @property | ||||
|     def url(self) -> str: | ||||
|         return self._url | ||||
|  | ||||
|     @property | ||||
|     def filename(self) -> str: | ||||
|         path = self.path.rstrip("/") | ||||
|         name = posixpath.basename(path) | ||||
|         if not name: | ||||
|             # Make sure we don't leak auth information if the netloc | ||||
|             # includes a username and password. | ||||
|             netloc, user_pass = split_auth_from_netloc(self.netloc) | ||||
|             return netloc | ||||
|  | ||||
|         name = urllib.parse.unquote(name) | ||||
|         assert name, f"URL {self._url!r} produced no filename" | ||||
|         return name | ||||
|  | ||||
|     @property | ||||
|     def file_path(self) -> str: | ||||
|         return url_to_path(self.url) | ||||
|  | ||||
|     @property | ||||
|     def scheme(self) -> str: | ||||
|         return self._parsed_url.scheme | ||||
|  | ||||
|     @property | ||||
|     def netloc(self) -> str: | ||||
|         """ | ||||
|         This can contain auth information. | ||||
|         """ | ||||
|         return self._parsed_url.netloc | ||||
|  | ||||
|     @property | ||||
|     def path(self) -> str: | ||||
|         return urllib.parse.unquote(self._parsed_url.path) | ||||
|  | ||||
|     def splitext(self) -> Tuple[str, str]: | ||||
|         return splitext(posixpath.basename(self.path.rstrip("/"))) | ||||
|  | ||||
|     @property | ||||
|     def ext(self) -> str: | ||||
|         return self.splitext()[1] | ||||
|  | ||||
|     @property | ||||
|     def url_without_fragment(self) -> str: | ||||
|         scheme, netloc, path, query, fragment = self._parsed_url | ||||
|         return urllib.parse.urlunsplit((scheme, netloc, path, query, "")) | ||||
|  | ||||
|     _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") | ||||
|  | ||||
|     # Per PEP 508. | ||||
|     _project_name_re = re.compile( | ||||
|         r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE | ||||
|     ) | ||||
|  | ||||
|     def _egg_fragment(self) -> Optional[str]: | ||||
|         match = self._egg_fragment_re.search(self._url) | ||||
|         if not match: | ||||
|             return None | ||||
|  | ||||
|         # An egg fragment looks like a PEP 508 project name, along with | ||||
|         # an optional extras specifier. Anything else is invalid. | ||||
|         project_name = match.group(1) | ||||
|         if not self._project_name_re.match(project_name): | ||||
|             deprecated( | ||||
|                 reason=f"{self} contains an egg fragment with a non-PEP 508 name", | ||||
|                 replacement="to use the req @ url syntax, and remove the egg fragment", | ||||
|                 gone_in="25.0", | ||||
|                 issue=11617, | ||||
|             ) | ||||
|  | ||||
|         return project_name | ||||
|  | ||||
|     _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") | ||||
|  | ||||
|     @property | ||||
|     def subdirectory_fragment(self) -> Optional[str]: | ||||
|         match = self._subdirectory_fragment_re.search(self._url) | ||||
|         if not match: | ||||
|             return None | ||||
|         return match.group(1) | ||||
|  | ||||
|     def metadata_link(self) -> Optional["Link"]: | ||||
|         """Return a link to the associated core metadata file (if any).""" | ||||
|         if self.metadata_file_data is None: | ||||
|             return None | ||||
|         metadata_url = f"{self.url_without_fragment}.metadata" | ||||
|         if self.metadata_file_data.hashes is None: | ||||
|             return Link(metadata_url) | ||||
|         return Link(metadata_url, hashes=self.metadata_file_data.hashes) | ||||
|  | ||||
|     def as_hashes(self) -> Hashes: | ||||
|         return Hashes({k: [v] for k, v in self._hashes.items()}) | ||||
|  | ||||
|     @property | ||||
|     def hash(self) -> Optional[str]: | ||||
|         return next(iter(self._hashes.values()), None) | ||||
|  | ||||
|     @property | ||||
|     def hash_name(self) -> Optional[str]: | ||||
|         return next(iter(self._hashes), None) | ||||
|  | ||||
|     @property | ||||
|     def show_url(self) -> str: | ||||
|         return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0]) | ||||
|  | ||||
|     @property | ||||
|     def is_file(self) -> bool: | ||||
|         return self.scheme == "file" | ||||
|  | ||||
|     def is_existing_dir(self) -> bool: | ||||
|         return self.is_file and os.path.isdir(self.file_path) | ||||
|  | ||||
|     @property | ||||
|     def is_wheel(self) -> bool: | ||||
|         return self.ext == WHEEL_EXTENSION | ||||
|  | ||||
|     @property | ||||
|     def is_vcs(self) -> bool: | ||||
|         from pip._internal.vcs import vcs | ||||
|  | ||||
|         return self.scheme in vcs.all_schemes | ||||
|  | ||||
|     @property | ||||
|     def is_yanked(self) -> bool: | ||||
|         return self.yanked_reason is not None | ||||
|  | ||||
|     @property | ||||
|     def has_hash(self) -> bool: | ||||
|         return bool(self._hashes) | ||||
|  | ||||
|     def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: | ||||
|         """ | ||||
|         Return True if the link has a hash and it is allowed by `hashes`. | ||||
|         """ | ||||
|         if hashes is None: | ||||
|             return False | ||||
|         return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items()) | ||||
|  | ||||
|  | ||||
| class _CleanResult(NamedTuple): | ||||
|     """Convert link for equivalency check. | ||||
|  | ||||
|     This is used in the resolver to check whether two URL-specified requirements | ||||
|     likely point to the same distribution and can be considered equivalent. This | ||||
|     equivalency logic avoids comparing URLs literally, which can be too strict | ||||
|     (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users. | ||||
|  | ||||
|     Currently this does three things: | ||||
|  | ||||
|     1. Drop the basic auth part. This is technically wrong since a server can | ||||
|        serve different content based on auth, but if it does that, it is even | ||||
|        impossible to guarantee two URLs without auth are equivalent, since | ||||
|        the user can input different auth information when prompted. So the | ||||
|        practical solution is to assume the auth doesn't affect the response. | ||||
|     2. Parse the query to avoid the ordering issue. Note that ordering under the | ||||
|        same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are | ||||
|        still considered different. | ||||
|     3. Explicitly drop most of the fragment part, except ``subdirectory=`` and | ||||
|        hash values, since it should have no impact the downloaded content. Note | ||||
|        that this drops the "egg=" part historically used to denote the requested | ||||
|        project (and extras), which is wrong in the strictest sense, but too many | ||||
|        people are supplying it inconsistently to cause superfluous resolution | ||||
|        conflicts, so we choose to also ignore them. | ||||
|     """ | ||||
|  | ||||
|     parsed: urllib.parse.SplitResult | ||||
|     query: Dict[str, List[str]] | ||||
|     subdirectory: str | ||||
|     hashes: Dict[str, str] | ||||
|  | ||||
|  | ||||
| def _clean_link(link: Link) -> _CleanResult: | ||||
|     parsed = link._parsed_url | ||||
|     netloc = parsed.netloc.rsplit("@", 1)[-1] | ||||
|     # According to RFC 8089, an empty host in file: means localhost. | ||||
|     if parsed.scheme == "file" and not netloc: | ||||
|         netloc = "localhost" | ||||
|     fragment = urllib.parse.parse_qs(parsed.fragment) | ||||
|     if "egg" in fragment: | ||||
|         logger.debug("Ignoring egg= fragment in %s", link) | ||||
|     try: | ||||
|         # If there are multiple subdirectory values, use the first one. | ||||
|         # This matches the behavior of Link.subdirectory_fragment. | ||||
|         subdirectory = fragment["subdirectory"][0] | ||||
|     except (IndexError, KeyError): | ||||
|         subdirectory = "" | ||||
|     # If there are multiple hash values under the same algorithm, use the | ||||
|     # first one. This matches the behavior of Link.hash_value. | ||||
|     hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment} | ||||
|     return _CleanResult( | ||||
|         parsed=parsed._replace(netloc=netloc, query="", fragment=""), | ||||
|         query=urllib.parse.parse_qs(parsed.query), | ||||
|         subdirectory=subdirectory, | ||||
|         hashes=hashes, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @functools.lru_cache(maxsize=None) | ||||
| def links_equivalent(link1: Link, link2: Link) -> bool: | ||||
|     return _clean_link(link1) == _clean_link(link2) | ||||
| @ -0,0 +1,31 @@ | ||||
| """ | ||||
| For types associated with installation schemes. | ||||
|  | ||||
| For a general overview of available schemes and their context, see | ||||
| https://docs.python.org/3/install/index.html#alternate-installation. | ||||
| """ | ||||
|  | ||||
|  | ||||
| SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"] | ||||
|  | ||||
|  | ||||
| class Scheme: | ||||
|     """A Scheme holds paths which are used as the base directories for | ||||
|     artifacts associated with a Python package. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = SCHEME_KEYS | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         platlib: str, | ||||
|         purelib: str, | ||||
|         headers: str, | ||||
|         scripts: str, | ||||
|         data: str, | ||||
|     ) -> None: | ||||
|         self.platlib = platlib | ||||
|         self.purelib = purelib | ||||
|         self.headers = headers | ||||
|         self.scripts = scripts | ||||
|         self.data = data | ||||
| @ -0,0 +1,132 @@ | ||||
| import itertools | ||||
| import logging | ||||
| import os | ||||
| import posixpath | ||||
| import urllib.parse | ||||
| from typing import List | ||||
|  | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
|  | ||||
| from pip._internal.models.index import PyPI | ||||
| from pip._internal.utils.compat import has_tls | ||||
| from pip._internal.utils.misc import normalize_path, redact_auth_from_url | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class SearchScope: | ||||
|  | ||||
|     """ | ||||
|     Encapsulates the locations that pip is configured to search. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = ["find_links", "index_urls", "no_index"] | ||||
|  | ||||
|     @classmethod | ||||
|     def create( | ||||
|         cls, | ||||
|         find_links: List[str], | ||||
|         index_urls: List[str], | ||||
|         no_index: bool, | ||||
|     ) -> "SearchScope": | ||||
|         """ | ||||
|         Create a SearchScope object after normalizing the `find_links`. | ||||
|         """ | ||||
|         # Build find_links. If an argument starts with ~, it may be | ||||
|         # a local file relative to a home directory. So try normalizing | ||||
|         # it and if it exists, use the normalized version. | ||||
|         # This is deliberately conservative - it might be fine just to | ||||
|         # blindly normalize anything starting with a ~... | ||||
|         built_find_links: List[str] = [] | ||||
|         for link in find_links: | ||||
|             if link.startswith("~"): | ||||
|                 new_link = normalize_path(link) | ||||
|                 if os.path.exists(new_link): | ||||
|                     link = new_link | ||||
|             built_find_links.append(link) | ||||
|  | ||||
|         # If we don't have TLS enabled, then WARN if anyplace we're looking | ||||
|         # relies on TLS. | ||||
|         if not has_tls(): | ||||
|             for link in itertools.chain(index_urls, built_find_links): | ||||
|                 parsed = urllib.parse.urlparse(link) | ||||
|                 if parsed.scheme == "https": | ||||
|                     logger.warning( | ||||
|                         "pip is configured with locations that require " | ||||
|                         "TLS/SSL, however the ssl module in Python is not " | ||||
|                         "available." | ||||
|                     ) | ||||
|                     break | ||||
|  | ||||
|         return cls( | ||||
|             find_links=built_find_links, | ||||
|             index_urls=index_urls, | ||||
|             no_index=no_index, | ||||
|         ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         find_links: List[str], | ||||
|         index_urls: List[str], | ||||
|         no_index: bool, | ||||
|     ) -> None: | ||||
|         self.find_links = find_links | ||||
|         self.index_urls = index_urls | ||||
|         self.no_index = no_index | ||||
|  | ||||
|     def get_formatted_locations(self) -> str: | ||||
|         lines = [] | ||||
|         redacted_index_urls = [] | ||||
|         if self.index_urls and self.index_urls != [PyPI.simple_url]: | ||||
|             for url in self.index_urls: | ||||
|                 redacted_index_url = redact_auth_from_url(url) | ||||
|  | ||||
|                 # Parse the URL | ||||
|                 purl = urllib.parse.urlsplit(redacted_index_url) | ||||
|  | ||||
|                 # URL is generally invalid if scheme and netloc is missing | ||||
|                 # there are issues with Python and URL parsing, so this test | ||||
|                 # is a bit crude. See bpo-20271, bpo-23505. Python doesn't | ||||
|                 # always parse invalid URLs correctly - it should raise | ||||
|                 # exceptions for malformed URLs | ||||
|                 if not purl.scheme and not purl.netloc: | ||||
|                     logger.warning( | ||||
|                         'The index url "%s" seems invalid, please provide a scheme.', | ||||
|                         redacted_index_url, | ||||
|                     ) | ||||
|  | ||||
|                 redacted_index_urls.append(redacted_index_url) | ||||
|  | ||||
|             lines.append( | ||||
|                 "Looking in indexes: {}".format(", ".join(redacted_index_urls)) | ||||
|             ) | ||||
|  | ||||
|         if self.find_links: | ||||
|             lines.append( | ||||
|                 "Looking in links: {}".format( | ||||
|                     ", ".join(redact_auth_from_url(url) for url in self.find_links) | ||||
|                 ) | ||||
|             ) | ||||
|         return "\n".join(lines) | ||||
|  | ||||
|     def get_index_urls_locations(self, project_name: str) -> List[str]: | ||||
|         """Returns the locations found via self.index_urls | ||||
|  | ||||
|         Checks the url_name on the main (first in the list) index and | ||||
|         use this url_name to produce all locations | ||||
|         """ | ||||
|  | ||||
|         def mkurl_pypi_url(url: str) -> str: | ||||
|             loc = posixpath.join( | ||||
|                 url, urllib.parse.quote(canonicalize_name(project_name)) | ||||
|             ) | ||||
|             # For maximum compatibility with easy_install, ensure the path | ||||
|             # ends in a trailing slash.  Although this isn't in the spec | ||||
|             # (and PyPI can handle it without the slash) some other index | ||||
|             # implementations might break if they relied on easy_install's | ||||
|             # behavior. | ||||
|             if not loc.endswith("/"): | ||||
|                 loc = loc + "/" | ||||
|             return loc | ||||
|  | ||||
|         return [mkurl_pypi_url(url) for url in self.index_urls] | ||||
| @ -0,0 +1,51 @@ | ||||
| from typing import Optional | ||||
|  | ||||
| from pip._internal.models.format_control import FormatControl | ||||
|  | ||||
|  | ||||
| class SelectionPreferences: | ||||
|     """ | ||||
|     Encapsulates the candidate selection preferences for downloading | ||||
|     and installing files. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = [ | ||||
|         "allow_yanked", | ||||
|         "allow_all_prereleases", | ||||
|         "format_control", | ||||
|         "prefer_binary", | ||||
|         "ignore_requires_python", | ||||
|     ] | ||||
|  | ||||
|     # Don't include an allow_yanked default value to make sure each call | ||||
|     # site considers whether yanked releases are allowed. This also causes | ||||
|     # that decision to be made explicit in the calling code, which helps | ||||
|     # people when reading the code. | ||||
|     def __init__( | ||||
|         self, | ||||
|         allow_yanked: bool, | ||||
|         allow_all_prereleases: bool = False, | ||||
|         format_control: Optional[FormatControl] = None, | ||||
|         prefer_binary: bool = False, | ||||
|         ignore_requires_python: Optional[bool] = None, | ||||
|     ) -> None: | ||||
|         """Create a SelectionPreferences object. | ||||
|  | ||||
|         :param allow_yanked: Whether files marked as yanked (in the sense | ||||
|             of PEP 592) are permitted to be candidates for install. | ||||
|         :param format_control: A FormatControl object or None. Used to control | ||||
|             the selection of source packages / binary packages when consulting | ||||
|             the index and links. | ||||
|         :param prefer_binary: Whether to prefer an old, but valid, binary | ||||
|             dist over a new source dist. | ||||
|         :param ignore_requires_python: Whether to ignore incompatible | ||||
|             "Requires-Python" values in links. Defaults to False. | ||||
|         """ | ||||
|         if ignore_requires_python is None: | ||||
|             ignore_requires_python = False | ||||
|  | ||||
|         self.allow_yanked = allow_yanked | ||||
|         self.allow_all_prereleases = allow_all_prereleases | ||||
|         self.format_control = format_control | ||||
|         self.prefer_binary = prefer_binary | ||||
|         self.ignore_requires_python = ignore_requires_python | ||||
| @ -0,0 +1,110 @@ | ||||
| import sys | ||||
| from typing import List, Optional, Tuple | ||||
|  | ||||
| from pip._vendor.packaging.tags import Tag | ||||
|  | ||||
| from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot | ||||
| from pip._internal.utils.misc import normalize_version_info | ||||
|  | ||||
|  | ||||
| class TargetPython: | ||||
|  | ||||
|     """ | ||||
|     Encapsulates the properties of a Python interpreter one is targeting | ||||
|     for a package install, download, etc. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = [ | ||||
|         "_given_py_version_info", | ||||
|         "abis", | ||||
|         "implementation", | ||||
|         "platforms", | ||||
|         "py_version", | ||||
|         "py_version_info", | ||||
|         "_valid_tags", | ||||
|     ] | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         platforms: Optional[List[str]] = None, | ||||
|         py_version_info: Optional[Tuple[int, ...]] = None, | ||||
|         abis: Optional[List[str]] = None, | ||||
|         implementation: Optional[str] = None, | ||||
|     ) -> None: | ||||
|         """ | ||||
|         :param platforms: A list of strings or None. If None, searches for | ||||
|             packages that are supported by the current system. Otherwise, will | ||||
|             find packages that can be built on the platforms passed in. These | ||||
|             packages will only be downloaded for distribution: they will | ||||
|             not be built locally. | ||||
|         :param py_version_info: An optional tuple of ints representing the | ||||
|             Python version information to use (e.g. `sys.version_info[:3]`). | ||||
|             This can have length 1, 2, or 3 when provided. | ||||
|         :param abis: A list of strings or None. This is passed to | ||||
|             compatibility_tags.py's get_supported() function as is. | ||||
|         :param implementation: A string or None. This is passed to | ||||
|             compatibility_tags.py's get_supported() function as is. | ||||
|         """ | ||||
|         # Store the given py_version_info for when we call get_supported(). | ||||
|         self._given_py_version_info = py_version_info | ||||
|  | ||||
|         if py_version_info is None: | ||||
|             py_version_info = sys.version_info[:3] | ||||
|         else: | ||||
|             py_version_info = normalize_version_info(py_version_info) | ||||
|  | ||||
|         py_version = ".".join(map(str, py_version_info[:2])) | ||||
|  | ||||
|         self.abis = abis | ||||
|         self.implementation = implementation | ||||
|         self.platforms = platforms | ||||
|         self.py_version = py_version | ||||
|         self.py_version_info = py_version_info | ||||
|  | ||||
|         # This is used to cache the return value of get_tags(). | ||||
|         self._valid_tags: Optional[List[Tag]] = None | ||||
|  | ||||
|     def format_given(self) -> str: | ||||
|         """ | ||||
|         Format the given, non-None attributes for display. | ||||
|         """ | ||||
|         display_version = None | ||||
|         if self._given_py_version_info is not None: | ||||
|             display_version = ".".join( | ||||
|                 str(part) for part in self._given_py_version_info | ||||
|             ) | ||||
|  | ||||
|         key_values = [ | ||||
|             ("platforms", self.platforms), | ||||
|             ("version_info", display_version), | ||||
|             ("abis", self.abis), | ||||
|             ("implementation", self.implementation), | ||||
|         ] | ||||
|         return " ".join( | ||||
|             f"{key}={value!r}" for key, value in key_values if value is not None | ||||
|         ) | ||||
|  | ||||
|     def get_tags(self) -> List[Tag]: | ||||
|         """ | ||||
|         Return the supported PEP 425 tags to check wheel candidates against. | ||||
|  | ||||
|         The tags are returned in order of preference (most preferred first). | ||||
|         """ | ||||
|         if self._valid_tags is None: | ||||
|             # Pass versions=None if no py_version_info was given since | ||||
|             # versions=None uses special default logic. | ||||
|             py_version_info = self._given_py_version_info | ||||
|             if py_version_info is None: | ||||
|                 version = None | ||||
|             else: | ||||
|                 version = version_info_to_nodot(py_version_info) | ||||
|  | ||||
|             tags = get_supported( | ||||
|                 version=version, | ||||
|                 platforms=self.platforms, | ||||
|                 abis=self.abis, | ||||
|                 impl=self.implementation, | ||||
|             ) | ||||
|             self._valid_tags = tags | ||||
|  | ||||
|         return self._valid_tags | ||||
| @ -0,0 +1,92 @@ | ||||
| """Represents a wheel file and provides access to the various parts of the | ||||
| name that have meaning. | ||||
| """ | ||||
| import re | ||||
| from typing import Dict, Iterable, List | ||||
|  | ||||
| from pip._vendor.packaging.tags import Tag | ||||
|  | ||||
| from pip._internal.exceptions import InvalidWheelFilename | ||||
|  | ||||
|  | ||||
| class Wheel: | ||||
|     """A wheel file""" | ||||
|  | ||||
|     wheel_file_re = re.compile( | ||||
|         r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?)) | ||||
|         ((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?) | ||||
|         \.whl|\.dist-info)$""", | ||||
|         re.VERBOSE, | ||||
|     ) | ||||
|  | ||||
|     def __init__(self, filename: str) -> None: | ||||
|         """ | ||||
|         :raises InvalidWheelFilename: when the filename is invalid for a wheel | ||||
|         """ | ||||
|         wheel_info = self.wheel_file_re.match(filename) | ||||
|         if not wheel_info: | ||||
|             raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.") | ||||
|         self.filename = filename | ||||
|         self.name = wheel_info.group("name").replace("_", "-") | ||||
|         # we'll assume "_" means "-" due to wheel naming scheme | ||||
|         # (https://github.com/pypa/pip/issues/1150) | ||||
|         self.version = wheel_info.group("ver").replace("_", "-") | ||||
|         self.build_tag = wheel_info.group("build") | ||||
|         self.pyversions = wheel_info.group("pyver").split(".") | ||||
|         self.abis = wheel_info.group("abi").split(".") | ||||
|         self.plats = wheel_info.group("plat").split(".") | ||||
|  | ||||
|         # All the tag combinations from this file | ||||
|         self.file_tags = { | ||||
|             Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats | ||||
|         } | ||||
|  | ||||
|     def get_formatted_file_tags(self) -> List[str]: | ||||
|         """Return the wheel's tags as a sorted list of strings.""" | ||||
|         return sorted(str(tag) for tag in self.file_tags) | ||||
|  | ||||
|     def support_index_min(self, tags: List[Tag]) -> int: | ||||
|         """Return the lowest index that one of the wheel's file_tag combinations | ||||
|         achieves in the given list of supported tags. | ||||
|  | ||||
|         For example, if there are 8 supported tags and one of the file tags | ||||
|         is first in the list, then return 0. | ||||
|  | ||||
|         :param tags: the PEP 425 tags to check the wheel against, in order | ||||
|             with most preferred first. | ||||
|  | ||||
|         :raises ValueError: If none of the wheel's file tags match one of | ||||
|             the supported tags. | ||||
|         """ | ||||
|         try: | ||||
|             return next(i for i, t in enumerate(tags) if t in self.file_tags) | ||||
|         except StopIteration: | ||||
|             raise ValueError() | ||||
|  | ||||
|     def find_most_preferred_tag( | ||||
|         self, tags: List[Tag], tag_to_priority: Dict[Tag, int] | ||||
|     ) -> int: | ||||
|         """Return the priority of the most preferred tag that one of the wheel's file | ||||
|         tag combinations achieves in the given list of supported tags using the given | ||||
|         tag_to_priority mapping, where lower priorities are more-preferred. | ||||
|  | ||||
|         This is used in place of support_index_min in some cases in order to avoid | ||||
|         an expensive linear scan of a large list of tags. | ||||
|  | ||||
|         :param tags: the PEP 425 tags to check the wheel against. | ||||
|         :param tag_to_priority: a mapping from tag to priority of that tag, where | ||||
|             lower is more preferred. | ||||
|  | ||||
|         :raises ValueError: If none of the wheel's file tags match one of | ||||
|             the supported tags. | ||||
|         """ | ||||
|         return min( | ||||
|             tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority | ||||
|         ) | ||||
|  | ||||
|     def supported(self, tags: Iterable[Tag]) -> bool: | ||||
|         """Return whether the wheel is compatible with one of the given tags. | ||||
|  | ||||
|         :param tags: the PEP 425 tags to check the wheel against. | ||||
|         """ | ||||
|         return not self.file_tags.isdisjoint(tags) | ||||
| @ -0,0 +1,2 @@ | ||||
| """Contains purely network-related utilities. | ||||
| """ | ||||
| @ -0,0 +1,561 @@ | ||||
| """Network Authentication Helpers | ||||
|  | ||||
| Contains interface (MultiDomainBasicAuth) and associated glue code for | ||||
| providing credentials in the context of network requests. | ||||
| """ | ||||
| import logging | ||||
| import os | ||||
| import shutil | ||||
| import subprocess | ||||
| import sysconfig | ||||
| import typing | ||||
| import urllib.parse | ||||
| from abc import ABC, abstractmethod | ||||
| from functools import lru_cache | ||||
| from os.path import commonprefix | ||||
| from pathlib import Path | ||||
| from typing import Any, Dict, List, NamedTuple, Optional, Tuple | ||||
|  | ||||
| from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth | ||||
| from pip._vendor.requests.models import Request, Response | ||||
| from pip._vendor.requests.utils import get_netrc_auth | ||||
|  | ||||
| from pip._internal.utils.logging import getLogger | ||||
| from pip._internal.utils.misc import ( | ||||
|     ask, | ||||
|     ask_input, | ||||
|     ask_password, | ||||
|     remove_auth_from_url, | ||||
|     split_auth_netloc_from_url, | ||||
| ) | ||||
| from pip._internal.vcs.versioncontrol import AuthInfo | ||||
|  | ||||
| logger = getLogger(__name__) | ||||
|  | ||||
| KEYRING_DISABLED = False | ||||
|  | ||||
|  | ||||
| class Credentials(NamedTuple): | ||||
|     url: str | ||||
|     username: str | ||||
|     password: str | ||||
|  | ||||
|  | ||||
| class KeyRingBaseProvider(ABC): | ||||
|     """Keyring base provider interface""" | ||||
|  | ||||
|     has_keyring: bool | ||||
|  | ||||
|     @abstractmethod | ||||
|     def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: | ||||
|         ... | ||||
|  | ||||
|     @abstractmethod | ||||
|     def save_auth_info(self, url: str, username: str, password: str) -> None: | ||||
|         ... | ||||
|  | ||||
|  | ||||
| class KeyRingNullProvider(KeyRingBaseProvider): | ||||
|     """Keyring null provider""" | ||||
|  | ||||
|     has_keyring = False | ||||
|  | ||||
|     def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: | ||||
|         return None | ||||
|  | ||||
|     def save_auth_info(self, url: str, username: str, password: str) -> None: | ||||
|         return None | ||||
|  | ||||
|  | ||||
| class KeyRingPythonProvider(KeyRingBaseProvider): | ||||
|     """Keyring interface which uses locally imported `keyring`""" | ||||
|  | ||||
|     has_keyring = True | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         import keyring | ||||
|  | ||||
|         self.keyring = keyring | ||||
|  | ||||
|     def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: | ||||
|         # Support keyring's get_credential interface which supports getting | ||||
|         # credentials without a username. This is only available for | ||||
|         # keyring>=15.2.0. | ||||
|         if hasattr(self.keyring, "get_credential"): | ||||
|             logger.debug("Getting credentials from keyring for %s", url) | ||||
|             cred = self.keyring.get_credential(url, username) | ||||
|             if cred is not None: | ||||
|                 return cred.username, cred.password | ||||
|             return None | ||||
|  | ||||
|         if username is not None: | ||||
|             logger.debug("Getting password from keyring for %s", url) | ||||
|             password = self.keyring.get_password(url, username) | ||||
|             if password: | ||||
|                 return username, password | ||||
|         return None | ||||
|  | ||||
|     def save_auth_info(self, url: str, username: str, password: str) -> None: | ||||
|         self.keyring.set_password(url, username, password) | ||||
|  | ||||
|  | ||||
| class KeyRingCliProvider(KeyRingBaseProvider): | ||||
|     """Provider which uses `keyring` cli | ||||
|  | ||||
|     Instead of calling the keyring package installed alongside pip | ||||
|     we call keyring on the command line which will enable pip to | ||||
|     use which ever installation of keyring is available first in | ||||
|     PATH. | ||||
|     """ | ||||
|  | ||||
|     has_keyring = True | ||||
|  | ||||
|     def __init__(self, cmd: str) -> None: | ||||
|         self.keyring = cmd | ||||
|  | ||||
|     def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: | ||||
|         # This is the default implementation of keyring.get_credential | ||||
|         # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139 | ||||
|         if username is not None: | ||||
|             password = self._get_password(url, username) | ||||
|             if password is not None: | ||||
|                 return username, password | ||||
|         return None | ||||
|  | ||||
|     def save_auth_info(self, url: str, username: str, password: str) -> None: | ||||
|         return self._set_password(url, username, password) | ||||
|  | ||||
|     def _get_password(self, service_name: str, username: str) -> Optional[str]: | ||||
|         """Mirror the implementation of keyring.get_password using cli""" | ||||
|         if self.keyring is None: | ||||
|             return None | ||||
|  | ||||
|         cmd = [self.keyring, "get", service_name, username] | ||||
|         env = os.environ.copy() | ||||
|         env["PYTHONIOENCODING"] = "utf-8" | ||||
|         res = subprocess.run( | ||||
|             cmd, | ||||
|             stdin=subprocess.DEVNULL, | ||||
|             stdout=subprocess.PIPE, | ||||
|             env=env, | ||||
|         ) | ||||
|         if res.returncode: | ||||
|             return None | ||||
|         return res.stdout.decode("utf-8").strip(os.linesep) | ||||
|  | ||||
|     def _set_password(self, service_name: str, username: str, password: str) -> None: | ||||
|         """Mirror the implementation of keyring.set_password using cli""" | ||||
|         if self.keyring is None: | ||||
|             return None | ||||
|         env = os.environ.copy() | ||||
|         env["PYTHONIOENCODING"] = "utf-8" | ||||
|         subprocess.run( | ||||
|             [self.keyring, "set", service_name, username], | ||||
|             input=f"{password}{os.linesep}".encode("utf-8"), | ||||
|             env=env, | ||||
|             check=True, | ||||
|         ) | ||||
|         return None | ||||
|  | ||||
|  | ||||
| @lru_cache(maxsize=None) | ||||
| def get_keyring_provider(provider: str) -> KeyRingBaseProvider: | ||||
|     logger.verbose("Keyring provider requested: %s", provider) | ||||
|  | ||||
|     # keyring has previously failed and been disabled | ||||
|     if KEYRING_DISABLED: | ||||
|         provider = "disabled" | ||||
|     if provider in ["import", "auto"]: | ||||
|         try: | ||||
|             impl = KeyRingPythonProvider() | ||||
|             logger.verbose("Keyring provider set: import") | ||||
|             return impl | ||||
|         except ImportError: | ||||
|             pass | ||||
|         except Exception as exc: | ||||
|             # In the event of an unexpected exception | ||||
|             # we should warn the user | ||||
|             msg = "Installed copy of keyring fails with exception %s" | ||||
|             if provider == "auto": | ||||
|                 msg = msg + ", trying to find a keyring executable as a fallback" | ||||
|             logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG)) | ||||
|     if provider in ["subprocess", "auto"]: | ||||
|         cli = shutil.which("keyring") | ||||
|         if cli and cli.startswith(sysconfig.get_path("scripts")): | ||||
|             # all code within this function is stolen from shutil.which implementation | ||||
|             @typing.no_type_check | ||||
|             def PATH_as_shutil_which_determines_it() -> str: | ||||
|                 path = os.environ.get("PATH", None) | ||||
|                 if path is None: | ||||
|                     try: | ||||
|                         path = os.confstr("CS_PATH") | ||||
|                     except (AttributeError, ValueError): | ||||
|                         # os.confstr() or CS_PATH is not available | ||||
|                         path = os.defpath | ||||
|                 # bpo-35755: Don't use os.defpath if the PATH environment variable is | ||||
|                 # set to an empty string | ||||
|  | ||||
|                 return path | ||||
|  | ||||
|             scripts = Path(sysconfig.get_path("scripts")) | ||||
|  | ||||
|             paths = [] | ||||
|             for path in PATH_as_shutil_which_determines_it().split(os.pathsep): | ||||
|                 p = Path(path) | ||||
|                 try: | ||||
|                     if not p.samefile(scripts): | ||||
|                         paths.append(path) | ||||
|                 except FileNotFoundError: | ||||
|                     pass | ||||
|  | ||||
|             path = os.pathsep.join(paths) | ||||
|  | ||||
|             cli = shutil.which("keyring", path=path) | ||||
|  | ||||
|         if cli: | ||||
|             logger.verbose("Keyring provider set: subprocess with executable %s", cli) | ||||
|             return KeyRingCliProvider(cli) | ||||
|  | ||||
|     logger.verbose("Keyring provider set: disabled") | ||||
|     return KeyRingNullProvider() | ||||
|  | ||||
|  | ||||
| class MultiDomainBasicAuth(AuthBase): | ||||
|     def __init__( | ||||
|         self, | ||||
|         prompting: bool = True, | ||||
|         index_urls: Optional[List[str]] = None, | ||||
|         keyring_provider: str = "auto", | ||||
|     ) -> None: | ||||
|         self.prompting = prompting | ||||
|         self.index_urls = index_urls | ||||
|         self.keyring_provider = keyring_provider  # type: ignore[assignment] | ||||
|         self.passwords: Dict[str, AuthInfo] = {} | ||||
|         # When the user is prompted to enter credentials and keyring is | ||||
|         # available, we will offer to save them. If the user accepts, | ||||
|         # this value is set to the credentials they entered. After the | ||||
|         # request authenticates, the caller should call | ||||
|         # ``save_credentials`` to save these. | ||||
|         self._credentials_to_save: Optional[Credentials] = None | ||||
|  | ||||
|     @property | ||||
|     def keyring_provider(self) -> KeyRingBaseProvider: | ||||
|         return get_keyring_provider(self._keyring_provider) | ||||
|  | ||||
|     @keyring_provider.setter | ||||
|     def keyring_provider(self, provider: str) -> None: | ||||
|         # The free function get_keyring_provider has been decorated with | ||||
|         # functools.cache. If an exception occurs in get_keyring_auth that | ||||
|         # cache will be cleared and keyring disabled, take that into account | ||||
|         # if you want to remove this indirection. | ||||
|         self._keyring_provider = provider | ||||
|  | ||||
|     @property | ||||
|     def use_keyring(self) -> bool: | ||||
|         # We won't use keyring when --no-input is passed unless | ||||
|         # a specific provider is requested because it might require | ||||
|         # user interaction | ||||
|         return self.prompting or self._keyring_provider not in ["auto", "disabled"] | ||||
|  | ||||
|     def _get_keyring_auth( | ||||
|         self, | ||||
|         url: Optional[str], | ||||
|         username: Optional[str], | ||||
|     ) -> Optional[AuthInfo]: | ||||
|         """Return the tuple auth for a given url from keyring.""" | ||||
|         # Do nothing if no url was provided | ||||
|         if not url: | ||||
|             return None | ||||
|  | ||||
|         try: | ||||
|             return self.keyring_provider.get_auth_info(url, username) | ||||
|         except Exception as exc: | ||||
|             logger.warning( | ||||
|                 "Keyring is skipped due to an exception: %s", | ||||
|                 str(exc), | ||||
|             ) | ||||
|             global KEYRING_DISABLED | ||||
|             KEYRING_DISABLED = True | ||||
|             get_keyring_provider.cache_clear() | ||||
|             return None | ||||
|  | ||||
|     def _get_index_url(self, url: str) -> Optional[str]: | ||||
|         """Return the original index URL matching the requested URL. | ||||
|  | ||||
|         Cached or dynamically generated credentials may work against | ||||
|         the original index URL rather than just the netloc. | ||||
|  | ||||
|         The provided url should have had its username and password | ||||
|         removed already. If the original index url had credentials then | ||||
|         they will be included in the return value. | ||||
|  | ||||
|         Returns None if no matching index was found, or if --no-index | ||||
|         was specified by the user. | ||||
|         """ | ||||
|         if not url or not self.index_urls: | ||||
|             return None | ||||
|  | ||||
|         url = remove_auth_from_url(url).rstrip("/") + "/" | ||||
|         parsed_url = urllib.parse.urlsplit(url) | ||||
|  | ||||
|         candidates = [] | ||||
|  | ||||
|         for index in self.index_urls: | ||||
|             index = index.rstrip("/") + "/" | ||||
|             parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index)) | ||||
|             if parsed_url == parsed_index: | ||||
|                 return index | ||||
|  | ||||
|             if parsed_url.netloc != parsed_index.netloc: | ||||
|                 continue | ||||
|  | ||||
|             candidate = urllib.parse.urlsplit(index) | ||||
|             candidates.append(candidate) | ||||
|  | ||||
|         if not candidates: | ||||
|             return None | ||||
|  | ||||
|         candidates.sort( | ||||
|             reverse=True, | ||||
|             key=lambda candidate: commonprefix( | ||||
|                 [ | ||||
|                     parsed_url.path, | ||||
|                     candidate.path, | ||||
|                 ] | ||||
|             ).rfind("/"), | ||||
|         ) | ||||
|  | ||||
|         return urllib.parse.urlunsplit(candidates[0]) | ||||
|  | ||||
|     def _get_new_credentials( | ||||
|         self, | ||||
|         original_url: str, | ||||
|         *, | ||||
|         allow_netrc: bool = True, | ||||
|         allow_keyring: bool = False, | ||||
|     ) -> AuthInfo: | ||||
|         """Find and return credentials for the specified URL.""" | ||||
|         # Split the credentials and netloc from the url. | ||||
|         url, netloc, url_user_password = split_auth_netloc_from_url( | ||||
|             original_url, | ||||
|         ) | ||||
|  | ||||
|         # Start with the credentials embedded in the url | ||||
|         username, password = url_user_password | ||||
|         if username is not None and password is not None: | ||||
|             logger.debug("Found credentials in url for %s", netloc) | ||||
|             return url_user_password | ||||
|  | ||||
|         # Find a matching index url for this request | ||||
|         index_url = self._get_index_url(url) | ||||
|         if index_url: | ||||
|             # Split the credentials from the url. | ||||
|             index_info = split_auth_netloc_from_url(index_url) | ||||
|             if index_info: | ||||
|                 index_url, _, index_url_user_password = index_info | ||||
|                 logger.debug("Found index url %s", index_url) | ||||
|  | ||||
|         # If an index URL was found, try its embedded credentials | ||||
|         if index_url and index_url_user_password[0] is not None: | ||||
|             username, password = index_url_user_password | ||||
|             if username is not None and password is not None: | ||||
|                 logger.debug("Found credentials in index url for %s", netloc) | ||||
|                 return index_url_user_password | ||||
|  | ||||
|         # Get creds from netrc if we still don't have them | ||||
|         if allow_netrc: | ||||
|             netrc_auth = get_netrc_auth(original_url) | ||||
|             if netrc_auth: | ||||
|                 logger.debug("Found credentials in netrc for %s", netloc) | ||||
|                 return netrc_auth | ||||
|  | ||||
|         # If we don't have a password and keyring is available, use it. | ||||
|         if allow_keyring: | ||||
|             # The index url is more specific than the netloc, so try it first | ||||
|             # fmt: off | ||||
|             kr_auth = ( | ||||
|                 self._get_keyring_auth(index_url, username) or | ||||
|                 self._get_keyring_auth(netloc, username) | ||||
|             ) | ||||
|             # fmt: on | ||||
|             if kr_auth: | ||||
|                 logger.debug("Found credentials in keyring for %s", netloc) | ||||
|                 return kr_auth | ||||
|  | ||||
|         return username, password | ||||
|  | ||||
|     def _get_url_and_credentials( | ||||
|         self, original_url: str | ||||
|     ) -> Tuple[str, Optional[str], Optional[str]]: | ||||
|         """Return the credentials to use for the provided URL. | ||||
|  | ||||
|         If allowed, netrc and keyring may be used to obtain the | ||||
|         correct credentials. | ||||
|  | ||||
|         Returns (url_without_credentials, username, password). Note | ||||
|         that even if the original URL contains credentials, this | ||||
|         function may return a different username and password. | ||||
|         """ | ||||
|         url, netloc, _ = split_auth_netloc_from_url(original_url) | ||||
|  | ||||
|         # Try to get credentials from original url | ||||
|         username, password = self._get_new_credentials(original_url) | ||||
|  | ||||
|         # If credentials not found, use any stored credentials for this netloc. | ||||
|         # Do this if either the username or the password is missing. | ||||
|         # This accounts for the situation in which the user has specified | ||||
|         # the username in the index url, but the password comes from keyring. | ||||
|         if (username is None or password is None) and netloc in self.passwords: | ||||
|             un, pw = self.passwords[netloc] | ||||
|             # It is possible that the cached credentials are for a different username, | ||||
|             # in which case the cache should be ignored. | ||||
|             if username is None or username == un: | ||||
|                 username, password = un, pw | ||||
|  | ||||
|         if username is not None or password is not None: | ||||
|             # Convert the username and password if they're None, so that | ||||
|             # this netloc will show up as "cached" in the conditional above. | ||||
|             # Further, HTTPBasicAuth doesn't accept None, so it makes sense to | ||||
|             # cache the value that is going to be used. | ||||
|             username = username or "" | ||||
|             password = password or "" | ||||
|  | ||||
|             # Store any acquired credentials. | ||||
|             self.passwords[netloc] = (username, password) | ||||
|  | ||||
|         assert ( | ||||
|             # Credentials were found | ||||
|             (username is not None and password is not None) | ||||
|             # Credentials were not found | ||||
|             or (username is None and password is None) | ||||
|         ), f"Could not load credentials from url: {original_url}" | ||||
|  | ||||
|         return url, username, password | ||||
|  | ||||
|     def __call__(self, req: Request) -> Request: | ||||
|         # Get credentials for this request | ||||
|         url, username, password = self._get_url_and_credentials(req.url) | ||||
|  | ||||
|         # Set the url of the request to the url without any credentials | ||||
|         req.url = url | ||||
|  | ||||
|         if username is not None and password is not None: | ||||
|             # Send the basic auth with this request | ||||
|             req = HTTPBasicAuth(username, password)(req) | ||||
|  | ||||
|         # Attach a hook to handle 401 responses | ||||
|         req.register_hook("response", self.handle_401) | ||||
|  | ||||
|         return req | ||||
|  | ||||
|     # Factored out to allow for easy patching in tests | ||||
|     def _prompt_for_password( | ||||
|         self, netloc: str | ||||
|     ) -> Tuple[Optional[str], Optional[str], bool]: | ||||
|         username = ask_input(f"User for {netloc}: ") if self.prompting else None | ||||
|         if not username: | ||||
|             return None, None, False | ||||
|         if self.use_keyring: | ||||
|             auth = self._get_keyring_auth(netloc, username) | ||||
|             if auth and auth[0] is not None and auth[1] is not None: | ||||
|                 return auth[0], auth[1], False | ||||
|         password = ask_password("Password: ") | ||||
|         return username, password, True | ||||
|  | ||||
|     # Factored out to allow for easy patching in tests | ||||
|     def _should_save_password_to_keyring(self) -> bool: | ||||
|         if ( | ||||
|             not self.prompting | ||||
|             or not self.use_keyring | ||||
|             or not self.keyring_provider.has_keyring | ||||
|         ): | ||||
|             return False | ||||
|         return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" | ||||
|  | ||||
|     def handle_401(self, resp: Response, **kwargs: Any) -> Response: | ||||
|         # We only care about 401 responses, anything else we want to just | ||||
|         #   pass through the actual response | ||||
|         if resp.status_code != 401: | ||||
|             return resp | ||||
|  | ||||
|         username, password = None, None | ||||
|  | ||||
|         # Query the keyring for credentials: | ||||
|         if self.use_keyring: | ||||
|             username, password = self._get_new_credentials( | ||||
|                 resp.url, | ||||
|                 allow_netrc=False, | ||||
|                 allow_keyring=True, | ||||
|             ) | ||||
|  | ||||
|         # We are not able to prompt the user so simply return the response | ||||
|         if not self.prompting and not username and not password: | ||||
|             return resp | ||||
|  | ||||
|         parsed = urllib.parse.urlparse(resp.url) | ||||
|  | ||||
|         # Prompt the user for a new username and password | ||||
|         save = False | ||||
|         if not username and not password: | ||||
|             username, password, save = self._prompt_for_password(parsed.netloc) | ||||
|  | ||||
|         # Store the new username and password to use for future requests | ||||
|         self._credentials_to_save = None | ||||
|         if username is not None and password is not None: | ||||
|             self.passwords[parsed.netloc] = (username, password) | ||||
|  | ||||
|             # Prompt to save the password to keyring | ||||
|             if save and self._should_save_password_to_keyring(): | ||||
|                 self._credentials_to_save = Credentials( | ||||
|                     url=parsed.netloc, | ||||
|                     username=username, | ||||
|                     password=password, | ||||
|                 ) | ||||
|  | ||||
|         # Consume content and release the original connection to allow our new | ||||
|         #   request to reuse the same one. | ||||
|         # The result of the assignment isn't used, it's just needed to consume | ||||
|         # the content. | ||||
|         _ = resp.content | ||||
|         resp.raw.release_conn() | ||||
|  | ||||
|         # Add our new username and password to the request | ||||
|         req = HTTPBasicAuth(username or "", password or "")(resp.request) | ||||
|         req.register_hook("response", self.warn_on_401) | ||||
|  | ||||
|         # On successful request, save the credentials that were used to | ||||
|         # keyring. (Note that if the user responded "no" above, this member | ||||
|         # is not set and nothing will be saved.) | ||||
|         if self._credentials_to_save: | ||||
|             req.register_hook("response", self.save_credentials) | ||||
|  | ||||
|         # Send our new request | ||||
|         new_resp = resp.connection.send(req, **kwargs) | ||||
|         new_resp.history.append(resp) | ||||
|  | ||||
|         return new_resp | ||||
|  | ||||
|     def warn_on_401(self, resp: Response, **kwargs: Any) -> None: | ||||
|         """Response callback to warn about incorrect credentials.""" | ||||
|         if resp.status_code == 401: | ||||
|             logger.warning( | ||||
|                 "401 Error, Credentials not correct for %s", | ||||
|                 resp.request.url, | ||||
|             ) | ||||
|  | ||||
|     def save_credentials(self, resp: Response, **kwargs: Any) -> None: | ||||
|         """Response callback to save credentials on success.""" | ||||
|         assert ( | ||||
|             self.keyring_provider.has_keyring | ||||
|         ), "should never reach here without keyring" | ||||
|  | ||||
|         creds = self._credentials_to_save | ||||
|         self._credentials_to_save = None | ||||
|         if creds and resp.status_code < 400: | ||||
|             try: | ||||
|                 logger.info("Saving credentials to keyring") | ||||
|                 self.keyring_provider.save_auth_info( | ||||
|                     creds.url, creds.username, creds.password | ||||
|                 ) | ||||
|             except Exception: | ||||
|                 logger.exception("Failed to save credentials") | ||||
| @ -0,0 +1,69 @@ | ||||
| """HTTP cache implementation. | ||||
| """ | ||||
|  | ||||
| import os | ||||
| from contextlib import contextmanager | ||||
| from typing import Generator, Optional | ||||
|  | ||||
| from pip._vendor.cachecontrol.cache import BaseCache | ||||
| from pip._vendor.cachecontrol.caches import FileCache | ||||
| from pip._vendor.requests.models import Response | ||||
|  | ||||
| from pip._internal.utils.filesystem import adjacent_tmp_file, replace | ||||
| from pip._internal.utils.misc import ensure_dir | ||||
|  | ||||
|  | ||||
| def is_from_cache(response: Response) -> bool: | ||||
|     return getattr(response, "from_cache", False) | ||||
|  | ||||
|  | ||||
| @contextmanager | ||||
| def suppressed_cache_errors() -> Generator[None, None, None]: | ||||
|     """If we can't access the cache then we can just skip caching and process | ||||
|     requests as if caching wasn't enabled. | ||||
|     """ | ||||
|     try: | ||||
|         yield | ||||
|     except OSError: | ||||
|         pass | ||||
|  | ||||
|  | ||||
| class SafeFileCache(BaseCache): | ||||
|     """ | ||||
|     A file based cache which is safe to use even when the target directory may | ||||
|     not be accessible or writable. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, directory: str) -> None: | ||||
|         assert directory is not None, "Cache directory must not be None." | ||||
|         super().__init__() | ||||
|         self.directory = directory | ||||
|  | ||||
|     def _get_cache_path(self, name: str) -> str: | ||||
|         # From cachecontrol.caches.file_cache.FileCache._fn, brought into our | ||||
|         # class for backwards-compatibility and to avoid using a non-public | ||||
|         # method. | ||||
|         hashed = FileCache.encode(name) | ||||
|         parts = list(hashed[:5]) + [hashed] | ||||
|         return os.path.join(self.directory, *parts) | ||||
|  | ||||
|     def get(self, key: str) -> Optional[bytes]: | ||||
|         path = self._get_cache_path(key) | ||||
|         with suppressed_cache_errors(): | ||||
|             with open(path, "rb") as f: | ||||
|                 return f.read() | ||||
|  | ||||
|     def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None: | ||||
|         path = self._get_cache_path(key) | ||||
|         with suppressed_cache_errors(): | ||||
|             ensure_dir(os.path.dirname(path)) | ||||
|  | ||||
|             with adjacent_tmp_file(path) as f: | ||||
|                 f.write(value) | ||||
|  | ||||
|             replace(f.name, path) | ||||
|  | ||||
|     def delete(self, key: str) -> None: | ||||
|         path = self._get_cache_path(key) | ||||
|         with suppressed_cache_errors(): | ||||
|             os.remove(path) | ||||
| @ -0,0 +1,186 @@ | ||||
| """Download files with progress indicators. | ||||
| """ | ||||
| import email.message | ||||
| import logging | ||||
| import mimetypes | ||||
| import os | ||||
| from typing import Iterable, Optional, Tuple | ||||
|  | ||||
| from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response | ||||
|  | ||||
| from pip._internal.cli.progress_bars import get_download_progress_renderer | ||||
| from pip._internal.exceptions import NetworkConnectionError | ||||
| from pip._internal.models.index import PyPI | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.network.cache import is_from_cache | ||||
| from pip._internal.network.session import PipSession | ||||
| from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks | ||||
| from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def _get_http_response_size(resp: Response) -> Optional[int]: | ||||
|     try: | ||||
|         return int(resp.headers["content-length"]) | ||||
|     except (ValueError, KeyError, TypeError): | ||||
|         return None | ||||
|  | ||||
|  | ||||
| def _prepare_download( | ||||
|     resp: Response, | ||||
|     link: Link, | ||||
|     progress_bar: str, | ||||
| ) -> Iterable[bytes]: | ||||
|     total_length = _get_http_response_size(resp) | ||||
|  | ||||
|     if link.netloc == PyPI.file_storage_domain: | ||||
|         url = link.show_url | ||||
|     else: | ||||
|         url = link.url_without_fragment | ||||
|  | ||||
|     logged_url = redact_auth_from_url(url) | ||||
|  | ||||
|     if total_length: | ||||
|         logged_url = "{} ({})".format(logged_url, format_size(total_length)) | ||||
|  | ||||
|     if is_from_cache(resp): | ||||
|         logger.info("Using cached %s", logged_url) | ||||
|     else: | ||||
|         logger.info("Downloading %s", logged_url) | ||||
|  | ||||
|     if logger.getEffectiveLevel() > logging.INFO: | ||||
|         show_progress = False | ||||
|     elif is_from_cache(resp): | ||||
|         show_progress = False | ||||
|     elif not total_length: | ||||
|         show_progress = True | ||||
|     elif total_length > (40 * 1000): | ||||
|         show_progress = True | ||||
|     else: | ||||
|         show_progress = False | ||||
|  | ||||
|     chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) | ||||
|  | ||||
|     if not show_progress: | ||||
|         return chunks | ||||
|  | ||||
|     renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length) | ||||
|     return renderer(chunks) | ||||
|  | ||||
|  | ||||
| def sanitize_content_filename(filename: str) -> str: | ||||
|     """ | ||||
|     Sanitize the "filename" value from a Content-Disposition header. | ||||
|     """ | ||||
|     return os.path.basename(filename) | ||||
|  | ||||
|  | ||||
| def parse_content_disposition(content_disposition: str, default_filename: str) -> str: | ||||
|     """ | ||||
|     Parse the "filename" value from a Content-Disposition header, and | ||||
|     return the default filename if the result is empty. | ||||
|     """ | ||||
|     m = email.message.Message() | ||||
|     m["content-type"] = content_disposition | ||||
|     filename = m.get_param("filename") | ||||
|     if filename: | ||||
|         # We need to sanitize the filename to prevent directory traversal | ||||
|         # in case the filename contains ".." path parts. | ||||
|         filename = sanitize_content_filename(str(filename)) | ||||
|     return filename or default_filename | ||||
|  | ||||
|  | ||||
| def _get_http_response_filename(resp: Response, link: Link) -> str: | ||||
|     """Get an ideal filename from the given HTTP response, falling back to | ||||
|     the link filename if not provided. | ||||
|     """ | ||||
|     filename = link.filename  # fallback | ||||
|     # Have a look at the Content-Disposition header for a better guess | ||||
|     content_disposition = resp.headers.get("content-disposition") | ||||
|     if content_disposition: | ||||
|         filename = parse_content_disposition(content_disposition, filename) | ||||
|     ext: Optional[str] = splitext(filename)[1] | ||||
|     if not ext: | ||||
|         ext = mimetypes.guess_extension(resp.headers.get("content-type", "")) | ||||
|         if ext: | ||||
|             filename += ext | ||||
|     if not ext and link.url != resp.url: | ||||
|         ext = os.path.splitext(resp.url)[1] | ||||
|         if ext: | ||||
|             filename += ext | ||||
|     return filename | ||||
|  | ||||
|  | ||||
| def _http_get_download(session: PipSession, link: Link) -> Response: | ||||
|     target_url = link.url.split("#", 1)[0] | ||||
|     resp = session.get(target_url, headers=HEADERS, stream=True) | ||||
|     raise_for_status(resp) | ||||
|     return resp | ||||
|  | ||||
|  | ||||
| class Downloader: | ||||
|     def __init__( | ||||
|         self, | ||||
|         session: PipSession, | ||||
|         progress_bar: str, | ||||
|     ) -> None: | ||||
|         self._session = session | ||||
|         self._progress_bar = progress_bar | ||||
|  | ||||
|     def __call__(self, link: Link, location: str) -> Tuple[str, str]: | ||||
|         """Download the file given by link into location.""" | ||||
|         try: | ||||
|             resp = _http_get_download(self._session, link) | ||||
|         except NetworkConnectionError as e: | ||||
|             assert e.response is not None | ||||
|             logger.critical( | ||||
|                 "HTTP error %s while getting %s", e.response.status_code, link | ||||
|             ) | ||||
|             raise | ||||
|  | ||||
|         filename = _get_http_response_filename(resp, link) | ||||
|         filepath = os.path.join(location, filename) | ||||
|  | ||||
|         chunks = _prepare_download(resp, link, self._progress_bar) | ||||
|         with open(filepath, "wb") as content_file: | ||||
|             for chunk in chunks: | ||||
|                 content_file.write(chunk) | ||||
|         content_type = resp.headers.get("Content-Type", "") | ||||
|         return filepath, content_type | ||||
|  | ||||
|  | ||||
| class BatchDownloader: | ||||
|     def __init__( | ||||
|         self, | ||||
|         session: PipSession, | ||||
|         progress_bar: str, | ||||
|     ) -> None: | ||||
|         self._session = session | ||||
|         self._progress_bar = progress_bar | ||||
|  | ||||
|     def __call__( | ||||
|         self, links: Iterable[Link], location: str | ||||
|     ) -> Iterable[Tuple[Link, Tuple[str, str]]]: | ||||
|         """Download the files given by links into location.""" | ||||
|         for link in links: | ||||
|             try: | ||||
|                 resp = _http_get_download(self._session, link) | ||||
|             except NetworkConnectionError as e: | ||||
|                 assert e.response is not None | ||||
|                 logger.critical( | ||||
|                     "HTTP error %s while getting %s", | ||||
|                     e.response.status_code, | ||||
|                     link, | ||||
|                 ) | ||||
|                 raise | ||||
|  | ||||
|             filename = _get_http_response_filename(resp, link) | ||||
|             filepath = os.path.join(location, filename) | ||||
|  | ||||
|             chunks = _prepare_download(resp, link, self._progress_bar) | ||||
|             with open(filepath, "wb") as content_file: | ||||
|                 for chunk in chunks: | ||||
|                     content_file.write(chunk) | ||||
|             content_type = resp.headers.get("Content-Type", "") | ||||
|             yield link, (filepath, content_type) | ||||
| @ -0,0 +1,210 @@ | ||||
| """Lazy ZIP over HTTP""" | ||||
|  | ||||
| __all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"] | ||||
|  | ||||
| from bisect import bisect_left, bisect_right | ||||
| from contextlib import contextmanager | ||||
| from tempfile import NamedTemporaryFile | ||||
| from typing import Any, Dict, Generator, List, Optional, Tuple | ||||
| from zipfile import BadZipFile, ZipFile | ||||
|  | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
| from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response | ||||
|  | ||||
| from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution | ||||
| from pip._internal.network.session import PipSession | ||||
| from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks | ||||
|  | ||||
|  | ||||
| class HTTPRangeRequestUnsupported(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution: | ||||
|     """Return a distribution object from the given wheel URL. | ||||
|  | ||||
|     This uses HTTP range requests to only fetch the portion of the wheel | ||||
|     containing metadata, just enough for the object to be constructed. | ||||
|     If such requests are not supported, HTTPRangeRequestUnsupported | ||||
|     is raised. | ||||
|     """ | ||||
|     with LazyZipOverHTTP(url, session) as zf: | ||||
|         # For read-only ZIP files, ZipFile only needs methods read, | ||||
|         # seek, seekable and tell, not the whole IO protocol. | ||||
|         wheel = MemoryWheel(zf.name, zf)  # type: ignore | ||||
|         # After context manager exit, wheel.name | ||||
|         # is an invalid file by intention. | ||||
|         return get_wheel_distribution(wheel, canonicalize_name(name)) | ||||
|  | ||||
|  | ||||
| class LazyZipOverHTTP: | ||||
|     """File-like object mapped to a ZIP file over HTTP. | ||||
|  | ||||
|     This uses HTTP range requests to lazily fetch the file's content, | ||||
|     which is supposed to be fed to ZipFile.  If such requests are not | ||||
|     supported by the server, raise HTTPRangeRequestUnsupported | ||||
|     during initialization. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE | ||||
|     ) -> None: | ||||
|         head = session.head(url, headers=HEADERS) | ||||
|         raise_for_status(head) | ||||
|         assert head.status_code == 200 | ||||
|         self._session, self._url, self._chunk_size = session, url, chunk_size | ||||
|         self._length = int(head.headers["Content-Length"]) | ||||
|         self._file = NamedTemporaryFile() | ||||
|         self.truncate(self._length) | ||||
|         self._left: List[int] = [] | ||||
|         self._right: List[int] = [] | ||||
|         if "bytes" not in head.headers.get("Accept-Ranges", "none"): | ||||
|             raise HTTPRangeRequestUnsupported("range request is not supported") | ||||
|         self._check_zip() | ||||
|  | ||||
|     @property | ||||
|     def mode(self) -> str: | ||||
|         """Opening mode, which is always rb.""" | ||||
|         return "rb" | ||||
|  | ||||
|     @property | ||||
|     def name(self) -> str: | ||||
|         """Path to the underlying file.""" | ||||
|         return self._file.name | ||||
|  | ||||
|     def seekable(self) -> bool: | ||||
|         """Return whether random access is supported, which is True.""" | ||||
|         return True | ||||
|  | ||||
|     def close(self) -> None: | ||||
|         """Close the file.""" | ||||
|         self._file.close() | ||||
|  | ||||
|     @property | ||||
|     def closed(self) -> bool: | ||||
|         """Whether the file is closed.""" | ||||
|         return self._file.closed | ||||
|  | ||||
|     def read(self, size: int = -1) -> bytes: | ||||
|         """Read up to size bytes from the object and return them. | ||||
|  | ||||
|         As a convenience, if size is unspecified or -1, | ||||
|         all bytes until EOF are returned.  Fewer than | ||||
|         size bytes may be returned if EOF is reached. | ||||
|         """ | ||||
|         download_size = max(size, self._chunk_size) | ||||
|         start, length = self.tell(), self._length | ||||
|         stop = length if size < 0 else min(start + download_size, length) | ||||
|         start = max(0, stop - download_size) | ||||
|         self._download(start, stop - 1) | ||||
|         return self._file.read(size) | ||||
|  | ||||
|     def readable(self) -> bool: | ||||
|         """Return whether the file is readable, which is True.""" | ||||
|         return True | ||||
|  | ||||
|     def seek(self, offset: int, whence: int = 0) -> int: | ||||
|         """Change stream position and return the new absolute position. | ||||
|  | ||||
|         Seek to offset relative position indicated by whence: | ||||
|         * 0: Start of stream (the default).  pos should be >= 0; | ||||
|         * 1: Current position - pos may be negative; | ||||
|         * 2: End of stream - pos usually negative. | ||||
|         """ | ||||
|         return self._file.seek(offset, whence) | ||||
|  | ||||
|     def tell(self) -> int: | ||||
|         """Return the current position.""" | ||||
|         return self._file.tell() | ||||
|  | ||||
|     def truncate(self, size: Optional[int] = None) -> int: | ||||
|         """Resize the stream to the given size in bytes. | ||||
|  | ||||
|         If size is unspecified resize to the current position. | ||||
|         The current stream position isn't changed. | ||||
|  | ||||
|         Return the new file size. | ||||
|         """ | ||||
|         return self._file.truncate(size) | ||||
|  | ||||
|     def writable(self) -> bool: | ||||
|         """Return False.""" | ||||
|         return False | ||||
|  | ||||
|     def __enter__(self) -> "LazyZipOverHTTP": | ||||
|         self._file.__enter__() | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, *exc: Any) -> None: | ||||
|         self._file.__exit__(*exc) | ||||
|  | ||||
|     @contextmanager | ||||
|     def _stay(self) -> Generator[None, None, None]: | ||||
|         """Return a context manager keeping the position. | ||||
|  | ||||
|         At the end of the block, seek back to original position. | ||||
|         """ | ||||
|         pos = self.tell() | ||||
|         try: | ||||
|             yield | ||||
|         finally: | ||||
|             self.seek(pos) | ||||
|  | ||||
|     def _check_zip(self) -> None: | ||||
|         """Check and download until the file is a valid ZIP.""" | ||||
|         end = self._length - 1 | ||||
|         for start in reversed(range(0, end, self._chunk_size)): | ||||
|             self._download(start, end) | ||||
|             with self._stay(): | ||||
|                 try: | ||||
|                     # For read-only ZIP files, ZipFile only needs | ||||
|                     # methods read, seek, seekable and tell. | ||||
|                     ZipFile(self)  # type: ignore | ||||
|                 except BadZipFile: | ||||
|                     pass | ||||
|                 else: | ||||
|                     break | ||||
|  | ||||
|     def _stream_response( | ||||
|         self, start: int, end: int, base_headers: Dict[str, str] = HEADERS | ||||
|     ) -> Response: | ||||
|         """Return HTTP response to a range request from start to end.""" | ||||
|         headers = base_headers.copy() | ||||
|         headers["Range"] = f"bytes={start}-{end}" | ||||
|         # TODO: Get range requests to be correctly cached | ||||
|         headers["Cache-Control"] = "no-cache" | ||||
|         return self._session.get(self._url, headers=headers, stream=True) | ||||
|  | ||||
|     def _merge( | ||||
|         self, start: int, end: int, left: int, right: int | ||||
|     ) -> Generator[Tuple[int, int], None, None]: | ||||
|         """Return a generator of intervals to be fetched. | ||||
|  | ||||
|         Args: | ||||
|             start (int): Start of needed interval | ||||
|             end (int): End of needed interval | ||||
|             left (int): Index of first overlapping downloaded data | ||||
|             right (int): Index after last overlapping downloaded data | ||||
|         """ | ||||
|         lslice, rslice = self._left[left:right], self._right[left:right] | ||||
|         i = start = min([start] + lslice[:1]) | ||||
|         end = max([end] + rslice[-1:]) | ||||
|         for j, k in zip(lslice, rslice): | ||||
|             if j > i: | ||||
|                 yield i, j - 1 | ||||
|             i = k + 1 | ||||
|         if i <= end: | ||||
|             yield i, end | ||||
|         self._left[left:right], self._right[left:right] = [start], [end] | ||||
|  | ||||
|     def _download(self, start: int, end: int) -> None: | ||||
|         """Download bytes from start to end inclusively.""" | ||||
|         with self._stay(): | ||||
|             left = bisect_left(self._right, start) | ||||
|             right = bisect_right(self._left, end) | ||||
|             for start, end in self._merge(start, end, left, right): | ||||
|                 response = self._stream_response(start, end) | ||||
|                 response.raise_for_status() | ||||
|                 self.seek(start) | ||||
|                 for chunk in response_chunks(response, self._chunk_size): | ||||
|                     self._file.write(chunk) | ||||
| @ -0,0 +1,519 @@ | ||||
| """PipSession and supporting code, containing all pip-specific | ||||
| network request configuration and behavior. | ||||
| """ | ||||
|  | ||||
| import email.utils | ||||
| import io | ||||
| import ipaddress | ||||
| import json | ||||
| import logging | ||||
| import mimetypes | ||||
| import os | ||||
| import platform | ||||
| import shutil | ||||
| import subprocess | ||||
| import sys | ||||
| import urllib.parse | ||||
| import warnings | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
|     Any, | ||||
|     Dict, | ||||
|     Generator, | ||||
|     List, | ||||
|     Mapping, | ||||
|     Optional, | ||||
|     Sequence, | ||||
|     Tuple, | ||||
|     Union, | ||||
| ) | ||||
|  | ||||
| from pip._vendor import requests, urllib3 | ||||
| from pip._vendor.cachecontrol import CacheControlAdapter as _BaseCacheControlAdapter | ||||
| from pip._vendor.requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter | ||||
| from pip._vendor.requests.adapters import HTTPAdapter as _BaseHTTPAdapter | ||||
| from pip._vendor.requests.models import PreparedRequest, Response | ||||
| from pip._vendor.requests.structures import CaseInsensitiveDict | ||||
| from pip._vendor.urllib3.connectionpool import ConnectionPool | ||||
| from pip._vendor.urllib3.exceptions import InsecureRequestWarning | ||||
|  | ||||
| from pip import __version__ | ||||
| from pip._internal.metadata import get_default_environment | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.network.auth import MultiDomainBasicAuth | ||||
| from pip._internal.network.cache import SafeFileCache | ||||
|  | ||||
| # Import ssl from compat so the initial import occurs in only one place. | ||||
| from pip._internal.utils.compat import has_tls | ||||
| from pip._internal.utils.glibc import libc_ver | ||||
| from pip._internal.utils.misc import build_url_from_netloc, parse_netloc | ||||
| from pip._internal.utils.urls import url_to_path | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from ssl import SSLContext | ||||
|  | ||||
|     from pip._vendor.urllib3.poolmanager import PoolManager | ||||
|  | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
| SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] | ||||
|  | ||||
|  | ||||
| # Ignore warning raised when using --trusted-host. | ||||
| warnings.filterwarnings("ignore", category=InsecureRequestWarning) | ||||
|  | ||||
|  | ||||
| SECURE_ORIGINS: List[SecureOrigin] = [ | ||||
|     # protocol, hostname, port | ||||
|     # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) | ||||
|     ("https", "*", "*"), | ||||
|     ("*", "localhost", "*"), | ||||
|     ("*", "127.0.0.0/8", "*"), | ||||
|     ("*", "::1/128", "*"), | ||||
|     ("file", "*", None), | ||||
|     # ssh is always secure. | ||||
|     ("ssh", "*", "*"), | ||||
| ] | ||||
|  | ||||
|  | ||||
| # These are environment variables present when running under various | ||||
| # CI systems.  For each variable, some CI systems that use the variable | ||||
| # are indicated.  The collection was chosen so that for each of a number | ||||
| # of popular systems, at least one of the environment variables is used. | ||||
| # This list is used to provide some indication of and lower bound for | ||||
| # CI traffic to PyPI.  Thus, it is okay if the list is not comprehensive. | ||||
| # For more background, see: https://github.com/pypa/pip/issues/5499 | ||||
| CI_ENVIRONMENT_VARIABLES = ( | ||||
|     # Azure Pipelines | ||||
|     "BUILD_BUILDID", | ||||
|     # Jenkins | ||||
|     "BUILD_ID", | ||||
|     # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI | ||||
|     "CI", | ||||
|     # Explicit environment variable. | ||||
|     "PIP_IS_CI", | ||||
| ) | ||||
|  | ||||
|  | ||||
| def looks_like_ci() -> bool: | ||||
|     """ | ||||
|     Return whether it looks like pip is running under CI. | ||||
|     """ | ||||
|     # We don't use the method of checking for a tty (e.g. using isatty()) | ||||
|     # because some CI systems mimic a tty (e.g. Travis CI).  Thus that | ||||
|     # method doesn't provide definitive information in either direction. | ||||
|     return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) | ||||
|  | ||||
|  | ||||
| def user_agent() -> str: | ||||
|     """ | ||||
|     Return a string representing the user agent. | ||||
|     """ | ||||
|     data: Dict[str, Any] = { | ||||
|         "installer": {"name": "pip", "version": __version__}, | ||||
|         "python": platform.python_version(), | ||||
|         "implementation": { | ||||
|             "name": platform.python_implementation(), | ||||
|         }, | ||||
|     } | ||||
|  | ||||
|     if data["implementation"]["name"] == "CPython": | ||||
|         data["implementation"]["version"] = platform.python_version() | ||||
|     elif data["implementation"]["name"] == "PyPy": | ||||
|         pypy_version_info = sys.pypy_version_info  # type: ignore | ||||
|         if pypy_version_info.releaselevel == "final": | ||||
|             pypy_version_info = pypy_version_info[:3] | ||||
|         data["implementation"]["version"] = ".".join( | ||||
|             [str(x) for x in pypy_version_info] | ||||
|         ) | ||||
|     elif data["implementation"]["name"] == "Jython": | ||||
|         # Complete Guess | ||||
|         data["implementation"]["version"] = platform.python_version() | ||||
|     elif data["implementation"]["name"] == "IronPython": | ||||
|         # Complete Guess | ||||
|         data["implementation"]["version"] = platform.python_version() | ||||
|  | ||||
|     if sys.platform.startswith("linux"): | ||||
|         from pip._vendor import distro | ||||
|  | ||||
|         linux_distribution = distro.name(), distro.version(), distro.codename() | ||||
|         distro_infos: Dict[str, Any] = dict( | ||||
|             filter( | ||||
|                 lambda x: x[1], | ||||
|                 zip(["name", "version", "id"], linux_distribution), | ||||
|             ) | ||||
|         ) | ||||
|         libc = dict( | ||||
|             filter( | ||||
|                 lambda x: x[1], | ||||
|                 zip(["lib", "version"], libc_ver()), | ||||
|             ) | ||||
|         ) | ||||
|         if libc: | ||||
|             distro_infos["libc"] = libc | ||||
|         if distro_infos: | ||||
|             data["distro"] = distro_infos | ||||
|  | ||||
|     if sys.platform.startswith("darwin") and platform.mac_ver()[0]: | ||||
|         data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} | ||||
|  | ||||
|     if platform.system(): | ||||
|         data.setdefault("system", {})["name"] = platform.system() | ||||
|  | ||||
|     if platform.release(): | ||||
|         data.setdefault("system", {})["release"] = platform.release() | ||||
|  | ||||
|     if platform.machine(): | ||||
|         data["cpu"] = platform.machine() | ||||
|  | ||||
|     if has_tls(): | ||||
|         import _ssl as ssl | ||||
|  | ||||
|         data["openssl_version"] = ssl.OPENSSL_VERSION | ||||
|  | ||||
|     setuptools_dist = get_default_environment().get_distribution("setuptools") | ||||
|     if setuptools_dist is not None: | ||||
|         data["setuptools_version"] = str(setuptools_dist.version) | ||||
|  | ||||
|     if shutil.which("rustc") is not None: | ||||
|         # If for any reason `rustc --version` fails, silently ignore it | ||||
|         try: | ||||
|             rustc_output = subprocess.check_output( | ||||
|                 ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5 | ||||
|             ) | ||||
|         except Exception: | ||||
|             pass | ||||
|         else: | ||||
|             if rustc_output.startswith(b"rustc "): | ||||
|                 # The format of `rustc --version` is: | ||||
|                 # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'` | ||||
|                 # We extract just the middle (1.52.1) part | ||||
|                 data["rustc_version"] = rustc_output.split(b" ")[1].decode() | ||||
|  | ||||
|     # Use None rather than False so as not to give the impression that | ||||
|     # pip knows it is not being run under CI.  Rather, it is a null or | ||||
|     # inconclusive result.  Also, we include some value rather than no | ||||
|     # value to make it easier to know that the check has been run. | ||||
|     data["ci"] = True if looks_like_ci() else None | ||||
|  | ||||
|     user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") | ||||
|     if user_data is not None: | ||||
|         data["user_data"] = user_data | ||||
|  | ||||
|     return "{data[installer][name]}/{data[installer][version]} {json}".format( | ||||
|         data=data, | ||||
|         json=json.dumps(data, separators=(",", ":"), sort_keys=True), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class LocalFSAdapter(BaseAdapter): | ||||
|     def send( | ||||
|         self, | ||||
|         request: PreparedRequest, | ||||
|         stream: bool = False, | ||||
|         timeout: Optional[Union[float, Tuple[float, float]]] = None, | ||||
|         verify: Union[bool, str] = True, | ||||
|         cert: Optional[Union[str, Tuple[str, str]]] = None, | ||||
|         proxies: Optional[Mapping[str, str]] = None, | ||||
|     ) -> Response: | ||||
|         pathname = url_to_path(request.url) | ||||
|  | ||||
|         resp = Response() | ||||
|         resp.status_code = 200 | ||||
|         resp.url = request.url | ||||
|  | ||||
|         try: | ||||
|             stats = os.stat(pathname) | ||||
|         except OSError as exc: | ||||
|             # format the exception raised as a io.BytesIO object, | ||||
|             # to return a better error message: | ||||
|             resp.status_code = 404 | ||||
|             resp.reason = type(exc).__name__ | ||||
|             resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8")) | ||||
|         else: | ||||
|             modified = email.utils.formatdate(stats.st_mtime, usegmt=True) | ||||
|             content_type = mimetypes.guess_type(pathname)[0] or "text/plain" | ||||
|             resp.headers = CaseInsensitiveDict( | ||||
|                 { | ||||
|                     "Content-Type": content_type, | ||||
|                     "Content-Length": stats.st_size, | ||||
|                     "Last-Modified": modified, | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|             resp.raw = open(pathname, "rb") | ||||
|             resp.close = resp.raw.close | ||||
|  | ||||
|         return resp | ||||
|  | ||||
|     def close(self) -> None: | ||||
|         pass | ||||
|  | ||||
|  | ||||
| class _SSLContextAdapterMixin: | ||||
|     """Mixin to add the ``ssl_context`` constructor argument to HTTP adapters. | ||||
|  | ||||
|     The additional argument is forwarded directly to the pool manager. This allows us | ||||
|     to dynamically decide what SSL store to use at runtime, which is used to implement | ||||
|     the optional ``truststore`` backend. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *, | ||||
|         ssl_context: Optional["SSLContext"] = None, | ||||
|         **kwargs: Any, | ||||
|     ) -> None: | ||||
|         self._ssl_context = ssl_context | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def init_poolmanager( | ||||
|         self, | ||||
|         connections: int, | ||||
|         maxsize: int, | ||||
|         block: bool = DEFAULT_POOLBLOCK, | ||||
|         **pool_kwargs: Any, | ||||
|     ) -> "PoolManager": | ||||
|         if self._ssl_context is not None: | ||||
|             pool_kwargs.setdefault("ssl_context", self._ssl_context) | ||||
|         return super().init_poolmanager(  # type: ignore[misc] | ||||
|             connections=connections, | ||||
|             maxsize=maxsize, | ||||
|             block=block, | ||||
|             **pool_kwargs, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class CacheControlAdapter(_SSLContextAdapterMixin, _BaseCacheControlAdapter): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class InsecureHTTPAdapter(HTTPAdapter): | ||||
|     def cert_verify( | ||||
|         self, | ||||
|         conn: ConnectionPool, | ||||
|         url: str, | ||||
|         verify: Union[bool, str], | ||||
|         cert: Optional[Union[str, Tuple[str, str]]], | ||||
|     ) -> None: | ||||
|         super().cert_verify(conn=conn, url=url, verify=False, cert=cert) | ||||
|  | ||||
|  | ||||
| class InsecureCacheControlAdapter(CacheControlAdapter): | ||||
|     def cert_verify( | ||||
|         self, | ||||
|         conn: ConnectionPool, | ||||
|         url: str, | ||||
|         verify: Union[bool, str], | ||||
|         cert: Optional[Union[str, Tuple[str, str]]], | ||||
|     ) -> None: | ||||
|         super().cert_verify(conn=conn, url=url, verify=False, cert=cert) | ||||
|  | ||||
|  | ||||
| class PipSession(requests.Session): | ||||
|     timeout: Optional[int] = None | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         *args: Any, | ||||
|         retries: int = 0, | ||||
|         cache: Optional[str] = None, | ||||
|         trusted_hosts: Sequence[str] = (), | ||||
|         index_urls: Optional[List[str]] = None, | ||||
|         ssl_context: Optional["SSLContext"] = None, | ||||
|         **kwargs: Any, | ||||
|     ) -> None: | ||||
|         """ | ||||
|         :param trusted_hosts: Domains not to emit warnings for when not using | ||||
|             HTTPS. | ||||
|         """ | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|         # Namespace the attribute with "pip_" just in case to prevent | ||||
|         # possible conflicts with the base class. | ||||
|         self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = [] | ||||
|  | ||||
|         # Attach our User Agent to the request | ||||
|         self.headers["User-Agent"] = user_agent() | ||||
|  | ||||
|         # Attach our Authentication handler to the session | ||||
|         self.auth = MultiDomainBasicAuth(index_urls=index_urls) | ||||
|  | ||||
|         # Create our urllib3.Retry instance which will allow us to customize | ||||
|         # how we handle retries. | ||||
|         retries = urllib3.Retry( | ||||
|             # Set the total number of retries that a particular request can | ||||
|             # have. | ||||
|             total=retries, | ||||
|             # A 503 error from PyPI typically means that the Fastly -> Origin | ||||
|             # connection got interrupted in some way. A 503 error in general | ||||
|             # is typically considered a transient error so we'll go ahead and | ||||
|             # retry it. | ||||
|             # A 500 may indicate transient error in Amazon S3 | ||||
|             # A 520 or 527 - may indicate transient error in CloudFlare | ||||
|             status_forcelist=[500, 503, 520, 527], | ||||
|             # Add a small amount of back off between failed requests in | ||||
|             # order to prevent hammering the service. | ||||
|             backoff_factor=0.25, | ||||
|         )  # type: ignore | ||||
|  | ||||
|         # Our Insecure HTTPAdapter disables HTTPS validation. It does not | ||||
|         # support caching so we'll use it for all http:// URLs. | ||||
|         # If caching is disabled, we will also use it for | ||||
|         # https:// hosts that we've marked as ignoring | ||||
|         # TLS errors for (trusted-hosts). | ||||
|         insecure_adapter = InsecureHTTPAdapter(max_retries=retries) | ||||
|  | ||||
|         # We want to _only_ cache responses on securely fetched origins or when | ||||
|         # the host is specified as trusted. We do this because | ||||
|         # we can't validate the response of an insecurely/untrusted fetched | ||||
|         # origin, and we don't want someone to be able to poison the cache and | ||||
|         # require manual eviction from the cache to fix it. | ||||
|         if cache: | ||||
|             secure_adapter = CacheControlAdapter( | ||||
|                 cache=SafeFileCache(cache), | ||||
|                 max_retries=retries, | ||||
|                 ssl_context=ssl_context, | ||||
|             ) | ||||
|             self._trusted_host_adapter = InsecureCacheControlAdapter( | ||||
|                 cache=SafeFileCache(cache), | ||||
|                 max_retries=retries, | ||||
|             ) | ||||
|         else: | ||||
|             secure_adapter = HTTPAdapter(max_retries=retries, ssl_context=ssl_context) | ||||
|             self._trusted_host_adapter = insecure_adapter | ||||
|  | ||||
|         self.mount("https://", secure_adapter) | ||||
|         self.mount("http://", insecure_adapter) | ||||
|  | ||||
|         # Enable file:// urls | ||||
|         self.mount("file://", LocalFSAdapter()) | ||||
|  | ||||
|         for host in trusted_hosts: | ||||
|             self.add_trusted_host(host, suppress_logging=True) | ||||
|  | ||||
|     def update_index_urls(self, new_index_urls: List[str]) -> None: | ||||
|         """ | ||||
|         :param new_index_urls: New index urls to update the authentication | ||||
|             handler with. | ||||
|         """ | ||||
|         self.auth.index_urls = new_index_urls | ||||
|  | ||||
|     def add_trusted_host( | ||||
|         self, host: str, source: Optional[str] = None, suppress_logging: bool = False | ||||
|     ) -> None: | ||||
|         """ | ||||
|         :param host: It is okay to provide a host that has previously been | ||||
|             added. | ||||
|         :param source: An optional source string, for logging where the host | ||||
|             string came from. | ||||
|         """ | ||||
|         if not suppress_logging: | ||||
|             msg = f"adding trusted host: {host!r}" | ||||
|             if source is not None: | ||||
|                 msg += f" (from {source})" | ||||
|             logger.info(msg) | ||||
|  | ||||
|         parsed_host, parsed_port = parse_netloc(host) | ||||
|         if parsed_host is None: | ||||
|             raise ValueError(f"Trusted host URL must include a host part: {host!r}") | ||||
|         if (parsed_host, parsed_port) not in self.pip_trusted_origins: | ||||
|             self.pip_trusted_origins.append((parsed_host, parsed_port)) | ||||
|  | ||||
|         self.mount( | ||||
|             build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter | ||||
|         ) | ||||
|         self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter) | ||||
|         if not parsed_port: | ||||
|             self.mount( | ||||
|                 build_url_from_netloc(host, scheme="http") + ":", | ||||
|                 self._trusted_host_adapter, | ||||
|             ) | ||||
|             # Mount wildcard ports for the same host. | ||||
|             self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter) | ||||
|  | ||||
|     def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]: | ||||
|         yield from SECURE_ORIGINS | ||||
|         for host, port in self.pip_trusted_origins: | ||||
|             yield ("*", host, "*" if port is None else port) | ||||
|  | ||||
|     def is_secure_origin(self, location: Link) -> bool: | ||||
|         # Determine if this url used a secure transport mechanism | ||||
|         parsed = urllib.parse.urlparse(str(location)) | ||||
|         origin_protocol, origin_host, origin_port = ( | ||||
|             parsed.scheme, | ||||
|             parsed.hostname, | ||||
|             parsed.port, | ||||
|         ) | ||||
|  | ||||
|         # The protocol to use to see if the protocol matches. | ||||
|         # Don't count the repository type as part of the protocol: in | ||||
|         # cases such as "git+ssh", only use "ssh". (I.e., Only verify against | ||||
|         # the last scheme.) | ||||
|         origin_protocol = origin_protocol.rsplit("+", 1)[-1] | ||||
|  | ||||
|         # Determine if our origin is a secure origin by looking through our | ||||
|         # hardcoded list of secure origins, as well as any additional ones | ||||
|         # configured on this PackageFinder instance. | ||||
|         for secure_origin in self.iter_secure_origins(): | ||||
|             secure_protocol, secure_host, secure_port = secure_origin | ||||
|             if origin_protocol != secure_protocol and secure_protocol != "*": | ||||
|                 continue | ||||
|  | ||||
|             try: | ||||
|                 addr = ipaddress.ip_address(origin_host or "") | ||||
|                 network = ipaddress.ip_network(secure_host) | ||||
|             except ValueError: | ||||
|                 # We don't have both a valid address or a valid network, so | ||||
|                 # we'll check this origin against hostnames. | ||||
|                 if ( | ||||
|                     origin_host | ||||
|                     and origin_host.lower() != secure_host.lower() | ||||
|                     and secure_host != "*" | ||||
|                 ): | ||||
|                     continue | ||||
|             else: | ||||
|                 # We have a valid address and network, so see if the address | ||||
|                 # is contained within the network. | ||||
|                 if addr not in network: | ||||
|                     continue | ||||
|  | ||||
|             # Check to see if the port matches. | ||||
|             if ( | ||||
|                 origin_port != secure_port | ||||
|                 and secure_port != "*" | ||||
|                 and secure_port is not None | ||||
|             ): | ||||
|                 continue | ||||
|  | ||||
|             # If we've gotten here, then this origin matches the current | ||||
|             # secure origin and we should return True | ||||
|             return True | ||||
|  | ||||
|         # If we've gotten to this point, then the origin isn't secure and we | ||||
|         # will not accept it as a valid location to search. We will however | ||||
|         # log a warning that we are ignoring it. | ||||
|         logger.warning( | ||||
|             "The repository located at %s is not a trusted or secure host and " | ||||
|             "is being ignored. If this repository is available via HTTPS we " | ||||
|             "recommend you use HTTPS instead, otherwise you may silence " | ||||
|             "this warning and allow it anyway with '--trusted-host %s'.", | ||||
|             origin_host, | ||||
|             origin_host, | ||||
|         ) | ||||
|  | ||||
|         return False | ||||
|  | ||||
|     def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response: | ||||
|         # Allow setting a default timeout on a session | ||||
|         kwargs.setdefault("timeout", self.timeout) | ||||
|         # Allow setting a default proxies on a session | ||||
|         kwargs.setdefault("proxies", self.proxies) | ||||
|  | ||||
|         # Dispatch the actual request | ||||
|         return super().request(method, url, *args, **kwargs) | ||||
| @ -0,0 +1,96 @@ | ||||
| from typing import Dict, Generator | ||||
|  | ||||
| from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response | ||||
|  | ||||
| from pip._internal.exceptions import NetworkConnectionError | ||||
|  | ||||
| # The following comments and HTTP headers were originally added by | ||||
| # Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03. | ||||
| # | ||||
| # We use Accept-Encoding: identity here because requests defaults to | ||||
| # accepting compressed responses. This breaks in a variety of ways | ||||
| # depending on how the server is configured. | ||||
| # - Some servers will notice that the file isn't a compressible file | ||||
| #   and will leave the file alone and with an empty Content-Encoding | ||||
| # - Some servers will notice that the file is already compressed and | ||||
| #   will leave the file alone, adding a Content-Encoding: gzip header | ||||
| # - Some servers won't notice anything at all and will take a file | ||||
| #   that's already been compressed and compress it again, and set | ||||
| #   the Content-Encoding: gzip header | ||||
| # By setting this to request only the identity encoding we're hoping | ||||
| # to eliminate the third case.  Hopefully there does not exist a server | ||||
| # which when given a file will notice it is already compressed and that | ||||
| # you're not asking for a compressed file and will then decompress it | ||||
| # before sending because if that's the case I don't think it'll ever be | ||||
| # possible to make this work. | ||||
| HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"} | ||||
|  | ||||
|  | ||||
| def raise_for_status(resp: Response) -> None: | ||||
|     http_error_msg = "" | ||||
|     if isinstance(resp.reason, bytes): | ||||
|         # We attempt to decode utf-8 first because some servers | ||||
|         # choose to localize their reason strings. If the string | ||||
|         # isn't utf-8, we fall back to iso-8859-1 for all other | ||||
|         # encodings. | ||||
|         try: | ||||
|             reason = resp.reason.decode("utf-8") | ||||
|         except UnicodeDecodeError: | ||||
|             reason = resp.reason.decode("iso-8859-1") | ||||
|     else: | ||||
|         reason = resp.reason | ||||
|  | ||||
|     if 400 <= resp.status_code < 500: | ||||
|         http_error_msg = ( | ||||
|             f"{resp.status_code} Client Error: {reason} for url: {resp.url}" | ||||
|         ) | ||||
|  | ||||
|     elif 500 <= resp.status_code < 600: | ||||
|         http_error_msg = ( | ||||
|             f"{resp.status_code} Server Error: {reason} for url: {resp.url}" | ||||
|         ) | ||||
|  | ||||
|     if http_error_msg: | ||||
|         raise NetworkConnectionError(http_error_msg, response=resp) | ||||
|  | ||||
|  | ||||
| def response_chunks( | ||||
|     response: Response, chunk_size: int = CONTENT_CHUNK_SIZE | ||||
| ) -> Generator[bytes, None, None]: | ||||
|     """Given a requests Response, provide the data chunks.""" | ||||
|     try: | ||||
|         # Special case for urllib3. | ||||
|         for chunk in response.raw.stream( | ||||
|             chunk_size, | ||||
|             # We use decode_content=False here because we don't | ||||
|             # want urllib3 to mess with the raw bytes we get | ||||
|             # from the server. If we decompress inside of | ||||
|             # urllib3 then we cannot verify the checksum | ||||
|             # because the checksum will be of the compressed | ||||
|             # file. This breakage will only occur if the | ||||
|             # server adds a Content-Encoding header, which | ||||
|             # depends on how the server was configured: | ||||
|             # - Some servers will notice that the file isn't a | ||||
|             #   compressible file and will leave the file alone | ||||
|             #   and with an empty Content-Encoding | ||||
|             # - Some servers will notice that the file is | ||||
|             #   already compressed and will leave the file | ||||
|             #   alone and will add a Content-Encoding: gzip | ||||
|             #   header | ||||
|             # - Some servers won't notice anything at all and | ||||
|             #   will take a file that's already been compressed | ||||
|             #   and compress it again and set the | ||||
|             #   Content-Encoding: gzip header | ||||
|             # | ||||
|             # By setting this not to decode automatically we | ||||
|             # hope to eliminate problems with the second case. | ||||
|             decode_content=False, | ||||
|         ): | ||||
|             yield chunk | ||||
|     except AttributeError: | ||||
|         # Standard file-like object. | ||||
|         while True: | ||||
|             chunk = response.raw.read(chunk_size) | ||||
|             if not chunk: | ||||
|                 break | ||||
|             yield chunk | ||||
| @ -0,0 +1,60 @@ | ||||
| """xmlrpclib.Transport implementation | ||||
| """ | ||||
|  | ||||
| import logging | ||||
| import urllib.parse | ||||
| import xmlrpc.client | ||||
| from typing import TYPE_CHECKING, Tuple | ||||
|  | ||||
| from pip._internal.exceptions import NetworkConnectionError | ||||
| from pip._internal.network.session import PipSession | ||||
| from pip._internal.network.utils import raise_for_status | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from xmlrpc.client import _HostType, _Marshallable | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class PipXmlrpcTransport(xmlrpc.client.Transport): | ||||
|     """Provide a `xmlrpclib.Transport` implementation via a `PipSession` | ||||
|     object. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, index_url: str, session: PipSession, use_datetime: bool = False | ||||
|     ) -> None: | ||||
|         super().__init__(use_datetime) | ||||
|         index_parts = urllib.parse.urlparse(index_url) | ||||
|         self._scheme = index_parts.scheme | ||||
|         self._session = session | ||||
|  | ||||
|     def request( | ||||
|         self, | ||||
|         host: "_HostType", | ||||
|         handler: str, | ||||
|         request_body: bytes, | ||||
|         verbose: bool = False, | ||||
|     ) -> Tuple["_Marshallable", ...]: | ||||
|         assert isinstance(host, str) | ||||
|         parts = (self._scheme, host, handler, None, None, None) | ||||
|         url = urllib.parse.urlunparse(parts) | ||||
|         try: | ||||
|             headers = {"Content-Type": "text/xml"} | ||||
|             response = self._session.post( | ||||
|                 url, | ||||
|                 data=request_body, | ||||
|                 headers=headers, | ||||
|                 stream=True, | ||||
|             ) | ||||
|             raise_for_status(response) | ||||
|             self.verbose = verbose | ||||
|             return self.parse_response(response.raw) | ||||
|         except NetworkConnectionError as exc: | ||||
|             assert exc.response | ||||
|             logger.critical( | ||||
|                 "HTTP error %s while getting %s", | ||||
|                 exc.response.status_code, | ||||
|                 url, | ||||
|             ) | ||||
|             raise | ||||
| @ -0,0 +1,124 @@ | ||||
| import contextlib | ||||
| import hashlib | ||||
| import logging | ||||
| import os | ||||
| from types import TracebackType | ||||
| from typing import Dict, Generator, Optional, Set, Type, Union | ||||
|  | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.req.req_install import InstallRequirement | ||||
| from pip._internal.utils.temp_dir import TempDirectory | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| @contextlib.contextmanager | ||||
| def update_env_context_manager(**changes: str) -> Generator[None, None, None]: | ||||
|     target = os.environ | ||||
|  | ||||
|     # Save values from the target and change them. | ||||
|     non_existent_marker = object() | ||||
|     saved_values: Dict[str, Union[object, str]] = {} | ||||
|     for name, new_value in changes.items(): | ||||
|         try: | ||||
|             saved_values[name] = target[name] | ||||
|         except KeyError: | ||||
|             saved_values[name] = non_existent_marker | ||||
|         target[name] = new_value | ||||
|  | ||||
|     try: | ||||
|         yield | ||||
|     finally: | ||||
|         # Restore original values in the target. | ||||
|         for name, original_value in saved_values.items(): | ||||
|             if original_value is non_existent_marker: | ||||
|                 del target[name] | ||||
|             else: | ||||
|                 assert isinstance(original_value, str)  # for mypy | ||||
|                 target[name] = original_value | ||||
|  | ||||
|  | ||||
| @contextlib.contextmanager | ||||
| def get_build_tracker() -> Generator["BuildTracker", None, None]: | ||||
|     root = os.environ.get("PIP_BUILD_TRACKER") | ||||
|     with contextlib.ExitStack() as ctx: | ||||
|         if root is None: | ||||
|             root = ctx.enter_context(TempDirectory(kind="build-tracker")).path | ||||
|             ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root)) | ||||
|             logger.debug("Initialized build tracking at %s", root) | ||||
|  | ||||
|         with BuildTracker(root) as tracker: | ||||
|             yield tracker | ||||
|  | ||||
|  | ||||
| class BuildTracker: | ||||
|     def __init__(self, root: str) -> None: | ||||
|         self._root = root | ||||
|         self._entries: Set[InstallRequirement] = set() | ||||
|         logger.debug("Created build tracker: %s", self._root) | ||||
|  | ||||
|     def __enter__(self) -> "BuildTracker": | ||||
|         logger.debug("Entered build tracker: %s", self._root) | ||||
|         return self | ||||
|  | ||||
|     def __exit__( | ||||
|         self, | ||||
|         exc_type: Optional[Type[BaseException]], | ||||
|         exc_val: Optional[BaseException], | ||||
|         exc_tb: Optional[TracebackType], | ||||
|     ) -> None: | ||||
|         self.cleanup() | ||||
|  | ||||
|     def _entry_path(self, link: Link) -> str: | ||||
|         hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() | ||||
|         return os.path.join(self._root, hashed) | ||||
|  | ||||
|     def add(self, req: InstallRequirement) -> None: | ||||
|         """Add an InstallRequirement to build tracking.""" | ||||
|  | ||||
|         assert req.link | ||||
|         # Get the file to write information about this requirement. | ||||
|         entry_path = self._entry_path(req.link) | ||||
|  | ||||
|         # Try reading from the file. If it exists and can be read from, a build | ||||
|         # is already in progress, so a LookupError is raised. | ||||
|         try: | ||||
|             with open(entry_path) as fp: | ||||
|                 contents = fp.read() | ||||
|         except FileNotFoundError: | ||||
|             pass | ||||
|         else: | ||||
|             message = "{} is already being built: {}".format(req.link, contents) | ||||
|             raise LookupError(message) | ||||
|  | ||||
|         # If we're here, req should really not be building already. | ||||
|         assert req not in self._entries | ||||
|  | ||||
|         # Start tracking this requirement. | ||||
|         with open(entry_path, "w", encoding="utf-8") as fp: | ||||
|             fp.write(str(req)) | ||||
|         self._entries.add(req) | ||||
|  | ||||
|         logger.debug("Added %s to build tracker %r", req, self._root) | ||||
|  | ||||
|     def remove(self, req: InstallRequirement) -> None: | ||||
|         """Remove an InstallRequirement from build tracking.""" | ||||
|  | ||||
|         assert req.link | ||||
|         # Delete the created file and the corresponding entries. | ||||
|         os.unlink(self._entry_path(req.link)) | ||||
|         self._entries.remove(req) | ||||
|  | ||||
|         logger.debug("Removed %s from build tracker %r", req, self._root) | ||||
|  | ||||
|     def cleanup(self) -> None: | ||||
|         for req in set(self._entries): | ||||
|             self.remove(req) | ||||
|  | ||||
|         logger.debug("Removed build tracker: %r", self._root) | ||||
|  | ||||
|     @contextlib.contextmanager | ||||
|     def track(self, req: InstallRequirement) -> Generator[None, None, None]: | ||||
|         self.add(req) | ||||
|         yield | ||||
|         self.remove(req) | ||||
| @ -0,0 +1,39 @@ | ||||
| """Metadata generation logic for source distributions. | ||||
| """ | ||||
|  | ||||
| import os | ||||
|  | ||||
| from pip._vendor.pyproject_hooks import BuildBackendHookCaller | ||||
|  | ||||
| from pip._internal.build_env import BuildEnvironment | ||||
| from pip._internal.exceptions import ( | ||||
|     InstallationSubprocessError, | ||||
|     MetadataGenerationFailed, | ||||
| ) | ||||
| from pip._internal.utils.subprocess import runner_with_spinner_message | ||||
| from pip._internal.utils.temp_dir import TempDirectory | ||||
|  | ||||
|  | ||||
| def generate_metadata( | ||||
|     build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str | ||||
| ) -> str: | ||||
|     """Generate metadata using mechanisms described in PEP 517. | ||||
|  | ||||
|     Returns the generated metadata directory. | ||||
|     """ | ||||
|     metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) | ||||
|  | ||||
|     metadata_dir = metadata_tmpdir.path | ||||
|  | ||||
|     with build_env: | ||||
|         # Note that BuildBackendHookCaller implements a fallback for | ||||
|         # prepare_metadata_for_build_wheel, so we don't have to | ||||
|         # consider the possibility that this hook doesn't exist. | ||||
|         runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") | ||||
|         with backend.subprocess_runner(runner): | ||||
|             try: | ||||
|                 distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) | ||||
|             except InstallationSubprocessError as error: | ||||
|                 raise MetadataGenerationFailed(package_details=details) from error | ||||
|  | ||||
|     return os.path.join(metadata_dir, distinfo_dir) | ||||
| @ -0,0 +1,41 @@ | ||||
| """Metadata generation logic for source distributions. | ||||
| """ | ||||
|  | ||||
| import os | ||||
|  | ||||
| from pip._vendor.pyproject_hooks import BuildBackendHookCaller | ||||
|  | ||||
| from pip._internal.build_env import BuildEnvironment | ||||
| from pip._internal.exceptions import ( | ||||
|     InstallationSubprocessError, | ||||
|     MetadataGenerationFailed, | ||||
| ) | ||||
| from pip._internal.utils.subprocess import runner_with_spinner_message | ||||
| from pip._internal.utils.temp_dir import TempDirectory | ||||
|  | ||||
|  | ||||
| def generate_editable_metadata( | ||||
|     build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str | ||||
| ) -> str: | ||||
|     """Generate metadata using mechanisms described in PEP 660. | ||||
|  | ||||
|     Returns the generated metadata directory. | ||||
|     """ | ||||
|     metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) | ||||
|  | ||||
|     metadata_dir = metadata_tmpdir.path | ||||
|  | ||||
|     with build_env: | ||||
|         # Note that BuildBackendHookCaller implements a fallback for | ||||
|         # prepare_metadata_for_build_wheel/editable, so we don't have to | ||||
|         # consider the possibility that this hook doesn't exist. | ||||
|         runner = runner_with_spinner_message( | ||||
|             "Preparing editable metadata (pyproject.toml)" | ||||
|         ) | ||||
|         with backend.subprocess_runner(runner): | ||||
|             try: | ||||
|                 distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir) | ||||
|             except InstallationSubprocessError as error: | ||||
|                 raise MetadataGenerationFailed(package_details=details) from error | ||||
|  | ||||
|     return os.path.join(metadata_dir, distinfo_dir) | ||||
| @ -0,0 +1,74 @@ | ||||
| """Metadata generation logic for legacy source distributions. | ||||
| """ | ||||
|  | ||||
| import logging | ||||
| import os | ||||
|  | ||||
| from pip._internal.build_env import BuildEnvironment | ||||
| from pip._internal.cli.spinners import open_spinner | ||||
| from pip._internal.exceptions import ( | ||||
|     InstallationError, | ||||
|     InstallationSubprocessError, | ||||
|     MetadataGenerationFailed, | ||||
| ) | ||||
| from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args | ||||
| from pip._internal.utils.subprocess import call_subprocess | ||||
| from pip._internal.utils.temp_dir import TempDirectory | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def _find_egg_info(directory: str) -> str: | ||||
|     """Find an .egg-info subdirectory in `directory`.""" | ||||
|     filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")] | ||||
|  | ||||
|     if not filenames: | ||||
|         raise InstallationError(f"No .egg-info directory found in {directory}") | ||||
|  | ||||
|     if len(filenames) > 1: | ||||
|         raise InstallationError( | ||||
|             "More than one .egg-info directory found in {}".format(directory) | ||||
|         ) | ||||
|  | ||||
|     return os.path.join(directory, filenames[0]) | ||||
|  | ||||
|  | ||||
| def generate_metadata( | ||||
|     build_env: BuildEnvironment, | ||||
|     setup_py_path: str, | ||||
|     source_dir: str, | ||||
|     isolated: bool, | ||||
|     details: str, | ||||
| ) -> str: | ||||
|     """Generate metadata using setup.py-based defacto mechanisms. | ||||
|  | ||||
|     Returns the generated metadata directory. | ||||
|     """ | ||||
|     logger.debug( | ||||
|         "Running setup.py (path:%s) egg_info for package %s", | ||||
|         setup_py_path, | ||||
|         details, | ||||
|     ) | ||||
|  | ||||
|     egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path | ||||
|  | ||||
|     args = make_setuptools_egg_info_args( | ||||
|         setup_py_path, | ||||
|         egg_info_dir=egg_info_dir, | ||||
|         no_user_config=isolated, | ||||
|     ) | ||||
|  | ||||
|     with build_env: | ||||
|         with open_spinner("Preparing metadata (setup.py)") as spinner: | ||||
|             try: | ||||
|                 call_subprocess( | ||||
|                     args, | ||||
|                     cwd=source_dir, | ||||
|                     command_desc="python setup.py egg_info", | ||||
|                     spinner=spinner, | ||||
|                 ) | ||||
|             except InstallationSubprocessError as error: | ||||
|                 raise MetadataGenerationFailed(package_details=details) from error | ||||
|  | ||||
|     # Return the .egg-info directory. | ||||
|     return _find_egg_info(egg_info_dir) | ||||
| @ -0,0 +1,37 @@ | ||||
| import logging | ||||
| import os | ||||
| from typing import Optional | ||||
|  | ||||
| from pip._vendor.pyproject_hooks import BuildBackendHookCaller | ||||
|  | ||||
| from pip._internal.utils.subprocess import runner_with_spinner_message | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def build_wheel_pep517( | ||||
|     name: str, | ||||
|     backend: BuildBackendHookCaller, | ||||
|     metadata_directory: str, | ||||
|     tempd: str, | ||||
| ) -> Optional[str]: | ||||
|     """Build one InstallRequirement using the PEP 517 build process. | ||||
|  | ||||
|     Returns path to wheel if successfully built. Otherwise, returns None. | ||||
|     """ | ||||
|     assert metadata_directory is not None | ||||
|     try: | ||||
|         logger.debug("Destination directory: %s", tempd) | ||||
|  | ||||
|         runner = runner_with_spinner_message( | ||||
|             f"Building wheel for {name} (pyproject.toml)" | ||||
|         ) | ||||
|         with backend.subprocess_runner(runner): | ||||
|             wheel_name = backend.build_wheel( | ||||
|                 tempd, | ||||
|                 metadata_directory=metadata_directory, | ||||
|             ) | ||||
|     except Exception: | ||||
|         logger.error("Failed building wheel for %s", name) | ||||
|         return None | ||||
|     return os.path.join(tempd, wheel_name) | ||||
| @ -0,0 +1,46 @@ | ||||
| import logging | ||||
| import os | ||||
| from typing import Optional | ||||
|  | ||||
| from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing | ||||
|  | ||||
| from pip._internal.utils.subprocess import runner_with_spinner_message | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def build_wheel_editable( | ||||
|     name: str, | ||||
|     backend: BuildBackendHookCaller, | ||||
|     metadata_directory: str, | ||||
|     tempd: str, | ||||
| ) -> Optional[str]: | ||||
|     """Build one InstallRequirement using the PEP 660 build process. | ||||
|  | ||||
|     Returns path to wheel if successfully built. Otherwise, returns None. | ||||
|     """ | ||||
|     assert metadata_directory is not None | ||||
|     try: | ||||
|         logger.debug("Destination directory: %s", tempd) | ||||
|  | ||||
|         runner = runner_with_spinner_message( | ||||
|             f"Building editable for {name} (pyproject.toml)" | ||||
|         ) | ||||
|         with backend.subprocess_runner(runner): | ||||
|             try: | ||||
|                 wheel_name = backend.build_editable( | ||||
|                     tempd, | ||||
|                     metadata_directory=metadata_directory, | ||||
|                 ) | ||||
|             except HookMissing as e: | ||||
|                 logger.error( | ||||
|                     "Cannot build editable %s because the build " | ||||
|                     "backend does not have the %s hook", | ||||
|                     name, | ||||
|                     e, | ||||
|                 ) | ||||
|                 return None | ||||
|     except Exception: | ||||
|         logger.error("Failed building editable for %s", name) | ||||
|         return None | ||||
|     return os.path.join(tempd, wheel_name) | ||||
| @ -0,0 +1,102 @@ | ||||
| import logging | ||||
| import os.path | ||||
| from typing import List, Optional | ||||
|  | ||||
| from pip._internal.cli.spinners import open_spinner | ||||
| from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args | ||||
| from pip._internal.utils.subprocess import call_subprocess, format_command_args | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def format_command_result( | ||||
|     command_args: List[str], | ||||
|     command_output: str, | ||||
| ) -> str: | ||||
|     """Format command information for logging.""" | ||||
|     command_desc = format_command_args(command_args) | ||||
|     text = f"Command arguments: {command_desc}\n" | ||||
|  | ||||
|     if not command_output: | ||||
|         text += "Command output: None" | ||||
|     elif logger.getEffectiveLevel() > logging.DEBUG: | ||||
|         text += "Command output: [use --verbose to show]" | ||||
|     else: | ||||
|         if not command_output.endswith("\n"): | ||||
|             command_output += "\n" | ||||
|         text += f"Command output:\n{command_output}" | ||||
|  | ||||
|     return text | ||||
|  | ||||
|  | ||||
| def get_legacy_build_wheel_path( | ||||
|     names: List[str], | ||||
|     temp_dir: str, | ||||
|     name: str, | ||||
|     command_args: List[str], | ||||
|     command_output: str, | ||||
| ) -> Optional[str]: | ||||
|     """Return the path to the wheel in the temporary build directory.""" | ||||
|     # Sort for determinism. | ||||
|     names = sorted(names) | ||||
|     if not names: | ||||
|         msg = ("Legacy build of wheel for {!r} created no files.\n").format(name) | ||||
|         msg += format_command_result(command_args, command_output) | ||||
|         logger.warning(msg) | ||||
|         return None | ||||
|  | ||||
|     if len(names) > 1: | ||||
|         msg = ( | ||||
|             "Legacy build of wheel for {!r} created more than one file.\n" | ||||
|             "Filenames (choosing first): {}\n" | ||||
|         ).format(name, names) | ||||
|         msg += format_command_result(command_args, command_output) | ||||
|         logger.warning(msg) | ||||
|  | ||||
|     return os.path.join(temp_dir, names[0]) | ||||
|  | ||||
|  | ||||
| def build_wheel_legacy( | ||||
|     name: str, | ||||
|     setup_py_path: str, | ||||
|     source_dir: str, | ||||
|     global_options: List[str], | ||||
|     build_options: List[str], | ||||
|     tempd: str, | ||||
| ) -> Optional[str]: | ||||
|     """Build one unpacked package using the "legacy" build process. | ||||
|  | ||||
|     Returns path to wheel if successfully built. Otherwise, returns None. | ||||
|     """ | ||||
|     wheel_args = make_setuptools_bdist_wheel_args( | ||||
|         setup_py_path, | ||||
|         global_options=global_options, | ||||
|         build_options=build_options, | ||||
|         destination_dir=tempd, | ||||
|     ) | ||||
|  | ||||
|     spin_message = f"Building wheel for {name} (setup.py)" | ||||
|     with open_spinner(spin_message) as spinner: | ||||
|         logger.debug("Destination directory: %s", tempd) | ||||
|  | ||||
|         try: | ||||
|             output = call_subprocess( | ||||
|                 wheel_args, | ||||
|                 command_desc="python setup.py bdist_wheel", | ||||
|                 cwd=source_dir, | ||||
|                 spinner=spinner, | ||||
|             ) | ||||
|         except Exception: | ||||
|             spinner.finish("error") | ||||
|             logger.error("Failed building wheel for %s", name) | ||||
|             return None | ||||
|  | ||||
|         names = os.listdir(tempd) | ||||
|         wheel_path = get_legacy_build_wheel_path( | ||||
|             names=names, | ||||
|             temp_dir=tempd, | ||||
|             name=name, | ||||
|             command_args=wheel_args, | ||||
|             command_output=output, | ||||
|         ) | ||||
|         return wheel_path | ||||
| @ -0,0 +1,187 @@ | ||||
| """Validation of dependencies of packages | ||||
| """ | ||||
|  | ||||
| import logging | ||||
| from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple | ||||
|  | ||||
| from pip._vendor.packaging.requirements import Requirement | ||||
| from pip._vendor.packaging.specifiers import LegacySpecifier | ||||
| from pip._vendor.packaging.utils import NormalizedName, canonicalize_name | ||||
| from pip._vendor.packaging.version import LegacyVersion | ||||
|  | ||||
| from pip._internal.distributions import make_distribution_for_install_requirement | ||||
| from pip._internal.metadata import get_default_environment | ||||
| from pip._internal.metadata.base import DistributionVersion | ||||
| from pip._internal.req.req_install import InstallRequirement | ||||
| from pip._internal.utils.deprecation import deprecated | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class PackageDetails(NamedTuple): | ||||
|     version: DistributionVersion | ||||
|     dependencies: List[Requirement] | ||||
|  | ||||
|  | ||||
| # Shorthands | ||||
| PackageSet = Dict[NormalizedName, PackageDetails] | ||||
| Missing = Tuple[NormalizedName, Requirement] | ||||
| Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement] | ||||
|  | ||||
| MissingDict = Dict[NormalizedName, List[Missing]] | ||||
| ConflictingDict = Dict[NormalizedName, List[Conflicting]] | ||||
| CheckResult = Tuple[MissingDict, ConflictingDict] | ||||
| ConflictDetails = Tuple[PackageSet, CheckResult] | ||||
|  | ||||
|  | ||||
| def create_package_set_from_installed() -> Tuple[PackageSet, bool]: | ||||
|     """Converts a list of distributions into a PackageSet.""" | ||||
|     package_set = {} | ||||
|     problems = False | ||||
|     env = get_default_environment() | ||||
|     for dist in env.iter_installed_distributions(local_only=False, skip=()): | ||||
|         name = dist.canonical_name | ||||
|         try: | ||||
|             dependencies = list(dist.iter_dependencies()) | ||||
|             package_set[name] = PackageDetails(dist.version, dependencies) | ||||
|         except (OSError, ValueError) as e: | ||||
|             # Don't crash on unreadable or broken metadata. | ||||
|             logger.warning("Error parsing requirements for %s: %s", name, e) | ||||
|             problems = True | ||||
|     return package_set, problems | ||||
|  | ||||
|  | ||||
| def check_package_set( | ||||
|     package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None | ||||
| ) -> CheckResult: | ||||
|     """Check if a package set is consistent | ||||
|  | ||||
|     If should_ignore is passed, it should be a callable that takes a | ||||
|     package name and returns a boolean. | ||||
|     """ | ||||
|  | ||||
|     warn_legacy_versions_and_specifiers(package_set) | ||||
|  | ||||
|     missing = {} | ||||
|     conflicting = {} | ||||
|  | ||||
|     for package_name, package_detail in package_set.items(): | ||||
|         # Info about dependencies of package_name | ||||
|         missing_deps: Set[Missing] = set() | ||||
|         conflicting_deps: Set[Conflicting] = set() | ||||
|  | ||||
|         if should_ignore and should_ignore(package_name): | ||||
|             continue | ||||
|  | ||||
|         for req in package_detail.dependencies: | ||||
|             name = canonicalize_name(req.name) | ||||
|  | ||||
|             # Check if it's missing | ||||
|             if name not in package_set: | ||||
|                 missed = True | ||||
|                 if req.marker is not None: | ||||
|                     missed = req.marker.evaluate({"extra": ""}) | ||||
|                 if missed: | ||||
|                     missing_deps.add((name, req)) | ||||
|                 continue | ||||
|  | ||||
|             # Check if there's a conflict | ||||
|             version = package_set[name].version | ||||
|             if not req.specifier.contains(version, prereleases=True): | ||||
|                 conflicting_deps.add((name, version, req)) | ||||
|  | ||||
|         if missing_deps: | ||||
|             missing[package_name] = sorted(missing_deps, key=str) | ||||
|         if conflicting_deps: | ||||
|             conflicting[package_name] = sorted(conflicting_deps, key=str) | ||||
|  | ||||
|     return missing, conflicting | ||||
|  | ||||
|  | ||||
| def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails: | ||||
|     """For checking if the dependency graph would be consistent after \ | ||||
|     installing given requirements | ||||
|     """ | ||||
|     # Start from the current state | ||||
|     package_set, _ = create_package_set_from_installed() | ||||
|     # Install packages | ||||
|     would_be_installed = _simulate_installation_of(to_install, package_set) | ||||
|  | ||||
|     # Only warn about directly-dependent packages; create a whitelist of them | ||||
|     whitelist = _create_whitelist(would_be_installed, package_set) | ||||
|  | ||||
|     return ( | ||||
|         package_set, | ||||
|         check_package_set( | ||||
|             package_set, should_ignore=lambda name: name not in whitelist | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def _simulate_installation_of( | ||||
|     to_install: List[InstallRequirement], package_set: PackageSet | ||||
| ) -> Set[NormalizedName]: | ||||
|     """Computes the version of packages after installing to_install.""" | ||||
|     # Keep track of packages that were installed | ||||
|     installed = set() | ||||
|  | ||||
|     # Modify it as installing requirement_set would (assuming no errors) | ||||
|     for inst_req in to_install: | ||||
|         abstract_dist = make_distribution_for_install_requirement(inst_req) | ||||
|         dist = abstract_dist.get_metadata_distribution() | ||||
|         name = dist.canonical_name | ||||
|         package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies())) | ||||
|  | ||||
|         installed.add(name) | ||||
|  | ||||
|     return installed | ||||
|  | ||||
|  | ||||
| def _create_whitelist( | ||||
|     would_be_installed: Set[NormalizedName], package_set: PackageSet | ||||
| ) -> Set[NormalizedName]: | ||||
|     packages_affected = set(would_be_installed) | ||||
|  | ||||
|     for package_name in package_set: | ||||
|         if package_name in packages_affected: | ||||
|             continue | ||||
|  | ||||
|         for req in package_set[package_name].dependencies: | ||||
|             if canonicalize_name(req.name) in packages_affected: | ||||
|                 packages_affected.add(package_name) | ||||
|                 break | ||||
|  | ||||
|     return packages_affected | ||||
|  | ||||
|  | ||||
| def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: | ||||
|     for project_name, package_details in package_set.items(): | ||||
|         if isinstance(package_details.version, LegacyVersion): | ||||
|             deprecated( | ||||
|                 reason=( | ||||
|                     f"{project_name} {package_details.version} " | ||||
|                     f"has a non-standard version number." | ||||
|                 ), | ||||
|                 replacement=( | ||||
|                     f"to upgrade to a newer version of {project_name} " | ||||
|                     f"or contact the author to suggest that they " | ||||
|                     f"release a version with a conforming version number" | ||||
|                 ), | ||||
|                 issue=12063, | ||||
|                 gone_in="23.3", | ||||
|             ) | ||||
|         for dep in package_details.dependencies: | ||||
|             if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): | ||||
|                 deprecated( | ||||
|                     reason=( | ||||
|                         f"{project_name} {package_details.version} " | ||||
|                         f"has a non-standard dependency specifier {dep}." | ||||
|                     ), | ||||
|                     replacement=( | ||||
|                         f"to upgrade to a newer version of {project_name} " | ||||
|                         f"or contact the author to suggest that they " | ||||
|                         f"release a version with a conforming dependency specifiers" | ||||
|                     ), | ||||
|                     issue=12063, | ||||
|                     gone_in="23.3", | ||||
|                 ) | ||||
| @ -0,0 +1,255 @@ | ||||
| import collections | ||||
| import logging | ||||
| import os | ||||
| from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set | ||||
|  | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
| from pip._vendor.packaging.version import Version | ||||
|  | ||||
| from pip._internal.exceptions import BadCommand, InstallationError | ||||
| from pip._internal.metadata import BaseDistribution, get_environment | ||||
| from pip._internal.req.constructors import ( | ||||
|     install_req_from_editable, | ||||
|     install_req_from_line, | ||||
| ) | ||||
| from pip._internal.req.req_file import COMMENT_RE | ||||
| from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class _EditableInfo(NamedTuple): | ||||
|     requirement: str | ||||
|     comments: List[str] | ||||
|  | ||||
|  | ||||
| def freeze( | ||||
|     requirement: Optional[List[str]] = None, | ||||
|     local_only: bool = False, | ||||
|     user_only: bool = False, | ||||
|     paths: Optional[List[str]] = None, | ||||
|     isolated: bool = False, | ||||
|     exclude_editable: bool = False, | ||||
|     skip: Container[str] = (), | ||||
| ) -> Generator[str, None, None]: | ||||
|     installations: Dict[str, FrozenRequirement] = {} | ||||
|  | ||||
|     dists = get_environment(paths).iter_installed_distributions( | ||||
|         local_only=local_only, | ||||
|         skip=(), | ||||
|         user_only=user_only, | ||||
|     ) | ||||
|     for dist in dists: | ||||
|         req = FrozenRequirement.from_dist(dist) | ||||
|         if exclude_editable and req.editable: | ||||
|             continue | ||||
|         installations[req.canonical_name] = req | ||||
|  | ||||
|     if requirement: | ||||
|         # the options that don't get turned into an InstallRequirement | ||||
|         # should only be emitted once, even if the same option is in multiple | ||||
|         # requirements files, so we need to keep track of what has been emitted | ||||
|         # so that we don't emit it again if it's seen again | ||||
|         emitted_options: Set[str] = set() | ||||
|         # keep track of which files a requirement is in so that we can | ||||
|         # give an accurate warning if a requirement appears multiple times. | ||||
|         req_files: Dict[str, List[str]] = collections.defaultdict(list) | ||||
|         for req_file_path in requirement: | ||||
|             with open(req_file_path) as req_file: | ||||
|                 for line in req_file: | ||||
|                     if ( | ||||
|                         not line.strip() | ||||
|                         or line.strip().startswith("#") | ||||
|                         or line.startswith( | ||||
|                             ( | ||||
|                                 "-r", | ||||
|                                 "--requirement", | ||||
|                                 "-f", | ||||
|                                 "--find-links", | ||||
|                                 "-i", | ||||
|                                 "--index-url", | ||||
|                                 "--pre", | ||||
|                                 "--trusted-host", | ||||
|                                 "--process-dependency-links", | ||||
|                                 "--extra-index-url", | ||||
|                                 "--use-feature", | ||||
|                             ) | ||||
|                         ) | ||||
|                     ): | ||||
|                         line = line.rstrip() | ||||
|                         if line not in emitted_options: | ||||
|                             emitted_options.add(line) | ||||
|                             yield line | ||||
|                         continue | ||||
|  | ||||
|                     if line.startswith("-e") or line.startswith("--editable"): | ||||
|                         if line.startswith("-e"): | ||||
|                             line = line[2:].strip() | ||||
|                         else: | ||||
|                             line = line[len("--editable") :].strip().lstrip("=") | ||||
|                         line_req = install_req_from_editable( | ||||
|                             line, | ||||
|                             isolated=isolated, | ||||
|                         ) | ||||
|                     else: | ||||
|                         line_req = install_req_from_line( | ||||
|                             COMMENT_RE.sub("", line).strip(), | ||||
|                             isolated=isolated, | ||||
|                         ) | ||||
|  | ||||
|                     if not line_req.name: | ||||
|                         logger.info( | ||||
|                             "Skipping line in requirement file [%s] because " | ||||
|                             "it's not clear what it would install: %s", | ||||
|                             req_file_path, | ||||
|                             line.strip(), | ||||
|                         ) | ||||
|                         logger.info( | ||||
|                             "  (add #egg=PackageName to the URL to avoid" | ||||
|                             " this warning)" | ||||
|                         ) | ||||
|                     else: | ||||
|                         line_req_canonical_name = canonicalize_name(line_req.name) | ||||
|                         if line_req_canonical_name not in installations: | ||||
|                             # either it's not installed, or it is installed | ||||
|                             # but has been processed already | ||||
|                             if not req_files[line_req.name]: | ||||
|                                 logger.warning( | ||||
|                                     "Requirement file [%s] contains %s, but " | ||||
|                                     "package %r is not installed", | ||||
|                                     req_file_path, | ||||
|                                     COMMENT_RE.sub("", line).strip(), | ||||
|                                     line_req.name, | ||||
|                                 ) | ||||
|                             else: | ||||
|                                 req_files[line_req.name].append(req_file_path) | ||||
|                         else: | ||||
|                             yield str(installations[line_req_canonical_name]).rstrip() | ||||
|                             del installations[line_req_canonical_name] | ||||
|                             req_files[line_req.name].append(req_file_path) | ||||
|  | ||||
|         # Warn about requirements that were included multiple times (in a | ||||
|         # single requirements file or in different requirements files). | ||||
|         for name, files in req_files.items(): | ||||
|             if len(files) > 1: | ||||
|                 logger.warning( | ||||
|                     "Requirement %s included multiple times [%s]", | ||||
|                     name, | ||||
|                     ", ".join(sorted(set(files))), | ||||
|                 ) | ||||
|  | ||||
|         yield ("## The following requirements were added by pip freeze:") | ||||
|     for installation in sorted(installations.values(), key=lambda x: x.name.lower()): | ||||
|         if installation.canonical_name not in skip: | ||||
|             yield str(installation).rstrip() | ||||
|  | ||||
|  | ||||
| def _format_as_name_version(dist: BaseDistribution) -> str: | ||||
|     dist_version = dist.version | ||||
|     if isinstance(dist_version, Version): | ||||
|         return f"{dist.raw_name}=={dist_version}" | ||||
|     return f"{dist.raw_name}==={dist_version}" | ||||
|  | ||||
|  | ||||
| def _get_editable_info(dist: BaseDistribution) -> _EditableInfo: | ||||
|     """ | ||||
|     Compute and return values (req, comments) for use in | ||||
|     FrozenRequirement.from_dist(). | ||||
|     """ | ||||
|     editable_project_location = dist.editable_project_location | ||||
|     assert editable_project_location | ||||
|     location = os.path.normcase(os.path.abspath(editable_project_location)) | ||||
|  | ||||
|     from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs | ||||
|  | ||||
|     vcs_backend = vcs.get_backend_for_dir(location) | ||||
|  | ||||
|     if vcs_backend is None: | ||||
|         display = _format_as_name_version(dist) | ||||
|         logger.debug( | ||||
|             'No VCS found for editable requirement "%s" in: %r', | ||||
|             display, | ||||
|             location, | ||||
|         ) | ||||
|         return _EditableInfo( | ||||
|             requirement=location, | ||||
|             comments=[f"# Editable install with no version control ({display})"], | ||||
|         ) | ||||
|  | ||||
|     vcs_name = type(vcs_backend).__name__ | ||||
|  | ||||
|     try: | ||||
|         req = vcs_backend.get_src_requirement(location, dist.raw_name) | ||||
|     except RemoteNotFoundError: | ||||
|         display = _format_as_name_version(dist) | ||||
|         return _EditableInfo( | ||||
|             requirement=location, | ||||
|             comments=[f"# Editable {vcs_name} install with no remote ({display})"], | ||||
|         ) | ||||
|     except RemoteNotValidError as ex: | ||||
|         display = _format_as_name_version(dist) | ||||
|         return _EditableInfo( | ||||
|             requirement=location, | ||||
|             comments=[ | ||||
|                 f"# Editable {vcs_name} install ({display}) with either a deleted " | ||||
|                 f"local remote or invalid URI:", | ||||
|                 f"# '{ex.url}'", | ||||
|             ], | ||||
|         ) | ||||
|     except BadCommand: | ||||
|         logger.warning( | ||||
|             "cannot determine version of editable source in %s " | ||||
|             "(%s command not found in path)", | ||||
|             location, | ||||
|             vcs_backend.name, | ||||
|         ) | ||||
|         return _EditableInfo(requirement=location, comments=[]) | ||||
|     except InstallationError as exc: | ||||
|         logger.warning("Error when trying to get requirement for VCS system %s", exc) | ||||
|     else: | ||||
|         return _EditableInfo(requirement=req, comments=[]) | ||||
|  | ||||
|     logger.warning("Could not determine repository location of %s", location) | ||||
|  | ||||
|     return _EditableInfo( | ||||
|         requirement=location, | ||||
|         comments=["## !! Could not determine repository location"], | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class FrozenRequirement: | ||||
|     def __init__( | ||||
|         self, | ||||
|         name: str, | ||||
|         req: str, | ||||
|         editable: bool, | ||||
|         comments: Iterable[str] = (), | ||||
|     ) -> None: | ||||
|         self.name = name | ||||
|         self.canonical_name = canonicalize_name(name) | ||||
|         self.req = req | ||||
|         self.editable = editable | ||||
|         self.comments = comments | ||||
|  | ||||
|     @classmethod | ||||
|     def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement": | ||||
|         editable = dist.editable | ||||
|         if editable: | ||||
|             req, comments = _get_editable_info(dist) | ||||
|         else: | ||||
|             comments = [] | ||||
|             direct_url = dist.direct_url | ||||
|             if direct_url: | ||||
|                 # if PEP 610 metadata is present, use it | ||||
|                 req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name) | ||||
|             else: | ||||
|                 # name==version requirement | ||||
|                 req = _format_as_name_version(dist) | ||||
|  | ||||
|         return cls(dist.raw_name, req, editable, comments=comments) | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         req = self.req | ||||
|         if self.editable: | ||||
|             req = f"-e {req}" | ||||
|         return "\n".join(list(self.comments) + [str(req)]) + "\n" | ||||
| @ -0,0 +1,2 @@ | ||||
| """For modules related to installing packages. | ||||
| """ | ||||
| @ -0,0 +1,46 @@ | ||||
| """Legacy editable installation process, i.e. `setup.py develop`. | ||||
| """ | ||||
| import logging | ||||
| from typing import Optional, Sequence | ||||
|  | ||||
| from pip._internal.build_env import BuildEnvironment | ||||
| from pip._internal.utils.logging import indent_log | ||||
| from pip._internal.utils.setuptools_build import make_setuptools_develop_args | ||||
| from pip._internal.utils.subprocess import call_subprocess | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def install_editable( | ||||
|     *, | ||||
|     global_options: Sequence[str], | ||||
|     prefix: Optional[str], | ||||
|     home: Optional[str], | ||||
|     use_user_site: bool, | ||||
|     name: str, | ||||
|     setup_py_path: str, | ||||
|     isolated: bool, | ||||
|     build_env: BuildEnvironment, | ||||
|     unpacked_source_directory: str, | ||||
| ) -> None: | ||||
|     """Install a package in editable mode. Most arguments are pass-through | ||||
|     to setuptools. | ||||
|     """ | ||||
|     logger.info("Running setup.py develop for %s", name) | ||||
|  | ||||
|     args = make_setuptools_develop_args( | ||||
|         setup_py_path, | ||||
|         global_options=global_options, | ||||
|         no_user_config=isolated, | ||||
|         prefix=prefix, | ||||
|         home=home, | ||||
|         use_user_site=use_user_site, | ||||
|     ) | ||||
|  | ||||
|     with indent_log(): | ||||
|         with build_env: | ||||
|             call_subprocess( | ||||
|                 args, | ||||
|                 command_desc="python setup.py develop", | ||||
|                 cwd=unpacked_source_directory, | ||||
|             ) | ||||
| @ -0,0 +1,740 @@ | ||||
| """Support for installing and building the "wheel" binary package format. | ||||
| """ | ||||
|  | ||||
| import collections | ||||
| import compileall | ||||
| import contextlib | ||||
| import csv | ||||
| import importlib | ||||
| import logging | ||||
| import os.path | ||||
| import re | ||||
| import shutil | ||||
| import sys | ||||
| import warnings | ||||
| from base64 import urlsafe_b64encode | ||||
| from email.message import Message | ||||
| from itertools import chain, filterfalse, starmap | ||||
| from typing import ( | ||||
|     IO, | ||||
|     TYPE_CHECKING, | ||||
|     Any, | ||||
|     BinaryIO, | ||||
|     Callable, | ||||
|     Dict, | ||||
|     Generator, | ||||
|     Iterable, | ||||
|     Iterator, | ||||
|     List, | ||||
|     NewType, | ||||
|     Optional, | ||||
|     Sequence, | ||||
|     Set, | ||||
|     Tuple, | ||||
|     Union, | ||||
|     cast, | ||||
| ) | ||||
| from zipfile import ZipFile, ZipInfo | ||||
|  | ||||
| from pip._vendor.distlib.scripts import ScriptMaker | ||||
| from pip._vendor.distlib.util import get_export_entry | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
|  | ||||
| from pip._internal.exceptions import InstallationError | ||||
| from pip._internal.locations import get_major_minor_version | ||||
| from pip._internal.metadata import ( | ||||
|     BaseDistribution, | ||||
|     FilesystemWheel, | ||||
|     get_wheel_distribution, | ||||
| ) | ||||
| from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl | ||||
| from pip._internal.models.scheme import SCHEME_KEYS, Scheme | ||||
| from pip._internal.utils.filesystem import adjacent_tmp_file, replace | ||||
| from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition | ||||
| from pip._internal.utils.unpacking import ( | ||||
|     current_umask, | ||||
|     is_within_directory, | ||||
|     set_extracted_file_to_default_mode_plus_executable, | ||||
|     zip_item_is_executable, | ||||
| ) | ||||
| from pip._internal.utils.wheel import parse_wheel | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from typing import Protocol | ||||
|  | ||||
|     class File(Protocol): | ||||
|         src_record_path: "RecordPath" | ||||
|         dest_path: str | ||||
|         changed: bool | ||||
|  | ||||
|         def save(self) -> None: | ||||
|             pass | ||||
|  | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
| RecordPath = NewType("RecordPath", str) | ||||
| InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]] | ||||
|  | ||||
|  | ||||
| def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]: | ||||
|     """Return (encoded_digest, length) for path using hashlib.sha256()""" | ||||
|     h, length = hash_file(path, blocksize) | ||||
|     digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=") | ||||
|     return (digest, str(length)) | ||||
|  | ||||
|  | ||||
| def csv_io_kwargs(mode: str) -> Dict[str, Any]: | ||||
|     """Return keyword arguments to properly open a CSV file | ||||
|     in the given mode. | ||||
|     """ | ||||
|     return {"mode": mode, "newline": "", "encoding": "utf-8"} | ||||
|  | ||||
|  | ||||
| def fix_script(path: str) -> bool: | ||||
|     """Replace #!python with #!/path/to/python | ||||
|     Return True if file was changed. | ||||
|     """ | ||||
|     # XXX RECORD hashes will need to be updated | ||||
|     assert os.path.isfile(path) | ||||
|  | ||||
|     with open(path, "rb") as script: | ||||
|         firstline = script.readline() | ||||
|         if not firstline.startswith(b"#!python"): | ||||
|             return False | ||||
|         exename = sys.executable.encode(sys.getfilesystemencoding()) | ||||
|         firstline = b"#!" + exename + os.linesep.encode("ascii") | ||||
|         rest = script.read() | ||||
|     with open(path, "wb") as script: | ||||
|         script.write(firstline) | ||||
|         script.write(rest) | ||||
|     return True | ||||
|  | ||||
|  | ||||
| def wheel_root_is_purelib(metadata: Message) -> bool: | ||||
|     return metadata.get("Root-Is-Purelib", "").lower() == "true" | ||||
|  | ||||
|  | ||||
| def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]: | ||||
|     console_scripts = {} | ||||
|     gui_scripts = {} | ||||
|     for entry_point in dist.iter_entry_points(): | ||||
|         if entry_point.group == "console_scripts": | ||||
|             console_scripts[entry_point.name] = entry_point.value | ||||
|         elif entry_point.group == "gui_scripts": | ||||
|             gui_scripts[entry_point.name] = entry_point.value | ||||
|     return console_scripts, gui_scripts | ||||
|  | ||||
|  | ||||
| def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]: | ||||
|     """Determine if any scripts are not on PATH and format a warning. | ||||
|     Returns a warning message if one or more scripts are not on PATH, | ||||
|     otherwise None. | ||||
|     """ | ||||
|     if not scripts: | ||||
|         return None | ||||
|  | ||||
|     # Group scripts by the path they were installed in | ||||
|     grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set) | ||||
|     for destfile in scripts: | ||||
|         parent_dir = os.path.dirname(destfile) | ||||
|         script_name = os.path.basename(destfile) | ||||
|         grouped_by_dir[parent_dir].add(script_name) | ||||
|  | ||||
|     # We don't want to warn for directories that are on PATH. | ||||
|     not_warn_dirs = [ | ||||
|         os.path.normcase(os.path.normpath(i)).rstrip(os.sep) | ||||
|         for i in os.environ.get("PATH", "").split(os.pathsep) | ||||
|     ] | ||||
|     # If an executable sits with sys.executable, we don't warn for it. | ||||
|     #     This covers the case of venv invocations without activating the venv. | ||||
|     not_warn_dirs.append( | ||||
|         os.path.normcase(os.path.normpath(os.path.dirname(sys.executable))) | ||||
|     ) | ||||
|     warn_for: Dict[str, Set[str]] = { | ||||
|         parent_dir: scripts | ||||
|         for parent_dir, scripts in grouped_by_dir.items() | ||||
|         if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs | ||||
|     } | ||||
|     if not warn_for: | ||||
|         return None | ||||
|  | ||||
|     # Format a message | ||||
|     msg_lines = [] | ||||
|     for parent_dir, dir_scripts in warn_for.items(): | ||||
|         sorted_scripts: List[str] = sorted(dir_scripts) | ||||
|         if len(sorted_scripts) == 1: | ||||
|             start_text = "script {} is".format(sorted_scripts[0]) | ||||
|         else: | ||||
|             start_text = "scripts {} are".format( | ||||
|                 ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] | ||||
|             ) | ||||
|  | ||||
|         msg_lines.append( | ||||
|             "The {} installed in '{}' which is not on PATH.".format( | ||||
|                 start_text, parent_dir | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     last_line_fmt = ( | ||||
|         "Consider adding {} to PATH or, if you prefer " | ||||
|         "to suppress this warning, use --no-warn-script-location." | ||||
|     ) | ||||
|     if len(msg_lines) == 1: | ||||
|         msg_lines.append(last_line_fmt.format("this directory")) | ||||
|     else: | ||||
|         msg_lines.append(last_line_fmt.format("these directories")) | ||||
|  | ||||
|     # Add a note if any directory starts with ~ | ||||
|     warn_for_tilde = any( | ||||
|         i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i | ||||
|     ) | ||||
|     if warn_for_tilde: | ||||
|         tilde_warning_msg = ( | ||||
|             "NOTE: The current PATH contains path(s) starting with `~`, " | ||||
|             "which may not be expanded by all applications." | ||||
|         ) | ||||
|         msg_lines.append(tilde_warning_msg) | ||||
|  | ||||
|     # Returns the formatted multiline message | ||||
|     return "\n".join(msg_lines) | ||||
|  | ||||
|  | ||||
| def _normalized_outrows( | ||||
|     outrows: Iterable[InstalledCSVRow], | ||||
| ) -> List[Tuple[str, str, str]]: | ||||
|     """Normalize the given rows of a RECORD file. | ||||
|  | ||||
|     Items in each row are converted into str. Rows are then sorted to make | ||||
|     the value more predictable for tests. | ||||
|  | ||||
|     Each row is a 3-tuple (path, hash, size) and corresponds to a record of | ||||
|     a RECORD file (see PEP 376 and PEP 427 for details).  For the rows | ||||
|     passed to this function, the size can be an integer as an int or string, | ||||
|     or the empty string. | ||||
|     """ | ||||
|     # Normally, there should only be one row per path, in which case the | ||||
|     # second and third elements don't come into play when sorting. | ||||
|     # However, in cases in the wild where a path might happen to occur twice, | ||||
|     # we don't want the sort operation to trigger an error (but still want | ||||
|     # determinism).  Since the third element can be an int or string, we | ||||
|     # coerce each element to a string to avoid a TypeError in this case. | ||||
|     # For additional background, see-- | ||||
|     # https://github.com/pypa/pip/issues/5868 | ||||
|     return sorted( | ||||
|         (record_path, hash_, str(size)) for record_path, hash_, size in outrows | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str: | ||||
|     return os.path.join(lib_dir, record_path) | ||||
|  | ||||
|  | ||||
| def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath: | ||||
|     # On Windows, do not handle relative paths if they belong to different | ||||
|     # logical disks | ||||
|     if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower(): | ||||
|         path = os.path.relpath(path, lib_dir) | ||||
|  | ||||
|     path = path.replace(os.path.sep, "/") | ||||
|     return cast("RecordPath", path) | ||||
|  | ||||
|  | ||||
| def get_csv_rows_for_installed( | ||||
|     old_csv_rows: List[List[str]], | ||||
|     installed: Dict[RecordPath, RecordPath], | ||||
|     changed: Set[RecordPath], | ||||
|     generated: List[str], | ||||
|     lib_dir: str, | ||||
| ) -> List[InstalledCSVRow]: | ||||
|     """ | ||||
|     :param installed: A map from archive RECORD path to installation RECORD | ||||
|         path. | ||||
|     """ | ||||
|     installed_rows: List[InstalledCSVRow] = [] | ||||
|     for row in old_csv_rows: | ||||
|         if len(row) > 3: | ||||
|             logger.warning("RECORD line has more than three elements: %s", row) | ||||
|         old_record_path = cast("RecordPath", row[0]) | ||||
|         new_record_path = installed.pop(old_record_path, old_record_path) | ||||
|         if new_record_path in changed: | ||||
|             digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir)) | ||||
|         else: | ||||
|             digest = row[1] if len(row) > 1 else "" | ||||
|             length = row[2] if len(row) > 2 else "" | ||||
|         installed_rows.append((new_record_path, digest, length)) | ||||
|     for f in generated: | ||||
|         path = _fs_to_record_path(f, lib_dir) | ||||
|         digest, length = rehash(f) | ||||
|         installed_rows.append((path, digest, length)) | ||||
|     for installed_record_path in installed.values(): | ||||
|         installed_rows.append((installed_record_path, "", "")) | ||||
|     return installed_rows | ||||
|  | ||||
|  | ||||
| def get_console_script_specs(console: Dict[str, str]) -> List[str]: | ||||
|     """ | ||||
|     Given the mapping from entrypoint name to callable, return the relevant | ||||
|     console script specs. | ||||
|     """ | ||||
|     # Don't mutate caller's version | ||||
|     console = console.copy() | ||||
|  | ||||
|     scripts_to_generate = [] | ||||
|  | ||||
|     # Special case pip and setuptools to generate versioned wrappers | ||||
|     # | ||||
|     # The issue is that some projects (specifically, pip and setuptools) use | ||||
|     # code in setup.py to create "versioned" entry points - pip2.7 on Python | ||||
|     # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into | ||||
|     # the wheel metadata at build time, and so if the wheel is installed with | ||||
|     # a *different* version of Python the entry points will be wrong. The | ||||
|     # correct fix for this is to enhance the metadata to be able to describe | ||||
|     # such versioned entry points, but that won't happen till Metadata 2.0 is | ||||
|     # available. | ||||
|     # In the meantime, projects using versioned entry points will either have | ||||
|     # incorrect versioned entry points, or they will not be able to distribute | ||||
|     # "universal" wheels (i.e., they will need a wheel per Python version). | ||||
|     # | ||||
|     # Because setuptools and pip are bundled with _ensurepip and virtualenv, | ||||
|     # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we | ||||
|     # override the versioned entry points in the wheel and generate the | ||||
|     # correct ones. This code is purely a short-term measure until Metadata 2.0 | ||||
|     # is available. | ||||
|     # | ||||
|     # To add the level of hack in this section of code, in order to support | ||||
|     # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment | ||||
|     # variable which will control which version scripts get installed. | ||||
|     # | ||||
|     # ENSUREPIP_OPTIONS=altinstall | ||||
|     #   - Only pipX.Y and easy_install-X.Y will be generated and installed | ||||
|     # ENSUREPIP_OPTIONS=install | ||||
|     #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note | ||||
|     #     that this option is technically if ENSUREPIP_OPTIONS is set and is | ||||
|     #     not altinstall | ||||
|     # DEFAULT | ||||
|     #   - The default behavior is to install pip, pipX, pipX.Y, easy_install | ||||
|     #     and easy_install-X.Y. | ||||
|     pip_script = console.pop("pip", None) | ||||
|     if pip_script: | ||||
|         if "ENSUREPIP_OPTIONS" not in os.environ: | ||||
|             scripts_to_generate.append("pip = " + pip_script) | ||||
|  | ||||
|         if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": | ||||
|             scripts_to_generate.append( | ||||
|                 "pip{} = {}".format(sys.version_info[0], pip_script) | ||||
|             ) | ||||
|  | ||||
|         scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") | ||||
|         # Delete any other versioned pip entry points | ||||
|         pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)] | ||||
|         for k in pip_ep: | ||||
|             del console[k] | ||||
|     easy_install_script = console.pop("easy_install", None) | ||||
|     if easy_install_script: | ||||
|         if "ENSUREPIP_OPTIONS" not in os.environ: | ||||
|             scripts_to_generate.append("easy_install = " + easy_install_script) | ||||
|  | ||||
|         scripts_to_generate.append( | ||||
|             "easy_install-{} = {}".format( | ||||
|                 get_major_minor_version(), easy_install_script | ||||
|             ) | ||||
|         ) | ||||
|         # Delete any other versioned easy_install entry points | ||||
|         easy_install_ep = [ | ||||
|             k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k) | ||||
|         ] | ||||
|         for k in easy_install_ep: | ||||
|             del console[k] | ||||
|  | ||||
|     # Generate the console entry points specified in the wheel | ||||
|     scripts_to_generate.extend(starmap("{} = {}".format, console.items())) | ||||
|  | ||||
|     return scripts_to_generate | ||||
|  | ||||
|  | ||||
| class ZipBackedFile: | ||||
|     def __init__( | ||||
|         self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile | ||||
|     ) -> None: | ||||
|         self.src_record_path = src_record_path | ||||
|         self.dest_path = dest_path | ||||
|         self._zip_file = zip_file | ||||
|         self.changed = False | ||||
|  | ||||
|     def _getinfo(self) -> ZipInfo: | ||||
|         return self._zip_file.getinfo(self.src_record_path) | ||||
|  | ||||
|     def save(self) -> None: | ||||
|         # directory creation is lazy and after file filtering | ||||
|         # to ensure we don't install empty dirs; empty dirs can't be | ||||
|         # uninstalled. | ||||
|         parent_dir = os.path.dirname(self.dest_path) | ||||
|         ensure_dir(parent_dir) | ||||
|  | ||||
|         # When we open the output file below, any existing file is truncated | ||||
|         # before we start writing the new contents. This is fine in most | ||||
|         # cases, but can cause a segfault if pip has loaded a shared | ||||
|         # object (e.g. from pyopenssl through its vendored urllib3) | ||||
|         # Since the shared object is mmap'd an attempt to call a | ||||
|         # symbol in it will then cause a segfault. Unlinking the file | ||||
|         # allows writing of new contents while allowing the process to | ||||
|         # continue to use the old copy. | ||||
|         if os.path.exists(self.dest_path): | ||||
|             os.unlink(self.dest_path) | ||||
|  | ||||
|         zipinfo = self._getinfo() | ||||
|  | ||||
|         with self._zip_file.open(zipinfo) as f: | ||||
|             with open(self.dest_path, "wb") as dest: | ||||
|                 shutil.copyfileobj(f, dest) | ||||
|  | ||||
|         if zip_item_is_executable(zipinfo): | ||||
|             set_extracted_file_to_default_mode_plus_executable(self.dest_path) | ||||
|  | ||||
|  | ||||
| class ScriptFile: | ||||
|     def __init__(self, file: "File") -> None: | ||||
|         self._file = file | ||||
|         self.src_record_path = self._file.src_record_path | ||||
|         self.dest_path = self._file.dest_path | ||||
|         self.changed = False | ||||
|  | ||||
|     def save(self) -> None: | ||||
|         self._file.save() | ||||
|         self.changed = fix_script(self.dest_path) | ||||
|  | ||||
|  | ||||
| class MissingCallableSuffix(InstallationError): | ||||
|     def __init__(self, entry_point: str) -> None: | ||||
|         super().__init__( | ||||
|             "Invalid script entry point: {} - A callable " | ||||
|             "suffix is required. Cf https://packaging.python.org/" | ||||
|             "specifications/entry-points/#use-for-scripts for more " | ||||
|             "information.".format(entry_point) | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def _raise_for_invalid_entrypoint(specification: str) -> None: | ||||
|     entry = get_export_entry(specification) | ||||
|     if entry is not None and entry.suffix is None: | ||||
|         raise MissingCallableSuffix(str(entry)) | ||||
|  | ||||
|  | ||||
| class PipScriptMaker(ScriptMaker): | ||||
|     def make( | ||||
|         self, specification: str, options: Optional[Dict[str, Any]] = None | ||||
|     ) -> List[str]: | ||||
|         _raise_for_invalid_entrypoint(specification) | ||||
|         return super().make(specification, options) | ||||
|  | ||||
|  | ||||
| def _install_wheel( | ||||
|     name: str, | ||||
|     wheel_zip: ZipFile, | ||||
|     wheel_path: str, | ||||
|     scheme: Scheme, | ||||
|     pycompile: bool = True, | ||||
|     warn_script_location: bool = True, | ||||
|     direct_url: Optional[DirectUrl] = None, | ||||
|     requested: bool = False, | ||||
| ) -> None: | ||||
|     """Install a wheel. | ||||
|  | ||||
|     :param name: Name of the project to install | ||||
|     :param wheel_zip: open ZipFile for wheel being installed | ||||
|     :param scheme: Distutils scheme dictating the install directories | ||||
|     :param req_description: String used in place of the requirement, for | ||||
|         logging | ||||
|     :param pycompile: Whether to byte-compile installed Python files | ||||
|     :param warn_script_location: Whether to check that scripts are installed | ||||
|         into a directory on PATH | ||||
|     :raises UnsupportedWheel: | ||||
|         * when the directory holds an unpacked wheel with incompatible | ||||
|           Wheel-Version | ||||
|         * when the .dist-info dir does not match the wheel | ||||
|     """ | ||||
|     info_dir, metadata = parse_wheel(wheel_zip, name) | ||||
|  | ||||
|     if wheel_root_is_purelib(metadata): | ||||
|         lib_dir = scheme.purelib | ||||
|     else: | ||||
|         lib_dir = scheme.platlib | ||||
|  | ||||
|     # Record details of the files moved | ||||
|     #   installed = files copied from the wheel to the destination | ||||
|     #   changed = files changed while installing (scripts #! line typically) | ||||
|     #   generated = files newly generated during the install (script wrappers) | ||||
|     installed: Dict[RecordPath, RecordPath] = {} | ||||
|     changed: Set[RecordPath] = set() | ||||
|     generated: List[str] = [] | ||||
|  | ||||
|     def record_installed( | ||||
|         srcfile: RecordPath, destfile: str, modified: bool = False | ||||
|     ) -> None: | ||||
|         """Map archive RECORD paths to installation RECORD paths.""" | ||||
|         newpath = _fs_to_record_path(destfile, lib_dir) | ||||
|         installed[srcfile] = newpath | ||||
|         if modified: | ||||
|             changed.add(newpath) | ||||
|  | ||||
|     def is_dir_path(path: RecordPath) -> bool: | ||||
|         return path.endswith("/") | ||||
|  | ||||
|     def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None: | ||||
|         if not is_within_directory(dest_dir_path, target_path): | ||||
|             message = ( | ||||
|                 "The wheel {!r} has a file {!r} trying to install" | ||||
|                 " outside the target directory {!r}" | ||||
|             ) | ||||
|             raise InstallationError( | ||||
|                 message.format(wheel_path, target_path, dest_dir_path) | ||||
|             ) | ||||
|  | ||||
|     def root_scheme_file_maker( | ||||
|         zip_file: ZipFile, dest: str | ||||
|     ) -> Callable[[RecordPath], "File"]: | ||||
|         def make_root_scheme_file(record_path: RecordPath) -> "File": | ||||
|             normed_path = os.path.normpath(record_path) | ||||
|             dest_path = os.path.join(dest, normed_path) | ||||
|             assert_no_path_traversal(dest, dest_path) | ||||
|             return ZipBackedFile(record_path, dest_path, zip_file) | ||||
|  | ||||
|         return make_root_scheme_file | ||||
|  | ||||
|     def data_scheme_file_maker( | ||||
|         zip_file: ZipFile, scheme: Scheme | ||||
|     ) -> Callable[[RecordPath], "File"]: | ||||
|         scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS} | ||||
|  | ||||
|         def make_data_scheme_file(record_path: RecordPath) -> "File": | ||||
|             normed_path = os.path.normpath(record_path) | ||||
|             try: | ||||
|                 _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) | ||||
|             except ValueError: | ||||
|                 message = ( | ||||
|                     "Unexpected file in {}: {!r}. .data directory contents" | ||||
|                     " should be named like: '<scheme key>/<path>'." | ||||
|                 ).format(wheel_path, record_path) | ||||
|                 raise InstallationError(message) | ||||
|  | ||||
|             try: | ||||
|                 scheme_path = scheme_paths[scheme_key] | ||||
|             except KeyError: | ||||
|                 valid_scheme_keys = ", ".join(sorted(scheme_paths)) | ||||
|                 message = ( | ||||
|                     "Unknown scheme key used in {}: {} (for file {!r}). .data" | ||||
|                     " directory contents should be in subdirectories named" | ||||
|                     " with a valid scheme key ({})" | ||||
|                 ).format(wheel_path, scheme_key, record_path, valid_scheme_keys) | ||||
|                 raise InstallationError(message) | ||||
|  | ||||
|             dest_path = os.path.join(scheme_path, dest_subpath) | ||||
|             assert_no_path_traversal(scheme_path, dest_path) | ||||
|             return ZipBackedFile(record_path, dest_path, zip_file) | ||||
|  | ||||
|         return make_data_scheme_file | ||||
|  | ||||
|     def is_data_scheme_path(path: RecordPath) -> bool: | ||||
|         return path.split("/", 1)[0].endswith(".data") | ||||
|  | ||||
|     paths = cast(List[RecordPath], wheel_zip.namelist()) | ||||
|     file_paths = filterfalse(is_dir_path, paths) | ||||
|     root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths) | ||||
|  | ||||
|     make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir) | ||||
|     files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths) | ||||
|  | ||||
|     def is_script_scheme_path(path: RecordPath) -> bool: | ||||
|         parts = path.split("/", 2) | ||||
|         return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts" | ||||
|  | ||||
|     other_scheme_paths, script_scheme_paths = partition( | ||||
|         is_script_scheme_path, data_scheme_paths | ||||
|     ) | ||||
|  | ||||
|     make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) | ||||
|     other_scheme_files = map(make_data_scheme_file, other_scheme_paths) | ||||
|     files = chain(files, other_scheme_files) | ||||
|  | ||||
|     # Get the defined entry points | ||||
|     distribution = get_wheel_distribution( | ||||
|         FilesystemWheel(wheel_path), | ||||
|         canonicalize_name(name), | ||||
|     ) | ||||
|     console, gui = get_entrypoints(distribution) | ||||
|  | ||||
|     def is_entrypoint_wrapper(file: "File") -> bool: | ||||
|         # EP, EP.exe and EP-script.py are scripts generated for | ||||
|         # entry point EP by setuptools | ||||
|         path = file.dest_path | ||||
|         name = os.path.basename(path) | ||||
|         if name.lower().endswith(".exe"): | ||||
|             matchname = name[:-4] | ||||
|         elif name.lower().endswith("-script.py"): | ||||
|             matchname = name[:-10] | ||||
|         elif name.lower().endswith(".pya"): | ||||
|             matchname = name[:-4] | ||||
|         else: | ||||
|             matchname = name | ||||
|         # Ignore setuptools-generated scripts | ||||
|         return matchname in console or matchname in gui | ||||
|  | ||||
|     script_scheme_files: Iterator[File] = map( | ||||
|         make_data_scheme_file, script_scheme_paths | ||||
|     ) | ||||
|     script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files) | ||||
|     script_scheme_files = map(ScriptFile, script_scheme_files) | ||||
|     files = chain(files, script_scheme_files) | ||||
|  | ||||
|     for file in files: | ||||
|         file.save() | ||||
|         record_installed(file.src_record_path, file.dest_path, file.changed) | ||||
|  | ||||
|     def pyc_source_file_paths() -> Generator[str, None, None]: | ||||
|         # We de-duplicate installation paths, since there can be overlap (e.g. | ||||
|         # file in .data maps to same location as file in wheel root). | ||||
|         # Sorting installation paths makes it easier to reproduce and debug | ||||
|         # issues related to permissions on existing files. | ||||
|         for installed_path in sorted(set(installed.values())): | ||||
|             full_installed_path = os.path.join(lib_dir, installed_path) | ||||
|             if not os.path.isfile(full_installed_path): | ||||
|                 continue | ||||
|             if not full_installed_path.endswith(".py"): | ||||
|                 continue | ||||
|             yield full_installed_path | ||||
|  | ||||
|     def pyc_output_path(path: str) -> str: | ||||
|         """Return the path the pyc file would have been written to.""" | ||||
|         return importlib.util.cache_from_source(path) | ||||
|  | ||||
|     # Compile all of the pyc files for the installed files | ||||
|     if pycompile: | ||||
|         with captured_stdout() as stdout: | ||||
|             with warnings.catch_warnings(): | ||||
|                 warnings.filterwarnings("ignore") | ||||
|                 for path in pyc_source_file_paths(): | ||||
|                     success = compileall.compile_file(path, force=True, quiet=True) | ||||
|                     if success: | ||||
|                         pyc_path = pyc_output_path(path) | ||||
|                         assert os.path.exists(pyc_path) | ||||
|                         pyc_record_path = cast( | ||||
|                             "RecordPath", pyc_path.replace(os.path.sep, "/") | ||||
|                         ) | ||||
|                         record_installed(pyc_record_path, pyc_path) | ||||
|         logger.debug(stdout.getvalue()) | ||||
|  | ||||
|     maker = PipScriptMaker(None, scheme.scripts) | ||||
|  | ||||
|     # Ensure old scripts are overwritten. | ||||
|     # See https://github.com/pypa/pip/issues/1800 | ||||
|     maker.clobber = True | ||||
|  | ||||
|     # Ensure we don't generate any variants for scripts because this is almost | ||||
|     # never what somebody wants. | ||||
|     # See https://bitbucket.org/pypa/distlib/issue/35/ | ||||
|     maker.variants = {""} | ||||
|  | ||||
|     # This is required because otherwise distlib creates scripts that are not | ||||
|     # executable. | ||||
|     # See https://bitbucket.org/pypa/distlib/issue/32/ | ||||
|     maker.set_mode = True | ||||
|  | ||||
|     # Generate the console and GUI entry points specified in the wheel | ||||
|     scripts_to_generate = get_console_script_specs(console) | ||||
|  | ||||
|     gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items())) | ||||
|  | ||||
|     generated_console_scripts = maker.make_multiple(scripts_to_generate) | ||||
|     generated.extend(generated_console_scripts) | ||||
|  | ||||
|     generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True})) | ||||
|  | ||||
|     if warn_script_location: | ||||
|         msg = message_about_scripts_not_on_PATH(generated_console_scripts) | ||||
|         if msg is not None: | ||||
|             logger.warning(msg) | ||||
|  | ||||
|     generated_file_mode = 0o666 & ~current_umask() | ||||
|  | ||||
|     @contextlib.contextmanager | ||||
|     def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]: | ||||
|         with adjacent_tmp_file(path, **kwargs) as f: | ||||
|             yield f | ||||
|         os.chmod(f.name, generated_file_mode) | ||||
|         replace(f.name, path) | ||||
|  | ||||
|     dest_info_dir = os.path.join(lib_dir, info_dir) | ||||
|  | ||||
|     # Record pip as the installer | ||||
|     installer_path = os.path.join(dest_info_dir, "INSTALLER") | ||||
|     with _generate_file(installer_path) as installer_file: | ||||
|         installer_file.write(b"pip\n") | ||||
|     generated.append(installer_path) | ||||
|  | ||||
|     # Record the PEP 610 direct URL reference | ||||
|     if direct_url is not None: | ||||
|         direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) | ||||
|         with _generate_file(direct_url_path) as direct_url_file: | ||||
|             direct_url_file.write(direct_url.to_json().encode("utf-8")) | ||||
|         generated.append(direct_url_path) | ||||
|  | ||||
|     # Record the REQUESTED file | ||||
|     if requested: | ||||
|         requested_path = os.path.join(dest_info_dir, "REQUESTED") | ||||
|         with open(requested_path, "wb"): | ||||
|             pass | ||||
|         generated.append(requested_path) | ||||
|  | ||||
|     record_text = distribution.read_text("RECORD") | ||||
|     record_rows = list(csv.reader(record_text.splitlines())) | ||||
|  | ||||
|     rows = get_csv_rows_for_installed( | ||||
|         record_rows, | ||||
|         installed=installed, | ||||
|         changed=changed, | ||||
|         generated=generated, | ||||
|         lib_dir=lib_dir, | ||||
|     ) | ||||
|  | ||||
|     # Record details of all files installed | ||||
|     record_path = os.path.join(dest_info_dir, "RECORD") | ||||
|  | ||||
|     with _generate_file(record_path, **csv_io_kwargs("w")) as record_file: | ||||
|         # Explicitly cast to typing.IO[str] as a workaround for the mypy error: | ||||
|         # "writer" has incompatible type "BinaryIO"; expected "_Writer" | ||||
|         writer = csv.writer(cast("IO[str]", record_file)) | ||||
|         writer.writerows(_normalized_outrows(rows)) | ||||
|  | ||||
|  | ||||
| @contextlib.contextmanager | ||||
| def req_error_context(req_description: str) -> Generator[None, None, None]: | ||||
|     try: | ||||
|         yield | ||||
|     except InstallationError as e: | ||||
|         message = "For req: {}. {}".format(req_description, e.args[0]) | ||||
|         raise InstallationError(message) from e | ||||
|  | ||||
|  | ||||
| def install_wheel( | ||||
|     name: str, | ||||
|     wheel_path: str, | ||||
|     scheme: Scheme, | ||||
|     req_description: str, | ||||
|     pycompile: bool = True, | ||||
|     warn_script_location: bool = True, | ||||
|     direct_url: Optional[DirectUrl] = None, | ||||
|     requested: bool = False, | ||||
| ) -> None: | ||||
|     with ZipFile(wheel_path, allowZip64=True) as z: | ||||
|         with req_error_context(req_description): | ||||
|             _install_wheel( | ||||
|                 name=name, | ||||
|                 wheel_zip=z, | ||||
|                 wheel_path=wheel_path, | ||||
|                 scheme=scheme, | ||||
|                 pycompile=pycompile, | ||||
|                 warn_script_location=warn_script_location, | ||||
|                 direct_url=direct_url, | ||||
|                 requested=requested, | ||||
|             ) | ||||
| @ -0,0 +1,743 @@ | ||||
| """Prepares a distribution for installation | ||||
| """ | ||||
|  | ||||
| # The following comment should be removed at some point in the future. | ||||
| # mypy: strict-optional=False | ||||
|  | ||||
| import logging | ||||
| import mimetypes | ||||
| import os | ||||
| import shutil | ||||
| from typing import Dict, Iterable, List, Optional | ||||
|  | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
|  | ||||
| from pip._internal.distributions import make_distribution_for_install_requirement | ||||
| from pip._internal.distributions.installed import InstalledDistribution | ||||
| from pip._internal.exceptions import ( | ||||
|     DirectoryUrlHashUnsupported, | ||||
|     HashMismatch, | ||||
|     HashUnpinned, | ||||
|     InstallationError, | ||||
|     MetadataInconsistent, | ||||
|     NetworkConnectionError, | ||||
|     PreviousBuildDirError, | ||||
|     VcsHashUnsupported, | ||||
| ) | ||||
| from pip._internal.index.package_finder import PackageFinder | ||||
| from pip._internal.metadata import BaseDistribution, get_metadata_distribution | ||||
| from pip._internal.models.direct_url import ArchiveInfo | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.models.wheel import Wheel | ||||
| from pip._internal.network.download import BatchDownloader, Downloader | ||||
| from pip._internal.network.lazy_wheel import ( | ||||
|     HTTPRangeRequestUnsupported, | ||||
|     dist_from_wheel_url, | ||||
| ) | ||||
| from pip._internal.network.session import PipSession | ||||
| from pip._internal.operations.build.build_tracker import BuildTracker | ||||
| from pip._internal.req.req_install import InstallRequirement | ||||
| from pip._internal.utils.direct_url_helpers import ( | ||||
|     direct_url_for_editable, | ||||
|     direct_url_from_link, | ||||
| ) | ||||
| from pip._internal.utils.hashes import Hashes, MissingHashes | ||||
| from pip._internal.utils.logging import indent_log | ||||
| from pip._internal.utils.misc import ( | ||||
|     display_path, | ||||
|     hash_file, | ||||
|     hide_url, | ||||
|     is_installable_dir, | ||||
| ) | ||||
| from pip._internal.utils.temp_dir import TempDirectory | ||||
| from pip._internal.utils.unpacking import unpack_file | ||||
| from pip._internal.vcs import vcs | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| def _get_prepared_distribution( | ||||
|     req: InstallRequirement, | ||||
|     build_tracker: BuildTracker, | ||||
|     finder: PackageFinder, | ||||
|     build_isolation: bool, | ||||
|     check_build_deps: bool, | ||||
| ) -> BaseDistribution: | ||||
|     """Prepare a distribution for installation.""" | ||||
|     abstract_dist = make_distribution_for_install_requirement(req) | ||||
|     with build_tracker.track(req): | ||||
|         abstract_dist.prepare_distribution_metadata( | ||||
|             finder, build_isolation, check_build_deps | ||||
|         ) | ||||
|     return abstract_dist.get_metadata_distribution() | ||||
|  | ||||
|  | ||||
| def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None: | ||||
|     vcs_backend = vcs.get_backend_for_scheme(link.scheme) | ||||
|     assert vcs_backend is not None | ||||
|     vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity) | ||||
|  | ||||
|  | ||||
| class File: | ||||
|     def __init__(self, path: str, content_type: Optional[str]) -> None: | ||||
|         self.path = path | ||||
|         if content_type is None: | ||||
|             self.content_type = mimetypes.guess_type(path)[0] | ||||
|         else: | ||||
|             self.content_type = content_type | ||||
|  | ||||
|  | ||||
| def get_http_url( | ||||
|     link: Link, | ||||
|     download: Downloader, | ||||
|     download_dir: Optional[str] = None, | ||||
|     hashes: Optional[Hashes] = None, | ||||
| ) -> File: | ||||
|     temp_dir = TempDirectory(kind="unpack", globally_managed=True) | ||||
|     # If a download dir is specified, is the file already downloaded there? | ||||
|     already_downloaded_path = None | ||||
|     if download_dir: | ||||
|         already_downloaded_path = _check_download_dir(link, download_dir, hashes) | ||||
|  | ||||
|     if already_downloaded_path: | ||||
|         from_path = already_downloaded_path | ||||
|         content_type = None | ||||
|     else: | ||||
|         # let's download to a tmp dir | ||||
|         from_path, content_type = download(link, temp_dir.path) | ||||
|         if hashes: | ||||
|             hashes.check_against_path(from_path) | ||||
|  | ||||
|     return File(from_path, content_type) | ||||
|  | ||||
|  | ||||
| def get_file_url( | ||||
|     link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None | ||||
| ) -> File: | ||||
|     """Get file and optionally check its hash.""" | ||||
|     # If a download dir is specified, is the file already there and valid? | ||||
|     already_downloaded_path = None | ||||
|     if download_dir: | ||||
|         already_downloaded_path = _check_download_dir(link, download_dir, hashes) | ||||
|  | ||||
|     if already_downloaded_path: | ||||
|         from_path = already_downloaded_path | ||||
|     else: | ||||
|         from_path = link.file_path | ||||
|  | ||||
|     # If --require-hashes is off, `hashes` is either empty, the | ||||
|     # link's embedded hash, or MissingHashes; it is required to | ||||
|     # match. If --require-hashes is on, we are satisfied by any | ||||
|     # hash in `hashes` matching: a URL-based or an option-based | ||||
|     # one; no internet-sourced hash will be in `hashes`. | ||||
|     if hashes: | ||||
|         hashes.check_against_path(from_path) | ||||
|     return File(from_path, None) | ||||
|  | ||||
|  | ||||
| def unpack_url( | ||||
|     link: Link, | ||||
|     location: str, | ||||
|     download: Downloader, | ||||
|     verbosity: int, | ||||
|     download_dir: Optional[str] = None, | ||||
|     hashes: Optional[Hashes] = None, | ||||
| ) -> Optional[File]: | ||||
|     """Unpack link into location, downloading if required. | ||||
|  | ||||
|     :param hashes: A Hashes object, one of whose embedded hashes must match, | ||||
|         or HashMismatch will be raised. If the Hashes is empty, no matches are | ||||
|         required, and unhashable types of requirements (like VCS ones, which | ||||
|         would ordinarily raise HashUnsupported) are allowed. | ||||
|     """ | ||||
|     # non-editable vcs urls | ||||
|     if link.is_vcs: | ||||
|         unpack_vcs_link(link, location, verbosity=verbosity) | ||||
|         return None | ||||
|  | ||||
|     assert not link.is_existing_dir() | ||||
|  | ||||
|     # file urls | ||||
|     if link.is_file: | ||||
|         file = get_file_url(link, download_dir, hashes=hashes) | ||||
|  | ||||
|     # http urls | ||||
|     else: | ||||
|         file = get_http_url( | ||||
|             link, | ||||
|             download, | ||||
|             download_dir, | ||||
|             hashes=hashes, | ||||
|         ) | ||||
|  | ||||
|     # unpack the archive to the build dir location. even when only downloading | ||||
|     # archives, they have to be unpacked to parse dependencies, except wheels | ||||
|     if not link.is_wheel: | ||||
|         unpack_file(file.path, location, file.content_type) | ||||
|  | ||||
|     return file | ||||
|  | ||||
|  | ||||
| def _check_download_dir( | ||||
|     link: Link, | ||||
|     download_dir: str, | ||||
|     hashes: Optional[Hashes], | ||||
|     warn_on_hash_mismatch: bool = True, | ||||
| ) -> Optional[str]: | ||||
|     """Check download_dir for previously downloaded file with correct hash | ||||
|     If a correct file is found return its path else None | ||||
|     """ | ||||
|     download_path = os.path.join(download_dir, link.filename) | ||||
|  | ||||
|     if not os.path.exists(download_path): | ||||
|         return None | ||||
|  | ||||
|     # If already downloaded, does its hash match? | ||||
|     logger.info("File was already downloaded %s", download_path) | ||||
|     if hashes: | ||||
|         try: | ||||
|             hashes.check_against_path(download_path) | ||||
|         except HashMismatch: | ||||
|             if warn_on_hash_mismatch: | ||||
|                 logger.warning( | ||||
|                     "Previously-downloaded file %s has bad hash. Re-downloading.", | ||||
|                     download_path, | ||||
|                 ) | ||||
|             os.unlink(download_path) | ||||
|             return None | ||||
|     return download_path | ||||
|  | ||||
|  | ||||
| class RequirementPreparer: | ||||
|     """Prepares a Requirement""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         build_dir: str, | ||||
|         download_dir: Optional[str], | ||||
|         src_dir: str, | ||||
|         build_isolation: bool, | ||||
|         check_build_deps: bool, | ||||
|         build_tracker: BuildTracker, | ||||
|         session: PipSession, | ||||
|         progress_bar: str, | ||||
|         finder: PackageFinder, | ||||
|         require_hashes: bool, | ||||
|         use_user_site: bool, | ||||
|         lazy_wheel: bool, | ||||
|         verbosity: int, | ||||
|         legacy_resolver: bool, | ||||
|     ) -> None: | ||||
|         super().__init__() | ||||
|  | ||||
|         self.src_dir = src_dir | ||||
|         self.build_dir = build_dir | ||||
|         self.build_tracker = build_tracker | ||||
|         self._session = session | ||||
|         self._download = Downloader(session, progress_bar) | ||||
|         self._batch_download = BatchDownloader(session, progress_bar) | ||||
|         self.finder = finder | ||||
|  | ||||
|         # Where still-packed archives should be written to. If None, they are | ||||
|         # not saved, and are deleted immediately after unpacking. | ||||
|         self.download_dir = download_dir | ||||
|  | ||||
|         # Is build isolation allowed? | ||||
|         self.build_isolation = build_isolation | ||||
|  | ||||
|         # Should check build dependencies? | ||||
|         self.check_build_deps = check_build_deps | ||||
|  | ||||
|         # Should hash-checking be required? | ||||
|         self.require_hashes = require_hashes | ||||
|  | ||||
|         # Should install in user site-packages? | ||||
|         self.use_user_site = use_user_site | ||||
|  | ||||
|         # Should wheels be downloaded lazily? | ||||
|         self.use_lazy_wheel = lazy_wheel | ||||
|  | ||||
|         # How verbose should underlying tooling be? | ||||
|         self.verbosity = verbosity | ||||
|  | ||||
|         # Are we using the legacy resolver? | ||||
|         self.legacy_resolver = legacy_resolver | ||||
|  | ||||
|         # Memoized downloaded files, as mapping of url: path. | ||||
|         self._downloaded: Dict[str, str] = {} | ||||
|  | ||||
|         # Previous "header" printed for a link-based InstallRequirement | ||||
|         self._previous_requirement_header = ("", "") | ||||
|  | ||||
|     def _log_preparing_link(self, req: InstallRequirement) -> None: | ||||
|         """Provide context for the requirement being prepared.""" | ||||
|         if req.link.is_file and not req.is_wheel_from_cache: | ||||
|             message = "Processing %s" | ||||
|             information = str(display_path(req.link.file_path)) | ||||
|         else: | ||||
|             message = "Collecting %s" | ||||
|             information = str(req.req or req) | ||||
|  | ||||
|         # If we used req.req, inject requirement source if available (this | ||||
|         # would already be included if we used req directly) | ||||
|         if req.req and req.comes_from: | ||||
|             if isinstance(req.comes_from, str): | ||||
|                 comes_from: Optional[str] = req.comes_from | ||||
|             else: | ||||
|                 comes_from = req.comes_from.from_path() | ||||
|             if comes_from: | ||||
|                 information += f" (from {comes_from})" | ||||
|  | ||||
|         if (message, information) != self._previous_requirement_header: | ||||
|             self._previous_requirement_header = (message, information) | ||||
|             logger.info(message, information) | ||||
|  | ||||
|         if req.is_wheel_from_cache: | ||||
|             with indent_log(): | ||||
|                 logger.info("Using cached %s", req.link.filename) | ||||
|  | ||||
|     def _ensure_link_req_src_dir( | ||||
|         self, req: InstallRequirement, parallel_builds: bool | ||||
|     ) -> None: | ||||
|         """Ensure source_dir of a linked InstallRequirement.""" | ||||
|         # Since source_dir is only set for editable requirements. | ||||
|         if req.link.is_wheel: | ||||
|             # We don't need to unpack wheels, so no need for a source | ||||
|             # directory. | ||||
|             return | ||||
|         assert req.source_dir is None | ||||
|         if req.link.is_existing_dir(): | ||||
|             # build local directories in-tree | ||||
|             req.source_dir = req.link.file_path | ||||
|             return | ||||
|  | ||||
|         # We always delete unpacked sdists after pip runs. | ||||
|         req.ensure_has_source_dir( | ||||
|             self.build_dir, | ||||
|             autodelete=True, | ||||
|             parallel_builds=parallel_builds, | ||||
|         ) | ||||
|  | ||||
|         # If a checkout exists, it's unwise to keep going.  version | ||||
|         # inconsistencies are logged later, but do not fail the | ||||
|         # installation. | ||||
|         # FIXME: this won't upgrade when there's an existing | ||||
|         # package unpacked in `req.source_dir` | ||||
|         # TODO: this check is now probably dead code | ||||
|         if is_installable_dir(req.source_dir): | ||||
|             raise PreviousBuildDirError( | ||||
|                 "pip can't proceed with requirements '{}' due to a" | ||||
|                 "pre-existing build directory ({}). This is likely " | ||||
|                 "due to a previous installation that failed . pip is " | ||||
|                 "being responsible and not assuming it can delete this. " | ||||
|                 "Please delete it and try again.".format(req, req.source_dir) | ||||
|             ) | ||||
|  | ||||
|     def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: | ||||
|         # By the time this is called, the requirement's link should have | ||||
|         # been checked so we can tell what kind of requirements req is | ||||
|         # and raise some more informative errors than otherwise. | ||||
|         # (For example, we can raise VcsHashUnsupported for a VCS URL | ||||
|         # rather than HashMissing.) | ||||
|         if not self.require_hashes: | ||||
|             return req.hashes(trust_internet=True) | ||||
|  | ||||
|         # We could check these first 2 conditions inside unpack_url | ||||
|         # and save repetition of conditions, but then we would | ||||
|         # report less-useful error messages for unhashable | ||||
|         # requirements, complaining that there's no hash provided. | ||||
|         if req.link.is_vcs: | ||||
|             raise VcsHashUnsupported() | ||||
|         if req.link.is_existing_dir(): | ||||
|             raise DirectoryUrlHashUnsupported() | ||||
|  | ||||
|         # Unpinned packages are asking for trouble when a new version | ||||
|         # is uploaded.  This isn't a security check, but it saves users | ||||
|         # a surprising hash mismatch in the future. | ||||
|         # file:/// URLs aren't pinnable, so don't complain about them | ||||
|         # not being pinned. | ||||
|         if not req.is_direct and not req.is_pinned: | ||||
|             raise HashUnpinned() | ||||
|  | ||||
|         # If known-good hashes are missing for this requirement, | ||||
|         # shim it with a facade object that will provoke hash | ||||
|         # computation and then raise a HashMissing exception | ||||
|         # showing the user what the hash should be. | ||||
|         return req.hashes(trust_internet=False) or MissingHashes() | ||||
|  | ||||
|     def _fetch_metadata_only( | ||||
|         self, | ||||
|         req: InstallRequirement, | ||||
|     ) -> Optional[BaseDistribution]: | ||||
|         if self.legacy_resolver: | ||||
|             logger.debug( | ||||
|                 "Metadata-only fetching is not used in the legacy resolver", | ||||
|             ) | ||||
|             return None | ||||
|         if self.require_hashes: | ||||
|             logger.debug( | ||||
|                 "Metadata-only fetching is not used as hash checking is required", | ||||
|             ) | ||||
|             return None | ||||
|         # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable. | ||||
|         return self._fetch_metadata_using_link_data_attr( | ||||
|             req | ||||
|         ) or self._fetch_metadata_using_lazy_wheel(req.link) | ||||
|  | ||||
|     def _fetch_metadata_using_link_data_attr( | ||||
|         self, | ||||
|         req: InstallRequirement, | ||||
|     ) -> Optional[BaseDistribution]: | ||||
|         """Fetch metadata from the data-dist-info-metadata attribute, if possible.""" | ||||
|         # (1) Get the link to the metadata file, if provided by the backend. | ||||
|         metadata_link = req.link.metadata_link() | ||||
|         if metadata_link is None: | ||||
|             return None | ||||
|         assert req.req is not None | ||||
|         logger.info( | ||||
|             "Obtaining dependency information for %s from %s", | ||||
|             req.req, | ||||
|             metadata_link, | ||||
|         ) | ||||
|         # (2) Download the contents of the METADATA file, separate from the dist itself. | ||||
|         metadata_file = get_http_url( | ||||
|             metadata_link, | ||||
|             self._download, | ||||
|             hashes=metadata_link.as_hashes(), | ||||
|         ) | ||||
|         with open(metadata_file.path, "rb") as f: | ||||
|             metadata_contents = f.read() | ||||
|         # (3) Generate a dist just from those file contents. | ||||
|         metadata_dist = get_metadata_distribution( | ||||
|             metadata_contents, | ||||
|             req.link.filename, | ||||
|             req.req.name, | ||||
|         ) | ||||
|         # (4) Ensure the Name: field from the METADATA file matches the name from the | ||||
|         #     install requirement. | ||||
|         # | ||||
|         #     NB: raw_name will fall back to the name from the install requirement if | ||||
|         #     the Name: field is not present, but it's noted in the raw_name docstring | ||||
|         #     that that should NEVER happen anyway. | ||||
|         if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name): | ||||
|             raise MetadataInconsistent( | ||||
|                 req, "Name", req.req.name, metadata_dist.raw_name | ||||
|             ) | ||||
|         return metadata_dist | ||||
|  | ||||
|     def _fetch_metadata_using_lazy_wheel( | ||||
|         self, | ||||
|         link: Link, | ||||
|     ) -> Optional[BaseDistribution]: | ||||
|         """Fetch metadata using lazy wheel, if possible.""" | ||||
|         # --use-feature=fast-deps must be provided. | ||||
|         if not self.use_lazy_wheel: | ||||
|             return None | ||||
|         if link.is_file or not link.is_wheel: | ||||
|             logger.debug( | ||||
|                 "Lazy wheel is not used as %r does not point to a remote wheel", | ||||
|                 link, | ||||
|             ) | ||||
|             return None | ||||
|  | ||||
|         wheel = Wheel(link.filename) | ||||
|         name = canonicalize_name(wheel.name) | ||||
|         logger.info( | ||||
|             "Obtaining dependency information from %s %s", | ||||
|             name, | ||||
|             wheel.version, | ||||
|         ) | ||||
|         url = link.url.split("#", 1)[0] | ||||
|         try: | ||||
|             return dist_from_wheel_url(name, url, self._session) | ||||
|         except HTTPRangeRequestUnsupported: | ||||
|             logger.debug("%s does not support range requests", url) | ||||
|             return None | ||||
|  | ||||
|     def _complete_partial_requirements( | ||||
|         self, | ||||
|         partially_downloaded_reqs: Iterable[InstallRequirement], | ||||
|         parallel_builds: bool = False, | ||||
|     ) -> None: | ||||
|         """Download any requirements which were only fetched by metadata.""" | ||||
|         # Download to a temporary directory. These will be copied over as | ||||
|         # needed for downstream 'download', 'wheel', and 'install' commands. | ||||
|         temp_dir = TempDirectory(kind="unpack", globally_managed=True).path | ||||
|  | ||||
|         # Map each link to the requirement that owns it. This allows us to set | ||||
|         # `req.local_file_path` on the appropriate requirement after passing | ||||
|         # all the links at once into BatchDownloader. | ||||
|         links_to_fully_download: Dict[Link, InstallRequirement] = {} | ||||
|         for req in partially_downloaded_reqs: | ||||
|             assert req.link | ||||
|             links_to_fully_download[req.link] = req | ||||
|  | ||||
|         batch_download = self._batch_download( | ||||
|             links_to_fully_download.keys(), | ||||
|             temp_dir, | ||||
|         ) | ||||
|         for link, (filepath, _) in batch_download: | ||||
|             logger.debug("Downloading link %s to %s", link, filepath) | ||||
|             req = links_to_fully_download[link] | ||||
|             req.local_file_path = filepath | ||||
|             # TODO: This needs fixing for sdists | ||||
|             # This is an emergency fix for #11847, which reports that | ||||
|             # distributions get downloaded twice when metadata is loaded | ||||
|             # from a PEP 658 standalone metadata file. Setting _downloaded | ||||
|             # fixes this for wheels, but breaks the sdist case (tests | ||||
|             # test_download_metadata). As PyPI is currently only serving | ||||
|             # metadata for wheels, this is not an immediate issue. | ||||
|             # Fixing the problem properly looks like it will require a | ||||
|             # complete refactoring of the `prepare_linked_requirements_more` | ||||
|             # logic, and I haven't a clue where to start on that, so for now | ||||
|             # I have fixed the issue *just* for wheels. | ||||
|             if req.is_wheel: | ||||
|                 self._downloaded[req.link.url] = filepath | ||||
|  | ||||
|         # This step is necessary to ensure all lazy wheels are processed | ||||
|         # successfully by the 'download', 'wheel', and 'install' commands. | ||||
|         for req in partially_downloaded_reqs: | ||||
|             self._prepare_linked_requirement(req, parallel_builds) | ||||
|  | ||||
|     def prepare_linked_requirement( | ||||
|         self, req: InstallRequirement, parallel_builds: bool = False | ||||
|     ) -> BaseDistribution: | ||||
|         """Prepare a requirement to be obtained from req.link.""" | ||||
|         assert req.link | ||||
|         self._log_preparing_link(req) | ||||
|         with indent_log(): | ||||
|             # Check if the relevant file is already available | ||||
|             # in the download directory | ||||
|             file_path = None | ||||
|             if self.download_dir is not None and req.link.is_wheel: | ||||
|                 hashes = self._get_linked_req_hashes(req) | ||||
|                 file_path = _check_download_dir( | ||||
|                     req.link, | ||||
|                     self.download_dir, | ||||
|                     hashes, | ||||
|                     # When a locally built wheel has been found in cache, we don't warn | ||||
|                     # about re-downloading when the already downloaded wheel hash does | ||||
|                     # not match. This is because the hash must be checked against the | ||||
|                     # original link, not the cached link. It that case the already | ||||
|                     # downloaded file will be removed and re-fetched from cache (which | ||||
|                     # implies a hash check against the cache entry's origin.json). | ||||
|                     warn_on_hash_mismatch=not req.is_wheel_from_cache, | ||||
|                 ) | ||||
|  | ||||
|             if file_path is not None: | ||||
|                 # The file is already available, so mark it as downloaded | ||||
|                 self._downloaded[req.link.url] = file_path | ||||
|             else: | ||||
|                 # The file is not available, attempt to fetch only metadata | ||||
|                 metadata_dist = self._fetch_metadata_only(req) | ||||
|                 if metadata_dist is not None: | ||||
|                     req.needs_more_preparation = True | ||||
|                     return metadata_dist | ||||
|  | ||||
|             # None of the optimizations worked, fully prepare the requirement | ||||
|             return self._prepare_linked_requirement(req, parallel_builds) | ||||
|  | ||||
|     def prepare_linked_requirements_more( | ||||
|         self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False | ||||
|     ) -> None: | ||||
|         """Prepare linked requirements more, if needed.""" | ||||
|         reqs = [req for req in reqs if req.needs_more_preparation] | ||||
|         for req in reqs: | ||||
|             # Determine if any of these requirements were already downloaded. | ||||
|             if self.download_dir is not None and req.link.is_wheel: | ||||
|                 hashes = self._get_linked_req_hashes(req) | ||||
|                 file_path = _check_download_dir(req.link, self.download_dir, hashes) | ||||
|                 if file_path is not None: | ||||
|                     self._downloaded[req.link.url] = file_path | ||||
|                     req.needs_more_preparation = False | ||||
|  | ||||
|         # Prepare requirements we found were already downloaded for some | ||||
|         # reason. The other downloads will be completed separately. | ||||
|         partially_downloaded_reqs: List[InstallRequirement] = [] | ||||
|         for req in reqs: | ||||
|             if req.needs_more_preparation: | ||||
|                 partially_downloaded_reqs.append(req) | ||||
|             else: | ||||
|                 self._prepare_linked_requirement(req, parallel_builds) | ||||
|  | ||||
|         # TODO: separate this part out from RequirementPreparer when the v1 | ||||
|         # resolver can be removed! | ||||
|         self._complete_partial_requirements( | ||||
|             partially_downloaded_reqs, | ||||
|             parallel_builds=parallel_builds, | ||||
|         ) | ||||
|  | ||||
|     def _prepare_linked_requirement( | ||||
|         self, req: InstallRequirement, parallel_builds: bool | ||||
|     ) -> BaseDistribution: | ||||
|         assert req.link | ||||
|         link = req.link | ||||
|  | ||||
|         hashes = self._get_linked_req_hashes(req) | ||||
|  | ||||
|         if hashes and req.is_wheel_from_cache: | ||||
|             assert req.download_info is not None | ||||
|             assert link.is_wheel | ||||
|             assert link.is_file | ||||
|             # We need to verify hashes, and we have found the requirement in the cache | ||||
|             # of locally built wheels. | ||||
|             if ( | ||||
|                 isinstance(req.download_info.info, ArchiveInfo) | ||||
|                 and req.download_info.info.hashes | ||||
|                 and hashes.has_one_of(req.download_info.info.hashes) | ||||
|             ): | ||||
|                 # At this point we know the requirement was built from a hashable source | ||||
|                 # artifact, and we verified that the cache entry's hash of the original | ||||
|                 # artifact matches one of the hashes we expect. We don't verify hashes | ||||
|                 # against the cached wheel, because the wheel is not the original. | ||||
|                 hashes = None | ||||
|             else: | ||||
|                 logger.warning( | ||||
|                     "The hashes of the source archive found in cache entry " | ||||
|                     "don't match, ignoring cached built wheel " | ||||
|                     "and re-downloading source." | ||||
|                 ) | ||||
|                 req.link = req.cached_wheel_source_link | ||||
|                 link = req.link | ||||
|  | ||||
|         self._ensure_link_req_src_dir(req, parallel_builds) | ||||
|  | ||||
|         if link.is_existing_dir(): | ||||
|             local_file = None | ||||
|         elif link.url not in self._downloaded: | ||||
|             try: | ||||
|                 local_file = unpack_url( | ||||
|                     link, | ||||
|                     req.source_dir, | ||||
|                     self._download, | ||||
|                     self.verbosity, | ||||
|                     self.download_dir, | ||||
|                     hashes, | ||||
|                 ) | ||||
|             except NetworkConnectionError as exc: | ||||
|                 raise InstallationError( | ||||
|                     "Could not install requirement {} because of HTTP " | ||||
|                     "error {} for URL {}".format(req, exc, link) | ||||
|                 ) | ||||
|         else: | ||||
|             file_path = self._downloaded[link.url] | ||||
|             if hashes: | ||||
|                 hashes.check_against_path(file_path) | ||||
|             local_file = File(file_path, content_type=None) | ||||
|  | ||||
|         # If download_info is set, we got it from the wheel cache. | ||||
|         if req.download_info is None: | ||||
|             # Editables don't go through this function (see | ||||
|             # prepare_editable_requirement). | ||||
|             assert not req.editable | ||||
|             req.download_info = direct_url_from_link(link, req.source_dir) | ||||
|             # Make sure we have a hash in download_info. If we got it as part of the | ||||
|             # URL, it will have been verified and we can rely on it. Otherwise we | ||||
|             # compute it from the downloaded file. | ||||
|             # FIXME: https://github.com/pypa/pip/issues/11943 | ||||
|             if ( | ||||
|                 isinstance(req.download_info.info, ArchiveInfo) | ||||
|                 and not req.download_info.info.hashes | ||||
|                 and local_file | ||||
|             ): | ||||
|                 hash = hash_file(local_file.path)[0].hexdigest() | ||||
|                 # We populate info.hash for backward compatibility. | ||||
|                 # This will automatically populate info.hashes. | ||||
|                 req.download_info.info.hash = f"sha256={hash}" | ||||
|  | ||||
|         # For use in later processing, | ||||
|         # preserve the file path on the requirement. | ||||
|         if local_file: | ||||
|             req.local_file_path = local_file.path | ||||
|  | ||||
|         dist = _get_prepared_distribution( | ||||
|             req, | ||||
|             self.build_tracker, | ||||
|             self.finder, | ||||
|             self.build_isolation, | ||||
|             self.check_build_deps, | ||||
|         ) | ||||
|         return dist | ||||
|  | ||||
|     def save_linked_requirement(self, req: InstallRequirement) -> None: | ||||
|         assert self.download_dir is not None | ||||
|         assert req.link is not None | ||||
|         link = req.link | ||||
|         if link.is_vcs or (link.is_existing_dir() and req.editable): | ||||
|             # Make a .zip of the source_dir we already created. | ||||
|             req.archive(self.download_dir) | ||||
|             return | ||||
|  | ||||
|         if link.is_existing_dir(): | ||||
|             logger.debug( | ||||
|                 "Not copying link to destination directory " | ||||
|                 "since it is a directory: %s", | ||||
|                 link, | ||||
|             ) | ||||
|             return | ||||
|         if req.local_file_path is None: | ||||
|             # No distribution was downloaded for this requirement. | ||||
|             return | ||||
|  | ||||
|         download_location = os.path.join(self.download_dir, link.filename) | ||||
|         if not os.path.exists(download_location): | ||||
|             shutil.copy(req.local_file_path, download_location) | ||||
|             download_path = display_path(download_location) | ||||
|             logger.info("Saved %s", download_path) | ||||
|  | ||||
|     def prepare_editable_requirement( | ||||
|         self, | ||||
|         req: InstallRequirement, | ||||
|     ) -> BaseDistribution: | ||||
|         """Prepare an editable requirement.""" | ||||
|         assert req.editable, "cannot prepare a non-editable req as editable" | ||||
|  | ||||
|         logger.info("Obtaining %s", req) | ||||
|  | ||||
|         with indent_log(): | ||||
|             if self.require_hashes: | ||||
|                 raise InstallationError( | ||||
|                     "The editable requirement {} cannot be installed when " | ||||
|                     "requiring hashes, because there is no single file to " | ||||
|                     "hash.".format(req) | ||||
|                 ) | ||||
|             req.ensure_has_source_dir(self.src_dir) | ||||
|             req.update_editable() | ||||
|             assert req.source_dir | ||||
|             req.download_info = direct_url_for_editable(req.unpacked_source_directory) | ||||
|  | ||||
|             dist = _get_prepared_distribution( | ||||
|                 req, | ||||
|                 self.build_tracker, | ||||
|                 self.finder, | ||||
|                 self.build_isolation, | ||||
|                 self.check_build_deps, | ||||
|             ) | ||||
|  | ||||
|             req.check_if_exists(self.use_user_site) | ||||
|  | ||||
|         return dist | ||||
|  | ||||
|     def prepare_installed_requirement( | ||||
|         self, | ||||
|         req: InstallRequirement, | ||||
|         skip_reason: str, | ||||
|     ) -> BaseDistribution: | ||||
|         """Prepare an already-installed requirement.""" | ||||
|         assert req.satisfied_by, "req should have been satisfied but isn't" | ||||
|         assert skip_reason is not None, ( | ||||
|             "did not get skip reason skipped but req.satisfied_by " | ||||
|             "is set to {}".format(req.satisfied_by) | ||||
|         ) | ||||
|         logger.info( | ||||
|             "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version | ||||
|         ) | ||||
|         with indent_log(): | ||||
|             if self.require_hashes: | ||||
|                 logger.debug( | ||||
|                     "Since it is already installed, we are trusting this " | ||||
|                     "package without checking its hash. To ensure a " | ||||
|                     "completely repeatable environment, install into an " | ||||
|                     "empty virtualenv." | ||||
|                 ) | ||||
|             return InstalledDistribution(req).get_metadata_distribution() | ||||
| @ -0,0 +1,179 @@ | ||||
| import importlib.util | ||||
| import os | ||||
| from collections import namedtuple | ||||
| from typing import Any, List, Optional | ||||
|  | ||||
| from pip._vendor import tomli | ||||
| from pip._vendor.packaging.requirements import InvalidRequirement, Requirement | ||||
|  | ||||
| from pip._internal.exceptions import ( | ||||
|     InstallationError, | ||||
|     InvalidPyProjectBuildRequires, | ||||
|     MissingPyProjectBuildRequires, | ||||
| ) | ||||
|  | ||||
|  | ||||
| def _is_list_of_str(obj: Any) -> bool: | ||||
|     return isinstance(obj, list) and all(isinstance(item, str) for item in obj) | ||||
|  | ||||
|  | ||||
| def make_pyproject_path(unpacked_source_directory: str) -> str: | ||||
|     return os.path.join(unpacked_source_directory, "pyproject.toml") | ||||
|  | ||||
|  | ||||
| BuildSystemDetails = namedtuple( | ||||
|     "BuildSystemDetails", ["requires", "backend", "check", "backend_path"] | ||||
| ) | ||||
|  | ||||
|  | ||||
| def load_pyproject_toml( | ||||
|     use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str | ||||
| ) -> Optional[BuildSystemDetails]: | ||||
|     """Load the pyproject.toml file. | ||||
|  | ||||
|     Parameters: | ||||
|         use_pep517 - Has the user requested PEP 517 processing? None | ||||
|                      means the user hasn't explicitly specified. | ||||
|         pyproject_toml - Location of the project's pyproject.toml file | ||||
|         setup_py - Location of the project's setup.py file | ||||
|         req_name - The name of the requirement we're processing (for | ||||
|                    error reporting) | ||||
|  | ||||
|     Returns: | ||||
|         None if we should use the legacy code path, otherwise a tuple | ||||
|         ( | ||||
|             requirements from pyproject.toml, | ||||
|             name of PEP 517 backend, | ||||
|             requirements we should check are installed after setting | ||||
|                 up the build environment | ||||
|             directory paths to import the backend from (backend-path), | ||||
|                 relative to the project root. | ||||
|         ) | ||||
|     """ | ||||
|     has_pyproject = os.path.isfile(pyproject_toml) | ||||
|     has_setup = os.path.isfile(setup_py) | ||||
|  | ||||
|     if not has_pyproject and not has_setup: | ||||
|         raise InstallationError( | ||||
|             f"{req_name} does not appear to be a Python project: " | ||||
|             f"neither 'setup.py' nor 'pyproject.toml' found." | ||||
|         ) | ||||
|  | ||||
|     if has_pyproject: | ||||
|         with open(pyproject_toml, encoding="utf-8") as f: | ||||
|             pp_toml = tomli.loads(f.read()) | ||||
|         build_system = pp_toml.get("build-system") | ||||
|     else: | ||||
|         build_system = None | ||||
|  | ||||
|     # The following cases must use PEP 517 | ||||
|     # We check for use_pep517 being non-None and falsey because that means | ||||
|     # the user explicitly requested --no-use-pep517.  The value 0 as | ||||
|     # opposed to False can occur when the value is provided via an | ||||
|     # environment variable or config file option (due to the quirk of | ||||
|     # strtobool() returning an integer in pip's configuration code). | ||||
|     if has_pyproject and not has_setup: | ||||
|         if use_pep517 is not None and not use_pep517: | ||||
|             raise InstallationError( | ||||
|                 "Disabling PEP 517 processing is invalid: " | ||||
|                 "project does not have a setup.py" | ||||
|             ) | ||||
|         use_pep517 = True | ||||
|     elif build_system and "build-backend" in build_system: | ||||
|         if use_pep517 is not None and not use_pep517: | ||||
|             raise InstallationError( | ||||
|                 "Disabling PEP 517 processing is invalid: " | ||||
|                 "project specifies a build backend of {} " | ||||
|                 "in pyproject.toml".format(build_system["build-backend"]) | ||||
|             ) | ||||
|         use_pep517 = True | ||||
|  | ||||
|     # If we haven't worked out whether to use PEP 517 yet, | ||||
|     # and the user hasn't explicitly stated a preference, | ||||
|     # we do so if the project has a pyproject.toml file | ||||
|     # or if we cannot import setuptools or wheels. | ||||
|  | ||||
|     # We fallback to PEP 517 when without setuptools or without the wheel package, | ||||
|     # so setuptools can be installed as a default build backend. | ||||
|     # For more info see: | ||||
|     # https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9 | ||||
|     # https://github.com/pypa/pip/issues/8559 | ||||
|     elif use_pep517 is None: | ||||
|         use_pep517 = ( | ||||
|             has_pyproject | ||||
|             or not importlib.util.find_spec("setuptools") | ||||
|             or not importlib.util.find_spec("wheel") | ||||
|         ) | ||||
|  | ||||
|     # At this point, we know whether we're going to use PEP 517. | ||||
|     assert use_pep517 is not None | ||||
|  | ||||
|     # If we're using the legacy code path, there is nothing further | ||||
|     # for us to do here. | ||||
|     if not use_pep517: | ||||
|         return None | ||||
|  | ||||
|     if build_system is None: | ||||
|         # Either the user has a pyproject.toml with no build-system | ||||
|         # section, or the user has no pyproject.toml, but has opted in | ||||
|         # explicitly via --use-pep517. | ||||
|         # In the absence of any explicit backend specification, we | ||||
|         # assume the setuptools backend that most closely emulates the | ||||
|         # traditional direct setup.py execution, and require wheel and | ||||
|         # a version of setuptools that supports that backend. | ||||
|  | ||||
|         build_system = { | ||||
|             "requires": ["setuptools>=40.8.0", "wheel"], | ||||
|             "build-backend": "setuptools.build_meta:__legacy__", | ||||
|         } | ||||
|  | ||||
|     # If we're using PEP 517, we have build system information (either | ||||
|     # from pyproject.toml, or defaulted by the code above). | ||||
|     # Note that at this point, we do not know if the user has actually | ||||
|     # specified a backend, though. | ||||
|     assert build_system is not None | ||||
|  | ||||
|     # Ensure that the build-system section in pyproject.toml conforms | ||||
|     # to PEP 518. | ||||
|  | ||||
|     # Specifying the build-system table but not the requires key is invalid | ||||
|     if "requires" not in build_system: | ||||
|         raise MissingPyProjectBuildRequires(package=req_name) | ||||
|  | ||||
|     # Error out if requires is not a list of strings | ||||
|     requires = build_system["requires"] | ||||
|     if not _is_list_of_str(requires): | ||||
|         raise InvalidPyProjectBuildRequires( | ||||
|             package=req_name, | ||||
|             reason="It is not a list of strings.", | ||||
|         ) | ||||
|  | ||||
|     # Each requirement must be valid as per PEP 508 | ||||
|     for requirement in requires: | ||||
|         try: | ||||
|             Requirement(requirement) | ||||
|         except InvalidRequirement as error: | ||||
|             raise InvalidPyProjectBuildRequires( | ||||
|                 package=req_name, | ||||
|                 reason=f"It contains an invalid requirement: {requirement!r}", | ||||
|             ) from error | ||||
|  | ||||
|     backend = build_system.get("build-backend") | ||||
|     backend_path = build_system.get("backend-path", []) | ||||
|     check: List[str] = [] | ||||
|     if backend is None: | ||||
|         # If the user didn't specify a backend, we assume they want to use | ||||
|         # the setuptools backend. But we can't be sure they have included | ||||
|         # a version of setuptools which supplies the backend. So we | ||||
|         # make a note to check that this requirement is present once | ||||
|         # we have set up the environment. | ||||
|         # This is quite a lot of work to check for a very specific case. But | ||||
|         # the problem is, that case is potentially quite common - projects that | ||||
|         # adopted PEP 518 early for the ability to specify requirements to | ||||
|         # execute setup.py, but never considered needing to mention the build | ||||
|         # tools themselves. The original PEP 518 code had a similar check (but | ||||
|         # implemented in a different way). | ||||
|         backend = "setuptools.build_meta:__legacy__" | ||||
|         check = ["setuptools>=40.8.0"] | ||||
|  | ||||
|     return BuildSystemDetails(requires, backend, check, backend_path) | ||||
| @ -0,0 +1,92 @@ | ||||
| import collections | ||||
| import logging | ||||
| from typing import Generator, List, Optional, Sequence, Tuple | ||||
|  | ||||
| from pip._internal.utils.logging import indent_log | ||||
|  | ||||
| from .req_file import parse_requirements | ||||
| from .req_install import InstallRequirement | ||||
| from .req_set import RequirementSet | ||||
|  | ||||
| __all__ = [ | ||||
|     "RequirementSet", | ||||
|     "InstallRequirement", | ||||
|     "parse_requirements", | ||||
|     "install_given_reqs", | ||||
| ] | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class InstallationResult: | ||||
|     def __init__(self, name: str) -> None: | ||||
|         self.name = name | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return f"InstallationResult(name={self.name!r})" | ||||
|  | ||||
|  | ||||
| def _validate_requirements( | ||||
|     requirements: List[InstallRequirement], | ||||
| ) -> Generator[Tuple[str, InstallRequirement], None, None]: | ||||
|     for req in requirements: | ||||
|         assert req.name, f"invalid to-be-installed requirement: {req}" | ||||
|         yield req.name, req | ||||
|  | ||||
|  | ||||
| def install_given_reqs( | ||||
|     requirements: List[InstallRequirement], | ||||
|     global_options: Sequence[str], | ||||
|     root: Optional[str], | ||||
|     home: Optional[str], | ||||
|     prefix: Optional[str], | ||||
|     warn_script_location: bool, | ||||
|     use_user_site: bool, | ||||
|     pycompile: bool, | ||||
| ) -> List[InstallationResult]: | ||||
|     """ | ||||
|     Install everything in the given list. | ||||
|  | ||||
|     (to be called after having downloaded and unpacked the packages) | ||||
|     """ | ||||
|     to_install = collections.OrderedDict(_validate_requirements(requirements)) | ||||
|  | ||||
|     if to_install: | ||||
|         logger.info( | ||||
|             "Installing collected packages: %s", | ||||
|             ", ".join(to_install.keys()), | ||||
|         ) | ||||
|  | ||||
|     installed = [] | ||||
|  | ||||
|     with indent_log(): | ||||
|         for req_name, requirement in to_install.items(): | ||||
|             if requirement.should_reinstall: | ||||
|                 logger.info("Attempting uninstall: %s", req_name) | ||||
|                 with indent_log(): | ||||
|                     uninstalled_pathset = requirement.uninstall(auto_confirm=True) | ||||
|             else: | ||||
|                 uninstalled_pathset = None | ||||
|  | ||||
|             try: | ||||
|                 requirement.install( | ||||
|                     global_options, | ||||
|                     root=root, | ||||
|                     home=home, | ||||
|                     prefix=prefix, | ||||
|                     warn_script_location=warn_script_location, | ||||
|                     use_user_site=use_user_site, | ||||
|                     pycompile=pycompile, | ||||
|                 ) | ||||
|             except Exception: | ||||
|                 # if install did not succeed, rollback previous uninstall | ||||
|                 if uninstalled_pathset and not requirement.install_succeeded: | ||||
|                     uninstalled_pathset.rollback() | ||||
|                 raise | ||||
|             else: | ||||
|                 if uninstalled_pathset and requirement.install_succeeded: | ||||
|                     uninstalled_pathset.commit() | ||||
|  | ||||
|             installed.append(InstallationResult(req_name)) | ||||
|  | ||||
|     return installed | ||||
| @ -0,0 +1,506 @@ | ||||
| """Backing implementation for InstallRequirement's various constructors | ||||
|  | ||||
| The idea here is that these formed a major chunk of InstallRequirement's size | ||||
| so, moving them and support code dedicated to them outside of that class | ||||
| helps creates for better understandability for the rest of the code. | ||||
|  | ||||
| These are meant to be used elsewhere within pip to create instances of | ||||
| InstallRequirement. | ||||
| """ | ||||
|  | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| from typing import Dict, List, Optional, Set, Tuple, Union | ||||
|  | ||||
| from pip._vendor.packaging.markers import Marker | ||||
| from pip._vendor.packaging.requirements import InvalidRequirement, Requirement | ||||
| from pip._vendor.packaging.specifiers import Specifier | ||||
|  | ||||
| from pip._internal.exceptions import InstallationError | ||||
| from pip._internal.models.index import PyPI, TestPyPI | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.models.wheel import Wheel | ||||
| from pip._internal.req.req_file import ParsedRequirement | ||||
| from pip._internal.req.req_install import InstallRequirement | ||||
| from pip._internal.utils.filetypes import is_archive_file | ||||
| from pip._internal.utils.misc import is_installable_dir | ||||
| from pip._internal.utils.packaging import get_requirement | ||||
| from pip._internal.utils.urls import path_to_url | ||||
| from pip._internal.vcs import is_url, vcs | ||||
|  | ||||
| __all__ = [ | ||||
|     "install_req_from_editable", | ||||
|     "install_req_from_line", | ||||
|     "parse_editable", | ||||
| ] | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
| operators = Specifier._operators.keys() | ||||
|  | ||||
|  | ||||
| def _strip_extras(path: str) -> Tuple[str, Optional[str]]: | ||||
|     m = re.match(r"^(.+)(\[[^\]]+\])$", path) | ||||
|     extras = None | ||||
|     if m: | ||||
|         path_no_extras = m.group(1) | ||||
|         extras = m.group(2) | ||||
|     else: | ||||
|         path_no_extras = path | ||||
|  | ||||
|     return path_no_extras, extras | ||||
|  | ||||
|  | ||||
| def convert_extras(extras: Optional[str]) -> Set[str]: | ||||
|     if not extras: | ||||
|         return set() | ||||
|     return get_requirement("placeholder" + extras.lower()).extras | ||||
|  | ||||
|  | ||||
| def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: | ||||
|     """Parses an editable requirement into: | ||||
|         - a requirement name | ||||
|         - an URL | ||||
|         - extras | ||||
|         - editable options | ||||
|     Accepted requirements: | ||||
|         svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir | ||||
|         .[some_extra] | ||||
|     """ | ||||
|  | ||||
|     url = editable_req | ||||
|  | ||||
|     # If a file path is specified with extras, strip off the extras. | ||||
|     url_no_extras, extras = _strip_extras(url) | ||||
|  | ||||
|     if os.path.isdir(url_no_extras): | ||||
|         # Treating it as code that has already been checked out | ||||
|         url_no_extras = path_to_url(url_no_extras) | ||||
|  | ||||
|     if url_no_extras.lower().startswith("file:"): | ||||
|         package_name = Link(url_no_extras).egg_fragment | ||||
|         if extras: | ||||
|             return ( | ||||
|                 package_name, | ||||
|                 url_no_extras, | ||||
|                 get_requirement("placeholder" + extras.lower()).extras, | ||||
|             ) | ||||
|         else: | ||||
|             return package_name, url_no_extras, set() | ||||
|  | ||||
|     for version_control in vcs: | ||||
|         if url.lower().startswith(f"{version_control}:"): | ||||
|             url = f"{version_control}+{url}" | ||||
|             break | ||||
|  | ||||
|     link = Link(url) | ||||
|  | ||||
|     if not link.is_vcs: | ||||
|         backends = ", ".join(vcs.all_schemes) | ||||
|         raise InstallationError( | ||||
|             f"{editable_req} is not a valid editable requirement. " | ||||
|             f"It should either be a path to a local project or a VCS URL " | ||||
|             f"(beginning with {backends})." | ||||
|         ) | ||||
|  | ||||
|     package_name = link.egg_fragment | ||||
|     if not package_name: | ||||
|         raise InstallationError( | ||||
|             "Could not detect requirement name for '{}', please specify one " | ||||
|             "with #egg=your_package_name".format(editable_req) | ||||
|         ) | ||||
|     return package_name, url, set() | ||||
|  | ||||
|  | ||||
| def check_first_requirement_in_file(filename: str) -> None: | ||||
|     """Check if file is parsable as a requirements file. | ||||
|  | ||||
|     This is heavily based on ``pkg_resources.parse_requirements``, but | ||||
|     simplified to just check the first meaningful line. | ||||
|  | ||||
|     :raises InvalidRequirement: If the first meaningful line cannot be parsed | ||||
|         as an requirement. | ||||
|     """ | ||||
|     with open(filename, encoding="utf-8", errors="ignore") as f: | ||||
|         # Create a steppable iterator, so we can handle \-continuations. | ||||
|         lines = ( | ||||
|             line | ||||
|             for line in (line.strip() for line in f) | ||||
|             if line and not line.startswith("#")  # Skip blank lines/comments. | ||||
|         ) | ||||
|  | ||||
|         for line in lines: | ||||
|             # Drop comments -- a hash without a space may be in a URL. | ||||
|             if " #" in line: | ||||
|                 line = line[: line.find(" #")] | ||||
|             # If there is a line continuation, drop it, and append the next line. | ||||
|             if line.endswith("\\"): | ||||
|                 line = line[:-2].strip() + next(lines, "") | ||||
|             Requirement(line) | ||||
|             return | ||||
|  | ||||
|  | ||||
| def deduce_helpful_msg(req: str) -> str: | ||||
|     """Returns helpful msg in case requirements file does not exist, | ||||
|     or cannot be parsed. | ||||
|  | ||||
|     :params req: Requirements file path | ||||
|     """ | ||||
|     if not os.path.exists(req): | ||||
|         return f" File '{req}' does not exist." | ||||
|     msg = " The path does exist. " | ||||
|     # Try to parse and check if it is a requirements file. | ||||
|     try: | ||||
|         check_first_requirement_in_file(req) | ||||
|     except InvalidRequirement: | ||||
|         logger.debug("Cannot parse '%s' as requirements file", req) | ||||
|     else: | ||||
|         msg += ( | ||||
|             f"The argument you provided " | ||||
|             f"({req}) appears to be a" | ||||
|             f" requirements file. If that is the" | ||||
|             f" case, use the '-r' flag to install" | ||||
|             f" the packages specified within it." | ||||
|         ) | ||||
|     return msg | ||||
|  | ||||
|  | ||||
| class RequirementParts: | ||||
|     def __init__( | ||||
|         self, | ||||
|         requirement: Optional[Requirement], | ||||
|         link: Optional[Link], | ||||
|         markers: Optional[Marker], | ||||
|         extras: Set[str], | ||||
|     ): | ||||
|         self.requirement = requirement | ||||
|         self.link = link | ||||
|         self.markers = markers | ||||
|         self.extras = extras | ||||
|  | ||||
|  | ||||
| def parse_req_from_editable(editable_req: str) -> RequirementParts: | ||||
|     name, url, extras_override = parse_editable(editable_req) | ||||
|  | ||||
|     if name is not None: | ||||
|         try: | ||||
|             req: Optional[Requirement] = Requirement(name) | ||||
|         except InvalidRequirement: | ||||
|             raise InstallationError(f"Invalid requirement: '{name}'") | ||||
|     else: | ||||
|         req = None | ||||
|  | ||||
|     link = Link(url) | ||||
|  | ||||
|     return RequirementParts(req, link, None, extras_override) | ||||
|  | ||||
|  | ||||
| # ---- The actual constructors follow ---- | ||||
|  | ||||
|  | ||||
| def install_req_from_editable( | ||||
|     editable_req: str, | ||||
|     comes_from: Optional[Union[InstallRequirement, str]] = None, | ||||
|     *, | ||||
|     use_pep517: Optional[bool] = None, | ||||
|     isolated: bool = False, | ||||
|     global_options: Optional[List[str]] = None, | ||||
|     hash_options: Optional[Dict[str, List[str]]] = None, | ||||
|     constraint: bool = False, | ||||
|     user_supplied: bool = False, | ||||
|     permit_editable_wheels: bool = False, | ||||
|     config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, | ||||
| ) -> InstallRequirement: | ||||
|     parts = parse_req_from_editable(editable_req) | ||||
|  | ||||
|     return InstallRequirement( | ||||
|         parts.requirement, | ||||
|         comes_from=comes_from, | ||||
|         user_supplied=user_supplied, | ||||
|         editable=True, | ||||
|         permit_editable_wheels=permit_editable_wheels, | ||||
|         link=parts.link, | ||||
|         constraint=constraint, | ||||
|         use_pep517=use_pep517, | ||||
|         isolated=isolated, | ||||
|         global_options=global_options, | ||||
|         hash_options=hash_options, | ||||
|         config_settings=config_settings, | ||||
|         extras=parts.extras, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def _looks_like_path(name: str) -> bool: | ||||
|     """Checks whether the string "looks like" a path on the filesystem. | ||||
|  | ||||
|     This does not check whether the target actually exists, only judge from the | ||||
|     appearance. | ||||
|  | ||||
|     Returns true if any of the following conditions is true: | ||||
|     * a path separator is found (either os.path.sep or os.path.altsep); | ||||
|     * a dot is found (which represents the current directory). | ||||
|     """ | ||||
|     if os.path.sep in name: | ||||
|         return True | ||||
|     if os.path.altsep is not None and os.path.altsep in name: | ||||
|         return True | ||||
|     if name.startswith("."): | ||||
|         return True | ||||
|     return False | ||||
|  | ||||
|  | ||||
| def _get_url_from_path(path: str, name: str) -> Optional[str]: | ||||
|     """ | ||||
|     First, it checks whether a provided path is an installable directory. If it | ||||
|     is, returns the path. | ||||
|  | ||||
|     If false, check if the path is an archive file (such as a .whl). | ||||
|     The function checks if the path is a file. If false, if the path has | ||||
|     an @, it will treat it as a PEP 440 URL requirement and return the path. | ||||
|     """ | ||||
|     if _looks_like_path(name) and os.path.isdir(path): | ||||
|         if is_installable_dir(path): | ||||
|             return path_to_url(path) | ||||
|         # TODO: The is_installable_dir test here might not be necessary | ||||
|         #       now that it is done in load_pyproject_toml too. | ||||
|         raise InstallationError( | ||||
|             f"Directory {name!r} is not installable. Neither 'setup.py' " | ||||
|             "nor 'pyproject.toml' found." | ||||
|         ) | ||||
|     if not is_archive_file(path): | ||||
|         return None | ||||
|     if os.path.isfile(path): | ||||
|         return path_to_url(path) | ||||
|     urlreq_parts = name.split("@", 1) | ||||
|     if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): | ||||
|         # If the path contains '@' and the part before it does not look | ||||
|         # like a path, try to treat it as a PEP 440 URL req instead. | ||||
|         return None | ||||
|     logger.warning( | ||||
|         "Requirement %r looks like a filename, but the file does not exist", | ||||
|         name, | ||||
|     ) | ||||
|     return path_to_url(path) | ||||
|  | ||||
|  | ||||
| def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts: | ||||
|     if is_url(name): | ||||
|         marker_sep = "; " | ||||
|     else: | ||||
|         marker_sep = ";" | ||||
|     if marker_sep in name: | ||||
|         name, markers_as_string = name.split(marker_sep, 1) | ||||
|         markers_as_string = markers_as_string.strip() | ||||
|         if not markers_as_string: | ||||
|             markers = None | ||||
|         else: | ||||
|             markers = Marker(markers_as_string) | ||||
|     else: | ||||
|         markers = None | ||||
|     name = name.strip() | ||||
|     req_as_string = None | ||||
|     path = os.path.normpath(os.path.abspath(name)) | ||||
|     link = None | ||||
|     extras_as_string = None | ||||
|  | ||||
|     if is_url(name): | ||||
|         link = Link(name) | ||||
|     else: | ||||
|         p, extras_as_string = _strip_extras(path) | ||||
|         url = _get_url_from_path(p, name) | ||||
|         if url is not None: | ||||
|             link = Link(url) | ||||
|  | ||||
|     # it's a local file, dir, or url | ||||
|     if link: | ||||
|         # Handle relative file URLs | ||||
|         if link.scheme == "file" and re.search(r"\.\./", link.url): | ||||
|             link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path)))) | ||||
|         # wheel file | ||||
|         if link.is_wheel: | ||||
|             wheel = Wheel(link.filename)  # can raise InvalidWheelFilename | ||||
|             req_as_string = f"{wheel.name}=={wheel.version}" | ||||
|         else: | ||||
|             # set the req to the egg fragment.  when it's not there, this | ||||
|             # will become an 'unnamed' requirement | ||||
|             req_as_string = link.egg_fragment | ||||
|  | ||||
|     # a requirement specifier | ||||
|     else: | ||||
|         req_as_string = name | ||||
|  | ||||
|     extras = convert_extras(extras_as_string) | ||||
|  | ||||
|     def with_source(text: str) -> str: | ||||
|         if not line_source: | ||||
|             return text | ||||
|         return f"{text} (from {line_source})" | ||||
|  | ||||
|     def _parse_req_string(req_as_string: str) -> Requirement: | ||||
|         try: | ||||
|             req = get_requirement(req_as_string) | ||||
|         except InvalidRequirement: | ||||
|             if os.path.sep in req_as_string: | ||||
|                 add_msg = "It looks like a path." | ||||
|                 add_msg += deduce_helpful_msg(req_as_string) | ||||
|             elif "=" in req_as_string and not any( | ||||
|                 op in req_as_string for op in operators | ||||
|             ): | ||||
|                 add_msg = "= is not a valid operator. Did you mean == ?" | ||||
|             else: | ||||
|                 add_msg = "" | ||||
|             msg = with_source(f"Invalid requirement: {req_as_string!r}") | ||||
|             if add_msg: | ||||
|                 msg += f"\nHint: {add_msg}" | ||||
|             raise InstallationError(msg) | ||||
|         else: | ||||
|             # Deprecate extras after specifiers: "name>=1.0[extras]" | ||||
|             # This currently works by accident because _strip_extras() parses | ||||
|             # any extras in the end of the string and those are saved in | ||||
|             # RequirementParts | ||||
|             for spec in req.specifier: | ||||
|                 spec_str = str(spec) | ||||
|                 if spec_str.endswith("]"): | ||||
|                     msg = f"Extras after version '{spec_str}'." | ||||
|                     raise InstallationError(msg) | ||||
|         return req | ||||
|  | ||||
|     if req_as_string is not None: | ||||
|         req: Optional[Requirement] = _parse_req_string(req_as_string) | ||||
|     else: | ||||
|         req = None | ||||
|  | ||||
|     return RequirementParts(req, link, markers, extras) | ||||
|  | ||||
|  | ||||
| def install_req_from_line( | ||||
|     name: str, | ||||
|     comes_from: Optional[Union[str, InstallRequirement]] = None, | ||||
|     *, | ||||
|     use_pep517: Optional[bool] = None, | ||||
|     isolated: bool = False, | ||||
|     global_options: Optional[List[str]] = None, | ||||
|     hash_options: Optional[Dict[str, List[str]]] = None, | ||||
|     constraint: bool = False, | ||||
|     line_source: Optional[str] = None, | ||||
|     user_supplied: bool = False, | ||||
|     config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, | ||||
| ) -> InstallRequirement: | ||||
|     """Creates an InstallRequirement from a name, which might be a | ||||
|     requirement, directory containing 'setup.py', filename, or URL. | ||||
|  | ||||
|     :param line_source: An optional string describing where the line is from, | ||||
|         for logging purposes in case of an error. | ||||
|     """ | ||||
|     parts = parse_req_from_line(name, line_source) | ||||
|  | ||||
|     return InstallRequirement( | ||||
|         parts.requirement, | ||||
|         comes_from, | ||||
|         link=parts.link, | ||||
|         markers=parts.markers, | ||||
|         use_pep517=use_pep517, | ||||
|         isolated=isolated, | ||||
|         global_options=global_options, | ||||
|         hash_options=hash_options, | ||||
|         config_settings=config_settings, | ||||
|         constraint=constraint, | ||||
|         extras=parts.extras, | ||||
|         user_supplied=user_supplied, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def install_req_from_req_string( | ||||
|     req_string: str, | ||||
|     comes_from: Optional[InstallRequirement] = None, | ||||
|     isolated: bool = False, | ||||
|     use_pep517: Optional[bool] = None, | ||||
|     user_supplied: bool = False, | ||||
| ) -> InstallRequirement: | ||||
|     try: | ||||
|         req = get_requirement(req_string) | ||||
|     except InvalidRequirement: | ||||
|         raise InstallationError(f"Invalid requirement: '{req_string}'") | ||||
|  | ||||
|     domains_not_allowed = [ | ||||
|         PyPI.file_storage_domain, | ||||
|         TestPyPI.file_storage_domain, | ||||
|     ] | ||||
|     if ( | ||||
|         req.url | ||||
|         and comes_from | ||||
|         and comes_from.link | ||||
|         and comes_from.link.netloc in domains_not_allowed | ||||
|     ): | ||||
|         # Explicitly disallow pypi packages that depend on external urls | ||||
|         raise InstallationError( | ||||
|             "Packages installed from PyPI cannot depend on packages " | ||||
|             "which are not also hosted on PyPI.\n" | ||||
|             "{} depends on {} ".format(comes_from.name, req) | ||||
|         ) | ||||
|  | ||||
|     return InstallRequirement( | ||||
|         req, | ||||
|         comes_from, | ||||
|         isolated=isolated, | ||||
|         use_pep517=use_pep517, | ||||
|         user_supplied=user_supplied, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def install_req_from_parsed_requirement( | ||||
|     parsed_req: ParsedRequirement, | ||||
|     isolated: bool = False, | ||||
|     use_pep517: Optional[bool] = None, | ||||
|     user_supplied: bool = False, | ||||
|     config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, | ||||
| ) -> InstallRequirement: | ||||
|     if parsed_req.is_editable: | ||||
|         req = install_req_from_editable( | ||||
|             parsed_req.requirement, | ||||
|             comes_from=parsed_req.comes_from, | ||||
|             use_pep517=use_pep517, | ||||
|             constraint=parsed_req.constraint, | ||||
|             isolated=isolated, | ||||
|             user_supplied=user_supplied, | ||||
|             config_settings=config_settings, | ||||
|         ) | ||||
|  | ||||
|     else: | ||||
|         req = install_req_from_line( | ||||
|             parsed_req.requirement, | ||||
|             comes_from=parsed_req.comes_from, | ||||
|             use_pep517=use_pep517, | ||||
|             isolated=isolated, | ||||
|             global_options=( | ||||
|                 parsed_req.options.get("global_options", []) | ||||
|                 if parsed_req.options | ||||
|                 else [] | ||||
|             ), | ||||
|             hash_options=( | ||||
|                 parsed_req.options.get("hashes", {}) if parsed_req.options else {} | ||||
|             ), | ||||
|             constraint=parsed_req.constraint, | ||||
|             line_source=parsed_req.line_source, | ||||
|             user_supplied=user_supplied, | ||||
|             config_settings=config_settings, | ||||
|         ) | ||||
|     return req | ||||
|  | ||||
|  | ||||
| def install_req_from_link_and_ireq( | ||||
|     link: Link, ireq: InstallRequirement | ||||
| ) -> InstallRequirement: | ||||
|     return InstallRequirement( | ||||
|         req=ireq.req, | ||||
|         comes_from=ireq.comes_from, | ||||
|         editable=ireq.editable, | ||||
|         link=link, | ||||
|         markers=ireq.markers, | ||||
|         use_pep517=ireq.use_pep517, | ||||
|         isolated=ireq.isolated, | ||||
|         global_options=ireq.global_options, | ||||
|         hash_options=ireq.hash_options, | ||||
|         config_settings=ireq.config_settings, | ||||
|         user_supplied=ireq.user_supplied, | ||||
|     ) | ||||
| @ -0,0 +1,552 @@ | ||||
| """ | ||||
| Requirements file parsing | ||||
| """ | ||||
|  | ||||
| import logging | ||||
| import optparse | ||||
| import os | ||||
| import re | ||||
| import shlex | ||||
| import urllib.parse | ||||
| from optparse import Values | ||||
| from typing import ( | ||||
|     TYPE_CHECKING, | ||||
|     Any, | ||||
|     Callable, | ||||
|     Dict, | ||||
|     Generator, | ||||
|     Iterable, | ||||
|     List, | ||||
|     Optional, | ||||
|     Tuple, | ||||
| ) | ||||
|  | ||||
| from pip._internal.cli import cmdoptions | ||||
| from pip._internal.exceptions import InstallationError, RequirementsFileParseError | ||||
| from pip._internal.models.search_scope import SearchScope | ||||
| from pip._internal.network.session import PipSession | ||||
| from pip._internal.network.utils import raise_for_status | ||||
| from pip._internal.utils.encoding import auto_decode | ||||
| from pip._internal.utils.urls import get_url_scheme | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     # NoReturn introduced in 3.6.2; imported only for type checking to maintain | ||||
|     # pip compatibility with older patch versions of Python 3.6 | ||||
|     from typing import NoReturn | ||||
|  | ||||
|     from pip._internal.index.package_finder import PackageFinder | ||||
|  | ||||
| __all__ = ["parse_requirements"] | ||||
|  | ||||
| ReqFileLines = Iterable[Tuple[int, str]] | ||||
|  | ||||
| LineParser = Callable[[str], Tuple[str, Values]] | ||||
|  | ||||
| SCHEME_RE = re.compile(r"^(http|https|file):", re.I) | ||||
| COMMENT_RE = re.compile(r"(^|\s+)#.*$") | ||||
|  | ||||
| # Matches environment variable-style values in '${MY_VARIABLE_1}' with the | ||||
| # variable name consisting of only uppercase letters, digits or the '_' | ||||
| # (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, | ||||
| # 2013 Edition. | ||||
| ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})") | ||||
|  | ||||
| SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [ | ||||
|     cmdoptions.index_url, | ||||
|     cmdoptions.extra_index_url, | ||||
|     cmdoptions.no_index, | ||||
|     cmdoptions.constraints, | ||||
|     cmdoptions.requirements, | ||||
|     cmdoptions.editable, | ||||
|     cmdoptions.find_links, | ||||
|     cmdoptions.no_binary, | ||||
|     cmdoptions.only_binary, | ||||
|     cmdoptions.prefer_binary, | ||||
|     cmdoptions.require_hashes, | ||||
|     cmdoptions.pre, | ||||
|     cmdoptions.trusted_host, | ||||
|     cmdoptions.use_new_feature, | ||||
| ] | ||||
|  | ||||
| # options to be passed to requirements | ||||
| SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [ | ||||
|     cmdoptions.global_options, | ||||
|     cmdoptions.hash, | ||||
|     cmdoptions.config_settings, | ||||
| ] | ||||
|  | ||||
| # the 'dest' string values | ||||
| SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class ParsedRequirement: | ||||
|     def __init__( | ||||
|         self, | ||||
|         requirement: str, | ||||
|         is_editable: bool, | ||||
|         comes_from: str, | ||||
|         constraint: bool, | ||||
|         options: Optional[Dict[str, Any]] = None, | ||||
|         line_source: Optional[str] = None, | ||||
|     ) -> None: | ||||
|         self.requirement = requirement | ||||
|         self.is_editable = is_editable | ||||
|         self.comes_from = comes_from | ||||
|         self.options = options | ||||
|         self.constraint = constraint | ||||
|         self.line_source = line_source | ||||
|  | ||||
|  | ||||
| class ParsedLine: | ||||
|     def __init__( | ||||
|         self, | ||||
|         filename: str, | ||||
|         lineno: int, | ||||
|         args: str, | ||||
|         opts: Values, | ||||
|         constraint: bool, | ||||
|     ) -> None: | ||||
|         self.filename = filename | ||||
|         self.lineno = lineno | ||||
|         self.opts = opts | ||||
|         self.constraint = constraint | ||||
|  | ||||
|         if args: | ||||
|             self.is_requirement = True | ||||
|             self.is_editable = False | ||||
|             self.requirement = args | ||||
|         elif opts.editables: | ||||
|             self.is_requirement = True | ||||
|             self.is_editable = True | ||||
|             # We don't support multiple -e on one line | ||||
|             self.requirement = opts.editables[0] | ||||
|         else: | ||||
|             self.is_requirement = False | ||||
|  | ||||
|  | ||||
| def parse_requirements( | ||||
|     filename: str, | ||||
|     session: PipSession, | ||||
|     finder: Optional["PackageFinder"] = None, | ||||
|     options: Optional[optparse.Values] = None, | ||||
|     constraint: bool = False, | ||||
| ) -> Generator[ParsedRequirement, None, None]: | ||||
|     """Parse a requirements file and yield ParsedRequirement instances. | ||||
|  | ||||
|     :param filename:    Path or url of requirements file. | ||||
|     :param session:     PipSession instance. | ||||
|     :param finder:      Instance of pip.index.PackageFinder. | ||||
|     :param options:     cli options. | ||||
|     :param constraint:  If true, parsing a constraint file rather than | ||||
|         requirements file. | ||||
|     """ | ||||
|     line_parser = get_line_parser(finder) | ||||
|     parser = RequirementsFileParser(session, line_parser) | ||||
|  | ||||
|     for parsed_line in parser.parse(filename, constraint): | ||||
|         parsed_req = handle_line( | ||||
|             parsed_line, options=options, finder=finder, session=session | ||||
|         ) | ||||
|         if parsed_req is not None: | ||||
|             yield parsed_req | ||||
|  | ||||
|  | ||||
| def preprocess(content: str) -> ReqFileLines: | ||||
|     """Split, filter, and join lines, and return a line iterator | ||||
|  | ||||
|     :param content: the content of the requirements file | ||||
|     """ | ||||
|     lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1) | ||||
|     lines_enum = join_lines(lines_enum) | ||||
|     lines_enum = ignore_comments(lines_enum) | ||||
|     lines_enum = expand_env_variables(lines_enum) | ||||
|     return lines_enum | ||||
|  | ||||
|  | ||||
| def handle_requirement_line( | ||||
|     line: ParsedLine, | ||||
|     options: Optional[optparse.Values] = None, | ||||
| ) -> ParsedRequirement: | ||||
|     # preserve for the nested code path | ||||
|     line_comes_from = "{} {} (line {})".format( | ||||
|         "-c" if line.constraint else "-r", | ||||
|         line.filename, | ||||
|         line.lineno, | ||||
|     ) | ||||
|  | ||||
|     assert line.is_requirement | ||||
|  | ||||
|     if line.is_editable: | ||||
|         # For editable requirements, we don't support per-requirement | ||||
|         # options, so just return the parsed requirement. | ||||
|         return ParsedRequirement( | ||||
|             requirement=line.requirement, | ||||
|             is_editable=line.is_editable, | ||||
|             comes_from=line_comes_from, | ||||
|             constraint=line.constraint, | ||||
|         ) | ||||
|     else: | ||||
|         # get the options that apply to requirements | ||||
|         req_options = {} | ||||
|         for dest in SUPPORTED_OPTIONS_REQ_DEST: | ||||
|             if dest in line.opts.__dict__ and line.opts.__dict__[dest]: | ||||
|                 req_options[dest] = line.opts.__dict__[dest] | ||||
|  | ||||
|         line_source = f"line {line.lineno} of {line.filename}" | ||||
|         return ParsedRequirement( | ||||
|             requirement=line.requirement, | ||||
|             is_editable=line.is_editable, | ||||
|             comes_from=line_comes_from, | ||||
|             constraint=line.constraint, | ||||
|             options=req_options, | ||||
|             line_source=line_source, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def handle_option_line( | ||||
|     opts: Values, | ||||
|     filename: str, | ||||
|     lineno: int, | ||||
|     finder: Optional["PackageFinder"] = None, | ||||
|     options: Optional[optparse.Values] = None, | ||||
|     session: Optional[PipSession] = None, | ||||
| ) -> None: | ||||
|     if opts.hashes: | ||||
|         logger.warning( | ||||
|             "%s line %s has --hash but no requirement, and will be ignored.", | ||||
|             filename, | ||||
|             lineno, | ||||
|         ) | ||||
|  | ||||
|     if options: | ||||
|         # percolate options upward | ||||
|         if opts.require_hashes: | ||||
|             options.require_hashes = opts.require_hashes | ||||
|         if opts.features_enabled: | ||||
|             options.features_enabled.extend( | ||||
|                 f for f in opts.features_enabled if f not in options.features_enabled | ||||
|             ) | ||||
|  | ||||
|     # set finder options | ||||
|     if finder: | ||||
|         find_links = finder.find_links | ||||
|         index_urls = finder.index_urls | ||||
|         no_index = finder.search_scope.no_index | ||||
|         if opts.no_index is True: | ||||
|             no_index = True | ||||
|             index_urls = [] | ||||
|         if opts.index_url and not no_index: | ||||
|             index_urls = [opts.index_url] | ||||
|         if opts.extra_index_urls and not no_index: | ||||
|             index_urls.extend(opts.extra_index_urls) | ||||
|         if opts.find_links: | ||||
|             # FIXME: it would be nice to keep track of the source | ||||
|             # of the find_links: support a find-links local path | ||||
|             # relative to a requirements file. | ||||
|             value = opts.find_links[0] | ||||
|             req_dir = os.path.dirname(os.path.abspath(filename)) | ||||
|             relative_to_reqs_file = os.path.join(req_dir, value) | ||||
|             if os.path.exists(relative_to_reqs_file): | ||||
|                 value = relative_to_reqs_file | ||||
|             find_links.append(value) | ||||
|  | ||||
|         if session: | ||||
|             # We need to update the auth urls in session | ||||
|             session.update_index_urls(index_urls) | ||||
|  | ||||
|         search_scope = SearchScope( | ||||
|             find_links=find_links, | ||||
|             index_urls=index_urls, | ||||
|             no_index=no_index, | ||||
|         ) | ||||
|         finder.search_scope = search_scope | ||||
|  | ||||
|         if opts.pre: | ||||
|             finder.set_allow_all_prereleases() | ||||
|  | ||||
|         if opts.prefer_binary: | ||||
|             finder.set_prefer_binary() | ||||
|  | ||||
|         if session: | ||||
|             for host in opts.trusted_hosts or []: | ||||
|                 source = f"line {lineno} of {filename}" | ||||
|                 session.add_trusted_host(host, source=source) | ||||
|  | ||||
|  | ||||
| def handle_line( | ||||
|     line: ParsedLine, | ||||
|     options: Optional[optparse.Values] = None, | ||||
|     finder: Optional["PackageFinder"] = None, | ||||
|     session: Optional[PipSession] = None, | ||||
| ) -> Optional[ParsedRequirement]: | ||||
|     """Handle a single parsed requirements line; This can result in | ||||
|     creating/yielding requirements, or updating the finder. | ||||
|  | ||||
|     :param line:        The parsed line to be processed. | ||||
|     :param options:     CLI options. | ||||
|     :param finder:      The finder - updated by non-requirement lines. | ||||
|     :param session:     The session - updated by non-requirement lines. | ||||
|  | ||||
|     Returns a ParsedRequirement object if the line is a requirement line, | ||||
|     otherwise returns None. | ||||
|  | ||||
|     For lines that contain requirements, the only options that have an effect | ||||
|     are from SUPPORTED_OPTIONS_REQ, and they are scoped to the | ||||
|     requirement. Other options from SUPPORTED_OPTIONS may be present, but are | ||||
|     ignored. | ||||
|  | ||||
|     For lines that do not contain requirements, the only options that have an | ||||
|     effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may | ||||
|     be present, but are ignored. These lines may contain multiple options | ||||
|     (although our docs imply only one is supported), and all our parsed and | ||||
|     affect the finder. | ||||
|     """ | ||||
|  | ||||
|     if line.is_requirement: | ||||
|         parsed_req = handle_requirement_line(line, options) | ||||
|         return parsed_req | ||||
|     else: | ||||
|         handle_option_line( | ||||
|             line.opts, | ||||
|             line.filename, | ||||
|             line.lineno, | ||||
|             finder, | ||||
|             options, | ||||
|             session, | ||||
|         ) | ||||
|         return None | ||||
|  | ||||
|  | ||||
| class RequirementsFileParser: | ||||
|     def __init__( | ||||
|         self, | ||||
|         session: PipSession, | ||||
|         line_parser: LineParser, | ||||
|     ) -> None: | ||||
|         self._session = session | ||||
|         self._line_parser = line_parser | ||||
|  | ||||
|     def parse( | ||||
|         self, filename: str, constraint: bool | ||||
|     ) -> Generator[ParsedLine, None, None]: | ||||
|         """Parse a given file, yielding parsed lines.""" | ||||
|         yield from self._parse_and_recurse(filename, constraint) | ||||
|  | ||||
|     def _parse_and_recurse( | ||||
|         self, filename: str, constraint: bool | ||||
|     ) -> Generator[ParsedLine, None, None]: | ||||
|         for line in self._parse_file(filename, constraint): | ||||
|             if not line.is_requirement and ( | ||||
|                 line.opts.requirements or line.opts.constraints | ||||
|             ): | ||||
|                 # parse a nested requirements file | ||||
|                 if line.opts.requirements: | ||||
|                     req_path = line.opts.requirements[0] | ||||
|                     nested_constraint = False | ||||
|                 else: | ||||
|                     req_path = line.opts.constraints[0] | ||||
|                     nested_constraint = True | ||||
|  | ||||
|                 # original file is over http | ||||
|                 if SCHEME_RE.search(filename): | ||||
|                     # do a url join so relative paths work | ||||
|                     req_path = urllib.parse.urljoin(filename, req_path) | ||||
|                 # original file and nested file are paths | ||||
|                 elif not SCHEME_RE.search(req_path): | ||||
|                     # do a join so relative paths work | ||||
|                     req_path = os.path.join( | ||||
|                         os.path.dirname(filename), | ||||
|                         req_path, | ||||
|                     ) | ||||
|  | ||||
|                 yield from self._parse_and_recurse(req_path, nested_constraint) | ||||
|             else: | ||||
|                 yield line | ||||
|  | ||||
|     def _parse_file( | ||||
|         self, filename: str, constraint: bool | ||||
|     ) -> Generator[ParsedLine, None, None]: | ||||
|         _, content = get_file_content(filename, self._session) | ||||
|  | ||||
|         lines_enum = preprocess(content) | ||||
|  | ||||
|         for line_number, line in lines_enum: | ||||
|             try: | ||||
|                 args_str, opts = self._line_parser(line) | ||||
|             except OptionParsingError as e: | ||||
|                 # add offending line | ||||
|                 msg = f"Invalid requirement: {line}\n{e.msg}" | ||||
|                 raise RequirementsFileParseError(msg) | ||||
|  | ||||
|             yield ParsedLine( | ||||
|                 filename, | ||||
|                 line_number, | ||||
|                 args_str, | ||||
|                 opts, | ||||
|                 constraint, | ||||
|             ) | ||||
|  | ||||
|  | ||||
| def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser: | ||||
|     def parse_line(line: str) -> Tuple[str, Values]: | ||||
|         # Build new parser for each line since it accumulates appendable | ||||
|         # options. | ||||
|         parser = build_parser() | ||||
|         defaults = parser.get_default_values() | ||||
|         defaults.index_url = None | ||||
|         if finder: | ||||
|             defaults.format_control = finder.format_control | ||||
|  | ||||
|         args_str, options_str = break_args_options(line) | ||||
|  | ||||
|         try: | ||||
|             options = shlex.split(options_str) | ||||
|         except ValueError as e: | ||||
|             raise OptionParsingError(f"Could not split options: {options_str}") from e | ||||
|  | ||||
|         opts, _ = parser.parse_args(options, defaults) | ||||
|  | ||||
|         return args_str, opts | ||||
|  | ||||
|     return parse_line | ||||
|  | ||||
|  | ||||
| def break_args_options(line: str) -> Tuple[str, str]: | ||||
|     """Break up the line into an args and options string.  We only want to shlex | ||||
|     (and then optparse) the options, not the args.  args can contain markers | ||||
|     which are corrupted by shlex. | ||||
|     """ | ||||
|     tokens = line.split(" ") | ||||
|     args = [] | ||||
|     options = tokens[:] | ||||
|     for token in tokens: | ||||
|         if token.startswith("-") or token.startswith("--"): | ||||
|             break | ||||
|         else: | ||||
|             args.append(token) | ||||
|             options.pop(0) | ||||
|     return " ".join(args), " ".join(options) | ||||
|  | ||||
|  | ||||
| class OptionParsingError(Exception): | ||||
|     def __init__(self, msg: str) -> None: | ||||
|         self.msg = msg | ||||
|  | ||||
|  | ||||
| def build_parser() -> optparse.OptionParser: | ||||
|     """ | ||||
|     Return a parser for parsing requirement lines | ||||
|     """ | ||||
|     parser = optparse.OptionParser(add_help_option=False) | ||||
|  | ||||
|     option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ | ||||
|     for option_factory in option_factories: | ||||
|         option = option_factory() | ||||
|         parser.add_option(option) | ||||
|  | ||||
|     # By default optparse sys.exits on parsing errors. We want to wrap | ||||
|     # that in our own exception. | ||||
|     def parser_exit(self: Any, msg: str) -> "NoReturn": | ||||
|         raise OptionParsingError(msg) | ||||
|  | ||||
|     # NOTE: mypy disallows assigning to a method | ||||
|     #       https://github.com/python/mypy/issues/2427 | ||||
|     parser.exit = parser_exit  # type: ignore | ||||
|  | ||||
|     return parser | ||||
|  | ||||
|  | ||||
| def join_lines(lines_enum: ReqFileLines) -> ReqFileLines: | ||||
|     """Joins a line ending in '\' with the previous line (except when following | ||||
|     comments).  The joined line takes on the index of the first line. | ||||
|     """ | ||||
|     primary_line_number = None | ||||
|     new_line: List[str] = [] | ||||
|     for line_number, line in lines_enum: | ||||
|         if not line.endswith("\\") or COMMENT_RE.match(line): | ||||
|             if COMMENT_RE.match(line): | ||||
|                 # this ensures comments are always matched later | ||||
|                 line = " " + line | ||||
|             if new_line: | ||||
|                 new_line.append(line) | ||||
|                 assert primary_line_number is not None | ||||
|                 yield primary_line_number, "".join(new_line) | ||||
|                 new_line = [] | ||||
|             else: | ||||
|                 yield line_number, line | ||||
|         else: | ||||
|             if not new_line: | ||||
|                 primary_line_number = line_number | ||||
|             new_line.append(line.strip("\\")) | ||||
|  | ||||
|     # last line contains \ | ||||
|     if new_line: | ||||
|         assert primary_line_number is not None | ||||
|         yield primary_line_number, "".join(new_line) | ||||
|  | ||||
|     # TODO: handle space after '\'. | ||||
|  | ||||
|  | ||||
| def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines: | ||||
|     """ | ||||
|     Strips comments and filter empty lines. | ||||
|     """ | ||||
|     for line_number, line in lines_enum: | ||||
|         line = COMMENT_RE.sub("", line) | ||||
|         line = line.strip() | ||||
|         if line: | ||||
|             yield line_number, line | ||||
|  | ||||
|  | ||||
| def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines: | ||||
|     """Replace all environment variables that can be retrieved via `os.getenv`. | ||||
|  | ||||
|     The only allowed format for environment variables defined in the | ||||
|     requirement file is `${MY_VARIABLE_1}` to ensure two things: | ||||
|  | ||||
|     1. Strings that contain a `$` aren't accidentally (partially) expanded. | ||||
|     2. Ensure consistency across platforms for requirement files. | ||||
|  | ||||
|     These points are the result of a discussion on the `github pull | ||||
|     request #3514 <https://github.com/pypa/pip/pull/3514>`_. | ||||
|  | ||||
|     Valid characters in variable names follow the `POSIX standard | ||||
|     <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited | ||||
|     to uppercase letter, digits and the `_` (underscore). | ||||
|     """ | ||||
|     for line_number, line in lines_enum: | ||||
|         for env_var, var_name in ENV_VAR_RE.findall(line): | ||||
|             value = os.getenv(var_name) | ||||
|             if not value: | ||||
|                 continue | ||||
|  | ||||
|             line = line.replace(env_var, value) | ||||
|  | ||||
|         yield line_number, line | ||||
|  | ||||
|  | ||||
| def get_file_content(url: str, session: PipSession) -> Tuple[str, str]: | ||||
|     """Gets the content of a file; it may be a filename, file: URL, or | ||||
|     http: URL.  Returns (location, content).  Content is unicode. | ||||
|     Respects # -*- coding: declarations on the retrieved files. | ||||
|  | ||||
|     :param url:         File path or url. | ||||
|     :param session:     PipSession instance. | ||||
|     """ | ||||
|     scheme = get_url_scheme(url) | ||||
|  | ||||
|     # Pip has special support for file:// URLs (LocalFSAdapter). | ||||
|     if scheme in ["http", "https", "file"]: | ||||
|         resp = session.get(url) | ||||
|         raise_for_status(resp) | ||||
|         return resp.url, resp.text | ||||
|  | ||||
|     # Assume this is a bare path. | ||||
|     try: | ||||
|         with open(url, "rb") as f: | ||||
|             content = auto_decode(f.read()) | ||||
|     except OSError as exc: | ||||
|         raise InstallationError(f"Could not open requirements file: {exc}") | ||||
|     return url, content | ||||
| @ -0,0 +1,874 @@ | ||||
| # The following comment should be removed at some point in the future. | ||||
| # mypy: strict-optional=False | ||||
|  | ||||
| import functools | ||||
| import logging | ||||
| import os | ||||
| import shutil | ||||
| import sys | ||||
| import uuid | ||||
| import zipfile | ||||
| from optparse import Values | ||||
| from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union | ||||
|  | ||||
| from pip._vendor.packaging.markers import Marker | ||||
| from pip._vendor.packaging.requirements import Requirement | ||||
| from pip._vendor.packaging.specifiers import SpecifierSet | ||||
| from pip._vendor.packaging.utils import canonicalize_name | ||||
| from pip._vendor.packaging.version import Version | ||||
| from pip._vendor.packaging.version import parse as parse_version | ||||
| from pip._vendor.pyproject_hooks import BuildBackendHookCaller | ||||
|  | ||||
| from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment | ||||
| from pip._internal.exceptions import InstallationError | ||||
| from pip._internal.locations import get_scheme | ||||
| from pip._internal.metadata import ( | ||||
|     BaseDistribution, | ||||
|     get_default_environment, | ||||
|     get_directory_distribution, | ||||
|     get_wheel_distribution, | ||||
| ) | ||||
| from pip._internal.metadata.base import FilesystemWheel | ||||
| from pip._internal.models.direct_url import DirectUrl | ||||
| from pip._internal.models.link import Link | ||||
| from pip._internal.operations.build.metadata import generate_metadata | ||||
| from pip._internal.operations.build.metadata_editable import generate_editable_metadata | ||||
| from pip._internal.operations.build.metadata_legacy import ( | ||||
|     generate_metadata as generate_metadata_legacy, | ||||
| ) | ||||
| from pip._internal.operations.install.editable_legacy import ( | ||||
|     install_editable as install_editable_legacy, | ||||
| ) | ||||
| from pip._internal.operations.install.wheel import install_wheel | ||||
| from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path | ||||
| from pip._internal.req.req_uninstall import UninstallPathSet | ||||
| from pip._internal.utils.deprecation import deprecated | ||||
| from pip._internal.utils.hashes import Hashes | ||||
| from pip._internal.utils.misc import ( | ||||
|     ConfiguredBuildBackendHookCaller, | ||||
|     ask_path_exists, | ||||
|     backup_dir, | ||||
|     display_path, | ||||
|     hide_url, | ||||
|     redact_auth_from_url, | ||||
| ) | ||||
| from pip._internal.utils.packaging import safe_extra | ||||
| from pip._internal.utils.subprocess import runner_with_spinner_message | ||||
| from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds | ||||
| from pip._internal.utils.virtualenv import running_under_virtualenv | ||||
| from pip._internal.vcs import vcs | ||||
|  | ||||
| logger = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class InstallRequirement: | ||||
|     """ | ||||
|     Represents something that may be installed later on, may have information | ||||
|     about where to fetch the relevant requirement and also contains logic for | ||||
|     installing the said requirement. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         req: Optional[Requirement], | ||||
|         comes_from: Optional[Union[str, "InstallRequirement"]], | ||||
|         editable: bool = False, | ||||
|         link: Optional[Link] = None, | ||||
|         markers: Optional[Marker] = None, | ||||
|         use_pep517: Optional[bool] = None, | ||||
|         isolated: bool = False, | ||||
|         *, | ||||
|         global_options: Optional[List[str]] = None, | ||||
|         hash_options: Optional[Dict[str, List[str]]] = None, | ||||
|         config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, | ||||
|         constraint: bool = False, | ||||
|         extras: Collection[str] = (), | ||||
|         user_supplied: bool = False, | ||||
|         permit_editable_wheels: bool = False, | ||||
|     ) -> None: | ||||
|         assert req is None or isinstance(req, Requirement), req | ||||
|         self.req = req | ||||
|         self.comes_from = comes_from | ||||
|         self.constraint = constraint | ||||
|         self.editable = editable | ||||
|         self.permit_editable_wheels = permit_editable_wheels | ||||
|  | ||||
|         # source_dir is the local directory where the linked requirement is | ||||
|         # located, or unpacked. In case unpacking is needed, creating and | ||||
|         # populating source_dir is done by the RequirementPreparer. Note this | ||||
|         # is not necessarily the directory where pyproject.toml or setup.py is | ||||
|         # located - that one is obtained via unpacked_source_directory. | ||||
|         self.source_dir: Optional[str] = None | ||||
|         if self.editable: | ||||
|             assert link | ||||
|             if link.is_file: | ||||
|                 self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) | ||||
|  | ||||
|         # original_link is the direct URL that was provided by the user for the | ||||
|         # requirement, either directly or via a constraints file. | ||||
|         if link is None and req and req.url: | ||||
|             # PEP 508 URL requirement | ||||
|             link = Link(req.url) | ||||
|         self.link = self.original_link = link | ||||
|  | ||||
|         # When this InstallRequirement is a wheel obtained from the cache of locally | ||||
|         # built wheels, this is the source link corresponding to the cache entry, which | ||||
|         # was used to download and build the cached wheel. | ||||
|         self.cached_wheel_source_link: Optional[Link] = None | ||||
|  | ||||
|         # Information about the location of the artifact that was downloaded . This | ||||
|         # property is guaranteed to be set in resolver results. | ||||
|         self.download_info: Optional[DirectUrl] = None | ||||
|  | ||||
|         # Path to any downloaded or already-existing package. | ||||
|         self.local_file_path: Optional[str] = None | ||||
|         if self.link and self.link.is_file: | ||||
|             self.local_file_path = self.link.file_path | ||||
|  | ||||
|         if extras: | ||||
|             self.extras = extras | ||||
|         elif req: | ||||
|             self.extras = {safe_extra(extra) for extra in req.extras} | ||||
|         else: | ||||
|             self.extras = set() | ||||
|         if markers is None and req: | ||||
|             markers = req.marker | ||||
|         self.markers = markers | ||||
|  | ||||
|         # This holds the Distribution object if this requirement is already installed. | ||||
|         self.satisfied_by: Optional[BaseDistribution] = None | ||||
|         # Whether the installation process should try to uninstall an existing | ||||
|         # distribution before installing this requirement. | ||||
|         self.should_reinstall = False | ||||
|         # Temporary build location | ||||
|         self._temp_build_dir: Optional[TempDirectory] = None | ||||
|         # Set to True after successful installation | ||||
|         self.install_succeeded: Optional[bool] = None | ||||
|         # Supplied options | ||||
|         self.global_options = global_options if global_options else [] | ||||
|         self.hash_options = hash_options if hash_options else {} | ||||
|         self.config_settings = config_settings | ||||
|         # Set to True after successful preparation of this requirement | ||||
|         self.prepared = False | ||||
|         # User supplied requirement are explicitly requested for installation | ||||
|         # by the user via CLI arguments or requirements files, as opposed to, | ||||
|         # e.g. dependencies, extras or constraints. | ||||
|         self.user_supplied = user_supplied | ||||
|  | ||||
|         self.isolated = isolated | ||||
|         self.build_env: BuildEnvironment = NoOpBuildEnvironment() | ||||
|  | ||||
|         # For PEP 517, the directory where we request the project metadata | ||||
|         # gets stored. We need this to pass to build_wheel, so the backend | ||||
|         # can ensure that the wheel matches the metadata (see the PEP for | ||||
|         # details). | ||||
|         self.metadata_directory: Optional[str] = None | ||||
|  | ||||
|         # The static build requirements (from pyproject.toml) | ||||
|         self.pyproject_requires: Optional[List[str]] = None | ||||
|  | ||||
|         # Build requirements that we will check are available | ||||
|         self.requirements_to_check: List[str] = [] | ||||
|  | ||||
|         # The PEP 517 backend we should use to build the project | ||||
|         self.pep517_backend: Optional[BuildBackendHookCaller] = None | ||||
|  | ||||
|         # Are we using PEP 517 for this requirement? | ||||
|         # After pyproject.toml has been loaded, the only valid values are True | ||||
|         # and False. Before loading, None is valid (meaning "use the default"). | ||||
|         # Setting an explicit value before loading pyproject.toml is supported, | ||||
|         # but after loading this flag should be treated as read only. | ||||
|         self.use_pep517 = use_pep517 | ||||
|  | ||||
|         # This requirement needs more preparation before it can be built | ||||
|         self.needs_more_preparation = False | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         if self.req: | ||||
|             s = str(self.req) | ||||
|             if self.link: | ||||
|                 s += " from {}".format(redact_auth_from_url(self.link.url)) | ||||
|         elif self.link: | ||||
|             s = redact_auth_from_url(self.link.url) | ||||
|         else: | ||||
|             s = "<InstallRequirement>" | ||||
|         if self.satisfied_by is not None: | ||||
|             if self.satisfied_by.location is not None: | ||||
|                 location = display_path(self.satisfied_by.location) | ||||
|             else: | ||||
|                 location = "<memory>" | ||||
|             s += f" in {location}" | ||||
|         if self.comes_from: | ||||
|             if isinstance(self.comes_from, str): | ||||
|                 comes_from: Optional[str] = self.comes_from | ||||
|             else: | ||||
|                 comes_from = self.comes_from.from_path() | ||||
|             if comes_from: | ||||
|                 s += f" (from {comes_from})" | ||||
|         return s | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return "<{} object: {} editable={!r}>".format( | ||||
|             self.__class__.__name__, str(self), self.editable | ||||
|         ) | ||||
|  | ||||
|     def format_debug(self) -> str: | ||||
|         """An un-tested helper for getting state, for debugging.""" | ||||
|         attributes = vars(self) | ||||
|         names = sorted(attributes) | ||||
|  | ||||
|         state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)) | ||||
|         return "<{name} object: {{{state}}}>".format( | ||||
|             name=self.__class__.__name__, | ||||
|             state=", ".join(state), | ||||
|         ) | ||||
|  | ||||
|     # Things that are valid for all kinds of requirements? | ||||
|     @property | ||||
|     def name(self) -> Optional[str]: | ||||
|         if self.req is None: | ||||
|             return None | ||||
|         return self.req.name | ||||
|  | ||||
|     @functools.lru_cache()  # use cached_property in python 3.8+ | ||||
|     def supports_pyproject_editable(self) -> bool: | ||||
|         if not self.use_pep517: | ||||
|             return False | ||||
|         assert self.pep517_backend | ||||
|         with self.build_env: | ||||
|             runner = runner_with_spinner_message( | ||||
|                 "Checking if build backend supports build_editable" | ||||
|             ) | ||||
|             with self.pep517_backend.subprocess_runner(runner): | ||||
|                 return "build_editable" in self.pep517_backend._supported_features() | ||||
|  | ||||
|     @property | ||||
|     def specifier(self) -> SpecifierSet: | ||||
|         return self.req.specifier | ||||
|  | ||||
|     @property | ||||
|     def is_direct(self) -> bool: | ||||
|         """Whether this requirement was specified as a direct URL.""" | ||||
|         return self.original_link is not None | ||||
|  | ||||
|     @property | ||||
|     def is_pinned(self) -> bool: | ||||
|         """Return whether I am pinned to an exact version. | ||||
|  | ||||
|         For example, some-package==1.2 is pinned; some-package>1.2 is not. | ||||
|         """ | ||||
|         specifiers = self.specifier | ||||
|         return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} | ||||
|  | ||||
|     def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool: | ||||
|         if not extras_requested: | ||||
|             # Provide an extra to safely evaluate the markers | ||||
|             # without matching any extra | ||||
|             extras_requested = ("",) | ||||
|         if self.markers is not None: | ||||
|             return any( | ||||
|                 self.markers.evaluate({"extra": extra}) for extra in extras_requested | ||||
|             ) | ||||
|         else: | ||||
|             return True | ||||
|  | ||||
|     @property | ||||
|     def has_hash_options(self) -> bool: | ||||
|         """Return whether any known-good hashes are specified as options. | ||||
|  | ||||
|         These activate --require-hashes mode; hashes specified as part of a | ||||
|         URL do not. | ||||
|  | ||||
|         """ | ||||
|         return bool(self.hash_options) | ||||
|  | ||||
|     def hashes(self, trust_internet: bool = True) -> Hashes: | ||||
|         """Return a hash-comparer that considers my option- and URL-based | ||||
|         hashes to be known-good. | ||||
|  | ||||
|         Hashes in URLs--ones embedded in the requirements file, not ones | ||||
|         downloaded from an index server--are almost peers with ones from | ||||
|         flags. They satisfy --require-hashes (whether it was implicitly or | ||||
|         explicitly activated) but do not activate it. md5 and sha224 are not | ||||
|         allowed in flags, which should nudge people toward good algos. We | ||||
|         always OR all hashes together, even ones from URLs. | ||||
|  | ||||
|         :param trust_internet: Whether to trust URL-based (#md5=...) hashes | ||||
|             downloaded from the internet, as by populate_link() | ||||
|  | ||||
|         """ | ||||
|         good_hashes = self.hash_options.copy() | ||||
|         if trust_internet: | ||||
|             link = self.link | ||||
|         elif self.is_direct and self.user_supplied: | ||||
|             link = self.original_link | ||||
|         else: | ||||
|             link = None | ||||
|         if link and link.hash: | ||||
|             good_hashes.setdefault(link.hash_name, []).append(link.hash) | ||||
|         return Hashes(good_hashes) | ||||
|  | ||||
|     def from_path(self) -> Optional[str]: | ||||
|         """Format a nice indicator to show where this "comes from" """ | ||||
|         if self.req is None: | ||||
|             return None | ||||
|         s = str(self.req) | ||||
|         if self.comes_from: | ||||
|             if isinstance(self.comes_from, str): | ||||
|                 comes_from = self.comes_from | ||||
|             else: | ||||
|                 comes_from = self.comes_from.from_path() | ||||
|             if comes_from: | ||||
|                 s += "->" + comes_from | ||||
|         return s | ||||
|  | ||||
|     def ensure_build_location( | ||||
|         self, build_dir: str, autodelete: bool, parallel_builds: bool | ||||
|     ) -> str: | ||||
|         assert build_dir is not None | ||||
|         if self._temp_build_dir is not None: | ||||
|             assert self._temp_build_dir.path | ||||
|             return self._temp_build_dir.path | ||||
|         if self.req is None: | ||||
|             # Some systems have /tmp as a symlink which confuses custom | ||||
|             # builds (such as numpy). Thus, we ensure that the real path | ||||
|             # is returned. | ||||
|             self._temp_build_dir = TempDirectory( | ||||
|                 kind=tempdir_kinds.REQ_BUILD, globally_managed=True | ||||
|             ) | ||||
|  | ||||
|             return self._temp_build_dir.path | ||||
|  | ||||
|         # This is the only remaining place where we manually determine the path | ||||
|         # for the temporary directory. It is only needed for editables where | ||||
|         # it is the value of the --src option. | ||||
|  | ||||
|         # When parallel builds are enabled, add a UUID to the build directory | ||||
|         # name so multiple builds do not interfere with each other. | ||||
|         dir_name: str = canonicalize_name(self.name) | ||||
|         if parallel_builds: | ||||
|             dir_name = f"{dir_name}_{uuid.uuid4().hex}" | ||||
|  | ||||
|         # FIXME: Is there a better place to create the build_dir? (hg and bzr | ||||
|         # need this) | ||||
|         if not os.path.exists(build_dir): | ||||
|             logger.debug("Creating directory %s", build_dir) | ||||
|             os.makedirs(build_dir) | ||||
|         actual_build_dir = os.path.join(build_dir, dir_name) | ||||
|         # `None` indicates that we respect the globally-configured deletion | ||||
|         # settings, which is what we actually want when auto-deleting. | ||||
|         delete_arg = None if autodelete else False | ||||
|         return TempDirectory( | ||||
|             path=actual_build_dir, | ||||
|             delete=delete_arg, | ||||
|             kind=tempdir_kinds.REQ_BUILD, | ||||
|             globally_managed=True, | ||||
|         ).path | ||||
|  | ||||
|     def _set_requirement(self) -> None: | ||||
|         """Set requirement after generating metadata.""" | ||||
|         assert self.req is None | ||||
|         assert self.metadata is not None | ||||
|         assert self.source_dir is not None | ||||
|  | ||||
|         # Construct a Requirement object from the generated metadata | ||||
|         if isinstance(parse_version(self.metadata["Version"]), Version): | ||||
|             op = "==" | ||||
|         else: | ||||
|             op = "===" | ||||
|  | ||||
|         self.req = Requirement( | ||||
|             "".join( | ||||
|                 [ | ||||
|                     self.metadata["Name"], | ||||
|                     op, | ||||
|                     self.metadata["Version"], | ||||
|                 ] | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     def warn_on_mismatching_name(self) -> None: | ||||
|         metadata_name = canonicalize_name(self.metadata["Name"]) | ||||
|         if canonicalize_name(self.req.name) == metadata_name: | ||||
|             # Everything is fine. | ||||
|             return | ||||
|  | ||||
|         # If we're here, there's a mismatch. Log a warning about it. | ||||
|         logger.warning( | ||||
|             "Generating metadata for package %s " | ||||
|             "produced metadata for project name %s. Fix your " | ||||
|             "#egg=%s fragments.", | ||||
|             self.name, | ||||
|             metadata_name, | ||||
|             self.name, | ||||
|         ) | ||||
|         self.req = Requirement(metadata_name) | ||||
|  | ||||
|     def check_if_exists(self, use_user_site: bool) -> None: | ||||
|         """Find an installed distribution that satisfies or conflicts | ||||
|         with this requirement, and set self.satisfied_by or | ||||
|         self.should_reinstall appropriately. | ||||
|         """ | ||||
|         if self.req is None: | ||||
|             return | ||||
|         existing_dist = get_default_environment().get_distribution(self.req.name) | ||||
|         if not existing_dist: | ||||
|             return | ||||
|  | ||||
|         version_compatible = self.req.specifier.contains( | ||||
|             existing_dist.version, | ||||
|             prereleases=True, | ||||
|         ) | ||||
|         if not version_compatible: | ||||
|             self.satisfied_by = None | ||||
|             if use_user_site: | ||||
|                 if existing_dist.in_usersite: | ||||
|                     self.should_reinstall = True | ||||
|                 elif running_under_virtualenv() and existing_dist.in_site_packages: | ||||
|                     raise InstallationError( | ||||
|                         f"Will not install to the user site because it will " | ||||
|                         f"lack sys.path precedence to {existing_dist.raw_name} " | ||||
|                         f"in {existing_dist.location}" | ||||
|                     ) | ||||
|             else: | ||||
|                 self.should_reinstall = True | ||||
|         else: | ||||
|             if self.editable: | ||||
|                 self.should_reinstall = True | ||||
|                 # when installing editables, nothing pre-existing should ever | ||||
|                 # satisfy | ||||
|                 self.satisfied_by = None | ||||
|             else: | ||||
|                 self.satisfied_by = existing_dist | ||||
|  | ||||
|     # Things valid for wheels | ||||
|     @property | ||||
|     def is_wheel(self) -> bool: | ||||
|         if not self.link: | ||||
|             return False | ||||
|         return self.link.is_wheel | ||||
|  | ||||
|     @property | ||||
|     def is_wheel_from_cache(self) -> bool: | ||||
|         # When True, it means that this InstallRequirement is a local wheel file in the | ||||
|         # cache of locally built wheels. | ||||
|         return self.cached_wheel_source_link is not None | ||||
|  | ||||
|     # Things valid for sdists | ||||
|     @property | ||||
|     def unpacked_source_directory(self) -> str: | ||||
|         return os.path.join( | ||||
|             self.source_dir, self.link and self.link.subdirectory_fragment or "" | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def setup_py_path(self) -> str: | ||||
|         assert self.source_dir, f"No source dir for {self}" | ||||
|         setup_py = os.path.join(self.unpacked_source_directory, "setup.py") | ||||
|  | ||||
|         return setup_py | ||||
|  | ||||
|     @property | ||||
|     def setup_cfg_path(self) -> str: | ||||
|         assert self.source_dir, f"No source dir for {self}" | ||||
|         setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg") | ||||
|  | ||||
|         return setup_cfg | ||||
|  | ||||
|     @property | ||||
|     def pyproject_toml_path(self) -> str: | ||||
|         assert self.source_dir, f"No source dir for {self}" | ||||
|         return make_pyproject_path(self.unpacked_source_directory) | ||||
|  | ||||
|     def load_pyproject_toml(self) -> None: | ||||
|         """Load the pyproject.toml file. | ||||
|  | ||||
|         After calling this routine, all of the attributes related to PEP 517 | ||||
|         processing for this requirement have been set. In particular, the | ||||
|         use_pep517 attribute can be used to determine whether we should | ||||
|         follow the PEP 517 or legacy (setup.py) code path. | ||||
|         """ | ||||
|         pyproject_toml_data = load_pyproject_toml( | ||||
|             self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self) | ||||
|         ) | ||||
|  | ||||
|         if pyproject_toml_data is None: | ||||
|             if self.config_settings: | ||||
|                 deprecated( | ||||
|                     reason=f"Config settings are ignored for project {self}.", | ||||
|                     replacement=( | ||||
|                         "to use --use-pep517 or add a " | ||||
|                         "pyproject.toml file to the project" | ||||
|                     ), | ||||
|                     gone_in="23.3", | ||||
|                 ) | ||||
|             self.use_pep517 = False | ||||
|             return | ||||
|  | ||||
|         self.use_pep517 = True | ||||
|         requires, backend, check, backend_path = pyproject_toml_data | ||||
|         self.requirements_to_check = check | ||||
|         self.pyproject_requires = requires | ||||
|         self.pep517_backend = ConfiguredBuildBackendHookCaller( | ||||
|             self, | ||||
|             self.unpacked_source_directory, | ||||
|             backend, | ||||
|             backend_path=backend_path, | ||||
|         ) | ||||
|  | ||||
|     def isolated_editable_sanity_check(self) -> None: | ||||
|         """Check that an editable requirement if valid for use with PEP 517/518. | ||||
|  | ||||
|         This verifies that an editable that has a pyproject.toml either supports PEP 660 | ||||
|         or as a setup.py or a setup.cfg | ||||
|         """ | ||||
|         if ( | ||||
|             self.editable | ||||
|             and self.use_pep517 | ||||
|             and not self.supports_pyproject_editable() | ||||
|             and not os.path.isfile(self.setup_py_path) | ||||
|             and not os.path.isfile(self.setup_cfg_path) | ||||
|         ): | ||||
|             raise InstallationError( | ||||
|                 f"Project {self} has a 'pyproject.toml' and its build " | ||||
|                 f"backend is missing the 'build_editable' hook. Since it does not " | ||||
|                 f"have a 'setup.py' nor a 'setup.cfg', " | ||||
|                 f"it cannot be installed in editable mode. " | ||||
|                 f"Consider using a build backend that supports PEP 660." | ||||
|             ) | ||||
|  | ||||
|     def prepare_metadata(self) -> None: | ||||
|         """Ensure that project metadata is available. | ||||
|  | ||||
|         Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. | ||||
|         Under legacy processing, call setup.py egg-info. | ||||
|         """ | ||||
|         assert self.source_dir | ||||
|         details = self.name or f"from {self.link}" | ||||
|  | ||||
|         if self.use_pep517: | ||||
|             assert self.pep517_backend is not None | ||||
|             if ( | ||||
|                 self.editable | ||||
|                 and self.permit_editable_wheels | ||||
|                 and self.supports_pyproject_editable() | ||||
|             ): | ||||
|                 self.metadata_directory = generate_editable_metadata( | ||||
|                     build_env=self.build_env, | ||||
|                     backend=self.pep517_backend, | ||||
|                     details=details, | ||||
|                 ) | ||||
|             else: | ||||
|                 self.metadata_directory = generate_metadata( | ||||
|                     build_env=self.build_env, | ||||
|                     backend=self.pep517_backend, | ||||
|                     details=details, | ||||
|                 ) | ||||
|         else: | ||||
|             self.metadata_directory = generate_metadata_legacy( | ||||
|                 build_env=self.build_env, | ||||
|                 setup_py_path=self.setup_py_path, | ||||
|                 source_dir=self.unpacked_source_directory, | ||||
|                 isolated=self.isolated, | ||||
|                 details=details, | ||||
|             ) | ||||
|  | ||||
|         # Act on the newly generated metadata, based on the name and version. | ||||
|         if not self.name: | ||||
|             self._set_requirement() | ||||
|         else: | ||||
|             self.warn_on_mismatching_name() | ||||
|  | ||||
|         self.assert_source_matches_version() | ||||
|  | ||||
|     @property | ||||
|     def metadata(self) -> Any: | ||||
|         if not hasattr(self, "_metadata"): | ||||
|             self._metadata = self.get_dist().metadata | ||||
|  | ||||
|         return self._metadata | ||||
|  | ||||
|     def get_dist(self) -> BaseDistribution: | ||||
|         if self.metadata_directory: | ||||
|             return get_directory_distribution(self.metadata_directory) | ||||
|         elif self.local_file_path and self.is_wheel: | ||||
|             return get_wheel_distribution( | ||||
|                 FilesystemWheel(self.local_file_path), canonicalize_name(self.name) | ||||
|             ) | ||||
|         raise AssertionError( | ||||
|             f"InstallRequirement {self} has no metadata directory and no wheel: " | ||||
|             f"can't make a distribution." | ||||
|         ) | ||||
|  | ||||
|     def assert_source_matches_version(self) -> None: | ||||
|         assert self.source_dir | ||||
|         version = self.metadata["version"] | ||||
|         if self.req.specifier and version not in self.req.specifier: | ||||
|             logger.warning( | ||||
|                 "Requested %s, but installing version %s", | ||||
|                 self, | ||||
|                 version, | ||||
|             ) | ||||
|         else: | ||||
|             logger.debug( | ||||
|                 "Source in %s has version %s, which satisfies requirement %s", | ||||
|                 display_path(self.source_dir), | ||||
|                 version, | ||||
|                 self, | ||||
|             ) | ||||
|  | ||||
|     # For both source distributions and editables | ||||
|     def ensure_has_source_dir( | ||||
|         self, | ||||
|         parent_dir: str, | ||||
|         autodelete: bool = False, | ||||
|         parallel_builds: bool = False, | ||||
|     ) -> None: | ||||
|         """Ensure that a source_dir is set. | ||||
|  | ||||
|         This will create a temporary build dir if the name of the requirement | ||||
|         isn't known yet. | ||||
|  | ||||
|         :param parent_dir: The ideal pip parent_dir for the source_dir. | ||||
|             Generally src_dir for editables and build_dir for sdists. | ||||
|         :return: self.source_dir | ||||
|         """ | ||||
|         if self.source_dir is None: | ||||
|             self.source_dir = self.ensure_build_location( | ||||
|                 parent_dir, | ||||
|                 autodelete=autodelete, | ||||
|                 parallel_builds=parallel_builds, | ||||
|             ) | ||||
|  | ||||
|     # For editable installations | ||||
|     def update_editable(self) -> None: | ||||
|         if not self.link: | ||||
|             logger.debug( | ||||
|                 "Cannot update repository at %s; repository location is unknown", | ||||
|                 self.source_dir, | ||||
|             ) | ||||
|             return | ||||
|         assert self.editable | ||||
|         assert self.source_dir | ||||
|         if self.link.scheme == "file": | ||||
|             # Static paths don't get updated | ||||
|             return | ||||
|         vcs_backend = vcs.get_backend_for_scheme(self.link.scheme) | ||||
|         # Editable requirements are validated in Requirement constructors. | ||||
|         # So here, if it's neither a path nor a valid VCS URL, it's a bug. | ||||
|         assert vcs_backend, f"Unsupported VCS URL {self.link.url}" | ||||
|         hidden_url = hide_url(self.link.url) | ||||
|         vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0) | ||||
|  | ||||
|     # Top-level Actions | ||||
|     def uninstall( | ||||
|         self, auto_confirm: bool = False, verbose: bool = False | ||||
|     ) -> Optional[UninstallPathSet]: | ||||
|         """ | ||||
|         Uninstall the distribution currently satisfying this requirement. | ||||
|  | ||||
|         Prompts before removing or modifying files unless | ||||
|         ``auto_confirm`` is True. | ||||
|  | ||||
|         Refuses to delete or modify files outside of ``sys.prefix`` - | ||||
|         thus uninstallation within a virtual environment can only | ||||
|         modify that virtual environment, even if the virtualenv is | ||||
|         linked to global site-packages. | ||||
|  | ||||
|         """ | ||||
|         assert self.req | ||||
|         dist = get_default_environment().get_distribution(self.req.name) | ||||
|         if not dist: | ||||
|             logger.warning("Skipping %s as it is not installed.", self.name) | ||||
|             return None | ||||
|         logger.info("Found existing installation: %s", dist) | ||||
|  | ||||
|         uninstalled_pathset = UninstallPathSet.from_dist(dist) | ||||
|         uninstalled_pathset.remove(auto_confirm, verbose) | ||||
|         return uninstalled_pathset | ||||
|  | ||||
|     def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str: | ||||
|         def _clean_zip_name(name: str, prefix: str) -> str: | ||||
|             assert name.startswith( | ||||
|                 prefix + os.path.sep | ||||
|             ), f"name {name!r} doesn't start with prefix {prefix!r}" | ||||
|             name = name[len(prefix) + 1 :] | ||||
|             name = name.replace(os.path.sep, "/") | ||||
|             return name | ||||
|  | ||||
|         path = os.path.join(parentdir, path) | ||||
|         name = _clean_zip_name(path, rootdir) | ||||
|         return self.name + "/" + name | ||||
|  | ||||
|     def archive(self, build_dir: Optional[str]) -> None: | ||||
|         """Saves archive to provided build_dir. | ||||
|  | ||||
|         Used for saving downloaded VCS requirements as part of `pip download`. | ||||
|         """ | ||||
|         assert self.source_dir | ||||
|         if build_dir is None: | ||||
|             return | ||||
|  | ||||
|         create_archive = True | ||||
|         archive_name = "{}-{}.zip".format(self.name, self.metadata["version"]) | ||||
|         archive_path = os.path.join(build_dir, archive_name) | ||||
|  | ||||
|         if os.path.exists(archive_path): | ||||
|             response = ask_path_exists( | ||||
|                 "The file {} exists. (i)gnore, (w)ipe, " | ||||
|                 "(b)ackup, (a)bort ".format(display_path(archive_path)), | ||||
|                 ("i", "w", "b", "a"), | ||||
|             ) | ||||
|             if response == "i": | ||||
|                 create_archive = False | ||||
|             elif response == "w": | ||||
|                 logger.warning("Deleting %s", display_path(archive_path)) | ||||
|                 os.remove(archive_path) | ||||
|             elif response == "b": | ||||
|                 dest_file = backup_dir(archive_path) | ||||
|                 logger.warning( | ||||
|                     "Backing up %s to %s", | ||||
|                     display_path(archive_path), | ||||
|                     display_path(dest_file), | ||||
|                 ) | ||||
|                 shutil.move(archive_path, dest_file) | ||||
|             elif response == "a": | ||||
|                 sys.exit(-1) | ||||
|  | ||||
|         if not create_archive: | ||||
|             return | ||||
|  | ||||
|         zip_output = zipfile.ZipFile( | ||||
|             archive_path, | ||||
|             "w", | ||||
|             zipfile.ZIP_DEFLATED, | ||||
|             allowZip64=True, | ||||
|         ) | ||||
|         with zip_output: | ||||
|             dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory)) | ||||
|             for dirpath, dirnames, filenames in os.walk(dir): | ||||
|                 for dirname in dirnames: | ||||
|                     dir_arcname = self._get_archive_name( | ||||
|                         dirname, | ||||
|                         parentdir=dirpath, | ||||
|                         rootdir=dir, | ||||
|                     ) | ||||
|                     zipdir = zipfile.ZipInfo(dir_arcname + "/") | ||||
|                     zipdir.external_attr = 0x1ED << 16  # 0o755 | ||||
|                     zip_output.writestr(zipdir, "") | ||||
|                 for filename in filenames: | ||||
|                     file_arcname = self._get_archive_name( | ||||
|                         filename, | ||||
|                         parentdir=dirpath, | ||||
|                         rootdir=dir, | ||||
|                     ) | ||||
|                     filename = os.path.join(dirpath, filename) | ||||
|                     zip_output.write(filename, file_arcname) | ||||
|  | ||||
|         logger.info("Saved %s", display_path(archive_path)) | ||||
|  | ||||
|     def install( | ||||
|         self, | ||||
|         global_options: Optional[Sequence[str]] = None, | ||||
|         root: Optional[str] = None, | ||||
|         home: Optional[str] = None, | ||||
|         prefix: Optional[str] = None, | ||||
|         warn_script_location: bool = True, | ||||
|         use_user_site: bool = False, | ||||
|         pycompile: bool = True, | ||||
|     ) -> None: | ||||
|         scheme = get_scheme( | ||||
|             self.name, | ||||
|             user=use_user_site, | ||||
|             home=home, | ||||
|             root=root, | ||||
|             isolated=self.isolated, | ||||
|             prefix=prefix, | ||||
|         ) | ||||
|  | ||||
|         if self.editable and not self.is_wheel: | ||||
|             install_editable_legacy( | ||||
|                 global_options=global_options if global_options is not None else [], | ||||
|                 prefix=prefix, | ||||
|                 home=home, | ||||
|                 use_user_site=use_user_site, | ||||
|                 name=self.name, | ||||
|                 setup_py_path=self.setup_py_path, | ||||
|                 isolated=self.isolated, | ||||
|                 build_env=self.build_env, | ||||
|                 unpacked_source_directory=self.unpacked_source_directory, | ||||
|             ) | ||||
|             self.install_succeeded = True | ||||
|             return | ||||
|  | ||||
|         assert self.is_wheel | ||||
|         assert self.local_file_path | ||||
|  | ||||
|         install_wheel( | ||||
|             self.name, | ||||
|             self.local_file_path, | ||||
|             scheme=scheme, | ||||
|             req_description=str(self.req), | ||||
|             pycompile=pycompile, | ||||
|             warn_script_location=warn_script_location, | ||||
|             direct_url=self.download_info if self.is_direct else None, | ||||
|             requested=self.user_supplied, | ||||
|         ) | ||||
|         self.install_succeeded = True | ||||
|  | ||||
|  | ||||
| def check_invalid_constraint_type(req: InstallRequirement) -> str: | ||||
|     # Check for unsupported forms | ||||
|     problem = "" | ||||
|     if not req.name: | ||||
|         problem = "Unnamed requirements are not allowed as constraints" | ||||
|     elif req.editable: | ||||
|         problem = "Editable requirements are not allowed as constraints" | ||||
|     elif req.extras: | ||||
|         problem = "Constraints cannot have extras" | ||||
|  | ||||
|     if problem: | ||||
|         deprecated( | ||||
|             reason=( | ||||
|                 "Constraints are only allowed to take the form of a package " | ||||
|                 "name and a version specifier. Other forms were originally " | ||||
|                 "permitted as an accident of the implementation, but were " | ||||
|                 "undocumented. The new implementation of the resolver no " | ||||
|                 "longer supports these forms." | ||||
|             ), | ||||
|             replacement="replacing the constraint with a requirement", | ||||
|             # No plan yet for when the new resolver becomes default | ||||
|             gone_in=None, | ||||
|             issue=8210, | ||||
|         ) | ||||
|  | ||||
|     return problem | ||||
|  | ||||
|  | ||||
| def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool: | ||||
|     if getattr(options, option, None): | ||||
|         return True | ||||
|     for req in reqs: | ||||
|         if getattr(req, option, None): | ||||
|             return True | ||||
|     return False | ||||
|  | ||||
|  | ||||
| def check_legacy_setup_py_options( | ||||
|     options: Values, | ||||
|     reqs: List[InstallRequirement], | ||||
| ) -> None: | ||||
|     has_build_options = _has_option(options, reqs, "build_options") | ||||
|     has_global_options = _has_option(options, reqs, "global_options") | ||||
|     if has_build_options or has_global_options: | ||||
|         deprecated( | ||||
|             reason="--build-option and --global-option are deprecated.", | ||||
|             issue=11859, | ||||
|             replacement="to use --config-settings", | ||||
|             gone_in="23.3", | ||||
|         ) | ||||
|         logger.warning( | ||||
|             "Implying --no-binary=:all: due to the presence of " | ||||
|             "--build-option / --global-option. " | ||||
|         ) | ||||
|         options.format_control.disallow_binaries() | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user