+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000000000000000000000000000000000000..a6bd15e73b881d086ecca6fe53229262b168de09
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000000000000000000000000000000000000..e3753d584943948433e08779ccd389db98a6db77
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/pac.iml b/.idea/pac.iml
new file mode 100644
index 0000000000000000000000000000000000000000..75a180478b62802d744022867cf209b9c10ec2d8
--- /dev/null
+++ b/.idea/pac.iml
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000000000000000000000000000000000000..35eb1ddfbbc029bcab630581847471d7f238ec53
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/workspace.xml b/.idea/workspace.xml
new file mode 100644
index 0000000000000000000000000000000000000000..0926e1c8299ddefff2fae56bcb86cb4a49ae954c
--- /dev/null
+++ b/.idea/workspace.xml
@@ -0,0 +1,84 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1735501782328
+
+
+ 1735501782328
+
+
+
+
+ 1735502948132
+
+
+
+ 1735502948132
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.venv/.gitignore b/.venv/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..4c3f3e8ac229f085dc25577e25776800fece6f32
--- /dev/null
+++ b/.venv/.gitignore
@@ -0,0 +1,2 @@
+# created by virtualenv automatically
+*
diff --git a/.venv/Lib/site-packages/pip/__pip-runner__.py b/.venv/Lib/site-packages/pip/__pip-runner__.py
new file mode 100644
index 0000000000000000000000000000000000000000..49a148a097e9cc06c165571e0bffaf7cae17dc5b
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/__pip-runner__.py
@@ -0,0 +1,50 @@
+"""Execute exactly this copy of pip, within a different environment.
+
+This file is named as it is, to ensure that this module can't be imported via
+an import statement.
+"""
+
+# /!\ This version compatibility check section must be Python 2 compatible. /!\
+
+import sys
+
+# Copied from setup.py
+PYTHON_REQUIRES = (3, 7)
+
+
+def version_str(version): # type: ignore
+ return ".".join(str(v) for v in version)
+
+
+if sys.version_info[:2] < PYTHON_REQUIRES:
+ raise SystemExit(
+ "This version of pip does not support python {} (requires >={}).".format(
+ version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
+ )
+ )
+
+# From here on, we can use Python 3 features, but the syntax must remain
+# Python 2 compatible.
+
+import runpy # noqa: E402
+from importlib.machinery import PathFinder # noqa: E402
+from os.path import dirname # noqa: E402
+
+PIP_SOURCES_ROOT = dirname(dirname(__file__))
+
+
+class PipImportRedirectingFinder:
+ @classmethod
+ def find_spec(self, fullname, path=None, target=None): # type: ignore
+ if fullname != "pip":
+ return None
+
+ spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
+ assert spec, (PIP_SOURCES_ROOT, fullname)
+ return spec
+
+
+sys.meta_path.insert(0, PipImportRedirectingFinder())
+
+assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
+runpy.run_module("pip", run_name="__main__", alter_sys=True)
diff --git a/.venv/Lib/site-packages/pip/_internal/__init__.py b/.venv/Lib/site-packages/pip/_internal/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..6afb5c627ce3db6e61cbf46276f7ddd42552eb28
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/__init__.py
@@ -0,0 +1,19 @@
+from typing import List, Optional
+
+import pip._internal.utils.inject_securetransport # noqa
+from pip._internal.utils import _log
+
+# init_logging() must be called before any call to logging.getLogger()
+# which happens at import of most modules.
+_log.init_logging()
+
+
+def main(args: (Optional[List[str]]) = None) -> int:
+ """This is preserved for old console scripts that may still be referencing
+ it.
+
+ For additional details, see https://github.com/pypa/pip/issues/7498.
+ """
+ from pip._internal.utils.entrypoints import _wrapper
+
+ return _wrapper(args)
diff --git a/.venv/Lib/site-packages/pip/_internal/build_env.py b/.venv/Lib/site-packages/pip/_internal/build_env.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f704a3547da02f913d6cfdbd4e0ed77c81caabe
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/build_env.py
@@ -0,0 +1,311 @@
+"""Build Environment used for isolation during sdist building
+"""
+
+import logging
+import os
+import pathlib
+import site
+import sys
+import textwrap
+from collections import OrderedDict
+from types import TracebackType
+from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
+
+from pip._vendor.certifi import where
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.version import Version
+
+from pip import __file__ as pip_location
+from pip._internal.cli.spinners import open_spinner
+from pip._internal.locations import get_platlib, get_purelib, get_scheme
+from pip._internal.metadata import get_default_environment, get_environment
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
+
+if TYPE_CHECKING:
+ from pip._internal.index.package_finder import PackageFinder
+
+logger = logging.getLogger(__name__)
+
+
+def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
+ return (a, b) if a != b else (a,)
+
+
+class _Prefix:
+ def __init__(self, path: str) -> None:
+ self.path = path
+ self.setup = False
+ scheme = get_scheme("", prefix=path)
+ self.bin_dir = scheme.scripts
+ self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
+
+
+def get_runnable_pip() -> str:
+ """Get a file to pass to a Python executable, to run the currently-running pip.
+
+ This is used to run a pip subprocess, for installing requirements into the build
+ environment.
+ """
+ source = pathlib.Path(pip_location).resolve().parent
+
+ if not source.is_dir():
+ # This would happen if someone is using pip from inside a zip file. In that
+ # case, we can use that directly.
+ return str(source)
+
+ return os.fsdecode(source / "__pip-runner__.py")
+
+
+def _get_system_sitepackages() -> Set[str]:
+ """Get system site packages
+
+ Usually from site.getsitepackages,
+ but fallback on `get_purelib()/get_platlib()` if unavailable
+ (e.g. in a virtualenv created by virtualenv<20)
+
+ Returns normalized set of strings.
+ """
+ if hasattr(site, "getsitepackages"):
+ system_sites = site.getsitepackages()
+ else:
+ # virtualenv < 20 overwrites site.py without getsitepackages
+ # fallback on get_purelib/get_platlib.
+ # this is known to miss things, but shouldn't in the cases
+ # where getsitepackages() has been removed (inside a virtualenv)
+ system_sites = [get_purelib(), get_platlib()]
+ return {os.path.normcase(path) for path in system_sites}
+
+
+class BuildEnvironment:
+ """Creates and manages an isolated environment to install build deps"""
+
+ def __init__(self) -> None:
+ temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
+
+ self._prefixes = OrderedDict(
+ (name, _Prefix(os.path.join(temp_dir.path, name)))
+ for name in ("normal", "overlay")
+ )
+
+ self._bin_dirs: List[str] = []
+ self._lib_dirs: List[str] = []
+ for prefix in reversed(list(self._prefixes.values())):
+ self._bin_dirs.append(prefix.bin_dir)
+ self._lib_dirs.extend(prefix.lib_dirs)
+
+ # Customize site to:
+ # - ensure .pth files are honored
+ # - prevent access to system site packages
+ system_sites = _get_system_sitepackages()
+
+ self._site_dir = os.path.join(temp_dir.path, "site")
+ if not os.path.exists(self._site_dir):
+ os.mkdir(self._site_dir)
+ with open(
+ os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
+ ) as fp:
+ fp.write(
+ textwrap.dedent(
+ """
+ import os, site, sys
+
+ # First, drop system-sites related paths.
+ original_sys_path = sys.path[:]
+ known_paths = set()
+ for path in {system_sites!r}:
+ site.addsitedir(path, known_paths=known_paths)
+ system_paths = set(
+ os.path.normcase(path)
+ for path in sys.path[len(original_sys_path):]
+ )
+ original_sys_path = [
+ path for path in original_sys_path
+ if os.path.normcase(path) not in system_paths
+ ]
+ sys.path = original_sys_path
+
+ # Second, add lib directories.
+ # ensuring .pth file are processed.
+ for path in {lib_dirs!r}:
+ assert not path in sys.path
+ site.addsitedir(path)
+ """
+ ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
+ )
+
+ def __enter__(self) -> None:
+ self._save_env = {
+ name: os.environ.get(name, None)
+ for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
+ }
+
+ path = self._bin_dirs[:]
+ old_path = self._save_env["PATH"]
+ if old_path:
+ path.extend(old_path.split(os.pathsep))
+
+ pythonpath = [self._site_dir]
+
+ os.environ.update(
+ {
+ "PATH": os.pathsep.join(path),
+ "PYTHONNOUSERSITE": "1",
+ "PYTHONPATH": os.pathsep.join(pythonpath),
+ }
+ )
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ for varname, old_value in self._save_env.items():
+ if old_value is None:
+ os.environ.pop(varname, None)
+ else:
+ os.environ[varname] = old_value
+
+ def check_requirements(
+ self, reqs: Iterable[str]
+ ) -> Tuple[Set[Tuple[str, str]], Set[str]]:
+ """Return 2 sets:
+ - conflicting requirements: set of (installed, wanted) reqs tuples
+ - missing requirements: set of reqs
+ """
+ missing = set()
+ conflicting = set()
+ if reqs:
+ env = (
+ get_environment(self._lib_dirs)
+ if hasattr(self, "_lib_dirs")
+ else get_default_environment()
+ )
+ for req_str in reqs:
+ req = Requirement(req_str)
+ # We're explicitly evaluating with an empty extra value, since build
+ # environments are not provided any mechanism to select specific extras.
+ if req.marker is not None and not req.marker.evaluate({"extra": ""}):
+ continue
+ dist = env.get_distribution(req.name)
+ if not dist:
+ missing.add(req_str)
+ continue
+ if isinstance(dist.version, Version):
+ installed_req_str = f"{req.name}=={dist.version}"
+ else:
+ installed_req_str = f"{req.name}==={dist.version}"
+ if not req.specifier.contains(dist.version, prereleases=True):
+ conflicting.add((installed_req_str, req_str))
+ # FIXME: Consider direct URL?
+ return conflicting, missing
+
+ def install_requirements(
+ self,
+ finder: "PackageFinder",
+ requirements: Iterable[str],
+ prefix_as_string: str,
+ *,
+ kind: str,
+ ) -> None:
+ prefix = self._prefixes[prefix_as_string]
+ assert not prefix.setup
+ prefix.setup = True
+ if not requirements:
+ return
+ self._install_requirements(
+ get_runnable_pip(),
+ finder,
+ requirements,
+ prefix,
+ kind=kind,
+ )
+
+ @staticmethod
+ def _install_requirements(
+ pip_runnable: str,
+ finder: "PackageFinder",
+ requirements: Iterable[str],
+ prefix: _Prefix,
+ *,
+ kind: str,
+ ) -> None:
+ args: List[str] = [
+ sys.executable,
+ pip_runnable,
+ "install",
+ "--ignore-installed",
+ "--no-user",
+ "--prefix",
+ prefix.path,
+ "--no-warn-script-location",
+ ]
+ if logger.getEffectiveLevel() <= logging.DEBUG:
+ args.append("-v")
+ for format_control in ("no_binary", "only_binary"):
+ formats = getattr(finder.format_control, format_control)
+ args.extend(
+ (
+ "--" + format_control.replace("_", "-"),
+ ",".join(sorted(formats or {":none:"})),
+ )
+ )
+
+ index_urls = finder.index_urls
+ if index_urls:
+ args.extend(["-i", index_urls[0]])
+ for extra_index in index_urls[1:]:
+ args.extend(["--extra-index-url", extra_index])
+ else:
+ args.append("--no-index")
+ for link in finder.find_links:
+ args.extend(["--find-links", link])
+
+ for host in finder.trusted_hosts:
+ args.extend(["--trusted-host", host])
+ if finder.allow_all_prereleases:
+ args.append("--pre")
+ if finder.prefer_binary:
+ args.append("--prefer-binary")
+ args.append("--")
+ args.extend(requirements)
+ extra_environ = {"_PIP_STANDALONE_CERT": where()}
+ with open_spinner(f"Installing {kind}") as spinner:
+ call_subprocess(
+ args,
+ command_desc=f"pip subprocess to install {kind}",
+ spinner=spinner,
+ extra_environ=extra_environ,
+ )
+
+
+class NoOpBuildEnvironment(BuildEnvironment):
+ """A no-op drop-in replacement for BuildEnvironment"""
+
+ def __init__(self) -> None:
+ pass
+
+ def __enter__(self) -> None:
+ pass
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ pass
+
+ def cleanup(self) -> None:
+ pass
+
+ def install_requirements(
+ self,
+ finder: "PackageFinder",
+ requirements: Iterable[str],
+ prefix_as_string: str,
+ *,
+ kind: str,
+ ) -> None:
+ raise NotImplementedError()
diff --git a/.venv/Lib/site-packages/pip/_internal/cache.py b/.venv/Lib/site-packages/pip/_internal/cache.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d3a664c7d1be57579608a7c1e1da4570b439a19
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cache.py
@@ -0,0 +1,292 @@
+"""Cache Management
+"""
+
+import hashlib
+import json
+import logging
+import os
+from pathlib import Path
+from typing import Any, Dict, List, Optional
+
+from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import InvalidWheelFilename
+from pip._internal.models.direct_url import DirectUrl
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
+from pip._internal.utils.urls import path_to_url
+
+logger = logging.getLogger(__name__)
+
+ORIGIN_JSON_NAME = "origin.json"
+
+
+def _hash_dict(d: Dict[str, str]) -> str:
+ """Return a stable sha224 of a dictionary."""
+ s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
+ return hashlib.sha224(s.encode("ascii")).hexdigest()
+
+
+class Cache:
+ """An abstract class - provides cache directories for data from links
+
+ :param cache_dir: The root of the cache.
+ """
+
+ def __init__(self, cache_dir: str) -> None:
+ super().__init__()
+ assert not cache_dir or os.path.isabs(cache_dir)
+ self.cache_dir = cache_dir or None
+
+ def _get_cache_path_parts(self, link: Link) -> List[str]:
+ """Get parts of part that must be os.path.joined with cache_dir"""
+
+ # We want to generate an url to use as our cache key, we don't want to
+ # just re-use the URL because it might have other items in the fragment
+ # and we don't care about those.
+ key_parts = {"url": link.url_without_fragment}
+ if link.hash_name is not None and link.hash is not None:
+ key_parts[link.hash_name] = link.hash
+ if link.subdirectory_fragment:
+ key_parts["subdirectory"] = link.subdirectory_fragment
+
+ # Include interpreter name, major and minor version in cache key
+ # to cope with ill-behaved sdists that build a different wheel
+ # depending on the python version their setup.py is being run on,
+ # and don't encode the difference in compatibility tags.
+ # https://github.com/pypa/pip/issues/7296
+ key_parts["interpreter_name"] = interpreter_name()
+ key_parts["interpreter_version"] = interpreter_version()
+
+ # Encode our key url with sha224, we'll use this because it has similar
+ # security properties to sha256, but with a shorter total output (and
+ # thus less secure). However the differences don't make a lot of
+ # difference for our use case here.
+ hashed = _hash_dict(key_parts)
+
+ # We want to nest the directories some to prevent having a ton of top
+ # level directories where we might run out of sub directories on some
+ # FS.
+ parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
+
+ return parts
+
+ def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
+ can_not_cache = not self.cache_dir or not canonical_package_name or not link
+ if can_not_cache:
+ return []
+
+ candidates = []
+ path = self.get_path_for_link(link)
+ if os.path.isdir(path):
+ for candidate in os.listdir(path):
+ candidates.append((candidate, path))
+ return candidates
+
+ def get_path_for_link(self, link: Link) -> str:
+ """Return a directory to store cached items in for link."""
+ raise NotImplementedError()
+
+ def get(
+ self,
+ link: Link,
+ package_name: Optional[str],
+ supported_tags: List[Tag],
+ ) -> Link:
+ """Returns a link to a cached item if it exists, otherwise returns the
+ passed link.
+ """
+ raise NotImplementedError()
+
+
+class SimpleWheelCache(Cache):
+ """A cache of wheels for future installs."""
+
+ def __init__(self, cache_dir: str) -> None:
+ super().__init__(cache_dir)
+
+ def get_path_for_link(self, link: Link) -> str:
+ """Return a directory to store cached wheels for link
+
+ Because there are M wheels for any one sdist, we provide a directory
+ to cache them in, and then consult that directory when looking up
+ cache hits.
+
+ We only insert things into the cache if they have plausible version
+ numbers, so that we don't contaminate the cache with things that were
+ not unique. E.g. ./package might have dozens of installs done for it
+ and build a version of 0.0...and if we built and cached a wheel, we'd
+ end up using the same wheel even if the source has been edited.
+
+ :param link: The link of the sdist for which this will cache wheels.
+ """
+ parts = self._get_cache_path_parts(link)
+ assert self.cache_dir
+ # Store wheels within the root cache_dir
+ return os.path.join(self.cache_dir, "wheels", *parts)
+
+ def get(
+ self,
+ link: Link,
+ package_name: Optional[str],
+ supported_tags: List[Tag],
+ ) -> Link:
+ candidates = []
+
+ if not package_name:
+ return link
+
+ canonical_package_name = canonicalize_name(package_name)
+ for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
+ try:
+ wheel = Wheel(wheel_name)
+ except InvalidWheelFilename:
+ continue
+ if canonicalize_name(wheel.name) != canonical_package_name:
+ logger.debug(
+ "Ignoring cached wheel %s for %s as it "
+ "does not match the expected distribution name %s.",
+ wheel_name,
+ link,
+ package_name,
+ )
+ continue
+ if not wheel.supported(supported_tags):
+ # Built for a different python/arch/etc
+ continue
+ candidates.append(
+ (
+ wheel.support_index_min(supported_tags),
+ wheel_name,
+ wheel_dir,
+ )
+ )
+
+ if not candidates:
+ return link
+
+ _, wheel_name, wheel_dir = min(candidates)
+ return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
+
+
+class EphemWheelCache(SimpleWheelCache):
+ """A SimpleWheelCache that creates it's own temporary cache directory"""
+
+ def __init__(self) -> None:
+ self._temp_dir = TempDirectory(
+ kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
+ globally_managed=True,
+ )
+
+ super().__init__(self._temp_dir.path)
+
+
+class CacheEntry:
+ def __init__(
+ self,
+ link: Link,
+ persistent: bool,
+ ):
+ self.link = link
+ self.persistent = persistent
+ self.origin: Optional[DirectUrl] = None
+ origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
+ if origin_direct_url_path.exists():
+ try:
+ self.origin = DirectUrl.from_json(
+ origin_direct_url_path.read_text(encoding="utf-8")
+ )
+ except Exception as e:
+ logger.warning(
+ "Ignoring invalid cache entry origin file %s for %s (%s)",
+ origin_direct_url_path,
+ link.filename,
+ e,
+ )
+
+
+class WheelCache(Cache):
+ """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
+
+ This Cache allows for gracefully degradation, using the ephem wheel cache
+ when a certain link is not found in the simple wheel cache first.
+ """
+
+ def __init__(self, cache_dir: str) -> None:
+ super().__init__(cache_dir)
+ self._wheel_cache = SimpleWheelCache(cache_dir)
+ self._ephem_cache = EphemWheelCache()
+
+ def get_path_for_link(self, link: Link) -> str:
+ return self._wheel_cache.get_path_for_link(link)
+
+ def get_ephem_path_for_link(self, link: Link) -> str:
+ return self._ephem_cache.get_path_for_link(link)
+
+ def get(
+ self,
+ link: Link,
+ package_name: Optional[str],
+ supported_tags: List[Tag],
+ ) -> Link:
+ cache_entry = self.get_cache_entry(link, package_name, supported_tags)
+ if cache_entry is None:
+ return link
+ return cache_entry.link
+
+ def get_cache_entry(
+ self,
+ link: Link,
+ package_name: Optional[str],
+ supported_tags: List[Tag],
+ ) -> Optional[CacheEntry]:
+ """Returns a CacheEntry with a link to a cached item if it exists or
+ None. The cache entry indicates if the item was found in the persistent
+ or ephemeral cache.
+ """
+ retval = self._wheel_cache.get(
+ link=link,
+ package_name=package_name,
+ supported_tags=supported_tags,
+ )
+ if retval is not link:
+ return CacheEntry(retval, persistent=True)
+
+ retval = self._ephem_cache.get(
+ link=link,
+ package_name=package_name,
+ supported_tags=supported_tags,
+ )
+ if retval is not link:
+ return CacheEntry(retval, persistent=False)
+
+ return None
+
+ @staticmethod
+ def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
+ origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
+ if origin_path.exists():
+ try:
+ origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
+ except Exception as e:
+ logger.warning(
+ "Could not read origin file %s in cache entry (%s). "
+ "Will attempt to overwrite it.",
+ origin_path,
+ e,
+ )
+ else:
+ # TODO: use DirectUrl.equivalent when
+ # https://github.com/pypa/pip/pull/10564 is merged.
+ if origin.url != download_info.url:
+ logger.warning(
+ "Origin URL %s in cache entry %s does not match download URL "
+ "%s. This is likely a pip bug or a cache corruption issue. "
+ "Will overwrite it with the new value.",
+ origin.url,
+ cache_dir,
+ download_info.url,
+ )
+ origin_path.write_text(download_info.to_json(), encoding="utf-8")
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/__init__.py b/.venv/Lib/site-packages/pip/_internal/cli/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e589bb917e23823e25f9fff7e0849c4d6d4a62bc
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/__init__.py
@@ -0,0 +1,4 @@
+"""Subpackage containing all of pip's command line interface related code
+"""
+
+# This file intentionally does not import submodules
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/autocompletion.py b/.venv/Lib/site-packages/pip/_internal/cli/autocompletion.py
new file mode 100644
index 0000000000000000000000000000000000000000..226fe84dc0d0c4eb78f9b3c603df20cef0fdfda4
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/autocompletion.py
@@ -0,0 +1,171 @@
+"""Logic that powers autocompletion installed by ``pip completion``.
+"""
+
+import optparse
+import os
+import sys
+from itertools import chain
+from typing import Any, Iterable, List, Optional
+
+from pip._internal.cli.main_parser import create_main_parser
+from pip._internal.commands import commands_dict, create_command
+from pip._internal.metadata import get_default_environment
+
+
+def autocomplete() -> None:
+ """Entry Point for completion of main and subcommand options."""
+ # Don't complete if user hasn't sourced bash_completion file.
+ if "PIP_AUTO_COMPLETE" not in os.environ:
+ return
+ cwords = os.environ["COMP_WORDS"].split()[1:]
+ cword = int(os.environ["COMP_CWORD"])
+ try:
+ current = cwords[cword - 1]
+ except IndexError:
+ current = ""
+
+ parser = create_main_parser()
+ subcommands = list(commands_dict)
+ options = []
+
+ # subcommand
+ subcommand_name: Optional[str] = None
+ for word in cwords:
+ if word in subcommands:
+ subcommand_name = word
+ break
+ # subcommand options
+ if subcommand_name is not None:
+ # special case: 'help' subcommand has no options
+ if subcommand_name == "help":
+ sys.exit(1)
+ # special case: list locally installed dists for show and uninstall
+ should_list_installed = not current.startswith("-") and subcommand_name in [
+ "show",
+ "uninstall",
+ ]
+ if should_list_installed:
+ env = get_default_environment()
+ lc = current.lower()
+ installed = [
+ dist.canonical_name
+ for dist in env.iter_installed_distributions(local_only=True)
+ if dist.canonical_name.startswith(lc)
+ and dist.canonical_name not in cwords[1:]
+ ]
+ # if there are no dists installed, fall back to option completion
+ if installed:
+ for dist in installed:
+ print(dist)
+ sys.exit(1)
+
+ should_list_installables = (
+ not current.startswith("-") and subcommand_name == "install"
+ )
+ if should_list_installables:
+ for path in auto_complete_paths(current, "path"):
+ print(path)
+ sys.exit(1)
+
+ subcommand = create_command(subcommand_name)
+
+ for opt in subcommand.parser.option_list_all:
+ if opt.help != optparse.SUPPRESS_HELP:
+ for opt_str in opt._long_opts + opt._short_opts:
+ options.append((opt_str, opt.nargs))
+
+ # filter out previously specified options from available options
+ prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
+ # filter options by current input
+ options = [(k, v) for k, v in options if k.startswith(current)]
+ # get completion type given cwords and available subcommand options
+ completion_type = get_path_completion_type(
+ cwords,
+ cword,
+ subcommand.parser.option_list_all,
+ )
+ # get completion files and directories if ``completion_type`` is
+ # ````, ```` or ````
+ if completion_type:
+ paths = auto_complete_paths(current, completion_type)
+ options = [(path, 0) for path in paths]
+ for option in options:
+ opt_label = option[0]
+ # append '=' to options which require args
+ if option[1] and option[0][:2] == "--":
+ opt_label += "="
+ print(opt_label)
+ else:
+ # show main parser options only when necessary
+
+ opts = [i.option_list for i in parser.option_groups]
+ opts.append(parser.option_list)
+ flattened_opts = chain.from_iterable(opts)
+ if current.startswith("-"):
+ for opt in flattened_opts:
+ if opt.help != optparse.SUPPRESS_HELP:
+ subcommands += opt._long_opts + opt._short_opts
+ else:
+ # get completion type given cwords and all available options
+ completion_type = get_path_completion_type(cwords, cword, flattened_opts)
+ if completion_type:
+ subcommands = list(auto_complete_paths(current, completion_type))
+
+ print(" ".join([x for x in subcommands if x.startswith(current)]))
+ sys.exit(1)
+
+
+def get_path_completion_type(
+ cwords: List[str], cword: int, opts: Iterable[Any]
+) -> Optional[str]:
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
+
+ :param cwords: same as the environmental variable ``COMP_WORDS``
+ :param cword: same as the environmental variable ``COMP_CWORD``
+ :param opts: The available options to check
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
+ """
+ if cword < 2 or not cwords[cword - 2].startswith("-"):
+ return None
+ for opt in opts:
+ if opt.help == optparse.SUPPRESS_HELP:
+ continue
+ for o in str(opt).split("/"):
+ if cwords[cword - 2].split("=")[0] == o:
+ if not opt.metavar or any(
+ x in ("path", "file", "dir") for x in opt.metavar.split("/")
+ ):
+ return opt.metavar
+ return None
+
+
+def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
+ and directories starting with ``current``; otherwise only list directories
+ starting with ``current``.
+
+ :param current: The word to be completed
+ :param completion_type: path completion type(``file``, ``path`` or ``dir``)
+ :return: A generator of regular files and/or directories
+ """
+ directory, filename = os.path.split(current)
+ current_path = os.path.abspath(directory)
+ # Don't complete paths if they can't be accessed
+ if not os.access(current_path, os.R_OK):
+ return
+ filename = os.path.normcase(filename)
+ # list all files that start with ``filename``
+ file_list = (
+ x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
+ )
+ for f in file_list:
+ opt = os.path.join(current_path, f)
+ comp_file = os.path.normcase(os.path.join(directory, f))
+ # complete regular files when there is not ```` after option
+ # complete directories when there is ````, ```` or
+ # ````after option
+ if completion_type != "dir" and os.path.isfile(opt):
+ yield comp_file
+ elif os.path.isdir(opt):
+ yield os.path.join(comp_file, "")
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/base_command.py b/.venv/Lib/site-packages/pip/_internal/cli/base_command.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a3b8e6c213a9a8069b38729ab3a0c16a213ce62
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/base_command.py
@@ -0,0 +1,236 @@
+"""Base Command class, and related routines"""
+
+import functools
+import logging
+import logging.config
+import optparse
+import os
+import sys
+import traceback
+from optparse import Values
+from typing import Any, Callable, List, Optional, Tuple
+
+from pip._vendor.rich import traceback as rich_traceback
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.command_context import CommandContextMixIn
+from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
+from pip._internal.cli.status_codes import (
+ ERROR,
+ PREVIOUS_BUILD_DIR_ERROR,
+ UNKNOWN_ERROR,
+ VIRTUALENV_NOT_FOUND,
+)
+from pip._internal.exceptions import (
+ BadCommand,
+ CommandError,
+ DiagnosticPipError,
+ InstallationError,
+ NetworkConnectionError,
+ PreviousBuildDirError,
+ UninstallationError,
+)
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
+from pip._internal.utils.misc import get_prog, normalize_path
+from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
+from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+__all__ = ["Command"]
+
+logger = logging.getLogger(__name__)
+
+
+class Command(CommandContextMixIn):
+ usage: str = ""
+ ignore_require_venv: bool = False
+
+ def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
+ super().__init__()
+
+ self.name = name
+ self.summary = summary
+ self.parser = ConfigOptionParser(
+ usage=self.usage,
+ prog=f"{get_prog()} {name}",
+ formatter=UpdatingDefaultsHelpFormatter(),
+ add_help_option=False,
+ name=name,
+ description=self.__doc__,
+ isolated=isolated,
+ )
+
+ self.tempdir_registry: Optional[TempDirRegistry] = None
+
+ # Commands should add options to this option group
+ optgroup_name = f"{self.name.capitalize()} Options"
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
+
+ # Add the general options
+ gen_opts = cmdoptions.make_option_group(
+ cmdoptions.general_group,
+ self.parser,
+ )
+ self.parser.add_option_group(gen_opts)
+
+ self.add_options()
+
+ def add_options(self) -> None:
+ pass
+
+ def handle_pip_version_check(self, options: Values) -> None:
+ """
+ This is a no-op so that commands by default do not do the pip version
+ check.
+ """
+ # Make sure we do the pip version check if the index_group options
+ # are present.
+ assert not hasattr(options, "no_index")
+
+ def run(self, options: Values, args: List[str]) -> int:
+ raise NotImplementedError
+
+ def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
+ # factored out for testability
+ return self.parser.parse_args(args)
+
+ def main(self, args: List[str]) -> int:
+ try:
+ with self.main_context():
+ return self._main(args)
+ finally:
+ logging.shutdown()
+
+ def _main(self, args: List[str]) -> int:
+ # We must initialize this before the tempdir manager, otherwise the
+ # configuration would not be accessible by the time we clean up the
+ # tempdir manager.
+ self.tempdir_registry = self.enter_context(tempdir_registry())
+ # Intentionally set as early as possible so globally-managed temporary
+ # directories are available to the rest of the code.
+ self.enter_context(global_tempdir_manager())
+
+ options, args = self.parse_args(args)
+
+ # Set verbosity so that it can be used elsewhere.
+ self.verbosity = options.verbose - options.quiet
+
+ level_number = setup_logging(
+ verbosity=self.verbosity,
+ no_color=options.no_color,
+ user_log_file=options.log,
+ )
+
+ always_enabled_features = set(options.features_enabled) & set(
+ cmdoptions.ALWAYS_ENABLED_FEATURES
+ )
+ if always_enabled_features:
+ logger.warning(
+ "The following features are always enabled: %s. ",
+ ", ".join(sorted(always_enabled_features)),
+ )
+
+ # Make sure that the --python argument isn't specified after the
+ # subcommand. We can tell, because if --python was specified,
+ # we should only reach this point if we're running in the created
+ # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
+ # variable set.
+ if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
+ logger.critical(
+ "The --python option must be placed before the pip subcommand name"
+ )
+ sys.exit(ERROR)
+
+ # TODO: Try to get these passing down from the command?
+ # without resorting to os.environ to hold these.
+ # This also affects isolated builds and it should.
+
+ if options.no_input:
+ os.environ["PIP_NO_INPUT"] = "1"
+
+ if options.exists_action:
+ os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
+
+ if options.require_venv and not self.ignore_require_venv:
+ # If a venv is required check if it can really be found
+ if not running_under_virtualenv():
+ logger.critical("Could not find an activated virtualenv (required).")
+ sys.exit(VIRTUALENV_NOT_FOUND)
+
+ if options.cache_dir:
+ options.cache_dir = normalize_path(options.cache_dir)
+ if not check_path_owner(options.cache_dir):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned "
+ "or is not writable by the current user. The cache "
+ "has been disabled. Check the permissions and owner of "
+ "that directory. If executing pip with sudo, you should "
+ "use sudo's -H flag.",
+ options.cache_dir,
+ )
+ options.cache_dir = None
+
+ def intercepts_unhandled_exc(
+ run_func: Callable[..., int]
+ ) -> Callable[..., int]:
+ @functools.wraps(run_func)
+ def exc_logging_wrapper(*args: Any) -> int:
+ try:
+ status = run_func(*args)
+ assert isinstance(status, int)
+ return status
+ except DiagnosticPipError as exc:
+ logger.error("[present-rich] %s", exc)
+ logger.debug("Exception information:", exc_info=True)
+
+ return ERROR
+ except PreviousBuildDirError as exc:
+ logger.critical(str(exc))
+ logger.debug("Exception information:", exc_info=True)
+
+ return PREVIOUS_BUILD_DIR_ERROR
+ except (
+ InstallationError,
+ UninstallationError,
+ BadCommand,
+ NetworkConnectionError,
+ ) as exc:
+ logger.critical(str(exc))
+ logger.debug("Exception information:", exc_info=True)
+
+ return ERROR
+ except CommandError as exc:
+ logger.critical("%s", exc)
+ logger.debug("Exception information:", exc_info=True)
+
+ return ERROR
+ except BrokenStdoutLoggingError:
+ # Bypass our logger and write any remaining messages to
+ # stderr because stdout no longer works.
+ print("ERROR: Pipe to stdout was broken", file=sys.stderr)
+ if level_number <= logging.DEBUG:
+ traceback.print_exc(file=sys.stderr)
+
+ return ERROR
+ except KeyboardInterrupt:
+ logger.critical("Operation cancelled by user")
+ logger.debug("Exception information:", exc_info=True)
+
+ return ERROR
+ except BaseException:
+ logger.critical("Exception:", exc_info=True)
+
+ return UNKNOWN_ERROR
+
+ return exc_logging_wrapper
+
+ try:
+ if not options.debug_mode:
+ run = intercepts_unhandled_exc(self.run)
+ else:
+ run = self.run
+ rich_traceback.install(show_locals=True)
+ return run(options, args)
+ finally:
+ self.handle_pip_version_check(options)
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py b/.venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..02ba60827933d6623cdf6b1417762fee47c1ab6f
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py
@@ -0,0 +1,1074 @@
+"""
+shared options and groups
+
+The principle here is to define options once, but *not* instantiate them
+globally. One reason being that options with action='append' can carry state
+between parses. pip parses general options twice internally, and shouldn't
+pass on state. To be consistent, all options will follow this design.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import importlib.util
+import logging
+import os
+import textwrap
+from functools import partial
+from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
+from textwrap import dedent
+from typing import Any, Callable, Dict, Optional, Tuple
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.parser import ConfigOptionParser
+from pip._internal.exceptions import CommandError
+from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.index import PyPI
+from pip._internal.models.target_python import TargetPython
+from pip._internal.utils.hashes import STRONG_HASHES
+from pip._internal.utils.misc import strtobool
+
+logger = logging.getLogger(__name__)
+
+
+def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
+ """
+ Raise an option parsing error using parser.error().
+
+ Args:
+ parser: an OptionParser instance.
+ option: an Option instance.
+ msg: the error text.
+ """
+ msg = f"{option} error: {msg}"
+ msg = textwrap.fill(" ".join(msg.split()))
+ parser.error(msg)
+
+
+def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
+ """
+ Return an OptionGroup object
+ group -- assumed to be dict with 'name' and 'options' keys
+ parser -- an optparse Parser
+ """
+ option_group = OptionGroup(parser, group["name"])
+ for option in group["options"]:
+ option_group.add_option(option())
+ return option_group
+
+
+def check_dist_restriction(options: Values, check_target: bool = False) -> None:
+ """Function for determining if custom platform options are allowed.
+
+ :param options: The OptionParser options.
+ :param check_target: Whether or not to check if --target is being used.
+ """
+ dist_restriction_set = any(
+ [
+ options.python_version,
+ options.platforms,
+ options.abis,
+ options.implementation,
+ ]
+ )
+
+ binary_only = FormatControl(set(), {":all:"})
+ sdist_dependencies_allowed = (
+ options.format_control != binary_only and not options.ignore_dependencies
+ )
+
+ # Installations or downloads using dist restrictions must not combine
+ # source distributions and dist-specific wheels, as they are not
+ # guaranteed to be locally compatible.
+ if dist_restriction_set and sdist_dependencies_allowed:
+ raise CommandError(
+ "When restricting platform and interpreter constraints using "
+ "--python-version, --platform, --abi, or --implementation, "
+ "either --no-deps must be set, or --only-binary=:all: must be "
+ "set and --no-binary must not be set (or must be set to "
+ ":none:)."
+ )
+
+ if check_target:
+ if dist_restriction_set and not options.target_dir:
+ raise CommandError(
+ "Can not use any platform or abi specific options unless "
+ "installing via '--target'"
+ )
+
+
+def _path_option_check(option: Option, opt: str, value: str) -> str:
+ return os.path.expanduser(value)
+
+
+def _package_name_option_check(option: Option, opt: str, value: str) -> str:
+ return canonicalize_name(value)
+
+
+class PipOption(Option):
+ TYPES = Option.TYPES + ("path", "package_name")
+ TYPE_CHECKER = Option.TYPE_CHECKER.copy()
+ TYPE_CHECKER["package_name"] = _package_name_option_check
+ TYPE_CHECKER["path"] = _path_option_check
+
+
+###########
+# options #
+###########
+
+help_: Callable[..., Option] = partial(
+ Option,
+ "-h",
+ "--help",
+ dest="help",
+ action="help",
+ help="Show help.",
+)
+
+debug_mode: Callable[..., Option] = partial(
+ Option,
+ "--debug",
+ dest="debug_mode",
+ action="store_true",
+ default=False,
+ help=(
+ "Let unhandled exceptions propagate outside the main subroutine, "
+ "instead of logging them to stderr."
+ ),
+)
+
+isolated_mode: Callable[..., Option] = partial(
+ Option,
+ "--isolated",
+ dest="isolated_mode",
+ action="store_true",
+ default=False,
+ help=(
+ "Run pip in an isolated mode, ignoring environment variables and user "
+ "configuration."
+ ),
+)
+
+require_virtualenv: Callable[..., Option] = partial(
+ Option,
+ "--require-virtualenv",
+ "--require-venv",
+ dest="require_venv",
+ action="store_true",
+ default=False,
+ help=(
+ "Allow pip to only run in a virtual environment; "
+ "exit with an error otherwise."
+ ),
+)
+
+override_externally_managed: Callable[..., Option] = partial(
+ Option,
+ "--break-system-packages",
+ dest="override_externally_managed",
+ action="store_true",
+ help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
+)
+
+python: Callable[..., Option] = partial(
+ Option,
+ "--python",
+ dest="python",
+ help="Run pip with the specified Python interpreter.",
+)
+
+verbose: Callable[..., Option] = partial(
+ Option,
+ "-v",
+ "--verbose",
+ dest="verbose",
+ action="count",
+ default=0,
+ help="Give more output. Option is additive, and can be used up to 3 times.",
+)
+
+no_color: Callable[..., Option] = partial(
+ Option,
+ "--no-color",
+ dest="no_color",
+ action="store_true",
+ default=False,
+ help="Suppress colored output.",
+)
+
+version: Callable[..., Option] = partial(
+ Option,
+ "-V",
+ "--version",
+ dest="version",
+ action="store_true",
+ help="Show version and exit.",
+)
+
+quiet: Callable[..., Option] = partial(
+ Option,
+ "-q",
+ "--quiet",
+ dest="quiet",
+ action="count",
+ default=0,
+ help=(
+ "Give less output. Option is additive, and can be used up to 3"
+ " times (corresponding to WARNING, ERROR, and CRITICAL logging"
+ " levels)."
+ ),
+)
+
+progress_bar: Callable[..., Option] = partial(
+ Option,
+ "--progress-bar",
+ dest="progress_bar",
+ type="choice",
+ choices=["on", "off"],
+ default="on",
+ help="Specify whether the progress bar should be used [on, off] (default: on)",
+)
+
+log: Callable[..., Option] = partial(
+ PipOption,
+ "--log",
+ "--log-file",
+ "--local-log",
+ dest="log",
+ metavar="path",
+ type="path",
+ help="Path to a verbose appending log.",
+)
+
+no_input: Callable[..., Option] = partial(
+ Option,
+ # Don't ask for input
+ "--no-input",
+ dest="no_input",
+ action="store_true",
+ default=False,
+ help="Disable prompting for input.",
+)
+
+keyring_provider: Callable[..., Option] = partial(
+ Option,
+ "--keyring-provider",
+ dest="keyring_provider",
+ choices=["auto", "disabled", "import", "subprocess"],
+ default="auto",
+ help=(
+ "Enable the credential lookup via the keyring library if user input is allowed."
+ " Specify which mechanism to use [disabled, import, subprocess]."
+ " (default: disabled)"
+ ),
+)
+
+proxy: Callable[..., Option] = partial(
+ Option,
+ "--proxy",
+ dest="proxy",
+ type="str",
+ default="",
+ help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
+)
+
+retries: Callable[..., Option] = partial(
+ Option,
+ "--retries",
+ dest="retries",
+ type="int",
+ default=5,
+ help="Maximum number of retries each connection should attempt "
+ "(default %default times).",
+)
+
+timeout: Callable[..., Option] = partial(
+ Option,
+ "--timeout",
+ "--default-timeout",
+ metavar="sec",
+ dest="timeout",
+ type="float",
+ default=15,
+ help="Set the socket timeout (default %default seconds).",
+)
+
+
+def exists_action() -> Option:
+ return Option(
+ # Option when path already exist
+ "--exists-action",
+ dest="exists_action",
+ type="choice",
+ choices=["s", "i", "w", "b", "a"],
+ default=[],
+ action="append",
+ metavar="action",
+ help="Default action when a path already exists: "
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
+ )
+
+
+cert: Callable[..., Option] = partial(
+ PipOption,
+ "--cert",
+ dest="cert",
+ type="path",
+ metavar="path",
+ help=(
+ "Path to PEM-encoded CA certificate bundle. "
+ "If provided, overrides the default. "
+ "See 'SSL Certificate Verification' in pip documentation "
+ "for more information."
+ ),
+)
+
+client_cert: Callable[..., Option] = partial(
+ PipOption,
+ "--client-cert",
+ dest="client_cert",
+ type="path",
+ default=None,
+ metavar="path",
+ help="Path to SSL client certificate, a single file containing the "
+ "private key and the certificate in PEM format.",
+)
+
+index_url: Callable[..., Option] = partial(
+ Option,
+ "-i",
+ "--index-url",
+ "--pypi-url",
+ dest="index_url",
+ metavar="URL",
+ default=PyPI.simple_url,
+ help="Base URL of the Python Package Index (default %default). "
+ "This should point to a repository compliant with PEP 503 "
+ "(the simple repository API) or a local directory laid out "
+ "in the same format.",
+)
+
+
+def extra_index_url() -> Option:
+ return Option(
+ "--extra-index-url",
+ dest="extra_index_urls",
+ metavar="URL",
+ action="append",
+ default=[],
+ help="Extra URLs of package indexes to use in addition to "
+ "--index-url. Should follow the same rules as "
+ "--index-url.",
+ )
+
+
+no_index: Callable[..., Option] = partial(
+ Option,
+ "--no-index",
+ dest="no_index",
+ action="store_true",
+ default=False,
+ help="Ignore package index (only looking at --find-links URLs instead).",
+)
+
+
+def find_links() -> Option:
+ return Option(
+ "-f",
+ "--find-links",
+ dest="find_links",
+ action="append",
+ default=[],
+ metavar="url",
+ help="If a URL or path to an html file, then parse for links to "
+ "archives such as sdist (.tar.gz) or wheel (.whl) files. "
+ "If a local path or file:// URL that's a directory, "
+ "then look for archives in the directory listing. "
+ "Links to VCS project URLs are not supported.",
+ )
+
+
+def trusted_host() -> Option:
+ return Option(
+ "--trusted-host",
+ dest="trusted_hosts",
+ action="append",
+ metavar="HOSTNAME",
+ default=[],
+ help="Mark this host or host:port pair as trusted, even though it "
+ "does not have valid or any HTTPS.",
+ )
+
+
+def constraints() -> Option:
+ return Option(
+ "-c",
+ "--constraint",
+ dest="constraints",
+ action="append",
+ default=[],
+ metavar="file",
+ help="Constrain versions using the given constraints file. "
+ "This option can be used multiple times.",
+ )
+
+
+def requirements() -> Option:
+ return Option(
+ "-r",
+ "--requirement",
+ dest="requirements",
+ action="append",
+ default=[],
+ metavar="file",
+ help="Install from the given requirements file. "
+ "This option can be used multiple times.",
+ )
+
+
+def editable() -> Option:
+ return Option(
+ "-e",
+ "--editable",
+ dest="editables",
+ action="append",
+ default=[],
+ metavar="path/url",
+ help=(
+ "Install a project in editable mode (i.e. setuptools "
+ '"develop mode") from a local project path or a VCS url.'
+ ),
+ )
+
+
+def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
+ value = os.path.abspath(value)
+ setattr(parser.values, option.dest, value)
+
+
+src: Callable[..., Option] = partial(
+ PipOption,
+ "--src",
+ "--source",
+ "--source-dir",
+ "--source-directory",
+ dest="src_dir",
+ type="path",
+ metavar="dir",
+ default=get_src_prefix(),
+ action="callback",
+ callback=_handle_src,
+ help="Directory to check out editable projects into. "
+ 'The default in a virtualenv is "/src". '
+ 'The default for global installs is "/src".',
+)
+
+
+def _get_format_control(values: Values, option: Option) -> Any:
+ """Get a format_control object."""
+ return getattr(values, option.dest)
+
+
+def _handle_no_binary(
+ option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+ existing = _get_format_control(parser.values, option)
+ FormatControl.handle_mutual_excludes(
+ value,
+ existing.no_binary,
+ existing.only_binary,
+ )
+
+
+def _handle_only_binary(
+ option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+ existing = _get_format_control(parser.values, option)
+ FormatControl.handle_mutual_excludes(
+ value,
+ existing.only_binary,
+ existing.no_binary,
+ )
+
+
+def no_binary() -> Option:
+ format_control = FormatControl(set(), set())
+ return Option(
+ "--no-binary",
+ dest="format_control",
+ action="callback",
+ callback=_handle_no_binary,
+ type="str",
+ default=format_control,
+ help="Do not use binary packages. Can be supplied multiple times, and "
+ 'each time adds to the existing value. Accepts either ":all:" to '
+ 'disable all binary packages, ":none:" to empty the set (notice '
+ "the colons), or one or more package names with commas between "
+ "them (no colons). Note that some packages are tricky to compile "
+ "and may fail to install when this option is used on them.",
+ )
+
+
+def only_binary() -> Option:
+ format_control = FormatControl(set(), set())
+ return Option(
+ "--only-binary",
+ dest="format_control",
+ action="callback",
+ callback=_handle_only_binary,
+ type="str",
+ default=format_control,
+ help="Do not use source packages. Can be supplied multiple times, and "
+ 'each time adds to the existing value. Accepts either ":all:" to '
+ 'disable all source packages, ":none:" to empty the set, or one '
+ "or more package names with commas between them. Packages "
+ "without binary distributions will fail to install when this "
+ "option is used on them.",
+ )
+
+
+platforms: Callable[..., Option] = partial(
+ Option,
+ "--platform",
+ dest="platforms",
+ metavar="platform",
+ action="append",
+ default=None,
+ help=(
+ "Only use wheels compatible with . Defaults to the "
+ "platform of the running system. Use this option multiple times to "
+ "specify multiple platforms supported by the target interpreter."
+ ),
+)
+
+
+# This was made a separate function for unit-testing purposes.
+def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
+ """
+ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
+
+ :return: A 2-tuple (version_info, error_msg), where `error_msg` is
+ non-None if and only if there was a parsing error.
+ """
+ if not value:
+ # The empty string is the same as not providing a value.
+ return (None, None)
+
+ parts = value.split(".")
+ if len(parts) > 3:
+ return ((), "at most three version parts are allowed")
+
+ if len(parts) == 1:
+ # Then we are in the case of "3" or "37".
+ value = parts[0]
+ if len(value) > 1:
+ parts = [value[0], value[1:]]
+
+ try:
+ version_info = tuple(int(part) for part in parts)
+ except ValueError:
+ return ((), "each version part must be an integer")
+
+ return (version_info, None)
+
+
+def _handle_python_version(
+ option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+ """
+ Handle a provided --python-version value.
+ """
+ version_info, error_msg = _convert_python_version(value)
+ if error_msg is not None:
+ msg = "invalid --python-version value: {!r}: {}".format(
+ value,
+ error_msg,
+ )
+ raise_option_error(parser, option=option, msg=msg)
+
+ parser.values.python_version = version_info
+
+
+python_version: Callable[..., Option] = partial(
+ Option,
+ "--python-version",
+ dest="python_version",
+ metavar="python_version",
+ action="callback",
+ callback=_handle_python_version,
+ type="str",
+ default=None,
+ help=dedent(
+ """\
+ The Python interpreter version to use for wheel and "Requires-Python"
+ compatibility checks. Defaults to a version derived from the running
+ interpreter. The version can be specified using up to three dot-separated
+ integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
+ version can also be given as a string without dots (e.g. "37" for 3.7.0).
+ """
+ ),
+)
+
+
+implementation: Callable[..., Option] = partial(
+ Option,
+ "--implementation",
+ dest="implementation",
+ metavar="implementation",
+ default=None,
+ help=(
+ "Only use wheels compatible with Python "
+ "implementation , e.g. 'pp', 'jy', 'cp', "
+ " or 'ip'. If not specified, then the current "
+ "interpreter implementation is used. Use 'py' to force "
+ "implementation-agnostic wheels."
+ ),
+)
+
+
+abis: Callable[..., Option] = partial(
+ Option,
+ "--abi",
+ dest="abis",
+ metavar="abi",
+ action="append",
+ default=None,
+ help=(
+ "Only use wheels compatible with Python abi , e.g. 'pypy_41'. "
+ "If not specified, then the current interpreter abi tag is used. "
+ "Use this option multiple times to specify multiple abis supported "
+ "by the target interpreter. Generally you will need to specify "
+ "--implementation, --platform, and --python-version when using this "
+ "option."
+ ),
+)
+
+
+def add_target_python_options(cmd_opts: OptionGroup) -> None:
+ cmd_opts.add_option(platforms())
+ cmd_opts.add_option(python_version())
+ cmd_opts.add_option(implementation())
+ cmd_opts.add_option(abis())
+
+
+def make_target_python(options: Values) -> TargetPython:
+ target_python = TargetPython(
+ platforms=options.platforms,
+ py_version_info=options.python_version,
+ abis=options.abis,
+ implementation=options.implementation,
+ )
+
+ return target_python
+
+
+def prefer_binary() -> Option:
+ return Option(
+ "--prefer-binary",
+ dest="prefer_binary",
+ action="store_true",
+ default=False,
+ help="Prefer older binary packages over newer source packages.",
+ )
+
+
+cache_dir: Callable[..., Option] = partial(
+ PipOption,
+ "--cache-dir",
+ dest="cache_dir",
+ default=USER_CACHE_DIR,
+ metavar="dir",
+ type="path",
+ help="Store the cache data in .",
+)
+
+
+def _handle_no_cache_dir(
+ option: Option, opt: str, value: str, parser: OptionParser
+) -> None:
+ """
+ Process a value provided for the --no-cache-dir option.
+
+ This is an optparse.Option callback for the --no-cache-dir option.
+ """
+ # The value argument will be None if --no-cache-dir is passed via the
+ # command-line, since the option doesn't accept arguments. However,
+ # the value can be non-None if the option is triggered e.g. by an
+ # environment variable, like PIP_NO_CACHE_DIR=true.
+ if value is not None:
+ # Then parse the string value to get argument error-checking.
+ try:
+ strtobool(value)
+ except ValueError as exc:
+ raise_option_error(parser, option=option, msg=str(exc))
+
+ # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
+ # converted to 0 (like "false" or "no") caused cache_dir to be disabled
+ # rather than enabled (logic would say the latter). Thus, we disable
+ # the cache directory not just on values that parse to True, but (for
+ # backwards compatibility reasons) also on values that parse to False.
+ # In other words, always set it to False if the option is provided in
+ # some (valid) form.
+ parser.values.cache_dir = False
+
+
+no_cache: Callable[..., Option] = partial(
+ Option,
+ "--no-cache-dir",
+ dest="cache_dir",
+ action="callback",
+ callback=_handle_no_cache_dir,
+ help="Disable the cache.",
+)
+
+no_deps: Callable[..., Option] = partial(
+ Option,
+ "--no-deps",
+ "--no-dependencies",
+ dest="ignore_dependencies",
+ action="store_true",
+ default=False,
+ help="Don't install package dependencies.",
+)
+
+ignore_requires_python: Callable[..., Option] = partial(
+ Option,
+ "--ignore-requires-python",
+ dest="ignore_requires_python",
+ action="store_true",
+ help="Ignore the Requires-Python information.",
+)
+
+no_build_isolation: Callable[..., Option] = partial(
+ Option,
+ "--no-build-isolation",
+ dest="build_isolation",
+ action="store_false",
+ default=True,
+ help="Disable isolation when building a modern source distribution. "
+ "Build dependencies specified by PEP 518 must be already installed "
+ "if this option is used.",
+)
+
+check_build_deps: Callable[..., Option] = partial(
+ Option,
+ "--check-build-dependencies",
+ dest="check_build_deps",
+ action="store_true",
+ default=False,
+ help="Check the build dependencies when PEP517 is used.",
+)
+
+
+def _handle_no_use_pep517(
+ option: Option, opt: str, value: str, parser: OptionParser
+) -> None:
+ """
+ Process a value provided for the --no-use-pep517 option.
+
+ This is an optparse.Option callback for the no_use_pep517 option.
+ """
+ # Since --no-use-pep517 doesn't accept arguments, the value argument
+ # will be None if --no-use-pep517 is passed via the command-line.
+ # However, the value can be non-None if the option is triggered e.g.
+ # by an environment variable, for example "PIP_NO_USE_PEP517=true".
+ if value is not None:
+ msg = """A value was passed for --no-use-pep517,
+ probably using either the PIP_NO_USE_PEP517 environment variable
+ or the "no-use-pep517" config file option. Use an appropriate value
+ of the PIP_USE_PEP517 environment variable or the "use-pep517"
+ config file option instead.
+ """
+ raise_option_error(parser, option=option, msg=msg)
+
+ # If user doesn't wish to use pep517, we check if setuptools and wheel are installed
+ # and raise error if it is not.
+ packages = ("setuptools", "wheel")
+ if not all(importlib.util.find_spec(package) for package in packages):
+ msg = (
+ f"It is not possible to use --no-use-pep517 "
+ f"without {' and '.join(packages)} installed."
+ )
+ raise_option_error(parser, option=option, msg=msg)
+
+ # Otherwise, --no-use-pep517 was passed via the command-line.
+ parser.values.use_pep517 = False
+
+
+use_pep517: Any = partial(
+ Option,
+ "--use-pep517",
+ dest="use_pep517",
+ action="store_true",
+ default=None,
+ help="Use PEP 517 for building source distributions "
+ "(use --no-use-pep517 to force legacy behaviour).",
+)
+
+no_use_pep517: Any = partial(
+ Option,
+ "--no-use-pep517",
+ dest="use_pep517",
+ action="callback",
+ callback=_handle_no_use_pep517,
+ default=None,
+ help=SUPPRESS_HELP,
+)
+
+
+def _handle_config_settings(
+ option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+ key, sep, val = value.partition("=")
+ if sep != "=":
+ parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
+ dest = getattr(parser.values, option.dest)
+ if dest is None:
+ dest = {}
+ setattr(parser.values, option.dest, dest)
+ if key in dest:
+ if isinstance(dest[key], list):
+ dest[key].append(val)
+ else:
+ dest[key] = [dest[key], val]
+ else:
+ dest[key] = val
+
+
+config_settings: Callable[..., Option] = partial(
+ Option,
+ "-C",
+ "--config-settings",
+ dest="config_settings",
+ type=str,
+ action="callback",
+ callback=_handle_config_settings,
+ metavar="settings",
+ help="Configuration settings to be passed to the PEP 517 build backend. "
+ "Settings take the form KEY=VALUE. Use multiple --config-settings options "
+ "to pass multiple keys to the backend.",
+)
+
+build_options: Callable[..., Option] = partial(
+ Option,
+ "--build-option",
+ dest="build_options",
+ metavar="options",
+ action="append",
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
+)
+
+global_options: Callable[..., Option] = partial(
+ Option,
+ "--global-option",
+ dest="global_options",
+ action="append",
+ metavar="options",
+ help="Extra global options to be supplied to the setup.py "
+ "call before the install or bdist_wheel command.",
+)
+
+no_clean: Callable[..., Option] = partial(
+ Option,
+ "--no-clean",
+ action="store_true",
+ default=False,
+ help="Don't clean up build directories.",
+)
+
+pre: Callable[..., Option] = partial(
+ Option,
+ "--pre",
+ action="store_true",
+ default=False,
+ help="Include pre-release and development versions. By default, "
+ "pip only finds stable versions.",
+)
+
+disable_pip_version_check: Callable[..., Option] = partial(
+ Option,
+ "--disable-pip-version-check",
+ dest="disable_pip_version_check",
+ action="store_true",
+ default=False,
+ help="Don't periodically check PyPI to determine whether a new version "
+ "of pip is available for download. Implied with --no-index.",
+)
+
+root_user_action: Callable[..., Option] = partial(
+ Option,
+ "--root-user-action",
+ dest="root_user_action",
+ default="warn",
+ choices=["warn", "ignore"],
+ help="Action if pip is run as a root user. By default, a warning message is shown.",
+)
+
+
+def _handle_merge_hash(
+ option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+ """Given a value spelled "algo:digest", append the digest to a list
+ pointed to in a dict by the algo name."""
+ if not parser.values.hashes:
+ parser.values.hashes = {}
+ try:
+ algo, digest = value.split(":", 1)
+ except ValueError:
+ parser.error(
+ "Arguments to {} must be a hash name " # noqa
+ "followed by a value, like --hash=sha256:"
+ "abcde...".format(opt_str)
+ )
+ if algo not in STRONG_HASHES:
+ parser.error(
+ "Allowed hash algorithms for {} are {}.".format( # noqa
+ opt_str, ", ".join(STRONG_HASHES)
+ )
+ )
+ parser.values.hashes.setdefault(algo, []).append(digest)
+
+
+hash: Callable[..., Option] = partial(
+ Option,
+ "--hash",
+ # Hash values eventually end up in InstallRequirement.hashes due to
+ # __dict__ copying in process_line().
+ dest="hashes",
+ action="callback",
+ callback=_handle_merge_hash,
+ type="string",
+ help="Verify that the package's archive matches this "
+ "hash before installing. Example: --hash=sha256:abcdef...",
+)
+
+
+require_hashes: Callable[..., Option] = partial(
+ Option,
+ "--require-hashes",
+ dest="require_hashes",
+ action="store_true",
+ default=False,
+ help="Require a hash to check each requirement against, for "
+ "repeatable installs. This option is implied when any package in a "
+ "requirements file has a --hash option.",
+)
+
+
+list_path: Callable[..., Option] = partial(
+ PipOption,
+ "--path",
+ dest="path",
+ type="path",
+ action="append",
+ help="Restrict to the specified installation path for listing "
+ "packages (can be used multiple times).",
+)
+
+
+def check_list_path_option(options: Values) -> None:
+ if options.path and (options.user or options.local):
+ raise CommandError("Cannot combine '--path' with '--user' or '--local'")
+
+
+list_exclude: Callable[..., Option] = partial(
+ PipOption,
+ "--exclude",
+ dest="excludes",
+ action="append",
+ metavar="package",
+ type="package_name",
+ help="Exclude specified package from the output",
+)
+
+
+no_python_version_warning: Callable[..., Option] = partial(
+ Option,
+ "--no-python-version-warning",
+ dest="no_python_version_warning",
+ action="store_true",
+ default=False,
+ help="Silence deprecation warnings for upcoming unsupported Pythons.",
+)
+
+
+# Features that are now always on. A warning is printed if they are used.
+ALWAYS_ENABLED_FEATURES = [
+ "no-binary-enable-wheel-cache", # always on since 23.1
+]
+
+use_new_feature: Callable[..., Option] = partial(
+ Option,
+ "--use-feature",
+ dest="features_enabled",
+ metavar="feature",
+ action="append",
+ default=[],
+ choices=[
+ "fast-deps",
+ "truststore",
+ ]
+ + ALWAYS_ENABLED_FEATURES,
+ help="Enable new functionality, that may be backward incompatible.",
+)
+
+use_deprecated_feature: Callable[..., Option] = partial(
+ Option,
+ "--use-deprecated",
+ dest="deprecated_features_enabled",
+ metavar="feature",
+ action="append",
+ default=[],
+ choices=[
+ "legacy-resolver",
+ ],
+ help=("Enable deprecated functionality, that will be removed in the future."),
+)
+
+
+##########
+# groups #
+##########
+
+general_group: Dict[str, Any] = {
+ "name": "General Options",
+ "options": [
+ help_,
+ debug_mode,
+ isolated_mode,
+ require_virtualenv,
+ python,
+ verbose,
+ version,
+ quiet,
+ log,
+ no_input,
+ keyring_provider,
+ proxy,
+ retries,
+ timeout,
+ exists_action,
+ trusted_host,
+ cert,
+ client_cert,
+ cache_dir,
+ no_cache,
+ disable_pip_version_check,
+ no_color,
+ no_python_version_warning,
+ use_new_feature,
+ use_deprecated_feature,
+ ],
+}
+
+index_group: Dict[str, Any] = {
+ "name": "Package Index Options",
+ "options": [
+ index_url,
+ extra_index_url,
+ no_index,
+ find_links,
+ ],
+}
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/command_context.py b/.venv/Lib/site-packages/pip/_internal/cli/command_context.py
new file mode 100644
index 0000000000000000000000000000000000000000..139995ac3f109a82664e4913f7ebc32ecf7617e1
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/command_context.py
@@ -0,0 +1,27 @@
+from contextlib import ExitStack, contextmanager
+from typing import ContextManager, Generator, TypeVar
+
+_T = TypeVar("_T", covariant=True)
+
+
+class CommandContextMixIn:
+ def __init__(self) -> None:
+ super().__init__()
+ self._in_main_context = False
+ self._main_context = ExitStack()
+
+ @contextmanager
+ def main_context(self) -> Generator[None, None, None]:
+ assert not self._in_main_context
+
+ self._in_main_context = True
+ try:
+ with self._main_context:
+ yield
+ finally:
+ self._in_main_context = False
+
+ def enter_context(self, context_provider: ContextManager[_T]) -> _T:
+ assert self._in_main_context
+
+ return self._main_context.enter_context(context_provider)
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/main.py b/.venv/Lib/site-packages/pip/_internal/cli/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e061f5b39081f39e9f4fa2a0e88aec0e0a3da79
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/main.py
@@ -0,0 +1,79 @@
+"""Primary application entrypoint.
+"""
+import locale
+import logging
+import os
+import sys
+import warnings
+from typing import List, Optional
+
+from pip._internal.cli.autocompletion import autocomplete
+from pip._internal.cli.main_parser import parse_command
+from pip._internal.commands import create_command
+from pip._internal.exceptions import PipError
+from pip._internal.utils import deprecation
+
+logger = logging.getLogger(__name__)
+
+
+# Do not import and use main() directly! Using it directly is actively
+# discouraged by pip's maintainers. The name, location and behavior of
+# this function is subject to change, so calling it directly is not
+# portable across different pip versions.
+
+# In addition, running pip in-process is unsupported and unsafe. This is
+# elaborated in detail at
+# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
+# That document also provides suggestions that should work for nearly
+# all users that are considering importing and using main() directly.
+
+# However, we know that certain users will still want to invoke pip
+# in-process. If you understand and accept the implications of using pip
+# in an unsupported manner, the best approach is to use runpy to avoid
+# depending on the exact location of this entry point.
+
+# The following example shows how to use runpy to invoke pip in that
+# case:
+#
+# sys.argv = ["pip", your, args, here]
+# runpy.run_module("pip", run_name="__main__")
+#
+# Note that this will exit the process after running, unlike a direct
+# call to main. As it is not safe to do any processing after calling
+# main, this should not be an issue in practice.
+
+
+def main(args: Optional[List[str]] = None) -> int:
+ if args is None:
+ args = sys.argv[1:]
+
+ # Suppress the pkg_resources deprecation warning
+ # Note - we use a module of .*pkg_resources to cover
+ # the normal case (pip._vendor.pkg_resources) and the
+ # devendored case (a bare pkg_resources)
+ warnings.filterwarnings(
+ action="ignore", category=DeprecationWarning, module=".*pkg_resources"
+ )
+
+ # Configure our deprecation warnings to be sent through loggers
+ deprecation.install_warning_logger()
+
+ autocomplete()
+
+ try:
+ cmd_name, cmd_args = parse_command(args)
+ except PipError as exc:
+ sys.stderr.write(f"ERROR: {exc}")
+ sys.stderr.write(os.linesep)
+ sys.exit(1)
+
+ # Needed for locale.getpreferredencoding(False) to work
+ # in pip._internal.utils.encoding.auto_decode
+ try:
+ locale.setlocale(locale.LC_ALL, "")
+ except locale.Error as e:
+ # setlocale can apparently crash if locale are uninitialized
+ logger.debug("Ignoring error %s when setting locale", e)
+ command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
+
+ return command.main(cmd_args)
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/main_parser.py b/.venv/Lib/site-packages/pip/_internal/cli/main_parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ade356b9c2f3e375bf598635627870f248c0cc3
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/main_parser.py
@@ -0,0 +1,134 @@
+"""A single place for constructing and exposing the main parser
+"""
+
+import os
+import subprocess
+import sys
+from typing import List, Optional, Tuple
+
+from pip._internal.build_env import get_runnable_pip
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
+from pip._internal.commands import commands_dict, get_similar_commands
+from pip._internal.exceptions import CommandError
+from pip._internal.utils.misc import get_pip_version, get_prog
+
+__all__ = ["create_main_parser", "parse_command"]
+
+
+def create_main_parser() -> ConfigOptionParser:
+ """Creates and returns the main parser for pip's CLI"""
+
+ parser = ConfigOptionParser(
+ usage="\n%prog [options]",
+ add_help_option=False,
+ formatter=UpdatingDefaultsHelpFormatter(),
+ name="global",
+ prog=get_prog(),
+ )
+ parser.disable_interspersed_args()
+
+ parser.version = get_pip_version()
+
+ # add the general options
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
+ parser.add_option_group(gen_opts)
+
+ # so the help formatter knows
+ parser.main = True # type: ignore
+
+ # create command listing for description
+ description = [""] + [
+ f"{name:27} {command_info.summary}"
+ for name, command_info in commands_dict.items()
+ ]
+ parser.description = "\n".join(description)
+
+ return parser
+
+
+def identify_python_interpreter(python: str) -> Optional[str]:
+ # If the named file exists, use it.
+ # If it's a directory, assume it's a virtual environment and
+ # look for the environment's Python executable.
+ if os.path.exists(python):
+ if os.path.isdir(python):
+ # bin/python for Unix, Scripts/python.exe for Windows
+ # Try both in case of odd cases like cygwin.
+ for exe in ("bin/python", "Scripts/python.exe"):
+ py = os.path.join(python, exe)
+ if os.path.exists(py):
+ return py
+ else:
+ return python
+
+ # Could not find the interpreter specified
+ return None
+
+
+def parse_command(args: List[str]) -> Tuple[str, List[str]]:
+ parser = create_main_parser()
+
+ # Note: parser calls disable_interspersed_args(), so the result of this
+ # call is to split the initial args into the general options before the
+ # subcommand and everything else.
+ # For example:
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
+ # general_options: ['--timeout==5']
+ # args_else: ['install', '--user', 'INITools']
+ general_options, args_else = parser.parse_args(args)
+
+ # --python
+ if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
+ # Re-invoke pip using the specified Python interpreter
+ interpreter = identify_python_interpreter(general_options.python)
+ if interpreter is None:
+ raise CommandError(
+ f"Could not locate Python interpreter {general_options.python}"
+ )
+
+ pip_cmd = [
+ interpreter,
+ get_runnable_pip(),
+ ]
+ pip_cmd.extend(args)
+
+ # Set a flag so the child doesn't re-invoke itself, causing
+ # an infinite loop.
+ os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
+ returncode = 0
+ try:
+ proc = subprocess.run(pip_cmd)
+ returncode = proc.returncode
+ except (subprocess.SubprocessError, OSError) as exc:
+ raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
+ sys.exit(returncode)
+
+ # --version
+ if general_options.version:
+ sys.stdout.write(parser.version)
+ sys.stdout.write(os.linesep)
+ sys.exit()
+
+ # pip || pip help -> print_help()
+ if not args_else or (args_else[0] == "help" and len(args_else) == 1):
+ parser.print_help()
+ sys.exit()
+
+ # the subcommand name
+ cmd_name = args_else[0]
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = [f'unknown command "{cmd_name}"']
+ if guess:
+ msg.append(f'maybe you meant "{guess}"')
+
+ raise CommandError(" - ".join(msg))
+
+ # all the args without the subcommand
+ cmd_args = args[:]
+ cmd_args.remove(cmd_name)
+
+ return cmd_name, cmd_args
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/parser.py b/.venv/Lib/site-packages/pip/_internal/cli/parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..c762cf2781d460288674314959c727e860aad067
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/parser.py
@@ -0,0 +1,294 @@
+"""Base option parser setup"""
+
+import logging
+import optparse
+import shutil
+import sys
+import textwrap
+from contextlib import suppress
+from typing import Any, Dict, Generator, List, Tuple
+
+from pip._internal.cli.status_codes import UNKNOWN_ERROR
+from pip._internal.configuration import Configuration, ConfigurationError
+from pip._internal.utils.misc import redact_auth_from_url, strtobool
+
+logger = logging.getLogger(__name__)
+
+
+class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
+ """A prettier/less verbose help formatter for optparse."""
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ # help position must be aligned with __init__.parseopts.description
+ kwargs["max_help_position"] = 30
+ kwargs["indent_increment"] = 1
+ kwargs["width"] = shutil.get_terminal_size()[0] - 2
+ super().__init__(*args, **kwargs)
+
+ def format_option_strings(self, option: optparse.Option) -> str:
+ return self._format_option_strings(option)
+
+ def _format_option_strings(
+ self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
+ ) -> str:
+ """
+ Return a comma-separated list of option strings and metavars.
+
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
+ :param mvarfmt: metavar format string
+ :param optsep: separator
+ """
+ opts = []
+
+ if option._short_opts:
+ opts.append(option._short_opts[0])
+ if option._long_opts:
+ opts.append(option._long_opts[0])
+ if len(opts) > 1:
+ opts.insert(1, optsep)
+
+ if option.takes_value():
+ assert option.dest is not None
+ metavar = option.metavar or option.dest.lower()
+ opts.append(mvarfmt.format(metavar.lower()))
+
+ return "".join(opts)
+
+ def format_heading(self, heading: str) -> str:
+ if heading == "Options":
+ return ""
+ return heading + ":\n"
+
+ def format_usage(self, usage: str) -> str:
+ """
+ Ensure there is only one newline between usage and the first heading
+ if there is no description.
+ """
+ msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
+ return msg
+
+ def format_description(self, description: str) -> str:
+ # leave full control over description to us
+ if description:
+ if hasattr(self.parser, "main"):
+ label = "Commands"
+ else:
+ label = "Description"
+ # some doc strings have initial newlines, some don't
+ description = description.lstrip("\n")
+ # some doc strings have final newlines and spaces, some don't
+ description = description.rstrip()
+ # dedent, then reindent
+ description = self.indent_lines(textwrap.dedent(description), " ")
+ description = f"{label}:\n{description}\n"
+ return description
+ else:
+ return ""
+
+ def format_epilog(self, epilog: str) -> str:
+ # leave full control over epilog to us
+ if epilog:
+ return epilog
+ else:
+ return ""
+
+ def indent_lines(self, text: str, indent: str) -> str:
+ new_lines = [indent + line for line in text.split("\n")]
+ return "\n".join(new_lines)
+
+
+class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
+ """Custom help formatter for use in ConfigOptionParser.
+
+ This is updates the defaults before expanding them, allowing
+ them to show up correctly in the help listing.
+
+ Also redact auth from url type options
+ """
+
+ def expand_default(self, option: optparse.Option) -> str:
+ default_values = None
+ if self.parser is not None:
+ assert isinstance(self.parser, ConfigOptionParser)
+ self.parser._update_defaults(self.parser.defaults)
+ assert option.dest is not None
+ default_values = self.parser.defaults.get(option.dest)
+ help_text = super().expand_default(option)
+
+ if default_values and option.metavar == "URL":
+ if isinstance(default_values, str):
+ default_values = [default_values]
+
+ # If its not a list, we should abort and just return the help text
+ if not isinstance(default_values, list):
+ default_values = []
+
+ for val in default_values:
+ help_text = help_text.replace(val, redact_auth_from_url(val))
+
+ return help_text
+
+
+class CustomOptionParser(optparse.OptionParser):
+ def insert_option_group(
+ self, idx: int, *args: Any, **kwargs: Any
+ ) -> optparse.OptionGroup:
+ """Insert an OptionGroup at a given position."""
+ group = self.add_option_group(*args, **kwargs)
+
+ self.option_groups.pop()
+ self.option_groups.insert(idx, group)
+
+ return group
+
+ @property
+ def option_list_all(self) -> List[optparse.Option]:
+ """Get a list of all options, including those in option groups."""
+ res = self.option_list[:]
+ for i in self.option_groups:
+ res.extend(i.option_list)
+
+ return res
+
+
+class ConfigOptionParser(CustomOptionParser):
+ """Custom option parser which updates its defaults by checking the
+ configuration files and environmental variables"""
+
+ def __init__(
+ self,
+ *args: Any,
+ name: str,
+ isolated: bool = False,
+ **kwargs: Any,
+ ) -> None:
+ self.name = name
+ self.config = Configuration(isolated)
+
+ assert self.name
+ super().__init__(*args, **kwargs)
+
+ def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
+ try:
+ return option.check_value(key, val)
+ except optparse.OptionValueError as exc:
+ print(f"An error occurred during configuration: {exc}")
+ sys.exit(3)
+
+ def _get_ordered_configuration_items(
+ self,
+ ) -> Generator[Tuple[str, Any], None, None]:
+ # Configuration gives keys in an unordered manner. Order them.
+ override_order = ["global", self.name, ":env:"]
+
+ # Pool the options into different groups
+ section_items: Dict[str, List[Tuple[str, Any]]] = {
+ name: [] for name in override_order
+ }
+ for section_key, val in self.config.items():
+ # ignore empty values
+ if not val:
+ logger.debug(
+ "Ignoring configuration key '%s' as it's value is empty.",
+ section_key,
+ )
+ continue
+
+ section, key = section_key.split(".", 1)
+ if section in override_order:
+ section_items[section].append((key, val))
+
+ # Yield each group in their override order
+ for section in override_order:
+ for key, val in section_items[section]:
+ yield key, val
+
+ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
+ """Updates the given defaults with values from the config files and
+ the environ. Does a little special handling for certain types of
+ options (lists)."""
+
+ # Accumulate complex default state.
+ self.values = optparse.Values(self.defaults)
+ late_eval = set()
+ # Then set the options with those values
+ for key, val in self._get_ordered_configuration_items():
+ # '--' because configuration supports only long names
+ option = self.get_option("--" + key)
+
+ # Ignore options not present in this parser. E.g. non-globals put
+ # in [global] by users that want them to apply to all applicable
+ # commands.
+ if option is None:
+ continue
+
+ assert option.dest is not None
+
+ if option.action in ("store_true", "store_false"):
+ try:
+ val = strtobool(val)
+ except ValueError:
+ self.error(
+ "{} is not a valid value for {} option, " # noqa
+ "please specify a boolean value like yes/no, "
+ "true/false or 1/0 instead.".format(val, key)
+ )
+ elif option.action == "count":
+ with suppress(ValueError):
+ val = strtobool(val)
+ with suppress(ValueError):
+ val = int(val)
+ if not isinstance(val, int) or val < 0:
+ self.error(
+ "{} is not a valid value for {} option, " # noqa
+ "please instead specify either a non-negative integer "
+ "or a boolean value like yes/no or false/true "
+ "which is equivalent to 1/0.".format(val, key)
+ )
+ elif option.action == "append":
+ val = val.split()
+ val = [self.check_default(option, key, v) for v in val]
+ elif option.action == "callback":
+ assert option.callback is not None
+ late_eval.add(option.dest)
+ opt_str = option.get_opt_string()
+ val = option.convert_value(opt_str, val)
+ # From take_action
+ args = option.callback_args or ()
+ kwargs = option.callback_kwargs or {}
+ option.callback(option, opt_str, val, self, *args, **kwargs)
+ else:
+ val = self.check_default(option, key, val)
+
+ defaults[option.dest] = val
+
+ for key in late_eval:
+ defaults[key] = getattr(self.values, key)
+ self.values = None
+ return defaults
+
+ def get_default_values(self) -> optparse.Values:
+ """Overriding to make updating the defaults after instantiation of
+ the option parser possible, _update_defaults() does the dirty work."""
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return optparse.Values(self.defaults)
+
+ # Load the configuration, or error out in case of an error
+ try:
+ self.config.load()
+ except ConfigurationError as err:
+ self.exit(UNKNOWN_ERROR, str(err))
+
+ defaults = self._update_defaults(self.defaults.copy()) # ours
+ for option in self._get_all_options():
+ assert option.dest is not None
+ default = defaults.get(option.dest)
+ if isinstance(default, str):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+ return optparse.Values(defaults)
+
+ def error(self, msg: str) -> None:
+ self.print_usage(sys.stderr)
+ self.exit(UNKNOWN_ERROR, f"{msg}\n")
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/progress_bars.py b/.venv/Lib/site-packages/pip/_internal/cli/progress_bars.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ad14031ca50c2c348dc0daa8fe7b38af532c0f5
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/progress_bars.py
@@ -0,0 +1,68 @@
+import functools
+from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
+
+from pip._vendor.rich.progress import (
+ BarColumn,
+ DownloadColumn,
+ FileSizeColumn,
+ Progress,
+ ProgressColumn,
+ SpinnerColumn,
+ TextColumn,
+ TimeElapsedColumn,
+ TimeRemainingColumn,
+ TransferSpeedColumn,
+)
+
+from pip._internal.utils.logging import get_indentation
+
+DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
+
+
+def _rich_progress_bar(
+ iterable: Iterable[bytes],
+ *,
+ bar_type: str,
+ size: int,
+) -> Generator[bytes, None, None]:
+ assert bar_type == "on", "This should only be used in the default mode."
+
+ if not size:
+ total = float("inf")
+ columns: Tuple[ProgressColumn, ...] = (
+ TextColumn("[progress.description]{task.description}"),
+ SpinnerColumn("line", speed=1.5),
+ FileSizeColumn(),
+ TransferSpeedColumn(),
+ TimeElapsedColumn(),
+ )
+ else:
+ total = size
+ columns = (
+ TextColumn("[progress.description]{task.description}"),
+ BarColumn(),
+ DownloadColumn(),
+ TransferSpeedColumn(),
+ TextColumn("eta"),
+ TimeRemainingColumn(),
+ )
+
+ progress = Progress(*columns, refresh_per_second=30)
+ task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
+ with progress:
+ for chunk in iterable:
+ yield chunk
+ progress.update(task_id, advance=len(chunk))
+
+
+def get_download_progress_renderer(
+ *, bar_type: str, size: Optional[int] = None
+) -> DownloadProgressRenderer:
+ """Get an object that can be used to render the download progress.
+
+ Returns a callable, that takes an iterable to "wrap".
+ """
+ if bar_type == "on":
+ return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
+ else:
+ return iter # no-op, when passed an iterator
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/req_command.py b/.venv/Lib/site-packages/pip/_internal/cli/req_command.py
new file mode 100644
index 0000000000000000000000000000000000000000..86070f10c14b14dbfac004d11ba3234d36b70276
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/req_command.py
@@ -0,0 +1,508 @@
+"""Contains the Command base classes that depend on PipSession.
+
+The classes in this module are in a separate module so the commands not
+needing download / PackageFinder capability don't unnecessarily import the
+PackageFinder machinery and all its vendored dependencies, etc.
+"""
+
+import logging
+import os
+import sys
+from functools import partial
+from optparse import Values
+from typing import TYPE_CHECKING, Any, List, Optional, Tuple
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.command_context import CommandContextMixIn
+from pip._internal.exceptions import CommandError, PreviousBuildDirError
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.network.session import PipSession
+from pip._internal.operations.build.build_tracker import BuildTracker
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.constructors import (
+ install_req_from_editable,
+ install_req_from_line,
+ install_req_from_parsed_requirement,
+ install_req_from_req_string,
+)
+from pip._internal.req.req_file import parse_requirements
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.resolution.base import BaseResolver
+from pip._internal.self_outdated_check import pip_self_version_check
+from pip._internal.utils.temp_dir import (
+ TempDirectory,
+ TempDirectoryTypeRegistry,
+ tempdir_kinds,
+)
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+if TYPE_CHECKING:
+ from ssl import SSLContext
+
+logger = logging.getLogger(__name__)
+
+
+def _create_truststore_ssl_context() -> Optional["SSLContext"]:
+ if sys.version_info < (3, 10):
+ raise CommandError("The truststore feature is only available for Python 3.10+")
+
+ try:
+ import ssl
+ except ImportError:
+ logger.warning("Disabling truststore since ssl support is missing")
+ return None
+
+ try:
+ import truststore
+ except ImportError:
+ raise CommandError(
+ "To use the truststore feature, 'truststore' must be installed into "
+ "pip's current environment."
+ )
+
+ return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+
+
+class SessionCommandMixin(CommandContextMixIn):
+
+ """
+ A class mixin for command classes needing _build_session().
+ """
+
+ def __init__(self) -> None:
+ super().__init__()
+ self._session: Optional[PipSession] = None
+
+ @classmethod
+ def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
+ """Return a list of index urls from user-provided options."""
+ index_urls = []
+ if not getattr(options, "no_index", False):
+ url = getattr(options, "index_url", None)
+ if url:
+ index_urls.append(url)
+ urls = getattr(options, "extra_index_urls", None)
+ if urls:
+ index_urls.extend(urls)
+ # Return None rather than an empty list
+ return index_urls or None
+
+ def get_default_session(self, options: Values) -> PipSession:
+ """Get a default-managed session."""
+ if self._session is None:
+ self._session = self.enter_context(self._build_session(options))
+ # there's no type annotation on requests.Session, so it's
+ # automatically ContextManager[Any] and self._session becomes Any,
+ # then https://github.com/python/mypy/issues/7696 kicks in
+ assert self._session is not None
+ return self._session
+
+ def _build_session(
+ self,
+ options: Values,
+ retries: Optional[int] = None,
+ timeout: Optional[int] = None,
+ fallback_to_certifi: bool = False,
+ ) -> PipSession:
+ cache_dir = options.cache_dir
+ assert not cache_dir or os.path.isabs(cache_dir)
+
+ if "truststore" in options.features_enabled:
+ try:
+ ssl_context = _create_truststore_ssl_context()
+ except Exception:
+ if not fallback_to_certifi:
+ raise
+ ssl_context = None
+ else:
+ ssl_context = None
+
+ session = PipSession(
+ cache=os.path.join(cache_dir, "http") if cache_dir else None,
+ retries=retries if retries is not None else options.retries,
+ trusted_hosts=options.trusted_hosts,
+ index_urls=self._get_index_urls(options),
+ ssl_context=ssl_context,
+ )
+
+ # Handle custom ca-bundles from the user
+ if options.cert:
+ session.verify = options.cert
+
+ # Handle SSL client certificate
+ if options.client_cert:
+ session.cert = options.client_cert
+
+ # Handle timeouts
+ if options.timeout or timeout:
+ session.timeout = timeout if timeout is not None else options.timeout
+
+ # Handle configured proxies
+ if options.proxy:
+ session.proxies = {
+ "http": options.proxy,
+ "https": options.proxy,
+ }
+
+ # Determine if we can prompt the user for authentication or not
+ session.auth.prompting = not options.no_input
+ session.auth.keyring_provider = options.keyring_provider
+
+ return session
+
+
+class IndexGroupCommand(Command, SessionCommandMixin):
+
+ """
+ Abstract base class for commands with the index_group options.
+
+ This also corresponds to the commands that permit the pip version check.
+ """
+
+ def handle_pip_version_check(self, options: Values) -> None:
+ """
+ Do the pip version check if not disabled.
+
+ This overrides the default behavior of not doing the check.
+ """
+ # Make sure the index_group options are present.
+ assert hasattr(options, "no_index")
+
+ if options.disable_pip_version_check or options.no_index:
+ return
+
+ # Otherwise, check if we're using the latest version of pip available.
+ session = self._build_session(
+ options,
+ retries=0,
+ timeout=min(5, options.timeout),
+ # This is set to ensure the function does not fail when truststore is
+ # specified in use-feature but cannot be loaded. This usually raises a
+ # CommandError and shows a nice user-facing error, but this function is not
+ # called in that try-except block.
+ fallback_to_certifi=True,
+ )
+ with session:
+ pip_self_version_check(session, options)
+
+
+KEEPABLE_TEMPDIR_TYPES = [
+ tempdir_kinds.BUILD_ENV,
+ tempdir_kinds.EPHEM_WHEEL_CACHE,
+ tempdir_kinds.REQ_BUILD,
+]
+
+
+def warn_if_run_as_root() -> None:
+ """Output a warning for sudo users on Unix.
+
+ In a virtual environment, sudo pip still writes to virtualenv.
+ On Windows, users may run pip as Administrator without issues.
+ This warning only applies to Unix root users outside of virtualenv.
+ """
+ if running_under_virtualenv():
+ return
+ if not hasattr(os, "getuid"):
+ return
+ # On Windows, there are no "system managed" Python packages. Installing as
+ # Administrator via pip is the correct way of updating system environments.
+ #
+ # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
+ # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
+ if sys.platform == "win32" or sys.platform == "cygwin":
+ return
+
+ if os.getuid() != 0:
+ return
+
+ logger.warning(
+ "Running pip as the 'root' user can result in broken permissions and "
+ "conflicting behaviour with the system package manager. "
+ "It is recommended to use a virtual environment instead: "
+ "https://pip.pypa.io/warnings/venv"
+ )
+
+
+def with_cleanup(func: Any) -> Any:
+ """Decorator for common logic related to managing temporary
+ directories.
+ """
+
+ def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
+ for t in KEEPABLE_TEMPDIR_TYPES:
+ registry.set_delete(t, False)
+
+ def wrapper(
+ self: RequirementCommand, options: Values, args: List[Any]
+ ) -> Optional[int]:
+ assert self.tempdir_registry is not None
+ if options.no_clean:
+ configure_tempdir_registry(self.tempdir_registry)
+
+ try:
+ return func(self, options, args)
+ except PreviousBuildDirError:
+ # This kind of conflict can occur when the user passes an explicit
+ # build directory with a pre-existing folder. In that case we do
+ # not want to accidentally remove it.
+ configure_tempdir_registry(self.tempdir_registry)
+ raise
+
+ return wrapper
+
+
+class RequirementCommand(IndexGroupCommand):
+ def __init__(self, *args: Any, **kw: Any) -> None:
+ super().__init__(*args, **kw)
+
+ self.cmd_opts.add_option(cmdoptions.no_clean())
+
+ @staticmethod
+ def determine_resolver_variant(options: Values) -> str:
+ """Determines which resolver should be used, based on the given options."""
+ if "legacy-resolver" in options.deprecated_features_enabled:
+ return "legacy"
+
+ return "2020-resolver"
+
+ @classmethod
+ def make_requirement_preparer(
+ cls,
+ temp_build_dir: TempDirectory,
+ options: Values,
+ build_tracker: BuildTracker,
+ session: PipSession,
+ finder: PackageFinder,
+ use_user_site: bool,
+ download_dir: Optional[str] = None,
+ verbosity: int = 0,
+ ) -> RequirementPreparer:
+ """
+ Create a RequirementPreparer instance for the given parameters.
+ """
+ temp_build_dir_path = temp_build_dir.path
+ assert temp_build_dir_path is not None
+ legacy_resolver = False
+
+ resolver_variant = cls.determine_resolver_variant(options)
+ if resolver_variant == "2020-resolver":
+ lazy_wheel = "fast-deps" in options.features_enabled
+ if lazy_wheel:
+ logger.warning(
+ "pip is using lazily downloaded wheels using HTTP "
+ "range requests to obtain dependency information. "
+ "This experimental feature is enabled through "
+ "--use-feature=fast-deps and it is not ready for "
+ "production."
+ )
+ else:
+ legacy_resolver = True
+ lazy_wheel = False
+ if "fast-deps" in options.features_enabled:
+ logger.warning(
+ "fast-deps has no effect when used with the legacy resolver."
+ )
+
+ return RequirementPreparer(
+ build_dir=temp_build_dir_path,
+ src_dir=options.src_dir,
+ download_dir=download_dir,
+ build_isolation=options.build_isolation,
+ check_build_deps=options.check_build_deps,
+ build_tracker=build_tracker,
+ session=session,
+ progress_bar=options.progress_bar,
+ finder=finder,
+ require_hashes=options.require_hashes,
+ use_user_site=use_user_site,
+ lazy_wheel=lazy_wheel,
+ verbosity=verbosity,
+ legacy_resolver=legacy_resolver,
+ )
+
+ @classmethod
+ def make_resolver(
+ cls,
+ preparer: RequirementPreparer,
+ finder: PackageFinder,
+ options: Values,
+ wheel_cache: Optional[WheelCache] = None,
+ use_user_site: bool = False,
+ ignore_installed: bool = True,
+ ignore_requires_python: bool = False,
+ force_reinstall: bool = False,
+ upgrade_strategy: str = "to-satisfy-only",
+ use_pep517: Optional[bool] = None,
+ py_version_info: Optional[Tuple[int, ...]] = None,
+ ) -> BaseResolver:
+ """
+ Create a Resolver instance for the given parameters.
+ """
+ make_install_req = partial(
+ install_req_from_req_string,
+ isolated=options.isolated_mode,
+ use_pep517=use_pep517,
+ )
+ resolver_variant = cls.determine_resolver_variant(options)
+ # The long import name and duplicated invocation is needed to convince
+ # Mypy into correctly typechecking. Otherwise it would complain the
+ # "Resolver" class being redefined.
+ if resolver_variant == "2020-resolver":
+ import pip._internal.resolution.resolvelib.resolver
+
+ return pip._internal.resolution.resolvelib.resolver.Resolver(
+ preparer=preparer,
+ finder=finder,
+ wheel_cache=wheel_cache,
+ make_install_req=make_install_req,
+ use_user_site=use_user_site,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_installed=ignore_installed,
+ ignore_requires_python=ignore_requires_python,
+ force_reinstall=force_reinstall,
+ upgrade_strategy=upgrade_strategy,
+ py_version_info=py_version_info,
+ )
+ import pip._internal.resolution.legacy.resolver
+
+ return pip._internal.resolution.legacy.resolver.Resolver(
+ preparer=preparer,
+ finder=finder,
+ wheel_cache=wheel_cache,
+ make_install_req=make_install_req,
+ use_user_site=use_user_site,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_installed=ignore_installed,
+ ignore_requires_python=ignore_requires_python,
+ force_reinstall=force_reinstall,
+ upgrade_strategy=upgrade_strategy,
+ py_version_info=py_version_info,
+ )
+
+ def get_requirements(
+ self,
+ args: List[str],
+ options: Values,
+ finder: PackageFinder,
+ session: PipSession,
+ ) -> List[InstallRequirement]:
+ """
+ Parse command-line arguments into the corresponding requirements.
+ """
+ requirements: List[InstallRequirement] = []
+ for filename in options.constraints:
+ for parsed_req in parse_requirements(
+ filename,
+ constraint=True,
+ finder=finder,
+ options=options,
+ session=session,
+ ):
+ req_to_add = install_req_from_parsed_requirement(
+ parsed_req,
+ isolated=options.isolated_mode,
+ user_supplied=False,
+ )
+ requirements.append(req_to_add)
+
+ for req in args:
+ req_to_add = install_req_from_line(
+ req,
+ comes_from=None,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ user_supplied=True,
+ config_settings=getattr(options, "config_settings", None),
+ )
+ requirements.append(req_to_add)
+
+ for req in options.editables:
+ req_to_add = install_req_from_editable(
+ req,
+ user_supplied=True,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ config_settings=getattr(options, "config_settings", None),
+ )
+ requirements.append(req_to_add)
+
+ # NOTE: options.require_hashes may be set if --require-hashes is True
+ for filename in options.requirements:
+ for parsed_req in parse_requirements(
+ filename, finder=finder, options=options, session=session
+ ):
+ req_to_add = install_req_from_parsed_requirement(
+ parsed_req,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ user_supplied=True,
+ config_settings=parsed_req.options.get("config_settings")
+ if parsed_req.options
+ else None,
+ )
+ requirements.append(req_to_add)
+
+ # If any requirement has hash options, enable hash checking.
+ if any(req.has_hash_options for req in requirements):
+ options.require_hashes = True
+
+ if not (args or options.editables or options.requirements):
+ opts = {"name": self.name}
+ if options.find_links:
+ raise CommandError(
+ "You must give at least one requirement to {name} "
+ '(maybe you meant "pip {name} {links}"?)'.format(
+ **dict(opts, links=" ".join(options.find_links))
+ )
+ )
+ else:
+ raise CommandError(
+ "You must give at least one requirement to {name} "
+ '(see "pip help {name}")'.format(**opts)
+ )
+
+ return requirements
+
+ @staticmethod
+ def trace_basic_info(finder: PackageFinder) -> None:
+ """
+ Trace basic information about the provided objects.
+ """
+ # Display where finder is looking for packages
+ search_scope = finder.search_scope
+ locations = search_scope.get_formatted_locations()
+ if locations:
+ logger.info(locations)
+
+ def _build_package_finder(
+ self,
+ options: Values,
+ session: PipSession,
+ target_python: Optional[TargetPython] = None,
+ ignore_requires_python: Optional[bool] = None,
+ ) -> PackageFinder:
+ """
+ Create a package finder appropriate to this requirement command.
+
+ :param ignore_requires_python: Whether to ignore incompatible
+ "Requires-Python" values in links. Defaults to False.
+ """
+ link_collector = LinkCollector.create(session, options=options)
+ selection_prefs = SelectionPreferences(
+ allow_yanked=True,
+ format_control=options.format_control,
+ allow_all_prereleases=options.pre,
+ prefer_binary=options.prefer_binary,
+ ignore_requires_python=ignore_requires_python,
+ )
+
+ return PackageFinder.create(
+ link_collector=link_collector,
+ selection_prefs=selection_prefs,
+ target_python=target_python,
+ )
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/spinners.py b/.venv/Lib/site-packages/pip/_internal/cli/spinners.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf2b976f377c2656afb3d84add8d30b0fc280c03
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/spinners.py
@@ -0,0 +1,159 @@
+import contextlib
+import itertools
+import logging
+import sys
+import time
+from typing import IO, Generator, Optional
+
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.logging import get_indentation
+
+logger = logging.getLogger(__name__)
+
+
+class SpinnerInterface:
+ def spin(self) -> None:
+ raise NotImplementedError()
+
+ def finish(self, final_status: str) -> None:
+ raise NotImplementedError()
+
+
+class InteractiveSpinner(SpinnerInterface):
+ def __init__(
+ self,
+ message: str,
+ file: Optional[IO[str]] = None,
+ spin_chars: str = "-\\|/",
+ # Empirically, 8 updates/second looks nice
+ min_update_interval_seconds: float = 0.125,
+ ):
+ self._message = message
+ if file is None:
+ file = sys.stdout
+ self._file = file
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._finished = False
+
+ self._spin_cycle = itertools.cycle(spin_chars)
+
+ self._file.write(" " * get_indentation() + self._message + " ... ")
+ self._width = 0
+
+ def _write(self, status: str) -> None:
+ assert not self._finished
+ # Erase what we wrote before by backspacing to the beginning, writing
+ # spaces to overwrite the old text, and then backspacing again
+ backup = "\b" * self._width
+ self._file.write(backup + " " * self._width + backup)
+ # Now we have a blank slate to add our status
+ self._file.write(status)
+ self._width = len(status)
+ self._file.flush()
+ self._rate_limiter.reset()
+
+ def spin(self) -> None:
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._write(next(self._spin_cycle))
+
+ def finish(self, final_status: str) -> None:
+ if self._finished:
+ return
+ self._write(final_status)
+ self._file.write("\n")
+ self._file.flush()
+ self._finished = True
+
+
+# Used for dumb terminals, non-interactive installs (no tty), etc.
+# We still print updates occasionally (once every 60 seconds by default) to
+# act as a keep-alive for systems like Travis-CI that take lack-of-output as
+# an indication that a task has frozen.
+class NonInteractiveSpinner(SpinnerInterface):
+ def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
+ self._message = message
+ self._finished = False
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._update("started")
+
+ def _update(self, status: str) -> None:
+ assert not self._finished
+ self._rate_limiter.reset()
+ logger.info("%s: %s", self._message, status)
+
+ def spin(self) -> None:
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._update("still running...")
+
+ def finish(self, final_status: str) -> None:
+ if self._finished:
+ return
+ self._update(f"finished with status '{final_status}'")
+ self._finished = True
+
+
+class RateLimiter:
+ def __init__(self, min_update_interval_seconds: float) -> None:
+ self._min_update_interval_seconds = min_update_interval_seconds
+ self._last_update: float = 0
+
+ def ready(self) -> bool:
+ now = time.time()
+ delta = now - self._last_update
+ return delta >= self._min_update_interval_seconds
+
+ def reset(self) -> None:
+ self._last_update = time.time()
+
+
+@contextlib.contextmanager
+def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
+ # Interactive spinner goes directly to sys.stdout rather than being routed
+ # through the logging system, but it acts like it has level INFO,
+ # i.e. it's only displayed if we're at level INFO or better.
+ # Non-interactive spinner goes through the logging system, so it is always
+ # in sync with logging configuration.
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
+ spinner: SpinnerInterface = InteractiveSpinner(message)
+ else:
+ spinner = NonInteractiveSpinner(message)
+ try:
+ with hidden_cursor(sys.stdout):
+ yield spinner
+ except KeyboardInterrupt:
+ spinner.finish("canceled")
+ raise
+ except Exception:
+ spinner.finish("error")
+ raise
+ else:
+ spinner.finish("done")
+
+
+HIDE_CURSOR = "\x1b[?25l"
+SHOW_CURSOR = "\x1b[?25h"
+
+
+@contextlib.contextmanager
+def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
+ # even via colorama. So don't even try.
+ if WINDOWS:
+ yield
+ # We don't want to clutter the output with control characters if we're
+ # writing to a file, or if the user is running with --quiet.
+ # See https://github.com/pypa/pip/issues/3418
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
+ yield
+ else:
+ file.write(HIDE_CURSOR)
+ try:
+ yield
+ finally:
+ file.write(SHOW_CURSOR)
diff --git a/.venv/Lib/site-packages/pip/_internal/cli/status_codes.py b/.venv/Lib/site-packages/pip/_internal/cli/status_codes.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e29502cddfa9a9887a93399ab4193fb75dfe605
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/cli/status_codes.py
@@ -0,0 +1,6 @@
+SUCCESS = 0
+ERROR = 1
+UNKNOWN_ERROR = 2
+VIRTUALENV_NOT_FOUND = 3
+PREVIOUS_BUILD_DIR_ERROR = 4
+NO_MATCHES_FOUND = 23
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/__init__.py b/.venv/Lib/site-packages/pip/_internal/commands/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..858a41014169b8f0eb1b905fa3bb69c753a1bda5
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/__init__.py
@@ -0,0 +1,132 @@
+"""
+Package containing all pip commands
+"""
+
+import importlib
+from collections import namedtuple
+from typing import Any, Dict, Optional
+
+from pip._internal.cli.base_command import Command
+
+CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
+
+# This dictionary does a bunch of heavy lifting for help output:
+# - Enables avoiding additional (costly) imports for presenting `--help`.
+# - The ordering matters for help display.
+#
+# Even though the module path starts with the same "pip._internal.commands"
+# prefix, the full path makes testing easier (specifically when modifying
+# `commands_dict` in test setup / teardown).
+commands_dict: Dict[str, CommandInfo] = {
+ "install": CommandInfo(
+ "pip._internal.commands.install",
+ "InstallCommand",
+ "Install packages.",
+ ),
+ "download": CommandInfo(
+ "pip._internal.commands.download",
+ "DownloadCommand",
+ "Download packages.",
+ ),
+ "uninstall": CommandInfo(
+ "pip._internal.commands.uninstall",
+ "UninstallCommand",
+ "Uninstall packages.",
+ ),
+ "freeze": CommandInfo(
+ "pip._internal.commands.freeze",
+ "FreezeCommand",
+ "Output installed packages in requirements format.",
+ ),
+ "inspect": CommandInfo(
+ "pip._internal.commands.inspect",
+ "InspectCommand",
+ "Inspect the python environment.",
+ ),
+ "list": CommandInfo(
+ "pip._internal.commands.list",
+ "ListCommand",
+ "List installed packages.",
+ ),
+ "show": CommandInfo(
+ "pip._internal.commands.show",
+ "ShowCommand",
+ "Show information about installed packages.",
+ ),
+ "check": CommandInfo(
+ "pip._internal.commands.check",
+ "CheckCommand",
+ "Verify installed packages have compatible dependencies.",
+ ),
+ "config": CommandInfo(
+ "pip._internal.commands.configuration",
+ "ConfigurationCommand",
+ "Manage local and global configuration.",
+ ),
+ "search": CommandInfo(
+ "pip._internal.commands.search",
+ "SearchCommand",
+ "Search PyPI for packages.",
+ ),
+ "cache": CommandInfo(
+ "pip._internal.commands.cache",
+ "CacheCommand",
+ "Inspect and manage pip's wheel cache.",
+ ),
+ "index": CommandInfo(
+ "pip._internal.commands.index",
+ "IndexCommand",
+ "Inspect information available from package indexes.",
+ ),
+ "wheel": CommandInfo(
+ "pip._internal.commands.wheel",
+ "WheelCommand",
+ "Build wheels from your requirements.",
+ ),
+ "hash": CommandInfo(
+ "pip._internal.commands.hash",
+ "HashCommand",
+ "Compute hashes of package archives.",
+ ),
+ "completion": CommandInfo(
+ "pip._internal.commands.completion",
+ "CompletionCommand",
+ "A helper command used for command completion.",
+ ),
+ "debug": CommandInfo(
+ "pip._internal.commands.debug",
+ "DebugCommand",
+ "Show information useful for debugging.",
+ ),
+ "help": CommandInfo(
+ "pip._internal.commands.help",
+ "HelpCommand",
+ "Show help for commands.",
+ ),
+}
+
+
+def create_command(name: str, **kwargs: Any) -> Command:
+ """
+ Create an instance of the Command class with the given name.
+ """
+ module_path, class_name, summary = commands_dict[name]
+ module = importlib.import_module(module_path)
+ command_class = getattr(module, class_name)
+ command = command_class(name=name, summary=summary, **kwargs)
+
+ return command
+
+
+def get_similar_commands(name: str) -> Optional[str]:
+ """Command name auto-correct."""
+ from difflib import get_close_matches
+
+ name = name.lower()
+
+ close_commands = get_close_matches(name, commands_dict.keys())
+
+ if close_commands:
+ return close_commands[0]
+ else:
+ return None
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/cache.py b/.venv/Lib/site-packages/pip/_internal/commands/cache.py
new file mode 100644
index 0000000000000000000000000000000000000000..e96d2b4924c468c666f3ad6dab902f217ee43c39
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/cache.py
@@ -0,0 +1,222 @@
+import os
+import textwrap
+from optparse import Values
+from typing import Any, List
+
+import pip._internal.utils.filesystem as filesystem
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.exceptions import CommandError, PipError
+from pip._internal.utils.logging import getLogger
+
+logger = getLogger(__name__)
+
+
+class CacheCommand(Command):
+ """
+ Inspect and manage pip's wheel cache.
+
+ Subcommands:
+
+ - dir: Show the cache directory.
+ - info: Show information about the cache.
+ - list: List filenames of packages stored in the cache.
+ - remove: Remove one or more package from the cache.
+ - purge: Remove all items from the cache.
+
+ ```` can be a glob expression or a package name.
+ """
+
+ ignore_require_venv = True
+ usage = """
+ %prog dir
+ %prog info
+ %prog list [] [--format=[human, abspath]]
+ %prog remove
+ %prog purge
+ """
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "--format",
+ action="store",
+ dest="list_format",
+ default="human",
+ choices=("human", "abspath"),
+ help="Select the output format among: human (default) or abspath",
+ )
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ handlers = {
+ "dir": self.get_cache_dir,
+ "info": self.get_cache_info,
+ "list": self.list_cache_items,
+ "remove": self.remove_cache_items,
+ "purge": self.purge_cache,
+ }
+
+ if not options.cache_dir:
+ logger.error("pip cache commands can not function since cache is disabled.")
+ return ERROR
+
+ # Determine action
+ if not args or args[0] not in handlers:
+ logger.error(
+ "Need an action (%s) to perform.",
+ ", ".join(sorted(handlers)),
+ )
+ return ERROR
+
+ action = args[0]
+
+ # Error handling happens here, not in the action-handlers.
+ try:
+ handlers[action](options, args[1:])
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ return SUCCESS
+
+ def get_cache_dir(self, options: Values, args: List[Any]) -> None:
+ if args:
+ raise CommandError("Too many arguments")
+
+ logger.info(options.cache_dir)
+
+ def get_cache_info(self, options: Values, args: List[Any]) -> None:
+ if args:
+ raise CommandError("Too many arguments")
+
+ num_http_files = len(self._find_http_files(options))
+ num_packages = len(self._find_wheels(options, "*"))
+
+ http_cache_location = self._cache_dir(options, "http")
+ wheels_cache_location = self._cache_dir(options, "wheels")
+ http_cache_size = filesystem.format_directory_size(http_cache_location)
+ wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
+
+ message = (
+ textwrap.dedent(
+ """
+ Package index page cache location: {http_cache_location}
+ Package index page cache size: {http_cache_size}
+ Number of HTTP files: {num_http_files}
+ Locally built wheels location: {wheels_cache_location}
+ Locally built wheels size: {wheels_cache_size}
+ Number of locally built wheels: {package_count}
+ """
+ )
+ .format(
+ http_cache_location=http_cache_location,
+ http_cache_size=http_cache_size,
+ num_http_files=num_http_files,
+ wheels_cache_location=wheels_cache_location,
+ package_count=num_packages,
+ wheels_cache_size=wheels_cache_size,
+ )
+ .strip()
+ )
+
+ logger.info(message)
+
+ def list_cache_items(self, options: Values, args: List[Any]) -> None:
+ if len(args) > 1:
+ raise CommandError("Too many arguments")
+
+ if args:
+ pattern = args[0]
+ else:
+ pattern = "*"
+
+ files = self._find_wheels(options, pattern)
+ if options.list_format == "human":
+ self.format_for_human(files)
+ else:
+ self.format_for_abspath(files)
+
+ def format_for_human(self, files: List[str]) -> None:
+ if not files:
+ logger.info("No locally built wheels cached.")
+ return
+
+ results = []
+ for filename in files:
+ wheel = os.path.basename(filename)
+ size = filesystem.format_file_size(filename)
+ results.append(f" - {wheel} ({size})")
+ logger.info("Cache contents:\n")
+ logger.info("\n".join(sorted(results)))
+
+ def format_for_abspath(self, files: List[str]) -> None:
+ if not files:
+ return
+
+ results = []
+ for filename in files:
+ results.append(filename)
+
+ logger.info("\n".join(sorted(results)))
+
+ def remove_cache_items(self, options: Values, args: List[Any]) -> None:
+ if len(args) > 1:
+ raise CommandError("Too many arguments")
+
+ if not args:
+ raise CommandError("Please provide a pattern")
+
+ files = self._find_wheels(options, args[0])
+
+ no_matching_msg = "No matching packages"
+ if args[0] == "*":
+ # Only fetch http files if no specific pattern given
+ files += self._find_http_files(options)
+ else:
+ # Add the pattern to the log message
+ no_matching_msg += ' for pattern "{}"'.format(args[0])
+
+ if not files:
+ logger.warning(no_matching_msg)
+
+ for filename in files:
+ os.unlink(filename)
+ logger.verbose("Removed %s", filename)
+ logger.info("Files removed: %s", len(files))
+
+ def purge_cache(self, options: Values, args: List[Any]) -> None:
+ if args:
+ raise CommandError("Too many arguments")
+
+ return self.remove_cache_items(options, ["*"])
+
+ def _cache_dir(self, options: Values, subdir: str) -> str:
+ return os.path.join(options.cache_dir, subdir)
+
+ def _find_http_files(self, options: Values) -> List[str]:
+ http_dir = self._cache_dir(options, "http")
+ return filesystem.find_files(http_dir, "*")
+
+ def _find_wheels(self, options: Values, pattern: str) -> List[str]:
+ wheel_dir = self._cache_dir(options, "wheels")
+
+ # The wheel filename format, as specified in PEP 427, is:
+ # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
+ #
+ # Additionally, non-alphanumeric values in the distribution are
+ # normalized to underscores (_), meaning hyphens can never occur
+ # before `-{version}`.
+ #
+ # Given that information:
+ # - If the pattern we're given contains a hyphen (-), the user is
+ # providing at least the version. Thus, we can just append `*.whl`
+ # to match the rest of it.
+ # - If the pattern we're given doesn't contain a hyphen (-), the
+ # user is only providing the name. Thus, we append `-*.whl` to
+ # match the hyphen before the version, followed by anything else.
+ #
+ # PEP 427: https://www.python.org/dev/peps/pep-0427/
+ pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
+
+ return filesystem.find_files(wheel_dir, pattern)
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/check.py b/.venv/Lib/site-packages/pip/_internal/commands/check.py
new file mode 100644
index 0000000000000000000000000000000000000000..5efd0a3416041e3afdf32a2d346db01d99e8f7d9
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/check.py
@@ -0,0 +1,54 @@
+import logging
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.operations.check import (
+ check_package_set,
+ create_package_set_from_installed,
+ warn_legacy_versions_and_specifiers,
+)
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class CheckCommand(Command):
+ """Verify installed packages have compatible dependencies."""
+
+ usage = """
+ %prog [options]"""
+
+ def run(self, options: Values, args: List[str]) -> int:
+ package_set, parsing_probs = create_package_set_from_installed()
+ warn_legacy_versions_and_specifiers(package_set)
+ missing, conflicting = check_package_set(package_set)
+
+ for project_name in missing:
+ version = package_set[project_name].version
+ for dependency in missing[project_name]:
+ write_output(
+ "%s %s requires %s, which is not installed.",
+ project_name,
+ version,
+ dependency[0],
+ )
+
+ for project_name in conflicting:
+ version = package_set[project_name].version
+ for dep_name, dep_version, req in conflicting[project_name]:
+ write_output(
+ "%s %s has requirement %s, but you have %s %s.",
+ project_name,
+ version,
+ req,
+ dep_name,
+ dep_version,
+ )
+
+ if missing or conflicting or parsing_probs:
+ return ERROR
+ else:
+ write_output("No broken requirements found.")
+ return SUCCESS
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/completion.py b/.venv/Lib/site-packages/pip/_internal/commands/completion.py
new file mode 100644
index 0000000000000000000000000000000000000000..30233fc7ad2c07c42e7c2d384312f1f4373155f6
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/completion.py
@@ -0,0 +1,121 @@
+import sys
+import textwrap
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.utils.misc import get_prog
+
+BASE_COMPLETION = """
+# pip {shell} completion start{script}# pip {shell} completion end
+"""
+
+COMPLETION_SCRIPTS = {
+ "bash": """
+ _pip_completion()
+ {{
+ COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
+ COMP_CWORD=$COMP_CWORD \\
+ PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
+ }}
+ complete -o default -F _pip_completion {prog}
+ """,
+ "zsh": """
+ #compdef -P pip[0-9.]#
+ compadd $( COMP_WORDS="$words[*]" \\
+ COMP_CWORD=$((CURRENT-1)) \\
+ PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
+ """,
+ "fish": """
+ function __fish_complete_pip
+ set -lx COMP_WORDS (commandline -o) ""
+ set -lx COMP_CWORD ( \\
+ math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
+ )
+ set -lx PIP_AUTO_COMPLETE 1
+ string split \\ -- (eval $COMP_WORDS[1])
+ end
+ complete -fa "(__fish_complete_pip)" -c {prog}
+ """,
+ "powershell": """
+ if ((Test-Path Function:\\TabExpansion) -and -not `
+ (Test-Path Function:\\_pip_completeBackup)) {{
+ Rename-Item Function:\\TabExpansion _pip_completeBackup
+ }}
+ function TabExpansion($line, $lastWord) {{
+ $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
+ if ($lastBlock.StartsWith("{prog} ")) {{
+ $Env:COMP_WORDS=$lastBlock
+ $Env:COMP_CWORD=$lastBlock.Split().Length - 1
+ $Env:PIP_AUTO_COMPLETE=1
+ (& {prog}).Split()
+ Remove-Item Env:COMP_WORDS
+ Remove-Item Env:COMP_CWORD
+ Remove-Item Env:PIP_AUTO_COMPLETE
+ }}
+ elseif (Test-Path Function:\\_pip_completeBackup) {{
+ # Fall back on existing tab expansion
+ _pip_completeBackup $line $lastWord
+ }}
+ }}
+ """,
+}
+
+
+class CompletionCommand(Command):
+ """A helper command to be used for command completion."""
+
+ ignore_require_venv = True
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "--bash",
+ "-b",
+ action="store_const",
+ const="bash",
+ dest="shell",
+ help="Emit completion code for bash",
+ )
+ self.cmd_opts.add_option(
+ "--zsh",
+ "-z",
+ action="store_const",
+ const="zsh",
+ dest="shell",
+ help="Emit completion code for zsh",
+ )
+ self.cmd_opts.add_option(
+ "--fish",
+ "-f",
+ action="store_const",
+ const="fish",
+ dest="shell",
+ help="Emit completion code for fish",
+ )
+ self.cmd_opts.add_option(
+ "--powershell",
+ "-p",
+ action="store_const",
+ const="powershell",
+ dest="shell",
+ help="Emit completion code for powershell",
+ )
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ """Prints the completion code of the given shell"""
+ shells = COMPLETION_SCRIPTS.keys()
+ shell_options = ["--" + shell for shell in sorted(shells)]
+ if options.shell in shells:
+ script = textwrap.dedent(
+ COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
+ )
+ print(BASE_COMPLETION.format(script=script, shell=options.shell))
+ return SUCCESS
+ else:
+ sys.stderr.write(
+ "ERROR: You must pass {}\n".format(" or ".join(shell_options))
+ )
+ return SUCCESS
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/configuration.py b/.venv/Lib/site-packages/pip/_internal/commands/configuration.py
new file mode 100644
index 0000000000000000000000000000000000000000..84b134e490b081d661daf69f98e0b9b1fdddd36f
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/configuration.py
@@ -0,0 +1,282 @@
+import logging
+import os
+import subprocess
+from optparse import Values
+from typing import Any, List, Optional
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.configuration import (
+ Configuration,
+ Kind,
+ get_configuration_files,
+ kinds,
+)
+from pip._internal.exceptions import PipError
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import get_prog, write_output
+
+logger = logging.getLogger(__name__)
+
+
+class ConfigurationCommand(Command):
+ """
+ Manage local and global configuration.
+
+ Subcommands:
+
+ - list: List the active configuration (or from the file specified)
+ - edit: Edit the configuration file in an editor
+ - get: Get the value associated with command.option
+ - set: Set the command.option=value
+ - unset: Unset the value associated with command.option
+ - debug: List the configuration files and values defined under them
+
+ Configuration keys should be dot separated command and option name,
+ with the special prefix "global" affecting any command. For example,
+ "pip config set global.index-url https://example.org/" would configure
+ the index url for all commands, but "pip config set download.timeout 10"
+ would configure a 10 second timeout only for "pip download" commands.
+
+ If none of --user, --global and --site are passed, a virtual
+ environment configuration file is used if one is active and the file
+ exists. Otherwise, all modifications happen to the user file by
+ default.
+ """
+
+ ignore_require_venv = True
+ usage = """
+ %prog [] list
+ %prog [] [--editor ] edit
+
+ %prog [] get command.option
+ %prog [] set command.option value
+ %prog [] unset command.option
+ %prog [] debug
+ """
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "--editor",
+ dest="editor",
+ action="store",
+ default=None,
+ help=(
+ "Editor to use to edit the file. Uses VISUAL or EDITOR "
+ "environment variables if not provided."
+ ),
+ )
+
+ self.cmd_opts.add_option(
+ "--global",
+ dest="global_file",
+ action="store_true",
+ default=False,
+ help="Use the system-wide configuration file only",
+ )
+
+ self.cmd_opts.add_option(
+ "--user",
+ dest="user_file",
+ action="store_true",
+ default=False,
+ help="Use the user configuration file only",
+ )
+
+ self.cmd_opts.add_option(
+ "--site",
+ dest="site_file",
+ action="store_true",
+ default=False,
+ help="Use the current environment configuration file only",
+ )
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ handlers = {
+ "list": self.list_values,
+ "edit": self.open_in_editor,
+ "get": self.get_name,
+ "set": self.set_name_value,
+ "unset": self.unset_name,
+ "debug": self.list_config_values,
+ }
+
+ # Determine action
+ if not args or args[0] not in handlers:
+ logger.error(
+ "Need an action (%s) to perform.",
+ ", ".join(sorted(handlers)),
+ )
+ return ERROR
+
+ action = args[0]
+
+ # Determine which configuration files are to be loaded
+ # Depends on whether the command is modifying.
+ try:
+ load_only = self._determine_file(
+ options, need_value=(action in ["get", "set", "unset", "edit"])
+ )
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ # Load a new configuration
+ self.configuration = Configuration(
+ isolated=options.isolated_mode, load_only=load_only
+ )
+ self.configuration.load()
+
+ # Error handling happens here, not in the action-handlers.
+ try:
+ handlers[action](options, args[1:])
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ return SUCCESS
+
+ def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
+ file_options = [
+ key
+ for key, value in (
+ (kinds.USER, options.user_file),
+ (kinds.GLOBAL, options.global_file),
+ (kinds.SITE, options.site_file),
+ )
+ if value
+ ]
+
+ if not file_options:
+ if not need_value:
+ return None
+ # Default to user, unless there's a site file.
+ elif any(
+ os.path.exists(site_config_file)
+ for site_config_file in get_configuration_files()[kinds.SITE]
+ ):
+ return kinds.SITE
+ else:
+ return kinds.USER
+ elif len(file_options) == 1:
+ return file_options[0]
+
+ raise PipError(
+ "Need exactly one file to operate upon "
+ "(--user, --site, --global) to perform."
+ )
+
+ def list_values(self, options: Values, args: List[str]) -> None:
+ self._get_n_args(args, "list", n=0)
+
+ for key, value in sorted(self.configuration.items()):
+ write_output("%s=%r", key, value)
+
+ def get_name(self, options: Values, args: List[str]) -> None:
+ key = self._get_n_args(args, "get [name]", n=1)
+ value = self.configuration.get_value(key)
+
+ write_output("%s", value)
+
+ def set_name_value(self, options: Values, args: List[str]) -> None:
+ key, value = self._get_n_args(args, "set [name] [value]", n=2)
+ self.configuration.set_value(key, value)
+
+ self._save_configuration()
+
+ def unset_name(self, options: Values, args: List[str]) -> None:
+ key = self._get_n_args(args, "unset [name]", n=1)
+ self.configuration.unset_value(key)
+
+ self._save_configuration()
+
+ def list_config_values(self, options: Values, args: List[str]) -> None:
+ """List config key-value pairs across different config files"""
+ self._get_n_args(args, "debug", n=0)
+
+ self.print_env_var_values()
+ # Iterate over config files and print if they exist, and the
+ # key-value pairs present in them if they do
+ for variant, files in sorted(self.configuration.iter_config_files()):
+ write_output("%s:", variant)
+ for fname in files:
+ with indent_log():
+ file_exists = os.path.exists(fname)
+ write_output("%s, exists: %r", fname, file_exists)
+ if file_exists:
+ self.print_config_file_values(variant)
+
+ def print_config_file_values(self, variant: Kind) -> None:
+ """Get key-value pairs from the file of a variant"""
+ for name, value in self.configuration.get_values_in_config(variant).items():
+ with indent_log():
+ write_output("%s: %s", name, value)
+
+ def print_env_var_values(self) -> None:
+ """Get key-values pairs present as environment variables"""
+ write_output("%s:", "env_var")
+ with indent_log():
+ for key, value in sorted(self.configuration.get_environ_vars()):
+ env_var = f"PIP_{key.upper()}"
+ write_output("%s=%r", env_var, value)
+
+ def open_in_editor(self, options: Values, args: List[str]) -> None:
+ editor = self._determine_editor(options)
+
+ fname = self.configuration.get_file_to_edit()
+ if fname is None:
+ raise PipError("Could not determine appropriate file.")
+ elif '"' in fname:
+ # This shouldn't happen, unless we see a username like that.
+ # If that happens, we'd appreciate a pull request fixing this.
+ raise PipError(
+ f'Can not open an editor for a file name containing "\n{fname}'
+ )
+
+ try:
+ subprocess.check_call(f'{editor} "{fname}"', shell=True)
+ except FileNotFoundError as e:
+ if not e.filename:
+ e.filename = editor
+ raise
+ except subprocess.CalledProcessError as e:
+ raise PipError(
+ "Editor Subprocess exited with exit code {}".format(e.returncode)
+ )
+
+ def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
+ """Helper to make sure the command got the right number of arguments"""
+ if len(args) != n:
+ msg = (
+ "Got unexpected number of arguments, expected {}. "
+ '(example: "{} config {}")'
+ ).format(n, get_prog(), example)
+ raise PipError(msg)
+
+ if n == 1:
+ return args[0]
+ else:
+ return args
+
+ def _save_configuration(self) -> None:
+ # We successfully ran a modifying command. Need to save the
+ # configuration.
+ try:
+ self.configuration.save()
+ except Exception:
+ logger.exception(
+ "Unable to save configuration. Please report this as a bug."
+ )
+ raise PipError("Internal Error.")
+
+ def _determine_editor(self, options: Values) -> str:
+ if options.editor is not None:
+ return options.editor
+ elif "VISUAL" in os.environ:
+ return os.environ["VISUAL"]
+ elif "EDITOR" in os.environ:
+ return os.environ["EDITOR"]
+ else:
+ raise PipError("Could not determine editor to use.")
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/debug.py b/.venv/Lib/site-packages/pip/_internal/commands/debug.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a3e7d298f393ed8532e4f11913635efc94cb329
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/debug.py
@@ -0,0 +1,199 @@
+import importlib.resources
+import locale
+import logging
+import os
+import sys
+from optparse import Values
+from types import ModuleType
+from typing import Any, Dict, List, Optional
+
+import pip._vendor
+from pip._vendor.certifi import where
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.configuration import Configuration
+from pip._internal.metadata import get_environment
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import get_pip_version
+
+logger = logging.getLogger(__name__)
+
+
+def show_value(name: str, value: Any) -> None:
+ logger.info("%s: %s", name, value)
+
+
+def show_sys_implementation() -> None:
+ logger.info("sys.implementation:")
+ implementation_name = sys.implementation.name
+ with indent_log():
+ show_value("name", implementation_name)
+
+
+def create_vendor_txt_map() -> Dict[str, str]:
+ with importlib.resources.open_text("pip._vendor", "vendor.txt") as f:
+ # Purge non version specifying lines.
+ # Also, remove any space prefix or suffixes (including comments).
+ lines = [
+ line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
+ ]
+
+ # Transform into "module" -> version dict.
+ return dict(line.split("==", 1) for line in lines)
+
+
+def get_module_from_module_name(module_name: str) -> ModuleType:
+ # Module name can be uppercase in vendor.txt for some reason...
+ module_name = module_name.lower().replace("-", "_")
+ # PATCH: setuptools is actually only pkg_resources.
+ if module_name == "setuptools":
+ module_name = "pkg_resources"
+
+ __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
+ return getattr(pip._vendor, module_name)
+
+
+def get_vendor_version_from_module(module_name: str) -> Optional[str]:
+ module = get_module_from_module_name(module_name)
+ version = getattr(module, "__version__", None)
+
+ if not version:
+ # Try to find version in debundled module info.
+ assert module.__file__ is not None
+ env = get_environment([os.path.dirname(module.__file__)])
+ dist = env.get_distribution(module_name)
+ if dist:
+ version = str(dist.version)
+
+ return version
+
+
+def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
+ """Log the actual version and print extra info if there is
+ a conflict or if the actual version could not be imported.
+ """
+ for module_name, expected_version in vendor_txt_versions.items():
+ extra_message = ""
+ actual_version = get_vendor_version_from_module(module_name)
+ if not actual_version:
+ extra_message = (
+ " (Unable to locate actual module version, using"
+ " vendor.txt specified version)"
+ )
+ actual_version = expected_version
+ elif parse_version(actual_version) != parse_version(expected_version):
+ extra_message = (
+ " (CONFLICT: vendor.txt suggests version should"
+ " be {})".format(expected_version)
+ )
+ logger.info("%s==%s%s", module_name, actual_version, extra_message)
+
+
+def show_vendor_versions() -> None:
+ logger.info("vendored library versions:")
+
+ vendor_txt_versions = create_vendor_txt_map()
+ with indent_log():
+ show_actual_vendor_versions(vendor_txt_versions)
+
+
+def show_tags(options: Values) -> None:
+ tag_limit = 10
+
+ target_python = make_target_python(options)
+ tags = target_python.get_tags()
+
+ # Display the target options that were explicitly provided.
+ formatted_target = target_python.format_given()
+ suffix = ""
+ if formatted_target:
+ suffix = f" (target: {formatted_target})"
+
+ msg = "Compatible tags: {}{}".format(len(tags), suffix)
+ logger.info(msg)
+
+ if options.verbose < 1 and len(tags) > tag_limit:
+ tags_limited = True
+ tags = tags[:tag_limit]
+ else:
+ tags_limited = False
+
+ with indent_log():
+ for tag in tags:
+ logger.info(str(tag))
+
+ if tags_limited:
+ msg = (
+ "...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
+ ).format(tag_limit=tag_limit)
+ logger.info(msg)
+
+
+def ca_bundle_info(config: Configuration) -> str:
+ levels = set()
+ for key, _ in config.items():
+ levels.add(key.split(".")[0])
+
+ if not levels:
+ return "Not specified"
+
+ levels_that_override_global = ["install", "wheel", "download"]
+ global_overriding_level = [
+ level for level in levels if level in levels_that_override_global
+ ]
+ if not global_overriding_level:
+ return "global"
+
+ if "global" in levels:
+ levels.remove("global")
+ return ", ".join(levels)
+
+
+class DebugCommand(Command):
+ """
+ Display debug information.
+ """
+
+ usage = """
+ %prog """
+ ignore_require_venv = True
+
+ def add_options(self) -> None:
+ cmdoptions.add_target_python_options(self.cmd_opts)
+ self.parser.insert_option_group(0, self.cmd_opts)
+ self.parser.config.load()
+
+ def run(self, options: Values, args: List[str]) -> int:
+ logger.warning(
+ "This command is only meant for debugging. "
+ "Do not use this with automation for parsing and getting these "
+ "details, since the output and options of this command may "
+ "change without notice."
+ )
+ show_value("pip version", get_pip_version())
+ show_value("sys.version", sys.version)
+ show_value("sys.executable", sys.executable)
+ show_value("sys.getdefaultencoding", sys.getdefaultencoding())
+ show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
+ show_value(
+ "locale.getpreferredencoding",
+ locale.getpreferredencoding(),
+ )
+ show_value("sys.platform", sys.platform)
+ show_sys_implementation()
+
+ show_value("'cert' config value", ca_bundle_info(self.parser.config))
+ show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
+ show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
+ show_value("pip._vendor.certifi.where()", where())
+ show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
+
+ show_vendor_versions()
+
+ show_tags(options)
+
+ return SUCCESS
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/download.py b/.venv/Lib/site-packages/pip/_internal/commands/download.py
new file mode 100644
index 0000000000000000000000000000000000000000..54247a78a654187206cd17a403913c6257ffcc7d
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/download.py
@@ -0,0 +1,147 @@
+import logging
+import os
+from optparse import Values
+from typing import List
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.req_command import RequirementCommand, with_cleanup
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.req.req_install import check_legacy_setup_py_options
+from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+class DownloadCommand(RequirementCommand):
+ """
+ Download packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports downloading from "requirements files", which provide
+ an easy way to specify a whole environment to be downloaded.
+ """
+
+ usage = """
+ %prog [options] [package-index-options] ...
+ %prog [options] -r [package-index-options] ...
+ %prog [options] ...
+ %prog [options] ...
+ %prog [options] ..."""
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(cmdoptions.constraints())
+ self.cmd_opts.add_option(cmdoptions.requirements())
+ self.cmd_opts.add_option(cmdoptions.no_deps())
+ self.cmd_opts.add_option(cmdoptions.global_options())
+ self.cmd_opts.add_option(cmdoptions.no_binary())
+ self.cmd_opts.add_option(cmdoptions.only_binary())
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
+ self.cmd_opts.add_option(cmdoptions.src())
+ self.cmd_opts.add_option(cmdoptions.pre())
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+
+ self.cmd_opts.add_option(
+ "-d",
+ "--dest",
+ "--destination-dir",
+ "--destination-directory",
+ dest="download_dir",
+ metavar="dir",
+ default=os.curdir,
+ help="Download packages into .",
+ )
+
+ cmdoptions.add_target_python_options(self.cmd_opts)
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ @with_cleanup
+ def run(self, options: Values, args: List[str]) -> int:
+ options.ignore_installed = True
+ # editable doesn't really make sense for `pip download`, but the bowels
+ # of the RequirementSet code require that property.
+ options.editables = []
+
+ cmdoptions.check_dist_restriction(options)
+
+ options.download_dir = normalize_path(options.download_dir)
+ ensure_dir(options.download_dir)
+
+ session = self.get_default_session(options)
+
+ target_python = make_target_python(options)
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ target_python=target_python,
+ ignore_requires_python=options.ignore_requires_python,
+ )
+
+ build_tracker = self.enter_context(get_build_tracker())
+
+ directory = TempDirectory(
+ delete=not options.no_clean,
+ kind="download",
+ globally_managed=True,
+ )
+
+ reqs = self.get_requirements(args, options, finder, session)
+ check_legacy_setup_py_options(options, reqs)
+
+ preparer = self.make_requirement_preparer(
+ temp_build_dir=directory,
+ options=options,
+ build_tracker=build_tracker,
+ session=session,
+ finder=finder,
+ download_dir=options.download_dir,
+ use_user_site=False,
+ verbosity=self.verbosity,
+ )
+
+ resolver = self.make_resolver(
+ preparer=preparer,
+ finder=finder,
+ options=options,
+ ignore_requires_python=options.ignore_requires_python,
+ use_pep517=options.use_pep517,
+ py_version_info=options.python_version,
+ )
+
+ self.trace_basic_info(finder)
+
+ requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
+
+ downloaded: List[str] = []
+ for req in requirement_set.requirements.values():
+ if req.satisfied_by is None:
+ assert req.name is not None
+ preparer.save_linked_requirement(req)
+ downloaded.append(req.name)
+
+ preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
+ requirement_set.warn_legacy_versions_and_specifiers()
+
+ if downloaded:
+ write_output("Successfully downloaded %s", " ".join(downloaded))
+
+ return SUCCESS
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/freeze.py b/.venv/Lib/site-packages/pip/_internal/commands/freeze.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd9d88a8b017d6c1f2600b71812977e80d36d9bd
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/freeze.py
@@ -0,0 +1,108 @@
+import sys
+from optparse import Values
+from typing import AbstractSet, List
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.operations.freeze import freeze
+from pip._internal.utils.compat import stdlib_pkgs
+
+
+def _should_suppress_build_backends() -> bool:
+ return sys.version_info < (3, 12)
+
+
+def _dev_pkgs() -> AbstractSet[str]:
+ pkgs = {"pip"}
+
+ if _should_suppress_build_backends():
+ pkgs |= {"setuptools", "distribute", "wheel"}
+
+ return pkgs
+
+
+class FreezeCommand(Command):
+ """
+ Output installed packages in requirements format.
+
+ packages are listed in a case-insensitive sorted order.
+ """
+
+ usage = """
+ %prog [options]"""
+ log_streams = ("ext://sys.stderr", "ext://sys.stderr")
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "-r",
+ "--requirement",
+ dest="requirements",
+ action="append",
+ default=[],
+ metavar="file",
+ help=(
+ "Use the order in the given requirements file and its "
+ "comments when generating output. This option can be "
+ "used multiple times."
+ ),
+ )
+ self.cmd_opts.add_option(
+ "-l",
+ "--local",
+ dest="local",
+ action="store_true",
+ default=False,
+ help=(
+ "If in a virtualenv that has global access, do not output "
+ "globally-installed packages."
+ ),
+ )
+ self.cmd_opts.add_option(
+ "--user",
+ dest="user",
+ action="store_true",
+ default=False,
+ help="Only output packages installed in user-site.",
+ )
+ self.cmd_opts.add_option(cmdoptions.list_path())
+ self.cmd_opts.add_option(
+ "--all",
+ dest="freeze_all",
+ action="store_true",
+ help=(
+ "Do not skip these packages in the output:"
+ " {}".format(", ".join(_dev_pkgs()))
+ ),
+ )
+ self.cmd_opts.add_option(
+ "--exclude-editable",
+ dest="exclude_editable",
+ action="store_true",
+ help="Exclude editable package from output.",
+ )
+ self.cmd_opts.add_option(cmdoptions.list_exclude())
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ skip = set(stdlib_pkgs)
+ if not options.freeze_all:
+ skip.update(_dev_pkgs())
+
+ if options.excludes:
+ skip.update(options.excludes)
+
+ cmdoptions.check_list_path_option(options)
+
+ for line in freeze(
+ requirement=options.requirements,
+ local_only=options.local,
+ user_only=options.user,
+ paths=options.path,
+ isolated=options.isolated_mode,
+ skip=skip,
+ exclude_editable=options.exclude_editable,
+ ):
+ sys.stdout.write(line + "\n")
+ return SUCCESS
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/hash.py b/.venv/Lib/site-packages/pip/_internal/commands/hash.py
new file mode 100644
index 0000000000000000000000000000000000000000..042dac813e74b8187c3754cb9a937c7f7183e331
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/hash.py
@@ -0,0 +1,59 @@
+import hashlib
+import logging
+import sys
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
+from pip._internal.utils.misc import read_chunks, write_output
+
+logger = logging.getLogger(__name__)
+
+
+class HashCommand(Command):
+ """
+ Compute a hash of a local package archive.
+
+ These can be used with --hash in a requirements file to do repeatable
+ installs.
+ """
+
+ usage = "%prog [options] ..."
+ ignore_require_venv = True
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "-a",
+ "--algorithm",
+ dest="algorithm",
+ choices=STRONG_HASHES,
+ action="store",
+ default=FAVORITE_HASH,
+ help="The hash algorithm to use: one of {}".format(
+ ", ".join(STRONG_HASHES)
+ ),
+ )
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ if not args:
+ self.parser.print_usage(sys.stderr)
+ return ERROR
+
+ algorithm = options.algorithm
+ for path in args:
+ write_output(
+ "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
+ )
+ return SUCCESS
+
+
+def _hash_of_file(path: str, algorithm: str) -> str:
+ """Return the hash digest of a file."""
+ with open(path, "rb") as archive:
+ hash = hashlib.new(algorithm)
+ for chunk in read_chunks(archive):
+ hash.update(chunk)
+ return hash.hexdigest()
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/help.py b/.venv/Lib/site-packages/pip/_internal/commands/help.py
new file mode 100644
index 0000000000000000000000000000000000000000..62066318b74dcc5c32bcd24b9493fb34d1ce52d7
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/help.py
@@ -0,0 +1,41 @@
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+
+
+class HelpCommand(Command):
+ """Show help for commands"""
+
+ usage = """
+ %prog """
+ ignore_require_venv = True
+
+ def run(self, options: Values, args: List[str]) -> int:
+ from pip._internal.commands import (
+ commands_dict,
+ create_command,
+ get_similar_commands,
+ )
+
+ try:
+ # 'pip help' with no args is handled by pip.__init__.parseopt()
+ cmd_name = args[0] # the command we need help for
+ except IndexError:
+ return SUCCESS
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = [f'unknown command "{cmd_name}"']
+ if guess:
+ msg.append(f'maybe you meant "{guess}"')
+
+ raise CommandError(" - ".join(msg))
+
+ command = create_command(cmd_name)
+ command.parser.print_help()
+
+ return SUCCESS
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/index.py b/.venv/Lib/site-packages/pip/_internal/commands/index.py
new file mode 100644
index 0000000000000000000000000000000000000000..7267effed2413ba315d0a1af8490ec677c227662
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/index.py
@@ -0,0 +1,139 @@
+import logging
+from optparse import Values
+from typing import Any, Iterable, List, Optional, Union
+
+from pip._vendor.packaging.version import LegacyVersion, Version
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import IndexGroupCommand
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.commands.search import print_dist_installation_info
+from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.network.session import PipSession
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class IndexCommand(IndexGroupCommand):
+ """
+ Inspect information available from package indexes.
+ """
+
+ ignore_require_venv = True
+ usage = """
+ %prog versions
+ """
+
+ def add_options(self) -> None:
+ cmdoptions.add_target_python_options(self.cmd_opts)
+
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ self.cmd_opts.add_option(cmdoptions.pre())
+ self.cmd_opts.add_option(cmdoptions.no_binary())
+ self.cmd_opts.add_option(cmdoptions.only_binary())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ handlers = {
+ "versions": self.get_available_package_versions,
+ }
+
+ logger.warning(
+ "pip index is currently an experimental command. "
+ "It may be removed/changed in a future release "
+ "without prior warning."
+ )
+
+ # Determine action
+ if not args or args[0] not in handlers:
+ logger.error(
+ "Need an action (%s) to perform.",
+ ", ".join(sorted(handlers)),
+ )
+ return ERROR
+
+ action = args[0]
+
+ # Error handling happens here, not in the action-handlers.
+ try:
+ handlers[action](options, args[1:])
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ return SUCCESS
+
+ def _build_package_finder(
+ self,
+ options: Values,
+ session: PipSession,
+ target_python: Optional[TargetPython] = None,
+ ignore_requires_python: Optional[bool] = None,
+ ) -> PackageFinder:
+ """
+ Create a package finder appropriate to the index command.
+ """
+ link_collector = LinkCollector.create(session, options=options)
+
+ # Pass allow_yanked=False to ignore yanked versions.
+ selection_prefs = SelectionPreferences(
+ allow_yanked=False,
+ allow_all_prereleases=options.pre,
+ ignore_requires_python=ignore_requires_python,
+ )
+
+ return PackageFinder.create(
+ link_collector=link_collector,
+ selection_prefs=selection_prefs,
+ target_python=target_python,
+ )
+
+ def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
+ if len(args) != 1:
+ raise CommandError("You need to specify exactly one argument")
+
+ target_python = cmdoptions.make_target_python(options)
+ query = args[0]
+
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ target_python=target_python,
+ ignore_requires_python=options.ignore_requires_python,
+ )
+
+ versions: Iterable[Union[LegacyVersion, Version]] = (
+ candidate.version for candidate in finder.find_all_candidates(query)
+ )
+
+ if not options.pre:
+ # Remove prereleases
+ versions = (
+ version for version in versions if not version.is_prerelease
+ )
+ versions = set(versions)
+
+ if not versions:
+ raise DistributionNotFound(
+ "No matching distribution found for {}".format(query)
+ )
+
+ formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
+ latest = formatted_versions[0]
+
+ write_output("{} ({})".format(query, latest))
+ write_output("Available versions: {}".format(", ".join(formatted_versions)))
+ print_dist_installation_info(query, latest)
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/inspect.py b/.venv/Lib/site-packages/pip/_internal/commands/inspect.py
new file mode 100644
index 0000000000000000000000000000000000000000..27c8fa3d5b6999c77dad7aece312a5d6cf12ab48
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/inspect.py
@@ -0,0 +1,92 @@
+import logging
+from optparse import Values
+from typing import Any, Dict, List
+
+from pip._vendor.packaging.markers import default_environment
+from pip._vendor.rich import print_json
+
+from pip import __version__
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.metadata import BaseDistribution, get_environment
+from pip._internal.utils.compat import stdlib_pkgs
+from pip._internal.utils.urls import path_to_url
+
+logger = logging.getLogger(__name__)
+
+
+class InspectCommand(Command):
+ """
+ Inspect the content of a Python environment and produce a report in JSON format.
+ """
+
+ ignore_require_venv = True
+ usage = """
+ %prog [options]"""
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "--local",
+ action="store_true",
+ default=False,
+ help=(
+ "If in a virtualenv that has global access, do not list "
+ "globally-installed packages."
+ ),
+ )
+ self.cmd_opts.add_option(
+ "--user",
+ dest="user",
+ action="store_true",
+ default=False,
+ help="Only output packages installed in user-site.",
+ )
+ self.cmd_opts.add_option(cmdoptions.list_path())
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ cmdoptions.check_list_path_option(options)
+ dists = get_environment(options.path).iter_installed_distributions(
+ local_only=options.local,
+ user_only=options.user,
+ skip=set(stdlib_pkgs),
+ )
+ output = {
+ "version": "1",
+ "pip_version": __version__,
+ "installed": [self._dist_to_dict(dist) for dist in dists],
+ "environment": default_environment(),
+ # TODO tags? scheme?
+ }
+ print_json(data=output)
+ return SUCCESS
+
+ def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
+ res: Dict[str, Any] = {
+ "metadata": dist.metadata_dict,
+ "metadata_location": dist.info_location,
+ }
+ # direct_url. Note that we don't have download_info (as in the installation
+ # report) since it is not recorded in installed metadata.
+ direct_url = dist.direct_url
+ if direct_url is not None:
+ res["direct_url"] = direct_url.to_dict()
+ else:
+ # Emulate direct_url for legacy editable installs.
+ editable_project_location = dist.editable_project_location
+ if editable_project_location is not None:
+ res["direct_url"] = {
+ "url": path_to_url(editable_project_location),
+ "dir_info": {
+ "editable": True,
+ },
+ }
+ # installer
+ installer = dist.installer
+ if dist.installer:
+ res["installer"] = installer
+ # requested
+ if dist.installed_with_dist_info:
+ res["requested"] = dist.requested
+ return res
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/install.py b/.venv/Lib/site-packages/pip/_internal/commands/install.py
new file mode 100644
index 0000000000000000000000000000000000000000..f6a300804f4a99eb79b4f3a1ee676251c30e629f
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/install.py
@@ -0,0 +1,778 @@
+import errno
+import json
+import operator
+import os
+import shutil
+import site
+from optparse import SUPPRESS_HELP, Values
+from typing import List, Optional
+
+from pip._vendor.rich import print_json
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.req_command import (
+ RequirementCommand,
+ warn_if_run_as_root,
+ with_cleanup,
+)
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.exceptions import CommandError, InstallationError
+from pip._internal.locations import get_scheme
+from pip._internal.metadata import get_environment
+from pip._internal.models.installation_report import InstallationReport
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.operations.check import ConflictDetails, check_install_conflicts
+from pip._internal.req import install_given_reqs
+from pip._internal.req.req_install import (
+ InstallRequirement,
+ check_legacy_setup_py_options,
+)
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.filesystem import test_writable_dir
+from pip._internal.utils.logging import getLogger
+from pip._internal.utils.misc import (
+ check_externally_managed,
+ ensure_dir,
+ get_pip_version,
+ protect_pip_from_modification_on_windows,
+ write_output,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.virtualenv import (
+ running_under_virtualenv,
+ virtualenv_no_global,
+)
+from pip._internal.wheel_builder import build, should_build_for_install_command
+
+logger = getLogger(__name__)
+
+
+class InstallCommand(RequirementCommand):
+ """
+ Install packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports installing from "requirements files", which provide
+ an easy way to specify a whole environment to be installed.
+ """
+
+ usage = """
+ %prog [options] [package-index-options] ...
+ %prog [options] -r [package-index-options] ...
+ %prog [options] [-e] ...
+ %prog [options] [-e] ...
+ %prog [options] ..."""
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(cmdoptions.requirements())
+ self.cmd_opts.add_option(cmdoptions.constraints())
+ self.cmd_opts.add_option(cmdoptions.no_deps())
+ self.cmd_opts.add_option(cmdoptions.pre())
+
+ self.cmd_opts.add_option(cmdoptions.editable())
+ self.cmd_opts.add_option(
+ "--dry-run",
+ action="store_true",
+ dest="dry_run",
+ default=False,
+ help=(
+ "Don't actually install anything, just print what would be. "
+ "Can be used in combination with --ignore-installed "
+ "to 'resolve' the requirements."
+ ),
+ )
+ self.cmd_opts.add_option(
+ "-t",
+ "--target",
+ dest="target_dir",
+ metavar="dir",
+ default=None,
+ help=(
+ "Install packages into . "
+ "By default this will not replace existing files/folders in "
+ ". Use --upgrade to replace existing packages in "
+ "with new versions."
+ ),
+ )
+ cmdoptions.add_target_python_options(self.cmd_opts)
+
+ self.cmd_opts.add_option(
+ "--user",
+ dest="use_user_site",
+ action="store_true",
+ help=(
+ "Install to the Python user install directory for your "
+ "platform. Typically ~/.local/, or %APPDATA%\\Python on "
+ "Windows. (See the Python documentation for site.USER_BASE "
+ "for full details.)"
+ ),
+ )
+ self.cmd_opts.add_option(
+ "--no-user",
+ dest="use_user_site",
+ action="store_false",
+ help=SUPPRESS_HELP,
+ )
+ self.cmd_opts.add_option(
+ "--root",
+ dest="root_path",
+ metavar="dir",
+ default=None,
+ help="Install everything relative to this alternate root directory.",
+ )
+ self.cmd_opts.add_option(
+ "--prefix",
+ dest="prefix_path",
+ metavar="dir",
+ default=None,
+ help=(
+ "Installation prefix where lib, bin and other top-level "
+ "folders are placed. Note that the resulting installation may "
+ "contain scripts and other resources which reference the "
+ "Python interpreter of pip, and not that of ``--prefix``. "
+ "See also the ``--python`` option if the intention is to "
+ "install packages into another (possibly pip-free) "
+ "environment."
+ ),
+ )
+
+ self.cmd_opts.add_option(cmdoptions.src())
+
+ self.cmd_opts.add_option(
+ "-U",
+ "--upgrade",
+ dest="upgrade",
+ action="store_true",
+ help=(
+ "Upgrade all specified packages to the newest available "
+ "version. The handling of dependencies depends on the "
+ "upgrade-strategy used."
+ ),
+ )
+
+ self.cmd_opts.add_option(
+ "--upgrade-strategy",
+ dest="upgrade_strategy",
+ default="only-if-needed",
+ choices=["only-if-needed", "eager"],
+ help=(
+ "Determines how dependency upgrading should be handled "
+ "[default: %default]. "
+ '"eager" - dependencies are upgraded regardless of '
+ "whether the currently installed version satisfies the "
+ "requirements of the upgraded package(s). "
+ '"only-if-needed" - are upgraded only when they do not '
+ "satisfy the requirements of the upgraded package(s)."
+ ),
+ )
+
+ self.cmd_opts.add_option(
+ "--force-reinstall",
+ dest="force_reinstall",
+ action="store_true",
+ help="Reinstall all packages even if they are already up-to-date.",
+ )
+
+ self.cmd_opts.add_option(
+ "-I",
+ "--ignore-installed",
+ dest="ignore_installed",
+ action="store_true",
+ help=(
+ "Ignore the installed packages, overwriting them. "
+ "This can break your system if the existing package "
+ "is of a different version or was installed "
+ "with a different package manager!"
+ ),
+ )
+
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
+ self.cmd_opts.add_option(cmdoptions.override_externally_managed())
+
+ self.cmd_opts.add_option(cmdoptions.config_settings())
+ self.cmd_opts.add_option(cmdoptions.global_options())
+
+ self.cmd_opts.add_option(
+ "--compile",
+ action="store_true",
+ dest="compile",
+ default=True,
+ help="Compile Python source files to bytecode",
+ )
+
+ self.cmd_opts.add_option(
+ "--no-compile",
+ action="store_false",
+ dest="compile",
+ help="Do not compile Python source files to bytecode",
+ )
+
+ self.cmd_opts.add_option(
+ "--no-warn-script-location",
+ action="store_false",
+ dest="warn_script_location",
+ default=True,
+ help="Do not warn when installing scripts outside PATH",
+ )
+ self.cmd_opts.add_option(
+ "--no-warn-conflicts",
+ action="store_false",
+ dest="warn_about_conflicts",
+ default=True,
+ help="Do not warn about broken dependencies",
+ )
+ self.cmd_opts.add_option(cmdoptions.no_binary())
+ self.cmd_opts.add_option(cmdoptions.only_binary())
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
+ self.cmd_opts.add_option(cmdoptions.root_user_action())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ self.cmd_opts.add_option(
+ "--report",
+ dest="json_report_file",
+ metavar="file",
+ default=None,
+ help=(
+ "Generate a JSON file describing what pip did to install "
+ "the provided requirements. "
+ "Can be used in combination with --dry-run and --ignore-installed "
+ "to 'resolve' the requirements. "
+ "When - is used as file name it writes to stdout. "
+ "When writing to stdout, please combine with the --quiet option "
+ "to avoid mixing pip logging output with JSON output."
+ ),
+ )
+
+ @with_cleanup
+ def run(self, options: Values, args: List[str]) -> int:
+ if options.use_user_site and options.target_dir is not None:
+ raise CommandError("Can not combine '--user' and '--target'")
+
+ # Check whether the environment we're installing into is externally
+ # managed, as specified in PEP 668. Specifying --root, --target, or
+ # --prefix disables the check, since there's no reliable way to locate
+ # the EXTERNALLY-MANAGED file for those cases. An exception is also
+ # made specifically for "--dry-run --report" for convenience.
+ installing_into_current_environment = (
+ not (options.dry_run and options.json_report_file)
+ and options.root_path is None
+ and options.target_dir is None
+ and options.prefix_path is None
+ )
+ if (
+ installing_into_current_environment
+ and not options.override_externally_managed
+ ):
+ check_externally_managed()
+
+ upgrade_strategy = "to-satisfy-only"
+ if options.upgrade:
+ upgrade_strategy = options.upgrade_strategy
+
+ cmdoptions.check_dist_restriction(options, check_target=True)
+
+ logger.verbose("Using %s", get_pip_version())
+ options.use_user_site = decide_user_install(
+ options.use_user_site,
+ prefix_path=options.prefix_path,
+ target_dir=options.target_dir,
+ root_path=options.root_path,
+ isolated_mode=options.isolated_mode,
+ )
+
+ target_temp_dir: Optional[TempDirectory] = None
+ target_temp_dir_path: Optional[str] = None
+ if options.target_dir:
+ options.ignore_installed = True
+ options.target_dir = os.path.abspath(options.target_dir)
+ if (
+ # fmt: off
+ os.path.exists(options.target_dir) and
+ not os.path.isdir(options.target_dir)
+ # fmt: on
+ ):
+ raise CommandError(
+ "Target path exists but is not a directory, will not continue."
+ )
+
+ # Create a target directory for using with the target option
+ target_temp_dir = TempDirectory(kind="target")
+ target_temp_dir_path = target_temp_dir.path
+ self.enter_context(target_temp_dir)
+
+ global_options = options.global_options or []
+
+ session = self.get_default_session(options)
+
+ target_python = make_target_python(options)
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ target_python=target_python,
+ ignore_requires_python=options.ignore_requires_python,
+ )
+ build_tracker = self.enter_context(get_build_tracker())
+
+ directory = TempDirectory(
+ delete=not options.no_clean,
+ kind="install",
+ globally_managed=True,
+ )
+
+ try:
+ reqs = self.get_requirements(args, options, finder, session)
+ check_legacy_setup_py_options(options, reqs)
+
+ wheel_cache = WheelCache(options.cache_dir)
+
+ # Only when installing is it permitted to use PEP 660.
+ # In other circumstances (pip wheel, pip download) we generate
+ # regular (i.e. non editable) metadata and wheels.
+ for req in reqs:
+ req.permit_editable_wheels = True
+
+ preparer = self.make_requirement_preparer(
+ temp_build_dir=directory,
+ options=options,
+ build_tracker=build_tracker,
+ session=session,
+ finder=finder,
+ use_user_site=options.use_user_site,
+ verbosity=self.verbosity,
+ )
+ resolver = self.make_resolver(
+ preparer=preparer,
+ finder=finder,
+ options=options,
+ wheel_cache=wheel_cache,
+ use_user_site=options.use_user_site,
+ ignore_installed=options.ignore_installed,
+ ignore_requires_python=options.ignore_requires_python,
+ force_reinstall=options.force_reinstall,
+ upgrade_strategy=upgrade_strategy,
+ use_pep517=options.use_pep517,
+ )
+
+ self.trace_basic_info(finder)
+
+ requirement_set = resolver.resolve(
+ reqs, check_supported_wheels=not options.target_dir
+ )
+
+ if options.json_report_file:
+ report = InstallationReport(requirement_set.requirements_to_install)
+ if options.json_report_file == "-":
+ print_json(data=report.to_dict())
+ else:
+ with open(options.json_report_file, "w", encoding="utf-8") as f:
+ json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
+
+ if options.dry_run:
+ # In non dry-run mode, the legacy versions and specifiers check
+ # will be done as part of conflict detection.
+ requirement_set.warn_legacy_versions_and_specifiers()
+ would_install_items = sorted(
+ (r.metadata["name"], r.metadata["version"])
+ for r in requirement_set.requirements_to_install
+ )
+ if would_install_items:
+ write_output(
+ "Would install %s",
+ " ".join("-".join(item) for item in would_install_items),
+ )
+ return SUCCESS
+
+ try:
+ pip_req = requirement_set.get_requirement("pip")
+ except KeyError:
+ modifying_pip = False
+ else:
+ # If we're not replacing an already installed pip,
+ # we're not modifying it.
+ modifying_pip = pip_req.satisfied_by is None
+ protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
+
+ reqs_to_build = [
+ r
+ for r in requirement_set.requirements.values()
+ if should_build_for_install_command(r)
+ ]
+
+ _, build_failures = build(
+ reqs_to_build,
+ wheel_cache=wheel_cache,
+ verify=True,
+ build_options=[],
+ global_options=global_options,
+ )
+
+ if build_failures:
+ raise InstallationError(
+ "Could not build wheels for {}, which is required to "
+ "install pyproject.toml-based projects".format(
+ ", ".join(r.name for r in build_failures) # type: ignore
+ )
+ )
+
+ to_install = resolver.get_installation_order(requirement_set)
+
+ # Check for conflicts in the package set we're installing.
+ conflicts: Optional[ConflictDetails] = None
+ should_warn_about_conflicts = (
+ not options.ignore_dependencies and options.warn_about_conflicts
+ )
+ if should_warn_about_conflicts:
+ conflicts = self._determine_conflicts(to_install)
+
+ # Don't warn about script install locations if
+ # --target or --prefix has been specified
+ warn_script_location = options.warn_script_location
+ if options.target_dir or options.prefix_path:
+ warn_script_location = False
+
+ installed = install_given_reqs(
+ to_install,
+ global_options,
+ root=options.root_path,
+ home=target_temp_dir_path,
+ prefix=options.prefix_path,
+ warn_script_location=warn_script_location,
+ use_user_site=options.use_user_site,
+ pycompile=options.compile,
+ )
+
+ lib_locations = get_lib_location_guesses(
+ user=options.use_user_site,
+ home=target_temp_dir_path,
+ root=options.root_path,
+ prefix=options.prefix_path,
+ isolated=options.isolated_mode,
+ )
+ env = get_environment(lib_locations)
+
+ installed.sort(key=operator.attrgetter("name"))
+ items = []
+ for result in installed:
+ item = result.name
+ try:
+ installed_dist = env.get_distribution(item)
+ if installed_dist is not None:
+ item = f"{item}-{installed_dist.version}"
+ except Exception:
+ pass
+ items.append(item)
+
+ if conflicts is not None:
+ self._warn_about_conflicts(
+ conflicts,
+ resolver_variant=self.determine_resolver_variant(options),
+ )
+
+ installed_desc = " ".join(items)
+ if installed_desc:
+ write_output(
+ "Successfully installed %s",
+ installed_desc,
+ )
+ except OSError as error:
+ show_traceback = self.verbosity >= 1
+
+ message = create_os_error_message(
+ error,
+ show_traceback,
+ options.use_user_site,
+ )
+ logger.error(message, exc_info=show_traceback) # noqa
+
+ return ERROR
+
+ if options.target_dir:
+ assert target_temp_dir
+ self._handle_target_dir(
+ options.target_dir, target_temp_dir, options.upgrade
+ )
+ if options.root_user_action == "warn":
+ warn_if_run_as_root()
+ return SUCCESS
+
+ def _handle_target_dir(
+ self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
+ ) -> None:
+ ensure_dir(target_dir)
+
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ lib_dir_list = []
+
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ scheme = get_scheme("", home=target_temp_dir.path)
+ purelib_dir = scheme.purelib
+ platlib_dir = scheme.platlib
+ data_dir = scheme.data
+
+ if os.path.exists(purelib_dir):
+ lib_dir_list.append(purelib_dir)
+ if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
+ lib_dir_list.append(platlib_dir)
+ if os.path.exists(data_dir):
+ lib_dir_list.append(data_dir)
+
+ for lib_dir in lib_dir_list:
+ for item in os.listdir(lib_dir):
+ if lib_dir == data_dir:
+ ddir = os.path.join(data_dir, item)
+ if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
+ continue
+ target_item_dir = os.path.join(target_dir, item)
+ if os.path.exists(target_item_dir):
+ if not upgrade:
+ logger.warning(
+ "Target directory %s already exists. Specify "
+ "--upgrade to force replacement.",
+ target_item_dir,
+ )
+ continue
+ if os.path.islink(target_item_dir):
+ logger.warning(
+ "Target directory %s already exists and is "
+ "a link. pip will not automatically replace "
+ "links, please remove if replacement is "
+ "desired.",
+ target_item_dir,
+ )
+ continue
+ if os.path.isdir(target_item_dir):
+ shutil.rmtree(target_item_dir)
+ else:
+ os.remove(target_item_dir)
+
+ shutil.move(os.path.join(lib_dir, item), target_item_dir)
+
+ def _determine_conflicts(
+ self, to_install: List[InstallRequirement]
+ ) -> Optional[ConflictDetails]:
+ try:
+ return check_install_conflicts(to_install)
+ except Exception:
+ logger.exception(
+ "Error while checking for conflicts. Please file an issue on "
+ "pip's issue tracker: https://github.com/pypa/pip/issues/new"
+ )
+ return None
+
+ def _warn_about_conflicts(
+ self, conflict_details: ConflictDetails, resolver_variant: str
+ ) -> None:
+ package_set, (missing, conflicting) = conflict_details
+ if not missing and not conflicting:
+ return
+
+ parts: List[str] = []
+ if resolver_variant == "legacy":
+ parts.append(
+ "pip's legacy dependency resolver does not consider dependency "
+ "conflicts when selecting packages. This behaviour is the "
+ "source of the following dependency conflicts."
+ )
+ else:
+ assert resolver_variant == "2020-resolver"
+ parts.append(
+ "pip's dependency resolver does not currently take into account "
+ "all the packages that are installed. This behaviour is the "
+ "source of the following dependency conflicts."
+ )
+
+ # NOTE: There is some duplication here, with commands/check.py
+ for project_name in missing:
+ version = package_set[project_name][0]
+ for dependency in missing[project_name]:
+ message = (
+ "{name} {version} requires {requirement}, "
+ "which is not installed."
+ ).format(
+ name=project_name,
+ version=version,
+ requirement=dependency[1],
+ )
+ parts.append(message)
+
+ for project_name in conflicting:
+ version = package_set[project_name][0]
+ for dep_name, dep_version, req in conflicting[project_name]:
+ message = (
+ "{name} {version} requires {requirement}, but {you} have "
+ "{dep_name} {dep_version} which is incompatible."
+ ).format(
+ name=project_name,
+ version=version,
+ requirement=req,
+ dep_name=dep_name,
+ dep_version=dep_version,
+ you=("you" if resolver_variant == "2020-resolver" else "you'll"),
+ )
+ parts.append(message)
+
+ logger.critical("\n".join(parts))
+
+
+def get_lib_location_guesses(
+ user: bool = False,
+ home: Optional[str] = None,
+ root: Optional[str] = None,
+ isolated: bool = False,
+ prefix: Optional[str] = None,
+) -> List[str]:
+ scheme = get_scheme(
+ "",
+ user=user,
+ home=home,
+ root=root,
+ isolated=isolated,
+ prefix=prefix,
+ )
+ return [scheme.purelib, scheme.platlib]
+
+
+def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
+ return all(
+ test_writable_dir(d)
+ for d in set(get_lib_location_guesses(root=root, isolated=isolated))
+ )
+
+
+def decide_user_install(
+ use_user_site: Optional[bool],
+ prefix_path: Optional[str] = None,
+ target_dir: Optional[str] = None,
+ root_path: Optional[str] = None,
+ isolated_mode: bool = False,
+) -> bool:
+ """Determine whether to do a user install based on the input options.
+
+ If use_user_site is False, no additional checks are done.
+ If use_user_site is True, it is checked for compatibility with other
+ options.
+ If use_user_site is None, the default behaviour depends on the environment,
+ which is provided by the other arguments.
+ """
+ # In some cases (config from tox), use_user_site can be set to an integer
+ # rather than a bool, which 'use_user_site is False' wouldn't catch.
+ if (use_user_site is not None) and (not use_user_site):
+ logger.debug("Non-user install by explicit request")
+ return False
+
+ if use_user_site:
+ if prefix_path:
+ raise CommandError(
+ "Can not combine '--user' and '--prefix' as they imply "
+ "different installation locations"
+ )
+ if virtualenv_no_global():
+ raise InstallationError(
+ "Can not perform a '--user' install. User site-packages "
+ "are not visible in this virtualenv."
+ )
+ logger.debug("User install by explicit request")
+ return True
+
+ # If we are here, user installs have not been explicitly requested/avoided
+ assert use_user_site is None
+
+ # user install incompatible with --prefix/--target
+ if prefix_path or target_dir:
+ logger.debug("Non-user install due to --prefix or --target option")
+ return False
+
+ # If user installs are not enabled, choose a non-user install
+ if not site.ENABLE_USER_SITE:
+ logger.debug("Non-user install because user site-packages disabled")
+ return False
+
+ # If we have permission for a non-user install, do that,
+ # otherwise do a user install.
+ if site_packages_writable(root=root_path, isolated=isolated_mode):
+ logger.debug("Non-user install because site-packages writeable")
+ return False
+
+ logger.info(
+ "Defaulting to user installation because normal site-packages "
+ "is not writeable"
+ )
+ return True
+
+
+def create_os_error_message(
+ error: OSError, show_traceback: bool, using_user_site: bool
+) -> str:
+ """Format an error message for an OSError
+
+ It may occur anytime during the execution of the install command.
+ """
+ parts = []
+
+ # Mention the error if we are not going to show a traceback
+ parts.append("Could not install packages due to an OSError")
+ if not show_traceback:
+ parts.append(": ")
+ parts.append(str(error))
+ else:
+ parts.append(".")
+
+ # Spilt the error indication from a helper message (if any)
+ parts[-1] += "\n"
+
+ # Suggest useful actions to the user:
+ # (1) using user site-packages or (2) verifying the permissions
+ if error.errno == errno.EACCES:
+ user_option_part = "Consider using the `--user` option"
+ permissions_part = "Check the permissions"
+
+ if not running_under_virtualenv() and not using_user_site:
+ parts.extend(
+ [
+ user_option_part,
+ " or ",
+ permissions_part.lower(),
+ ]
+ )
+ else:
+ parts.append(permissions_part)
+ parts.append(".\n")
+
+ # Suggest the user to enable Long Paths if path length is
+ # more than 260
+ if (
+ WINDOWS
+ and error.errno == errno.ENOENT
+ and error.filename
+ and len(error.filename) > 260
+ ):
+ parts.append(
+ "HINT: This error might have occurred since "
+ "this system does not have Windows Long Path "
+ "support enabled. You can find information on "
+ "how to enable this at "
+ "https://pip.pypa.io/warnings/enable-long-paths\n"
+ )
+
+ return "".join(parts).strip() + "\n"
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/list.py b/.venv/Lib/site-packages/pip/_internal/commands/list.py
new file mode 100644
index 0000000000000000000000000000000000000000..ac10353194f5f17b042c2076b7397b0c12bfe588
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/list.py
@@ -0,0 +1,368 @@
+import json
+import logging
+from optparse import Values
+from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import IndexGroupCommand
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution, get_environment
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.network.session import PipSession
+from pip._internal.utils.compat import stdlib_pkgs
+from pip._internal.utils.misc import tabulate, write_output
+
+if TYPE_CHECKING:
+ from pip._internal.metadata.base import DistributionVersion
+
+ class _DistWithLatestInfo(BaseDistribution):
+ """Give the distribution object a couple of extra fields.
+
+ These will be populated during ``get_outdated()``. This is dirty but
+ makes the rest of the code much cleaner.
+ """
+
+ latest_version: DistributionVersion
+ latest_filetype: str
+
+ _ProcessedDists = Sequence[_DistWithLatestInfo]
+
+
+logger = logging.getLogger(__name__)
+
+
+class ListCommand(IndexGroupCommand):
+ """
+ List installed packages, including editables.
+
+ Packages are listed in a case-insensitive sorted order.
+ """
+
+ ignore_require_venv = True
+ usage = """
+ %prog [options]"""
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "-o",
+ "--outdated",
+ action="store_true",
+ default=False,
+ help="List outdated packages",
+ )
+ self.cmd_opts.add_option(
+ "-u",
+ "--uptodate",
+ action="store_true",
+ default=False,
+ help="List uptodate packages",
+ )
+ self.cmd_opts.add_option(
+ "-e",
+ "--editable",
+ action="store_true",
+ default=False,
+ help="List editable projects.",
+ )
+ self.cmd_opts.add_option(
+ "-l",
+ "--local",
+ action="store_true",
+ default=False,
+ help=(
+ "If in a virtualenv that has global access, do not list "
+ "globally-installed packages."
+ ),
+ )
+ self.cmd_opts.add_option(
+ "--user",
+ dest="user",
+ action="store_true",
+ default=False,
+ help="Only output packages installed in user-site.",
+ )
+ self.cmd_opts.add_option(cmdoptions.list_path())
+ self.cmd_opts.add_option(
+ "--pre",
+ action="store_true",
+ default=False,
+ help=(
+ "Include pre-release and development versions. By default, "
+ "pip only finds stable versions."
+ ),
+ )
+
+ self.cmd_opts.add_option(
+ "--format",
+ action="store",
+ dest="list_format",
+ default="columns",
+ choices=("columns", "freeze", "json"),
+ help=(
+ "Select the output format among: columns (default), freeze, or json. "
+ "The 'freeze' format cannot be used with the --outdated option."
+ ),
+ )
+
+ self.cmd_opts.add_option(
+ "--not-required",
+ action="store_true",
+ dest="not_required",
+ help="List packages that are not dependencies of installed packages.",
+ )
+
+ self.cmd_opts.add_option(
+ "--exclude-editable",
+ action="store_false",
+ dest="include_editable",
+ help="Exclude editable package from output.",
+ )
+ self.cmd_opts.add_option(
+ "--include-editable",
+ action="store_true",
+ dest="include_editable",
+ help="Include editable package from output.",
+ default=True,
+ )
+ self.cmd_opts.add_option(cmdoptions.list_exclude())
+ index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def _build_package_finder(
+ self, options: Values, session: PipSession
+ ) -> PackageFinder:
+ """
+ Create a package finder appropriate to this list command.
+ """
+ link_collector = LinkCollector.create(session, options=options)
+
+ # Pass allow_yanked=False to ignore yanked versions.
+ selection_prefs = SelectionPreferences(
+ allow_yanked=False,
+ allow_all_prereleases=options.pre,
+ )
+
+ return PackageFinder.create(
+ link_collector=link_collector,
+ selection_prefs=selection_prefs,
+ )
+
+ def run(self, options: Values, args: List[str]) -> int:
+ if options.outdated and options.uptodate:
+ raise CommandError("Options --outdated and --uptodate cannot be combined.")
+
+ if options.outdated and options.list_format == "freeze":
+ raise CommandError(
+ "List format 'freeze' cannot be used with the --outdated option."
+ )
+
+ cmdoptions.check_list_path_option(options)
+
+ skip = set(stdlib_pkgs)
+ if options.excludes:
+ skip.update(canonicalize_name(n) for n in options.excludes)
+
+ packages: "_ProcessedDists" = [
+ cast("_DistWithLatestInfo", d)
+ for d in get_environment(options.path).iter_installed_distributions(
+ local_only=options.local,
+ user_only=options.user,
+ editables_only=options.editable,
+ include_editables=options.include_editable,
+ skip=skip,
+ )
+ ]
+
+ # get_not_required must be called firstly in order to find and
+ # filter out all dependencies correctly. Otherwise a package
+ # can't be identified as requirement because some parent packages
+ # could be filtered out before.
+ if options.not_required:
+ packages = self.get_not_required(packages, options)
+
+ if options.outdated:
+ packages = self.get_outdated(packages, options)
+ elif options.uptodate:
+ packages = self.get_uptodate(packages, options)
+
+ self.output_package_listing(packages, options)
+ return SUCCESS
+
+ def get_outdated(
+ self, packages: "_ProcessedDists", options: Values
+ ) -> "_ProcessedDists":
+ return [
+ dist
+ for dist in self.iter_packages_latest_infos(packages, options)
+ if dist.latest_version > dist.version
+ ]
+
+ def get_uptodate(
+ self, packages: "_ProcessedDists", options: Values
+ ) -> "_ProcessedDists":
+ return [
+ dist
+ for dist in self.iter_packages_latest_infos(packages, options)
+ if dist.latest_version == dist.version
+ ]
+
+ def get_not_required(
+ self, packages: "_ProcessedDists", options: Values
+ ) -> "_ProcessedDists":
+ dep_keys = {
+ canonicalize_name(dep.name)
+ for dist in packages
+ for dep in (dist.iter_dependencies() or ())
+ }
+
+ # Create a set to remove duplicate packages, and cast it to a list
+ # to keep the return type consistent with get_outdated and
+ # get_uptodate
+ return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
+
+ def iter_packages_latest_infos(
+ self, packages: "_ProcessedDists", options: Values
+ ) -> Generator["_DistWithLatestInfo", None, None]:
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(options, session)
+
+ def latest_info(
+ dist: "_DistWithLatestInfo",
+ ) -> Optional["_DistWithLatestInfo"]:
+ all_candidates = finder.find_all_candidates(dist.canonical_name)
+ if not options.pre:
+ # Remove prereleases
+ all_candidates = [
+ candidate
+ for candidate in all_candidates
+ if not candidate.version.is_prerelease
+ ]
+
+ evaluator = finder.make_candidate_evaluator(
+ project_name=dist.canonical_name,
+ )
+ best_candidate = evaluator.sort_best_candidate(all_candidates)
+ if best_candidate is None:
+ return None
+
+ remote_version = best_candidate.version
+ if best_candidate.link.is_wheel:
+ typ = "wheel"
+ else:
+ typ = "sdist"
+ dist.latest_version = remote_version
+ dist.latest_filetype = typ
+ return dist
+
+ for dist in map(latest_info, packages):
+ if dist is not None:
+ yield dist
+
+ def output_package_listing(
+ self, packages: "_ProcessedDists", options: Values
+ ) -> None:
+ packages = sorted(
+ packages,
+ key=lambda dist: dist.canonical_name,
+ )
+ if options.list_format == "columns" and packages:
+ data, header = format_for_columns(packages, options)
+ self.output_package_listing_columns(data, header)
+ elif options.list_format == "freeze":
+ for dist in packages:
+ if options.verbose >= 1:
+ write_output(
+ "%s==%s (%s)", dist.raw_name, dist.version, dist.location
+ )
+ else:
+ write_output("%s==%s", dist.raw_name, dist.version)
+ elif options.list_format == "json":
+ write_output(format_for_json(packages, options))
+
+ def output_package_listing_columns(
+ self, data: List[List[str]], header: List[str]
+ ) -> None:
+ # insert the header first: we need to know the size of column names
+ if len(data) > 0:
+ data.insert(0, header)
+
+ pkg_strings, sizes = tabulate(data)
+
+ # Create and add a separator.
+ if len(data) > 0:
+ pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
+
+ for val in pkg_strings:
+ write_output(val)
+
+
+def format_for_columns(
+ pkgs: "_ProcessedDists", options: Values
+) -> Tuple[List[List[str]], List[str]]:
+ """
+ Convert the package data into something usable
+ by output_package_listing_columns.
+ """
+ header = ["Package", "Version"]
+
+ running_outdated = options.outdated
+ if running_outdated:
+ header.extend(["Latest", "Type"])
+
+ has_editables = any(x.editable for x in pkgs)
+ if has_editables:
+ header.append("Editable project location")
+
+ if options.verbose >= 1:
+ header.append("Location")
+ if options.verbose >= 1:
+ header.append("Installer")
+
+ data = []
+ for proj in pkgs:
+ # if we're working on the 'outdated' list, separate out the
+ # latest_version and type
+ row = [proj.raw_name, str(proj.version)]
+
+ if running_outdated:
+ row.append(str(proj.latest_version))
+ row.append(proj.latest_filetype)
+
+ if has_editables:
+ row.append(proj.editable_project_location or "")
+
+ if options.verbose >= 1:
+ row.append(proj.location or "")
+ if options.verbose >= 1:
+ row.append(proj.installer)
+
+ data.append(row)
+
+ return data, header
+
+
+def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
+ data = []
+ for dist in packages:
+ info = {
+ "name": dist.raw_name,
+ "version": str(dist.version),
+ }
+ if options.verbose >= 1:
+ info["location"] = dist.location or ""
+ info["installer"] = dist.installer
+ if options.outdated:
+ info["latest_version"] = str(dist.latest_version)
+ info["latest_filetype"] = dist.latest_filetype
+ editable_project_location = dist.editable_project_location
+ if editable_project_location:
+ info["editable_project_location"] = editable_project_location
+ data.append(info)
+ return json.dumps(data)
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/search.py b/.venv/Lib/site-packages/pip/_internal/commands/search.py
new file mode 100644
index 0000000000000000000000000000000000000000..03ed925b246dd551ec2ef45095ed6cad00fd2745
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/search.py
@@ -0,0 +1,174 @@
+import logging
+import shutil
+import sys
+import textwrap
+import xmlrpc.client
+from collections import OrderedDict
+from optparse import Values
+from typing import TYPE_CHECKING, Dict, List, Optional
+
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.req_command import SessionCommandMixin
+from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.metadata import get_default_environment
+from pip._internal.models.index import PyPI
+from pip._internal.network.xmlrpc import PipXmlrpcTransport
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import write_output
+
+if TYPE_CHECKING:
+ from typing import TypedDict
+
+ class TransformedHit(TypedDict):
+ name: str
+ summary: str
+ versions: List[str]
+
+
+logger = logging.getLogger(__name__)
+
+
+class SearchCommand(Command, SessionCommandMixin):
+ """Search for PyPI packages whose name or summary contains ."""
+
+ usage = """
+ %prog [options] """
+ ignore_require_venv = True
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "-i",
+ "--index",
+ dest="index",
+ metavar="URL",
+ default=PyPI.pypi_url,
+ help="Base URL of Python Package Index (default %default)",
+ )
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ if not args:
+ raise CommandError("Missing required argument (search query).")
+ query = args
+ pypi_hits = self.search(query, options)
+ hits = transform_hits(pypi_hits)
+
+ terminal_width = None
+ if sys.stdout.isatty():
+ terminal_width = shutil.get_terminal_size()[0]
+
+ print_results(hits, terminal_width=terminal_width)
+ if pypi_hits:
+ return SUCCESS
+ return NO_MATCHES_FOUND
+
+ def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
+ index_url = options.index
+
+ session = self.get_default_session(options)
+
+ transport = PipXmlrpcTransport(index_url, session)
+ pypi = xmlrpc.client.ServerProxy(index_url, transport)
+ try:
+ hits = pypi.search({"name": query, "summary": query}, "or")
+ except xmlrpc.client.Fault as fault:
+ message = "XMLRPC request failed [code: {code}]\n{string}".format(
+ code=fault.faultCode,
+ string=fault.faultString,
+ )
+ raise CommandError(message)
+ assert isinstance(hits, list)
+ return hits
+
+
+def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
+ """
+ The list from pypi is really a list of versions. We want a list of
+ packages with the list of versions stored inline. This converts the
+ list from pypi into one we can use.
+ """
+ packages: Dict[str, "TransformedHit"] = OrderedDict()
+ for hit in hits:
+ name = hit["name"]
+ summary = hit["summary"]
+ version = hit["version"]
+
+ if name not in packages.keys():
+ packages[name] = {
+ "name": name,
+ "summary": summary,
+ "versions": [version],
+ }
+ else:
+ packages[name]["versions"].append(version)
+
+ # if this is the highest version, replace summary and score
+ if version == highest_version(packages[name]["versions"]):
+ packages[name]["summary"] = summary
+
+ return list(packages.values())
+
+
+def print_dist_installation_info(name: str, latest: str) -> None:
+ env = get_default_environment()
+ dist = env.get_distribution(name)
+ if dist is not None:
+ with indent_log():
+ if dist.version == latest:
+ write_output("INSTALLED: %s (latest)", dist.version)
+ else:
+ write_output("INSTALLED: %s", dist.version)
+ if parse_version(latest).pre:
+ write_output(
+ "LATEST: %s (pre-release; install"
+ " with `pip install --pre`)",
+ latest,
+ )
+ else:
+ write_output("LATEST: %s", latest)
+
+
+def print_results(
+ hits: List["TransformedHit"],
+ name_column_width: Optional[int] = None,
+ terminal_width: Optional[int] = None,
+) -> None:
+ if not hits:
+ return
+ if name_column_width is None:
+ name_column_width = (
+ max(
+ [
+ len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
+ for hit in hits
+ ]
+ )
+ + 4
+ )
+
+ for hit in hits:
+ name = hit["name"]
+ summary = hit["summary"] or ""
+ latest = highest_version(hit.get("versions", ["-"]))
+ if terminal_width is not None:
+ target_width = terminal_width - name_column_width - 5
+ if target_width > 10:
+ # wrap and indent summary to fit terminal
+ summary_lines = textwrap.wrap(summary, target_width)
+ summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
+
+ name_latest = f"{name} ({latest})"
+ line = f"{name_latest:{name_column_width}} - {summary}"
+ try:
+ write_output(line)
+ print_dist_installation_info(name, latest)
+ except UnicodeEncodeError:
+ pass
+
+
+def highest_version(versions: List[str]) -> str:
+ return max(versions, key=parse_version)
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/show.py b/.venv/Lib/site-packages/pip/_internal/commands/show.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f10701f6b28c72b62c9904fec37b96bdd199dcc
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/show.py
@@ -0,0 +1,189 @@
+import logging
+from optparse import Values
+from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.metadata import BaseDistribution, get_default_environment
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class ShowCommand(Command):
+ """
+ Show information about one or more installed packages.
+
+ The output is in RFC-compliant mail header format.
+ """
+
+ usage = """
+ %prog [options] ..."""
+ ignore_require_venv = True
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "-f",
+ "--files",
+ dest="files",
+ action="store_true",
+ default=False,
+ help="Show the full list of installed files for each package.",
+ )
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ if not args:
+ logger.warning("ERROR: Please provide a package name or names.")
+ return ERROR
+ query = args
+
+ results = search_packages_info(query)
+ if not print_results(
+ results, list_files=options.files, verbose=options.verbose
+ ):
+ return ERROR
+ return SUCCESS
+
+
+class _PackageInfo(NamedTuple):
+ name: str
+ version: str
+ location: str
+ editable_project_location: Optional[str]
+ requires: List[str]
+ required_by: List[str]
+ installer: str
+ metadata_version: str
+ classifiers: List[str]
+ summary: str
+ homepage: str
+ project_urls: List[str]
+ author: str
+ author_email: str
+ license: str
+ entry_points: List[str]
+ files: Optional[List[str]]
+
+
+def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
+ """
+ Gather details from installed distributions. Print distribution name,
+ version, location, and installed files. Installed files requires a
+ pip generated 'installed-files.txt' in the distributions '.egg-info'
+ directory.
+ """
+ env = get_default_environment()
+
+ installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
+ query_names = [canonicalize_name(name) for name in query]
+ missing = sorted(
+ [name for name, pkg in zip(query, query_names) if pkg not in installed]
+ )
+ if missing:
+ logger.warning("Package(s) not found: %s", ", ".join(missing))
+
+ def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
+ return (
+ dist.metadata["Name"] or "UNKNOWN"
+ for dist in installed.values()
+ if current_dist.canonical_name
+ in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
+ )
+
+ for query_name in query_names:
+ try:
+ dist = installed[query_name]
+ except KeyError:
+ continue
+
+ requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
+ required_by = sorted(_get_requiring_packages(dist), key=str.lower)
+
+ try:
+ entry_points_text = dist.read_text("entry_points.txt")
+ entry_points = entry_points_text.splitlines(keepends=False)
+ except FileNotFoundError:
+ entry_points = []
+
+ files_iter = dist.iter_declared_entries()
+ if files_iter is None:
+ files: Optional[List[str]] = None
+ else:
+ files = sorted(files_iter)
+
+ metadata = dist.metadata
+
+ yield _PackageInfo(
+ name=dist.raw_name,
+ version=str(dist.version),
+ location=dist.location or "",
+ editable_project_location=dist.editable_project_location,
+ requires=requires,
+ required_by=required_by,
+ installer=dist.installer,
+ metadata_version=dist.metadata_version or "",
+ classifiers=metadata.get_all("Classifier", []),
+ summary=metadata.get("Summary", ""),
+ homepage=metadata.get("Home-page", ""),
+ project_urls=metadata.get_all("Project-URL", []),
+ author=metadata.get("Author", ""),
+ author_email=metadata.get("Author-email", ""),
+ license=metadata.get("License", ""),
+ entry_points=entry_points,
+ files=files,
+ )
+
+
+def print_results(
+ distributions: Iterable[_PackageInfo],
+ list_files: bool,
+ verbose: bool,
+) -> bool:
+ """
+ Print the information from installed distributions found.
+ """
+ results_printed = False
+ for i, dist in enumerate(distributions):
+ results_printed = True
+ if i > 0:
+ write_output("---")
+
+ write_output("Name: %s", dist.name)
+ write_output("Version: %s", dist.version)
+ write_output("Summary: %s", dist.summary)
+ write_output("Home-page: %s", dist.homepage)
+ write_output("Author: %s", dist.author)
+ write_output("Author-email: %s", dist.author_email)
+ write_output("License: %s", dist.license)
+ write_output("Location: %s", dist.location)
+ if dist.editable_project_location is not None:
+ write_output(
+ "Editable project location: %s", dist.editable_project_location
+ )
+ write_output("Requires: %s", ", ".join(dist.requires))
+ write_output("Required-by: %s", ", ".join(dist.required_by))
+
+ if verbose:
+ write_output("Metadata-Version: %s", dist.metadata_version)
+ write_output("Installer: %s", dist.installer)
+ write_output("Classifiers:")
+ for classifier in dist.classifiers:
+ write_output(" %s", classifier)
+ write_output("Entry-points:")
+ for entry in dist.entry_points:
+ write_output(" %s", entry.strip())
+ write_output("Project-URLs:")
+ for project_url in dist.project_urls:
+ write_output(" %s", project_url)
+ if list_files:
+ write_output("Files:")
+ if dist.files is None:
+ write_output("Cannot locate RECORD or installed-files.txt")
+ else:
+ for line in dist.files:
+ write_output(" %s", line.strip())
+ return results_printed
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/uninstall.py b/.venv/Lib/site-packages/pip/_internal/commands/uninstall.py
new file mode 100644
index 0000000000000000000000000000000000000000..f198fc313ff57929d95d36216e3e6ecec3877673
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/uninstall.py
@@ -0,0 +1,113 @@
+import logging
+from optparse import Values
+from typing import List
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import InstallationError
+from pip._internal.req import parse_requirements
+from pip._internal.req.constructors import (
+ install_req_from_line,
+ install_req_from_parsed_requirement,
+)
+from pip._internal.utils.misc import (
+ check_externally_managed,
+ protect_pip_from_modification_on_windows,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class UninstallCommand(Command, SessionCommandMixin):
+ """
+ Uninstall packages.
+
+ pip is able to uninstall most installed packages. Known exceptions are:
+
+ - Pure distutils packages installed with ``python setup.py install``, which
+ leave behind no metadata to determine what files were installed.
+ - Script wrappers installed by ``python setup.py develop``.
+ """
+
+ usage = """
+ %prog [options] ...
+ %prog [options] -r ..."""
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "-r",
+ "--requirement",
+ dest="requirements",
+ action="append",
+ default=[],
+ metavar="file",
+ help=(
+ "Uninstall all the packages listed in the given requirements "
+ "file. This option can be used multiple times."
+ ),
+ )
+ self.cmd_opts.add_option(
+ "-y",
+ "--yes",
+ dest="yes",
+ action="store_true",
+ help="Don't ask for confirmation of uninstall deletions.",
+ )
+ self.cmd_opts.add_option(cmdoptions.root_user_action())
+ self.cmd_opts.add_option(cmdoptions.override_externally_managed())
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options: Values, args: List[str]) -> int:
+ session = self.get_default_session(options)
+
+ reqs_to_uninstall = {}
+ for name in args:
+ req = install_req_from_line(
+ name,
+ isolated=options.isolated_mode,
+ )
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ else:
+ logger.warning(
+ "Invalid requirement: %r ignored -"
+ " the uninstall command expects named"
+ " requirements.",
+ name,
+ )
+ for filename in options.requirements:
+ for parsed_req in parse_requirements(
+ filename, options=options, session=session
+ ):
+ req = install_req_from_parsed_requirement(
+ parsed_req, isolated=options.isolated_mode
+ )
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ if not reqs_to_uninstall:
+ raise InstallationError(
+ f"You must give at least one requirement to {self.name} (see "
+ f'"pip help {self.name}")'
+ )
+
+ if not options.override_externally_managed:
+ check_externally_managed()
+
+ protect_pip_from_modification_on_windows(
+ modifying_pip="pip" in reqs_to_uninstall
+ )
+
+ for req in reqs_to_uninstall.values():
+ uninstall_pathset = req.uninstall(
+ auto_confirm=options.yes,
+ verbose=self.verbosity > 0,
+ )
+ if uninstall_pathset:
+ uninstall_pathset.commit()
+ if options.root_user_action == "warn":
+ warn_if_run_as_root()
+ return SUCCESS
diff --git a/.venv/Lib/site-packages/pip/_internal/commands/wheel.py b/.venv/Lib/site-packages/pip/_internal/commands/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..ed578aa2500d8917d5d3ed1249526b48ad7ee996
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/commands/wheel.py
@@ -0,0 +1,183 @@
+import logging
+import os
+import shutil
+from optparse import Values
+from typing import List
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import RequirementCommand, with_cleanup
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.req.req_install import (
+ InstallRequirement,
+ check_legacy_setup_py_options,
+)
+from pip._internal.utils.misc import ensure_dir, normalize_path
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.wheel_builder import build, should_build_for_wheel_command
+
+logger = logging.getLogger(__name__)
+
+
+class WheelCommand(RequirementCommand):
+ """
+ Build Wheel archives for your requirements and dependencies.
+
+ Wheel is a built-package format, and offers the advantage of not
+ recompiling your software during every install. For more details, see the
+ wheel docs: https://wheel.readthedocs.io/en/latest/
+
+ 'pip wheel' uses the build system interface as described here:
+ https://pip.pypa.io/en/stable/reference/build-system/
+
+ """
+
+ usage = """
+ %prog [options] ...
+ %prog [options] -r ...
+ %prog [options] [-e] ...
+ %prog [options] [-e] ...
+ %prog [options] ..."""
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ "-w",
+ "--wheel-dir",
+ dest="wheel_dir",
+ metavar="dir",
+ default=os.curdir,
+ help=(
+ "Build wheels into , where the default is the "
+ "current working directory."
+ ),
+ )
+ self.cmd_opts.add_option(cmdoptions.no_binary())
+ self.cmd_opts.add_option(cmdoptions.only_binary())
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
+ self.cmd_opts.add_option(cmdoptions.constraints())
+ self.cmd_opts.add_option(cmdoptions.editable())
+ self.cmd_opts.add_option(cmdoptions.requirements())
+ self.cmd_opts.add_option(cmdoptions.src())
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ self.cmd_opts.add_option(cmdoptions.no_deps())
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
+
+ self.cmd_opts.add_option(
+ "--no-verify",
+ dest="no_verify",
+ action="store_true",
+ default=False,
+ help="Don't verify if built wheel is valid.",
+ )
+
+ self.cmd_opts.add_option(cmdoptions.config_settings())
+ self.cmd_opts.add_option(cmdoptions.build_options())
+ self.cmd_opts.add_option(cmdoptions.global_options())
+
+ self.cmd_opts.add_option(
+ "--pre",
+ action="store_true",
+ default=False,
+ help=(
+ "Include pre-release and development versions. By default, "
+ "pip only finds stable versions."
+ ),
+ )
+
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ @with_cleanup
+ def run(self, options: Values, args: List[str]) -> int:
+ session = self.get_default_session(options)
+
+ finder = self._build_package_finder(options, session)
+
+ options.wheel_dir = normalize_path(options.wheel_dir)
+ ensure_dir(options.wheel_dir)
+
+ build_tracker = self.enter_context(get_build_tracker())
+
+ directory = TempDirectory(
+ delete=not options.no_clean,
+ kind="wheel",
+ globally_managed=True,
+ )
+
+ reqs = self.get_requirements(args, options, finder, session)
+ check_legacy_setup_py_options(options, reqs)
+
+ wheel_cache = WheelCache(options.cache_dir)
+
+ preparer = self.make_requirement_preparer(
+ temp_build_dir=directory,
+ options=options,
+ build_tracker=build_tracker,
+ session=session,
+ finder=finder,
+ download_dir=options.wheel_dir,
+ use_user_site=False,
+ verbosity=self.verbosity,
+ )
+
+ resolver = self.make_resolver(
+ preparer=preparer,
+ finder=finder,
+ options=options,
+ wheel_cache=wheel_cache,
+ ignore_requires_python=options.ignore_requires_python,
+ use_pep517=options.use_pep517,
+ )
+
+ self.trace_basic_info(finder)
+
+ requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
+
+ reqs_to_build: List[InstallRequirement] = []
+ for req in requirement_set.requirements.values():
+ if req.is_wheel:
+ preparer.save_linked_requirement(req)
+ elif should_build_for_wheel_command(req):
+ reqs_to_build.append(req)
+
+ preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
+ requirement_set.warn_legacy_versions_and_specifiers()
+
+ # build wheels
+ build_successes, build_failures = build(
+ reqs_to_build,
+ wheel_cache=wheel_cache,
+ verify=(not options.no_verify),
+ build_options=options.build_options or [],
+ global_options=options.global_options or [],
+ )
+ for req in build_successes:
+ assert req.link and req.link.is_wheel
+ assert req.local_file_path
+ # copy from cache to target directory
+ try:
+ shutil.copy(req.local_file_path, options.wheel_dir)
+ except OSError as e:
+ logger.warning(
+ "Building wheel for %s failed: %s",
+ req.name,
+ e,
+ )
+ build_failures.append(req)
+ if len(build_failures) != 0:
+ raise CommandError("Failed to build one or more wheels")
+
+ return SUCCESS
diff --git a/.venv/Lib/site-packages/pip/_internal/configuration.py b/.venv/Lib/site-packages/pip/_internal/configuration.py
new file mode 100644
index 0000000000000000000000000000000000000000..96f824955bf098d86e54cd8bce3bf0015f976ec2
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/configuration.py
@@ -0,0 +1,381 @@
+"""Configuration management setup
+
+Some terminology:
+- name
+ As written in config files.
+- value
+ Value associated with a name
+- key
+ Name combined with it's section (section.name)
+- variant
+ A single word describing where the configuration key-value pair came from
+"""
+
+import configparser
+import locale
+import os
+import sys
+from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
+
+from pip._internal.exceptions import (
+ ConfigurationError,
+ ConfigurationFileCouldNotBeLoaded,
+)
+from pip._internal.utils import appdirs
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.logging import getLogger
+from pip._internal.utils.misc import ensure_dir, enum
+
+RawConfigParser = configparser.RawConfigParser # Shorthand
+Kind = NewType("Kind", str)
+
+CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
+ENV_NAMES_IGNORED = "version", "help"
+
+# The kinds of configurations there are.
+kinds = enum(
+ USER="user", # User Specific
+ GLOBAL="global", # System Wide
+ SITE="site", # [Virtual] Environment Specific
+ ENV="env", # from PIP_CONFIG_FILE
+ ENV_VAR="env-var", # from Environment Variables
+)
+OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
+VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
+
+logger = getLogger(__name__)
+
+
+# NOTE: Maybe use the optionx attribute to normalize keynames.
+def _normalize_name(name: str) -> str:
+ """Make a name consistent regardless of source (environment or file)"""
+ name = name.lower().replace("_", "-")
+ if name.startswith("--"):
+ name = name[2:] # only prefer long opts
+ return name
+
+
+def _disassemble_key(name: str) -> List[str]:
+ if "." not in name:
+ error_message = (
+ "Key does not contain dot separated section and key. "
+ "Perhaps you wanted to use 'global.{}' instead?"
+ ).format(name)
+ raise ConfigurationError(error_message)
+ return name.split(".", 1)
+
+
+def get_configuration_files() -> Dict[Kind, List[str]]:
+ global_config_files = [
+ os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
+ ]
+
+ site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
+ legacy_config_file = os.path.join(
+ os.path.expanduser("~"),
+ "pip" if WINDOWS else ".pip",
+ CONFIG_BASENAME,
+ )
+ new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
+ return {
+ kinds.GLOBAL: global_config_files,
+ kinds.SITE: [site_config_file],
+ kinds.USER: [legacy_config_file, new_config_file],
+ }
+
+
+class Configuration:
+ """Handles management of configuration.
+
+ Provides an interface to accessing and managing configuration files.
+
+ This class converts provides an API that takes "section.key-name" style
+ keys and stores the value associated with it as "key-name" under the
+ section "section".
+
+ This allows for a clean interface wherein the both the section and the
+ key-name are preserved in an easy to manage form in the configuration files
+ and the data stored is also nice.
+ """
+
+ def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
+ super().__init__()
+
+ if load_only is not None and load_only not in VALID_LOAD_ONLY:
+ raise ConfigurationError(
+ "Got invalid value for load_only - should be one of {}".format(
+ ", ".join(map(repr, VALID_LOAD_ONLY))
+ )
+ )
+ self.isolated = isolated
+ self.load_only = load_only
+
+ # Because we keep track of where we got the data from
+ self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
+ variant: [] for variant in OVERRIDE_ORDER
+ }
+ self._config: Dict[Kind, Dict[str, Any]] = {
+ variant: {} for variant in OVERRIDE_ORDER
+ }
+ self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
+
+ def load(self) -> None:
+ """Loads configuration from configuration files and environment"""
+ self._load_config_files()
+ if not self.isolated:
+ self._load_environment_vars()
+
+ def get_file_to_edit(self) -> Optional[str]:
+ """Returns the file with highest priority in configuration"""
+ assert self.load_only is not None, "Need to be specified a file to be editing"
+
+ try:
+ return self._get_parser_to_modify()[0]
+ except IndexError:
+ return None
+
+ def items(self) -> Iterable[Tuple[str, Any]]:
+ """Returns key-value pairs like dict.items() representing the loaded
+ configuration
+ """
+ return self._dictionary.items()
+
+ def get_value(self, key: str) -> Any:
+ """Get a value from the configuration."""
+ orig_key = key
+ key = _normalize_name(key)
+ try:
+ return self._dictionary[key]
+ except KeyError:
+ # disassembling triggers a more useful error message than simply
+ # "No such key" in the case that the key isn't in the form command.option
+ _disassemble_key(key)
+ raise ConfigurationError(f"No such key - {orig_key}")
+
+ def set_value(self, key: str, value: Any) -> None:
+ """Modify a value in the configuration."""
+ key = _normalize_name(key)
+ self._ensure_have_load_only()
+
+ assert self.load_only
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+
+ # Modify the parser and the configuration
+ if not parser.has_section(section):
+ parser.add_section(section)
+ parser.set(section, name, value)
+
+ self._config[self.load_only][key] = value
+ self._mark_as_modified(fname, parser)
+
+ def unset_value(self, key: str) -> None:
+ """Unset a value in the configuration."""
+ orig_key = key
+ key = _normalize_name(key)
+ self._ensure_have_load_only()
+
+ assert self.load_only
+ if key not in self._config[self.load_only]:
+ raise ConfigurationError(f"No such key - {orig_key}")
+
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+ if not (
+ parser.has_section(section) and parser.remove_option(section, name)
+ ):
+ # The option was not removed.
+ raise ConfigurationError(
+ "Fatal Internal error [id=1]. Please report as a bug."
+ )
+
+ # The section may be empty after the option was removed.
+ if not parser.items(section):
+ parser.remove_section(section)
+ self._mark_as_modified(fname, parser)
+
+ del self._config[self.load_only][key]
+
+ def save(self) -> None:
+ """Save the current in-memory state."""
+ self._ensure_have_load_only()
+
+ for fname, parser in self._modified_parsers:
+ logger.info("Writing to %s", fname)
+
+ # Ensure directory exists.
+ ensure_dir(os.path.dirname(fname))
+
+ # Ensure directory's permission(need to be writeable)
+ try:
+ with open(fname, "w") as f:
+ parser.write(f)
+ except OSError as error:
+ raise ConfigurationError(
+ f"An error occurred while writing to the configuration file "
+ f"{fname}: {error}"
+ )
+
+ #
+ # Private routines
+ #
+
+ def _ensure_have_load_only(self) -> None:
+ if self.load_only is None:
+ raise ConfigurationError("Needed a specific file to be modifying.")
+ logger.debug("Will be working with %s variant only", self.load_only)
+
+ @property
+ def _dictionary(self) -> Dict[str, Any]:
+ """A dictionary representing the loaded configuration."""
+ # NOTE: Dictionaries are not populated if not loaded. So, conditionals
+ # are not needed here.
+ retval = {}
+
+ for variant in OVERRIDE_ORDER:
+ retval.update(self._config[variant])
+
+ return retval
+
+ def _load_config_files(self) -> None:
+ """Loads configuration from configuration files"""
+ config_files = dict(self.iter_config_files())
+ if config_files[kinds.ENV][0:1] == [os.devnull]:
+ logger.debug(
+ "Skipping loading configuration files due to "
+ "environment's PIP_CONFIG_FILE being os.devnull"
+ )
+ return
+
+ for variant, files in config_files.items():
+ for fname in files:
+ # If there's specific variant set in `load_only`, load only
+ # that variant, not the others.
+ if self.load_only is not None and variant != self.load_only:
+ logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
+ continue
+
+ parser = self._load_file(variant, fname)
+
+ # Keeping track of the parsers used
+ self._parsers[variant].append((fname, parser))
+
+ def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
+ logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
+ parser = self._construct_parser(fname)
+
+ for section in parser.sections():
+ items = parser.items(section)
+ self._config[variant].update(self._normalized_keys(section, items))
+
+ return parser
+
+ def _construct_parser(self, fname: str) -> RawConfigParser:
+ parser = configparser.RawConfigParser()
+ # If there is no such file, don't bother reading it but create the
+ # parser anyway, to hold the data.
+ # Doing this is useful when modifying and saving files, where we don't
+ # need to construct a parser.
+ if os.path.exists(fname):
+ locale_encoding = locale.getpreferredencoding(False)
+ try:
+ parser.read(fname, encoding=locale_encoding)
+ except UnicodeDecodeError:
+ # See https://github.com/pypa/pip/issues/4963
+ raise ConfigurationFileCouldNotBeLoaded(
+ reason=f"contains invalid {locale_encoding} characters",
+ fname=fname,
+ )
+ except configparser.Error as error:
+ # See https://github.com/pypa/pip/issues/4893
+ raise ConfigurationFileCouldNotBeLoaded(error=error)
+ return parser
+
+ def _load_environment_vars(self) -> None:
+ """Loads configuration from environment variables"""
+ self._config[kinds.ENV_VAR].update(
+ self._normalized_keys(":env:", self.get_environ_vars())
+ )
+
+ def _normalized_keys(
+ self, section: str, items: Iterable[Tuple[str, Any]]
+ ) -> Dict[str, Any]:
+ """Normalizes items to construct a dictionary with normalized keys.
+
+ This routine is where the names become keys and are made the same
+ regardless of source - configuration files or environment.
+ """
+ normalized = {}
+ for name, val in items:
+ key = section + "." + _normalize_name(name)
+ normalized[key] = val
+ return normalized
+
+ def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
+ """Returns a generator with all environmental vars with prefix PIP_"""
+ for key, val in os.environ.items():
+ if key.startswith("PIP_"):
+ name = key[4:].lower()
+ if name not in ENV_NAMES_IGNORED:
+ yield name, val
+
+ # XXX: This is patched in the tests.
+ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
+ """Yields variant and configuration files associated with it.
+
+ This should be treated like items of a dictionary.
+ """
+ # SMELL: Move the conditions out of this function
+
+ # environment variables have the lowest priority
+ config_file = os.environ.get("PIP_CONFIG_FILE", None)
+ if config_file is not None:
+ yield kinds.ENV, [config_file]
+ else:
+ yield kinds.ENV, []
+
+ config_files = get_configuration_files()
+
+ # at the base we have any global configuration
+ yield kinds.GLOBAL, config_files[kinds.GLOBAL]
+
+ # per-user configuration next
+ should_load_user_config = not self.isolated and not (
+ config_file and os.path.exists(config_file)
+ )
+ if should_load_user_config:
+ # The legacy config file is overridden by the new config file
+ yield kinds.USER, config_files[kinds.USER]
+
+ # finally virtualenv configuration first trumping others
+ yield kinds.SITE, config_files[kinds.SITE]
+
+ def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
+ """Get values present in a config file"""
+ return self._config[variant]
+
+ def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
+ # Determine which parser to modify
+ assert self.load_only
+ parsers = self._parsers[self.load_only]
+ if not parsers:
+ # This should not happen if everything works correctly.
+ raise ConfigurationError(
+ "Fatal Internal error [id=2]. Please report as a bug."
+ )
+
+ # Use the highest priority parser.
+ return parsers[-1]
+
+ # XXX: This is patched in the tests.
+ def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
+ file_parser_tuple = (fname, parser)
+ if file_parser_tuple not in self._modified_parsers:
+ self._modified_parsers.append(file_parser_tuple)
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self._dictionary!r})"
diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/__init__.py b/.venv/Lib/site-packages/pip/_internal/distributions/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a89a838b9a5cb264e9ae9d269fbedca6e2d6333
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/distributions/__init__.py
@@ -0,0 +1,21 @@
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.distributions.sdist import SourceDistribution
+from pip._internal.distributions.wheel import WheelDistribution
+from pip._internal.req.req_install import InstallRequirement
+
+
+def make_distribution_for_install_requirement(
+ install_req: InstallRequirement,
+) -> AbstractDistribution:
+ """Returns a Distribution for the given InstallRequirement"""
+ # Editable requirements will always be source distributions. They use the
+ # legacy logic until we create a modern standard for them.
+ if install_req.editable:
+ return SourceDistribution(install_req)
+
+ # If it's a wheel, it's a WheelDistribution
+ if install_req.is_wheel:
+ return WheelDistribution(install_req)
+
+ # Otherwise, a SourceDistribution
+ return SourceDistribution(install_req)
diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/base.py b/.venv/Lib/site-packages/pip/_internal/distributions/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..75ce2dc9057a20a957abe2fbd4ef094dc4196684
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/distributions/base.py
@@ -0,0 +1,39 @@
+import abc
+
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata.base import BaseDistribution
+from pip._internal.req import InstallRequirement
+
+
+class AbstractDistribution(metaclass=abc.ABCMeta):
+ """A base class for handling installable artifacts.
+
+ The requirements for anything installable are as follows:
+
+ - we must be able to determine the requirement name
+ (or we can't correctly handle the non-upgrade case).
+
+ - for packages with setup requirements, we must also be able
+ to determine their requirements without installing additional
+ packages (for the same reason as run-time dependencies)
+
+ - we must be able to create a Distribution object exposing the
+ above metadata.
+ """
+
+ def __init__(self, req: InstallRequirement) -> None:
+ super().__init__()
+ self.req = req
+
+ @abc.abstractmethod
+ def get_metadata_distribution(self) -> BaseDistribution:
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def prepare_distribution_metadata(
+ self,
+ finder: PackageFinder,
+ build_isolation: bool,
+ check_build_deps: bool,
+ ) -> None:
+ raise NotImplementedError()
diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/installed.py b/.venv/Lib/site-packages/pip/_internal/distributions/installed.py
new file mode 100644
index 0000000000000000000000000000000000000000..edb38aa1a6c54dcb73e2f74b6bdfff337841d99f
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/distributions/installed.py
@@ -0,0 +1,23 @@
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+
+
+class InstalledDistribution(AbstractDistribution):
+ """Represents an installed package.
+
+ This does not need any preparation as the required information has already
+ been computed.
+ """
+
+ def get_metadata_distribution(self) -> BaseDistribution:
+ assert self.req.satisfied_by is not None, "not actually installed"
+ return self.req.satisfied_by
+
+ def prepare_distribution_metadata(
+ self,
+ finder: PackageFinder,
+ build_isolation: bool,
+ check_build_deps: bool,
+ ) -> None:
+ pass
diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/sdist.py b/.venv/Lib/site-packages/pip/_internal/distributions/sdist.py
new file mode 100644
index 0000000000000000000000000000000000000000..4c25647930c6557d10e8a3ee92b68cfe3a07f7d7
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/distributions/sdist.py
@@ -0,0 +1,150 @@
+import logging
+from typing import Iterable, Set, Tuple
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.exceptions import InstallationError
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+from pip._internal.utils.subprocess import runner_with_spinner_message
+
+logger = logging.getLogger(__name__)
+
+
+class SourceDistribution(AbstractDistribution):
+ """Represents a source distribution.
+
+ The preparation step for these needs metadata for the packages to be
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
+ """
+
+ def get_metadata_distribution(self) -> BaseDistribution:
+ return self.req.get_dist()
+
+ def prepare_distribution_metadata(
+ self,
+ finder: PackageFinder,
+ build_isolation: bool,
+ check_build_deps: bool,
+ ) -> None:
+ # Load pyproject.toml, to determine whether PEP 517 is to be used
+ self.req.load_pyproject_toml()
+
+ # Set up the build isolation, if this requirement should be isolated
+ should_isolate = self.req.use_pep517 and build_isolation
+ if should_isolate:
+ # Setup an isolated environment and install the build backend static
+ # requirements in it.
+ self._prepare_build_backend(finder)
+ # Check that if the requirement is editable, it either supports PEP 660 or
+ # has a setup.py or a setup.cfg. This cannot be done earlier because we need
+ # to setup the build backend to verify it supports build_editable, nor can
+ # it be done later, because we want to avoid installing build requirements
+ # needlessly. Doing it here also works around setuptools generating
+ # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
+ # without setup.py nor setup.cfg.
+ self.req.isolated_editable_sanity_check()
+ # Install the dynamic build requirements.
+ self._install_build_reqs(finder)
+ # Check if the current environment provides build dependencies
+ should_check_deps = self.req.use_pep517 and check_build_deps
+ if should_check_deps:
+ pyproject_requires = self.req.pyproject_requires
+ assert pyproject_requires is not None
+ conflicting, missing = self.req.build_env.check_requirements(
+ pyproject_requires
+ )
+ if conflicting:
+ self._raise_conflicts("the backend dependencies", conflicting)
+ if missing:
+ self._raise_missing_reqs(missing)
+ self.req.prepare_metadata()
+
+ def _prepare_build_backend(self, finder: PackageFinder) -> None:
+ # Isolate in a BuildEnvironment and install the build-time
+ # requirements.
+ pyproject_requires = self.req.pyproject_requires
+ assert pyproject_requires is not None
+
+ self.req.build_env = BuildEnvironment()
+ self.req.build_env.install_requirements(
+ finder, pyproject_requires, "overlay", kind="build dependencies"
+ )
+ conflicting, missing = self.req.build_env.check_requirements(
+ self.req.requirements_to_check
+ )
+ if conflicting:
+ self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
+ if missing:
+ logger.warning(
+ "Missing build requirements in pyproject.toml for %s.",
+ self.req,
+ )
+ logger.warning(
+ "The project does not specify a build backend, and "
+ "pip cannot fall back to setuptools without %s.",
+ " and ".join(map(repr, sorted(missing))),
+ )
+
+ def _get_build_requires_wheel(self) -> Iterable[str]:
+ with self.req.build_env:
+ runner = runner_with_spinner_message("Getting requirements to build wheel")
+ backend = self.req.pep517_backend
+ assert backend is not None
+ with backend.subprocess_runner(runner):
+ return backend.get_requires_for_build_wheel()
+
+ def _get_build_requires_editable(self) -> Iterable[str]:
+ with self.req.build_env:
+ runner = runner_with_spinner_message(
+ "Getting requirements to build editable"
+ )
+ backend = self.req.pep517_backend
+ assert backend is not None
+ with backend.subprocess_runner(runner):
+ return backend.get_requires_for_build_editable()
+
+ def _install_build_reqs(self, finder: PackageFinder) -> None:
+ # Install any extra build dependencies that the backend requests.
+ # This must be done in a second pass, as the pyproject.toml
+ # dependencies must be installed before we can call the backend.
+ if (
+ self.req.editable
+ and self.req.permit_editable_wheels
+ and self.req.supports_pyproject_editable()
+ ):
+ build_reqs = self._get_build_requires_editable()
+ else:
+ build_reqs = self._get_build_requires_wheel()
+ conflicting, missing = self.req.build_env.check_requirements(build_reqs)
+ if conflicting:
+ self._raise_conflicts("the backend dependencies", conflicting)
+ self.req.build_env.install_requirements(
+ finder, missing, "normal", kind="backend dependencies"
+ )
+
+ def _raise_conflicts(
+ self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
+ ) -> None:
+ format_string = (
+ "Some build dependencies for {requirement} "
+ "conflict with {conflicting_with}: {description}."
+ )
+ error_message = format_string.format(
+ requirement=self.req,
+ conflicting_with=conflicting_with,
+ description=", ".join(
+ f"{installed} is incompatible with {wanted}"
+ for installed, wanted in sorted(conflicting_reqs)
+ ),
+ )
+ raise InstallationError(error_message)
+
+ def _raise_missing_reqs(self, missing: Set[str]) -> None:
+ format_string = (
+ "Some build dependencies for {requirement} are missing: {missing}."
+ )
+ error_message = format_string.format(
+ requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
+ )
+ raise InstallationError(error_message)
diff --git a/.venv/Lib/site-packages/pip/_internal/distributions/wheel.py b/.venv/Lib/site-packages/pip/_internal/distributions/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..03aac775b53f2dd3153a9f44829e7987258950aa
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/distributions/wheel.py
@@ -0,0 +1,34 @@
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import (
+ BaseDistribution,
+ FilesystemWheel,
+ get_wheel_distribution,
+)
+
+
+class WheelDistribution(AbstractDistribution):
+ """Represents a wheel distribution.
+
+ This does not need any preparation as wheels can be directly unpacked.
+ """
+
+ def get_metadata_distribution(self) -> BaseDistribution:
+ """Loads the metadata from the wheel file into memory and returns a
+ Distribution that uses it, not relying on the wheel file or
+ requirement.
+ """
+ assert self.req.local_file_path, "Set as part of preparation during download"
+ assert self.req.name, "Wheels are never unnamed"
+ wheel = FilesystemWheel(self.req.local_file_path)
+ return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
+
+ def prepare_distribution_metadata(
+ self,
+ finder: PackageFinder,
+ build_isolation: bool,
+ check_build_deps: bool,
+ ) -> None:
+ pass
diff --git a/.venv/Lib/site-packages/pip/_internal/exceptions.py b/.venv/Lib/site-packages/pip/_internal/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..d95fe44b34a936dc178c89d98ee9ef093cb0fccb
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/exceptions.py
@@ -0,0 +1,733 @@
+"""Exceptions used throughout package.
+
+This module MUST NOT try to import from anything within `pip._internal` to
+operate. This is expected to be importable from any/all files within the
+subpackage and, thus, should not depend on them.
+"""
+
+import configparser
+import contextlib
+import locale
+import logging
+import pathlib
+import re
+import sys
+from itertools import chain, groupby, repeat
+from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union
+
+from pip._vendor.requests.models import Request, Response
+from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
+from pip._vendor.rich.markup import escape
+from pip._vendor.rich.text import Text
+
+if TYPE_CHECKING:
+ from hashlib import _Hash
+ from typing import Literal
+
+ from pip._internal.metadata import BaseDistribution
+ from pip._internal.req.req_install import InstallRequirement
+
+logger = logging.getLogger(__name__)
+
+
+#
+# Scaffolding
+#
+def _is_kebab_case(s: str) -> bool:
+ return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
+
+
+def _prefix_with_indent(
+ s: Union[Text, str],
+ console: Console,
+ *,
+ prefix: str,
+ indent: str,
+) -> Text:
+ if isinstance(s, Text):
+ text = s
+ else:
+ text = console.render_str(s)
+
+ return console.render_str(prefix, overflow="ignore") + console.render_str(
+ f"\n{indent}", overflow="ignore"
+ ).join(text.split(allow_blank=True))
+
+
+class PipError(Exception):
+ """The base pip error."""
+
+
+class DiagnosticPipError(PipError):
+ """An error, that presents diagnostic information to the user.
+
+ This contains a bunch of logic, to enable pretty presentation of our error
+ messages. Each error gets a unique reference. Each error can also include
+ additional context, a hint and/or a note -- which are presented with the
+ main error message in a consistent style.
+
+ This is adapted from the error output styling in `sphinx-theme-builder`.
+ """
+
+ reference: str
+
+ def __init__(
+ self,
+ *,
+ kind: 'Literal["error", "warning"]' = "error",
+ reference: Optional[str] = None,
+ message: Union[str, Text],
+ context: Optional[Union[str, Text]],
+ hint_stmt: Optional[Union[str, Text]],
+ note_stmt: Optional[Union[str, Text]] = None,
+ link: Optional[str] = None,
+ ) -> None:
+ # Ensure a proper reference is provided.
+ if reference is None:
+ assert hasattr(self, "reference"), "error reference not provided!"
+ reference = self.reference
+ assert _is_kebab_case(reference), "error reference must be kebab-case!"
+
+ self.kind = kind
+ self.reference = reference
+
+ self.message = message
+ self.context = context
+
+ self.note_stmt = note_stmt
+ self.hint_stmt = hint_stmt
+
+ self.link = link
+
+ super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
+
+ def __repr__(self) -> str:
+ return (
+ f"<{self.__class__.__name__}("
+ f"reference={self.reference!r}, "
+ f"message={self.message!r}, "
+ f"context={self.context!r}, "
+ f"note_stmt={self.note_stmt!r}, "
+ f"hint_stmt={self.hint_stmt!r}"
+ ")>"
+ )
+
+ def __rich_console__(
+ self,
+ console: Console,
+ options: ConsoleOptions,
+ ) -> RenderResult:
+ colour = "red" if self.kind == "error" else "yellow"
+
+ yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
+ yield ""
+
+ if not options.ascii_only:
+ # Present the main message, with relevant context indented.
+ if self.context is not None:
+ yield _prefix_with_indent(
+ self.message,
+ console,
+ prefix=f"[{colour}]×[/] ",
+ indent=f"[{colour}]│[/] ",
+ )
+ yield _prefix_with_indent(
+ self.context,
+ console,
+ prefix=f"[{colour}]╰─>[/] ",
+ indent=f"[{colour}] [/] ",
+ )
+ else:
+ yield _prefix_with_indent(
+ self.message,
+ console,
+ prefix="[red]×[/] ",
+ indent=" ",
+ )
+ else:
+ yield self.message
+ if self.context is not None:
+ yield ""
+ yield self.context
+
+ if self.note_stmt is not None or self.hint_stmt is not None:
+ yield ""
+
+ if self.note_stmt is not None:
+ yield _prefix_with_indent(
+ self.note_stmt,
+ console,
+ prefix="[magenta bold]note[/]: ",
+ indent=" ",
+ )
+ if self.hint_stmt is not None:
+ yield _prefix_with_indent(
+ self.hint_stmt,
+ console,
+ prefix="[cyan bold]hint[/]: ",
+ indent=" ",
+ )
+
+ if self.link is not None:
+ yield ""
+ yield f"Link: {self.link}"
+
+
+#
+# Actual Errors
+#
+class ConfigurationError(PipError):
+ """General exception in configuration"""
+
+
+class InstallationError(PipError):
+ """General exception during installation"""
+
+
+class UninstallationError(PipError):
+ """General exception during uninstallation"""
+
+
+class MissingPyProjectBuildRequires(DiagnosticPipError):
+ """Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
+
+ reference = "missing-pyproject-build-system-requires"
+
+ def __init__(self, *, package: str) -> None:
+ super().__init__(
+ message=f"Can not process {escape(package)}",
+ context=Text(
+ "This package has an invalid pyproject.toml file.\n"
+ "The [build-system] table is missing the mandatory `requires` key."
+ ),
+ note_stmt="This is an issue with the package mentioned above, not pip.",
+ hint_stmt=Text("See PEP 518 for the detailed specification."),
+ )
+
+
+class InvalidPyProjectBuildRequires(DiagnosticPipError):
+ """Raised when pyproject.toml an invalid `build-system.requires`."""
+
+ reference = "invalid-pyproject-build-system-requires"
+
+ def __init__(self, *, package: str, reason: str) -> None:
+ super().__init__(
+ message=f"Can not process {escape(package)}",
+ context=Text(
+ "This package has an invalid `build-system.requires` key in "
+ f"pyproject.toml.\n{reason}"
+ ),
+ note_stmt="This is an issue with the package mentioned above, not pip.",
+ hint_stmt=Text("See PEP 518 for the detailed specification."),
+ )
+
+
+class NoneMetadataError(PipError):
+ """Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
+
+ This signifies an inconsistency, when the Distribution claims to have
+ the metadata file (if not, raise ``FileNotFoundError`` instead), but is
+ not actually able to produce its content. This may be due to permission
+ errors.
+ """
+
+ def __init__(
+ self,
+ dist: "BaseDistribution",
+ metadata_name: str,
+ ) -> None:
+ """
+ :param dist: A Distribution object.
+ :param metadata_name: The name of the metadata being accessed
+ (can be "METADATA" or "PKG-INFO").
+ """
+ self.dist = dist
+ self.metadata_name = metadata_name
+
+ def __str__(self) -> str:
+ # Use `dist` in the error message because its stringification
+ # includes more information, like the version and location.
+ return "None {} metadata found for distribution: {}".format(
+ self.metadata_name,
+ self.dist,
+ )
+
+
+class UserInstallationInvalid(InstallationError):
+ """A --user install is requested on an environment without user site."""
+
+ def __str__(self) -> str:
+ return "User base directory is not specified"
+
+
+class InvalidSchemeCombination(InstallationError):
+ def __str__(self) -> str:
+ before = ", ".join(str(a) for a in self.args[:-1])
+ return f"Cannot set {before} and {self.args[-1]} together"
+
+
+class DistributionNotFound(InstallationError):
+ """Raised when a distribution cannot be found to satisfy a requirement"""
+
+
+class RequirementsFileParseError(InstallationError):
+ """Raised when a general error occurs parsing a requirements file line."""
+
+
+class BestVersionAlreadyInstalled(PipError):
+ """Raised when the most up-to-date version of a package is already
+ installed."""
+
+
+class BadCommand(PipError):
+ """Raised when virtualenv or a command is not found"""
+
+
+class CommandError(PipError):
+ """Raised when there is an error in command-line arguments"""
+
+
+class PreviousBuildDirError(PipError):
+ """Raised when there's a previous conflicting build directory"""
+
+
+class NetworkConnectionError(PipError):
+ """HTTP connection error"""
+
+ def __init__(
+ self,
+ error_msg: str,
+ response: Optional[Response] = None,
+ request: Optional[Request] = None,
+ ) -> None:
+ """
+ Initialize NetworkConnectionError with `request` and `response`
+ objects.
+ """
+ self.response = response
+ self.request = request
+ self.error_msg = error_msg
+ if (
+ self.response is not None
+ and not self.request
+ and hasattr(response, "request")
+ ):
+ self.request = self.response.request
+ super().__init__(error_msg, response, request)
+
+ def __str__(self) -> str:
+ return str(self.error_msg)
+
+
+class InvalidWheelFilename(InstallationError):
+ """Invalid wheel filename."""
+
+
+class UnsupportedWheel(InstallationError):
+ """Unsupported wheel."""
+
+
+class InvalidWheel(InstallationError):
+ """Invalid (e.g. corrupt) wheel."""
+
+ def __init__(self, location: str, name: str):
+ self.location = location
+ self.name = name
+
+ def __str__(self) -> str:
+ return f"Wheel '{self.name}' located at {self.location} is invalid."
+
+
+class MetadataInconsistent(InstallationError):
+ """Built metadata contains inconsistent information.
+
+ This is raised when the metadata contains values (e.g. name and version)
+ that do not match the information previously obtained from sdist filename,
+ user-supplied ``#egg=`` value, or an install requirement name.
+ """
+
+ def __init__(
+ self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
+ ) -> None:
+ self.ireq = ireq
+ self.field = field
+ self.f_val = f_val
+ self.m_val = m_val
+
+ def __str__(self) -> str:
+ return (
+ f"Requested {self.ireq} has inconsistent {self.field}: "
+ f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
+ )
+
+
+class InstallationSubprocessError(DiagnosticPipError, InstallationError):
+ """A subprocess call failed."""
+
+ reference = "subprocess-exited-with-error"
+
+ def __init__(
+ self,
+ *,
+ command_description: str,
+ exit_code: int,
+ output_lines: Optional[List[str]],
+ ) -> None:
+ if output_lines is None:
+ output_prompt = Text("See above for output.")
+ else:
+ output_prompt = (
+ Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
+ + Text("".join(output_lines))
+ + Text.from_markup(R"[red]\[end of output][/]")
+ )
+
+ super().__init__(
+ message=(
+ f"[green]{escape(command_description)}[/] did not run successfully.\n"
+ f"exit code: {exit_code}"
+ ),
+ context=output_prompt,
+ hint_stmt=None,
+ note_stmt=(
+ "This error originates from a subprocess, and is likely not a "
+ "problem with pip."
+ ),
+ )
+
+ self.command_description = command_description
+ self.exit_code = exit_code
+
+ def __str__(self) -> str:
+ return f"{self.command_description} exited with {self.exit_code}"
+
+
+class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
+ reference = "metadata-generation-failed"
+
+ def __init__(
+ self,
+ *,
+ package_details: str,
+ ) -> None:
+ super(InstallationSubprocessError, self).__init__(
+ message="Encountered error while generating package metadata.",
+ context=escape(package_details),
+ hint_stmt="See above for details.",
+ note_stmt="This is an issue with the package mentioned above, not pip.",
+ )
+
+ def __str__(self) -> str:
+ return "metadata generation failed"
+
+
+class HashErrors(InstallationError):
+ """Multiple HashError instances rolled into one for reporting"""
+
+ def __init__(self) -> None:
+ self.errors: List["HashError"] = []
+
+ def append(self, error: "HashError") -> None:
+ self.errors.append(error)
+
+ def __str__(self) -> str:
+ lines = []
+ self.errors.sort(key=lambda e: e.order)
+ for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
+ lines.append(cls.head)
+ lines.extend(e.body() for e in errors_of_cls)
+ if lines:
+ return "\n".join(lines)
+ return ""
+
+ def __bool__(self) -> bool:
+ return bool(self.errors)
+
+
+class HashError(InstallationError):
+ """
+ A failure to verify a package against known-good hashes
+
+ :cvar order: An int sorting hash exception classes by difficulty of
+ recovery (lower being harder), so the user doesn't bother fretting
+ about unpinned packages when he has deeper issues, like VCS
+ dependencies, to deal with. Also keeps error reports in a
+ deterministic order.
+ :cvar head: A section heading for display above potentially many
+ exceptions of this kind
+ :ivar req: The InstallRequirement that triggered this error. This is
+ pasted on after the exception is instantiated, because it's not
+ typically available earlier.
+
+ """
+
+ req: Optional["InstallRequirement"] = None
+ head = ""
+ order: int = -1
+
+ def body(self) -> str:
+ """Return a summary of me for display under the heading.
+
+ This default implementation simply prints a description of the
+ triggering requirement.
+
+ :param req: The InstallRequirement that provoked this error, with
+ its link already populated by the resolver's _populate_link().
+
+ """
+ return f" {self._requirement_name()}"
+
+ def __str__(self) -> str:
+ return f"{self.head}\n{self.body()}"
+
+ def _requirement_name(self) -> str:
+ """Return a description of the requirement that triggered me.
+
+ This default implementation returns long description of the req, with
+ line numbers
+
+ """
+ return str(self.req) if self.req else "unknown package"
+
+
+class VcsHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 0
+ head = (
+ "Can't verify hashes for these requirements because we don't "
+ "have a way to hash version control repositories:"
+ )
+
+
+class DirectoryUrlHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 1
+ head = (
+ "Can't verify hashes for these file:// requirements because they "
+ "point to directories:"
+ )
+
+
+class HashMissing(HashError):
+ """A hash was needed for a requirement but is absent."""
+
+ order = 2
+ head = (
+ "Hashes are required in --require-hashes mode, but they are "
+ "missing from some requirements. Here is a list of those "
+ "requirements along with the hashes their downloaded archives "
+ "actually had. Add lines like these to your requirements files to "
+ "prevent tampering. (If you did not enable --require-hashes "
+ "manually, note that it turns on automatically when any package "
+ "has a hash.)"
+ )
+
+ def __init__(self, gotten_hash: str) -> None:
+ """
+ :param gotten_hash: The hash of the (possibly malicious) archive we
+ just downloaded
+ """
+ self.gotten_hash = gotten_hash
+
+ def body(self) -> str:
+ # Dodge circular import.
+ from pip._internal.utils.hashes import FAVORITE_HASH
+
+ package = None
+ if self.req:
+ # In the case of URL-based requirements, display the original URL
+ # seen in the requirements file rather than the package name,
+ # so the output can be directly copied into the requirements file.
+ package = (
+ self.req.original_link
+ if self.req.is_direct
+ # In case someone feeds something downright stupid
+ # to InstallRequirement's constructor.
+ else getattr(self.req, "req", None)
+ )
+ return " {} --hash={}:{}".format(
+ package or "unknown package", FAVORITE_HASH, self.gotten_hash
+ )
+
+
+class HashUnpinned(HashError):
+ """A requirement had a hash specified but was not pinned to a specific
+ version."""
+
+ order = 3
+ head = (
+ "In --require-hashes mode, all requirements must have their "
+ "versions pinned with ==. These do not:"
+ )
+
+
+class HashMismatch(HashError):
+ """
+ Distribution file hash values don't match.
+
+ :ivar package_name: The name of the package that triggered the hash
+ mismatch. Feel free to write to this after the exception is raise to
+ improve its error message.
+
+ """
+
+ order = 4
+ head = (
+ "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
+ "FILE. If you have updated the package versions, please update "
+ "the hashes. Otherwise, examine the package contents carefully; "
+ "someone may have tampered with them."
+ )
+
+ def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
+ """
+ :param allowed: A dict of algorithm names pointing to lists of allowed
+ hex digests
+ :param gots: A dict of algorithm names pointing to hashes we
+ actually got from the files under suspicion
+ """
+ self.allowed = allowed
+ self.gots = gots
+
+ def body(self) -> str:
+ return " {}:\n{}".format(self._requirement_name(), self._hash_comparison())
+
+ def _hash_comparison(self) -> str:
+ """
+ Return a comparison of actual and expected hash values.
+
+ Example::
+
+ Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
+ or 123451234512345123451234512345123451234512345
+ Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
+
+ """
+
+ def hash_then_or(hash_name: str) -> "chain[str]":
+ # For now, all the decent hashes have 6-char names, so we can get
+ # away with hard-coding space literals.
+ return chain([hash_name], repeat(" or"))
+
+ lines: List[str] = []
+ for hash_name, expecteds in self.allowed.items():
+ prefix = hash_then_or(hash_name)
+ lines.extend(
+ (" Expected {} {}".format(next(prefix), e)) for e in expecteds
+ )
+ lines.append(
+ " Got {}\n".format(self.gots[hash_name].hexdigest())
+ )
+ return "\n".join(lines)
+
+
+class UnsupportedPythonVersion(InstallationError):
+ """Unsupported python version according to Requires-Python package
+ metadata."""
+
+
+class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
+ """When there are errors while loading a configuration file"""
+
+ def __init__(
+ self,
+ reason: str = "could not be loaded",
+ fname: Optional[str] = None,
+ error: Optional[configparser.Error] = None,
+ ) -> None:
+ super().__init__(error)
+ self.reason = reason
+ self.fname = fname
+ self.error = error
+
+ def __str__(self) -> str:
+ if self.fname is not None:
+ message_part = f" in {self.fname}."
+ else:
+ assert self.error is not None
+ message_part = f".\n{self.error}\n"
+ return f"Configuration file {self.reason}{message_part}"
+
+
+_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\
+The Python environment under {sys.prefix} is managed externally, and may not be
+manipulated by the user. Please use specific tooling from the distributor of
+the Python installation to interact with this environment instead.
+"""
+
+
+class ExternallyManagedEnvironment(DiagnosticPipError):
+ """The current environment is externally managed.
+
+ This is raised when the current environment is externally managed, as
+ defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked
+ and displayed when the error is bubbled up to the user.
+
+ :param error: The error message read from ``EXTERNALLY-MANAGED``.
+ """
+
+ reference = "externally-managed-environment"
+
+ def __init__(self, error: Optional[str]) -> None:
+ if error is None:
+ context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR)
+ else:
+ context = Text(error)
+ super().__init__(
+ message="This environment is externally managed",
+ context=context,
+ note_stmt=(
+ "If you believe this is a mistake, please contact your "
+ "Python installation or OS distribution provider. "
+ "You can override this, at the risk of breaking your Python "
+ "installation or OS, by passing --break-system-packages."
+ ),
+ hint_stmt=Text("See PEP 668 for the detailed specification."),
+ )
+
+ @staticmethod
+ def _iter_externally_managed_error_keys() -> Iterator[str]:
+ # LC_MESSAGES is in POSIX, but not the C standard. The most common
+ # platform that does not implement this category is Windows, where
+ # using other categories for console message localization is equally
+ # unreliable, so we fall back to the locale-less vendor message. This
+ # can always be re-evaluated when a vendor proposes a new alternative.
+ try:
+ category = locale.LC_MESSAGES
+ except AttributeError:
+ lang: Optional[str] = None
+ else:
+ lang, _ = locale.getlocale(category)
+ if lang is not None:
+ yield f"Error-{lang}"
+ for sep in ("-", "_"):
+ before, found, _ = lang.partition(sep)
+ if not found:
+ continue
+ yield f"Error-{before}"
+ yield "Error"
+
+ @classmethod
+ def from_config(
+ cls,
+ config: Union[pathlib.Path, str],
+ ) -> "ExternallyManagedEnvironment":
+ parser = configparser.ConfigParser(interpolation=None)
+ try:
+ parser.read(config, encoding="utf-8")
+ section = parser["externally-managed"]
+ for key in cls._iter_externally_managed_error_keys():
+ with contextlib.suppress(KeyError):
+ return cls(section[key])
+ except KeyError:
+ pass
+ except (OSError, UnicodeDecodeError, configparser.ParsingError):
+ from pip._internal.utils._log import VERBOSE
+
+ exc_info = logger.isEnabledFor(VERBOSE)
+ logger.warning("Failed to read %s", config, exc_info=exc_info)
+ return cls(None)
diff --git a/.venv/Lib/site-packages/pip/_internal/index/__init__.py b/.venv/Lib/site-packages/pip/_internal/index/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a17b7b3b6ad49157ee41f3da304fec3d32342d3
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/index/__init__.py
@@ -0,0 +1,2 @@
+"""Index interaction code
+"""
diff --git a/.venv/Lib/site-packages/pip/_internal/index/collector.py b/.venv/Lib/site-packages/pip/_internal/index/collector.py
new file mode 100644
index 0000000000000000000000000000000000000000..b3e293ea3a508dc54674349e845f9794118f548b
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/index/collector.py
@@ -0,0 +1,505 @@
+"""
+The main purpose of this module is to expose LinkCollector.collect_sources().
+"""
+
+import collections
+import email.message
+import functools
+import itertools
+import json
+import logging
+import os
+import urllib.parse
+import urllib.request
+from html.parser import HTMLParser
+from optparse import Values
+from typing import (
+ TYPE_CHECKING,
+ Callable,
+ Dict,
+ Iterable,
+ List,
+ MutableMapping,
+ NamedTuple,
+ Optional,
+ Sequence,
+ Tuple,
+ Union,
+)
+
+from pip._vendor import requests
+from pip._vendor.requests import Response
+from pip._vendor.requests.exceptions import RetryError, SSLError
+
+from pip._internal.exceptions import NetworkConnectionError
+from pip._internal.models.link import Link
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import raise_for_status
+from pip._internal.utils.filetypes import is_archive_file
+from pip._internal.utils.misc import redact_auth_from_url
+from pip._internal.vcs import vcs
+
+from .sources import CandidatesFromPage, LinkSource, build_source
+
+if TYPE_CHECKING:
+ from typing import Protocol
+else:
+ Protocol = object
+
+logger = logging.getLogger(__name__)
+
+ResponseHeaders = MutableMapping[str, str]
+
+
+def _match_vcs_scheme(url: str) -> Optional[str]:
+ """Look for VCS schemes in the URL.
+
+ Returns the matched VCS scheme, or None if there's no match.
+ """
+ for scheme in vcs.schemes:
+ if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
+ return scheme
+ return None
+
+
+class _NotAPIContent(Exception):
+ def __init__(self, content_type: str, request_desc: str) -> None:
+ super().__init__(content_type, request_desc)
+ self.content_type = content_type
+ self.request_desc = request_desc
+
+
+def _ensure_api_header(response: Response) -> None:
+ """
+ Check the Content-Type header to ensure the response contains a Simple
+ API Response.
+
+ Raises `_NotAPIContent` if the content type is not a valid content-type.
+ """
+ content_type = response.headers.get("Content-Type", "Unknown")
+
+ content_type_l = content_type.lower()
+ if content_type_l.startswith(
+ (
+ "text/html",
+ "application/vnd.pypi.simple.v1+html",
+ "application/vnd.pypi.simple.v1+json",
+ )
+ ):
+ return
+
+ raise _NotAPIContent(content_type, response.request.method)
+
+
+class _NotHTTP(Exception):
+ pass
+
+
+def _ensure_api_response(url: str, session: PipSession) -> None:
+ """
+ Send a HEAD request to the URL, and ensure the response contains a simple
+ API Response.
+
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
+ `_NotAPIContent` if the content type is not a valid content type.
+ """
+ scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
+ if scheme not in {"http", "https"}:
+ raise _NotHTTP()
+
+ resp = session.head(url, allow_redirects=True)
+ raise_for_status(resp)
+
+ _ensure_api_header(resp)
+
+
+def _get_simple_response(url: str, session: PipSession) -> Response:
+ """Access an Simple API response with GET, and return the response.
+
+ This consists of three parts:
+
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
+ check the Content-Type is HTML or Simple API, to avoid downloading a
+ large file. Raise `_NotHTTP` if the content type cannot be determined, or
+ `_NotAPIContent` if it is not HTML or a Simple API.
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
+ 3. Check the Content-Type header to make sure we got a Simple API response,
+ and raise `_NotAPIContent` otherwise.
+ """
+ if is_archive_file(Link(url).filename):
+ _ensure_api_response(url, session=session)
+
+ logger.debug("Getting page %s", redact_auth_from_url(url))
+
+ resp = session.get(
+ url,
+ headers={
+ "Accept": ", ".join(
+ [
+ "application/vnd.pypi.simple.v1+json",
+ "application/vnd.pypi.simple.v1+html; q=0.1",
+ "text/html; q=0.01",
+ ]
+ ),
+ # We don't want to blindly returned cached data for
+ # /simple/, because authors generally expecting that
+ # twine upload && pip install will function, but if
+ # they've done a pip install in the last ~10 minutes
+ # it won't. Thus by setting this to zero we will not
+ # blindly use any cached data, however the benefit of
+ # using max-age=0 instead of no-cache, is that we will
+ # still support conditional requests, so we will still
+ # minimize traffic sent in cases where the page hasn't
+ # changed at all, we will just always incur the round
+ # trip for the conditional GET now instead of only
+ # once per 10 minutes.
+ # For more information, please see pypa/pip#5670.
+ "Cache-Control": "max-age=0",
+ },
+ )
+ raise_for_status(resp)
+
+ # The check for archives above only works if the url ends with
+ # something that looks like an archive. However that is not a
+ # requirement of an url. Unless we issue a HEAD request on every
+ # url we cannot know ahead of time for sure if something is a
+ # Simple API response or not. However we can check after we've
+ # downloaded it.
+ _ensure_api_header(resp)
+
+ logger.debug(
+ "Fetched page %s as %s",
+ redact_auth_from_url(url),
+ resp.headers.get("Content-Type", "Unknown"),
+ )
+
+ return resp
+
+
+def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
+ """Determine if we have any encoding information in our headers."""
+ if headers and "Content-Type" in headers:
+ m = email.message.Message()
+ m["content-type"] = headers["Content-Type"]
+ charset = m.get_param("charset")
+ if charset:
+ return str(charset)
+ return None
+
+
+class CacheablePageContent:
+ def __init__(self, page: "IndexContent") -> None:
+ assert page.cache_link_parsing
+ self.page = page
+
+ def __eq__(self, other: object) -> bool:
+ return isinstance(other, type(self)) and self.page.url == other.page.url
+
+ def __hash__(self) -> int:
+ return hash(self.page.url)
+
+
+class ParseLinks(Protocol):
+ def __call__(self, page: "IndexContent") -> Iterable[Link]:
+ ...
+
+
+def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
+ """
+ Given a function that parses an Iterable[Link] from an IndexContent, cache the
+ function's result (keyed by CacheablePageContent), unless the IndexContent
+ `page` has `page.cache_link_parsing == False`.
+ """
+
+ @functools.lru_cache(maxsize=None)
+ def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:
+ return list(fn(cacheable_page.page))
+
+ @functools.wraps(fn)
+ def wrapper_wrapper(page: "IndexContent") -> List[Link]:
+ if page.cache_link_parsing:
+ return wrapper(CacheablePageContent(page))
+ return list(fn(page))
+
+ return wrapper_wrapper
+
+
+@with_cached_index_content
+def parse_links(page: "IndexContent") -> Iterable[Link]:
+ """
+ Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
+ """
+
+ content_type_l = page.content_type.lower()
+ if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
+ data = json.loads(page.content)
+ for file in data.get("files", []):
+ link = Link.from_json(file, page.url)
+ if link is None:
+ continue
+ yield link
+ return
+
+ parser = HTMLLinkParser(page.url)
+ encoding = page.encoding or "utf-8"
+ parser.feed(page.content.decode(encoding))
+
+ url = page.url
+ base_url = parser.base_url or url
+ for anchor in parser.anchors:
+ link = Link.from_element(anchor, page_url=url, base_url=base_url)
+ if link is None:
+ continue
+ yield link
+
+
+class IndexContent:
+ """Represents one response (or page), along with its URL"""
+
+ def __init__(
+ self,
+ content: bytes,
+ content_type: str,
+ encoding: Optional[str],
+ url: str,
+ cache_link_parsing: bool = True,
+ ) -> None:
+ """
+ :param encoding: the encoding to decode the given content.
+ :param url: the URL from which the HTML was downloaded.
+ :param cache_link_parsing: whether links parsed from this page's url
+ should be cached. PyPI index urls should
+ have this set to False, for example.
+ """
+ self.content = content
+ self.content_type = content_type
+ self.encoding = encoding
+ self.url = url
+ self.cache_link_parsing = cache_link_parsing
+
+ def __str__(self) -> str:
+ return redact_auth_from_url(self.url)
+
+
+class HTMLLinkParser(HTMLParser):
+ """
+ HTMLParser that keeps the first base HREF and a list of all anchor
+ elements' attributes.
+ """
+
+ def __init__(self, url: str) -> None:
+ super().__init__(convert_charrefs=True)
+
+ self.url: str = url
+ self.base_url: Optional[str] = None
+ self.anchors: List[Dict[str, Optional[str]]] = []
+
+ def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
+ if tag == "base" and self.base_url is None:
+ href = self.get_href(attrs)
+ if href is not None:
+ self.base_url = href
+ elif tag == "a":
+ self.anchors.append(dict(attrs))
+
+ def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
+ for name, value in attrs:
+ if name == "href":
+ return value
+ return None
+
+
+def _handle_get_simple_fail(
+ link: Link,
+ reason: Union[str, Exception],
+ meth: Optional[Callable[..., None]] = None,
+) -> None:
+ if meth is None:
+ meth = logger.debug
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
+
+
+def _make_index_content(
+ response: Response, cache_link_parsing: bool = True
+) -> IndexContent:
+ encoding = _get_encoding_from_headers(response.headers)
+ return IndexContent(
+ response.content,
+ response.headers["Content-Type"],
+ encoding=encoding,
+ url=response.url,
+ cache_link_parsing=cache_link_parsing,
+ )
+
+
+def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
+ url = link.url.split("#", 1)[0]
+
+ # Check for VCS schemes that do not support lookup as web pages.
+ vcs_scheme = _match_vcs_scheme(url)
+ if vcs_scheme:
+ logger.warning(
+ "Cannot look at %s URL %s because it does not support lookup as web pages.",
+ vcs_scheme,
+ link,
+ )
+ return None
+
+ # Tack index.html onto file:// URLs that point to directories
+ scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
+ if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
+ # add trailing slash if not present so urljoin doesn't trim
+ # final segment
+ if not url.endswith("/"):
+ url += "/"
+ # TODO: In the future, it would be nice if pip supported PEP 691
+ # style responses in the file:// URLs, however there's no
+ # standard file extension for application/vnd.pypi.simple.v1+json
+ # so we'll need to come up with something on our own.
+ url = urllib.parse.urljoin(url, "index.html")
+ logger.debug(" file: URL is directory, getting %s", url)
+
+ try:
+ resp = _get_simple_response(url, session=session)
+ except _NotHTTP:
+ logger.warning(
+ "Skipping page %s because it looks like an archive, and cannot "
+ "be checked by a HTTP HEAD request.",
+ link,
+ )
+ except _NotAPIContent as exc:
+ logger.warning(
+ "Skipping page %s because the %s request got Content-Type: %s. "
+ "The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
+ "application/vnd.pypi.simple.v1+html, and text/html",
+ link,
+ exc.request_desc,
+ exc.content_type,
+ )
+ except NetworkConnectionError as exc:
+ _handle_get_simple_fail(link, exc)
+ except RetryError as exc:
+ _handle_get_simple_fail(link, exc)
+ except SSLError as exc:
+ reason = "There was a problem confirming the ssl certificate: "
+ reason += str(exc)
+ _handle_get_simple_fail(link, reason, meth=logger.info)
+ except requests.ConnectionError as exc:
+ _handle_get_simple_fail(link, f"connection error: {exc}")
+ except requests.Timeout:
+ _handle_get_simple_fail(link, "timed out")
+ else:
+ return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
+ return None
+
+
+class CollectedSources(NamedTuple):
+ find_links: Sequence[Optional[LinkSource]]
+ index_urls: Sequence[Optional[LinkSource]]
+
+
+class LinkCollector:
+
+ """
+ Responsible for collecting Link objects from all configured locations,
+ making network requests as needed.
+
+ The class's main method is its collect_sources() method.
+ """
+
+ def __init__(
+ self,
+ session: PipSession,
+ search_scope: SearchScope,
+ ) -> None:
+ self.search_scope = search_scope
+ self.session = session
+
+ @classmethod
+ def create(
+ cls,
+ session: PipSession,
+ options: Values,
+ suppress_no_index: bool = False,
+ ) -> "LinkCollector":
+ """
+ :param session: The Session to use to make requests.
+ :param suppress_no_index: Whether to ignore the --no-index option
+ when constructing the SearchScope object.
+ """
+ index_urls = [options.index_url] + options.extra_index_urls
+ if options.no_index and not suppress_no_index:
+ logger.debug(
+ "Ignoring indexes: %s",
+ ",".join(redact_auth_from_url(url) for url in index_urls),
+ )
+ index_urls = []
+
+ # Make sure find_links is a list before passing to create().
+ find_links = options.find_links or []
+
+ search_scope = SearchScope.create(
+ find_links=find_links,
+ index_urls=index_urls,
+ no_index=options.no_index,
+ )
+ link_collector = LinkCollector(
+ session=session,
+ search_scope=search_scope,
+ )
+ return link_collector
+
+ @property
+ def find_links(self) -> List[str]:
+ return self.search_scope.find_links
+
+ def fetch_response(self, location: Link) -> Optional[IndexContent]:
+ """
+ Fetch an HTML page containing package links.
+ """
+ return _get_index_content(location, session=self.session)
+
+ def collect_sources(
+ self,
+ project_name: str,
+ candidates_from_page: CandidatesFromPage,
+ ) -> CollectedSources:
+ # The OrderedDict calls deduplicate sources by URL.
+ index_url_sources = collections.OrderedDict(
+ build_source(
+ loc,
+ candidates_from_page=candidates_from_page,
+ page_validator=self.session.is_secure_origin,
+ expand_dir=False,
+ cache_link_parsing=False,
+ )
+ for loc in self.search_scope.get_index_urls_locations(project_name)
+ ).values()
+ find_links_sources = collections.OrderedDict(
+ build_source(
+ loc,
+ candidates_from_page=candidates_from_page,
+ page_validator=self.session.is_secure_origin,
+ expand_dir=True,
+ cache_link_parsing=True,
+ )
+ for loc in self.find_links
+ ).values()
+
+ if logger.isEnabledFor(logging.DEBUG):
+ lines = [
+ f"* {s.link}"
+ for s in itertools.chain(find_links_sources, index_url_sources)
+ if s is not None and s.link is not None
+ ]
+ lines = [
+ f"{len(lines)} location(s) to search "
+ f"for versions of {project_name}:"
+ ] + lines
+ logger.debug("\n".join(lines))
+
+ return CollectedSources(
+ find_links=list(find_links_sources),
+ index_urls=list(index_url_sources),
+ )
diff --git a/.venv/Lib/site-packages/pip/_internal/index/package_finder.py b/.venv/Lib/site-packages/pip/_internal/index/package_finder.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6f8d57e854b77f60c04f59a7f3ff74476a5f5d6
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/index/package_finder.py
@@ -0,0 +1,1029 @@
+"""Routines related to PyPI, indexes"""
+
+import enum
+import functools
+import itertools
+import logging
+import re
+from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
+
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.tags import Tag
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import _BaseVersion
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled,
+ DistributionNotFound,
+ InvalidWheelFilename,
+ UnsupportedWheel,
+)
+from pip._internal.index.collector import LinkCollector, parse_links
+from pip._internal.models.candidate import InstallationCandidate
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.link import Link
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.models.wheel import Wheel
+from pip._internal.req import InstallRequirement
+from pip._internal.utils._log import getLogger
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import build_netloc
+from pip._internal.utils.packaging import check_requires_python
+from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
+
+if TYPE_CHECKING:
+ from pip._vendor.typing_extensions import TypeGuard
+
+__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
+
+
+logger = getLogger(__name__)
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
+
+
+def _check_link_requires_python(
+ link: Link,
+ version_info: Tuple[int, int, int],
+ ignore_requires_python: bool = False,
+) -> bool:
+ """
+ Return whether the given Python version is compatible with a link's
+ "Requires-Python" value.
+
+ :param version_info: A 3-tuple of ints representing the Python
+ major-minor-micro version to check.
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
+ value if the given Python version isn't compatible.
+ """
+ try:
+ is_compatible = check_requires_python(
+ link.requires_python,
+ version_info=version_info,
+ )
+ except specifiers.InvalidSpecifier:
+ logger.debug(
+ "Ignoring invalid Requires-Python (%r) for link: %s",
+ link.requires_python,
+ link,
+ )
+ else:
+ if not is_compatible:
+ version = ".".join(map(str, version_info))
+ if not ignore_requires_python:
+ logger.verbose(
+ "Link requires a different Python (%s not in: %r): %s",
+ version,
+ link.requires_python,
+ link,
+ )
+ return False
+
+ logger.debug(
+ "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
+ version,
+ link.requires_python,
+ link,
+ )
+
+ return True
+
+
+class LinkType(enum.Enum):
+ candidate = enum.auto()
+ different_project = enum.auto()
+ yanked = enum.auto()
+ format_unsupported = enum.auto()
+ format_invalid = enum.auto()
+ platform_mismatch = enum.auto()
+ requires_python_mismatch = enum.auto()
+
+
+class LinkEvaluator:
+
+ """
+ Responsible for evaluating links for a particular project.
+ """
+
+ _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ def __init__(
+ self,
+ project_name: str,
+ canonical_name: str,
+ formats: FrozenSet[str],
+ target_python: TargetPython,
+ allow_yanked: bool,
+ ignore_requires_python: Optional[bool] = None,
+ ) -> None:
+ """
+ :param project_name: The user supplied package name.
+ :param canonical_name: The canonical package name.
+ :param formats: The formats allowed for this package. Should be a set
+ with 'binary' or 'source' or both in it.
+ :param target_python: The target Python interpreter to use when
+ evaluating link compatibility. This is used, for example, to
+ check wheel compatibility, as well as when checking the Python
+ version, e.g. the Python version embedded in a link filename
+ (or egg fragment) and against an HTML link's optional PEP 503
+ "data-requires-python" attribute.
+ :param allow_yanked: Whether files marked as yanked (in the sense
+ of PEP 592) are permitted to be candidates for install.
+ :param ignore_requires_python: Whether to ignore incompatible
+ PEP 503 "data-requires-python" values in HTML links. Defaults
+ to False.
+ """
+ if ignore_requires_python is None:
+ ignore_requires_python = False
+
+ self._allow_yanked = allow_yanked
+ self._canonical_name = canonical_name
+ self._ignore_requires_python = ignore_requires_python
+ self._formats = formats
+ self._target_python = target_python
+
+ self.project_name = project_name
+
+ def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
+ """
+ Determine whether a link is a candidate for installation.
+
+ :return: A tuple (result, detail), where *result* is an enum
+ representing whether the evaluation found a candidate, or the reason
+ why one is not found. If a candidate is found, *detail* will be the
+ candidate's version string; if one is not found, it contains the
+ reason the link fails to qualify.
+ """
+ version = None
+ if link.is_yanked and not self._allow_yanked:
+ reason = link.yanked_reason or ""
+ return (LinkType.yanked, f"yanked for reason: {reason}")
+
+ if link.egg_fragment:
+ egg_info = link.egg_fragment
+ ext = link.ext
+ else:
+ egg_info, ext = link.splitext()
+ if not ext:
+ return (LinkType.format_unsupported, "not a file")
+ if ext not in SUPPORTED_EXTENSIONS:
+ return (
+ LinkType.format_unsupported,
+ f"unsupported archive format: {ext}",
+ )
+ if "binary" not in self._formats and ext == WHEEL_EXTENSION:
+ reason = f"No binaries permitted for {self.project_name}"
+ return (LinkType.format_unsupported, reason)
+ if "macosx10" in link.path and ext == ".zip":
+ return (LinkType.format_unsupported, "macosx10 one")
+ if ext == WHEEL_EXTENSION:
+ try:
+ wheel = Wheel(link.filename)
+ except InvalidWheelFilename:
+ return (
+ LinkType.format_invalid,
+ "invalid wheel filename",
+ )
+ if canonicalize_name(wheel.name) != self._canonical_name:
+ reason = f"wrong project name (not {self.project_name})"
+ return (LinkType.different_project, reason)
+
+ supported_tags = self._target_python.get_tags()
+ if not wheel.supported(supported_tags):
+ # Include the wheel's tags in the reason string to
+ # simplify troubleshooting compatibility issues.
+ file_tags = ", ".join(wheel.get_formatted_file_tags())
+ reason = (
+ f"none of the wheel's tags ({file_tags}) are compatible "
+ f"(run pip debug --verbose to show compatible tags)"
+ )
+ return (LinkType.platform_mismatch, reason)
+
+ version = wheel.version
+
+ # This should be up by the self.ok_binary check, but see issue 2700.
+ if "source" not in self._formats and ext != WHEEL_EXTENSION:
+ reason = f"No sources permitted for {self.project_name}"
+ return (LinkType.format_unsupported, reason)
+
+ if not version:
+ version = _extract_version_from_fragment(
+ egg_info,
+ self._canonical_name,
+ )
+ if not version:
+ reason = f"Missing project version for {self.project_name}"
+ return (LinkType.format_invalid, reason)
+
+ match = self._py_version_re.search(version)
+ if match:
+ version = version[: match.start()]
+ py_version = match.group(1)
+ if py_version != self._target_python.py_version:
+ return (
+ LinkType.platform_mismatch,
+ "Python version is incorrect",
+ )
+
+ supports_python = _check_link_requires_python(
+ link,
+ version_info=self._target_python.py_version_info,
+ ignore_requires_python=self._ignore_requires_python,
+ )
+ if not supports_python:
+ reason = f"{version} Requires-Python {link.requires_python}"
+ return (LinkType.requires_python_mismatch, reason)
+
+ logger.debug("Found link %s, version: %s", link, version)
+
+ return (LinkType.candidate, version)
+
+
+def filter_unallowed_hashes(
+ candidates: List[InstallationCandidate],
+ hashes: Optional[Hashes],
+ project_name: str,
+) -> List[InstallationCandidate]:
+ """
+ Filter out candidates whose hashes aren't allowed, and return a new
+ list of candidates.
+
+ If at least one candidate has an allowed hash, then all candidates with
+ either an allowed hash or no hash specified are returned. Otherwise,
+ the given candidates are returned.
+
+ Including the candidates with no hash specified when there is a match
+ allows a warning to be logged if there is a more preferred candidate
+ with no hash specified. Returning all candidates in the case of no
+ matches lets pip report the hash of the candidate that would otherwise
+ have been installed (e.g. permitting the user to more easily update
+ their requirements file with the desired hash).
+ """
+ if not hashes:
+ logger.debug(
+ "Given no hashes to check %s links for project %r: "
+ "discarding no candidates",
+ len(candidates),
+ project_name,
+ )
+ # Make sure we're not returning back the given value.
+ return list(candidates)
+
+ matches_or_no_digest = []
+ # Collect the non-matches for logging purposes.
+ non_matches = []
+ match_count = 0
+ for candidate in candidates:
+ link = candidate.link
+ if not link.has_hash:
+ pass
+ elif link.is_hash_allowed(hashes=hashes):
+ match_count += 1
+ else:
+ non_matches.append(candidate)
+ continue
+
+ matches_or_no_digest.append(candidate)
+
+ if match_count:
+ filtered = matches_or_no_digest
+ else:
+ # Make sure we're not returning back the given value.
+ filtered = list(candidates)
+
+ if len(filtered) == len(candidates):
+ discard_message = "discarding no candidates"
+ else:
+ discard_message = "discarding {} non-matches:\n {}".format(
+ len(non_matches),
+ "\n ".join(str(candidate.link) for candidate in non_matches),
+ )
+
+ logger.debug(
+ "Checked %s links for project %r against %s hashes "
+ "(%s matches, %s no digest): %s",
+ len(candidates),
+ project_name,
+ hashes.digest_count,
+ match_count,
+ len(matches_or_no_digest) - match_count,
+ discard_message,
+ )
+
+ return filtered
+
+
+class CandidatePreferences:
+
+ """
+ Encapsulates some of the preferences for filtering and sorting
+ InstallationCandidate objects.
+ """
+
+ def __init__(
+ self,
+ prefer_binary: bool = False,
+ allow_all_prereleases: bool = False,
+ ) -> None:
+ """
+ :param allow_all_prereleases: Whether to allow all pre-releases.
+ """
+ self.allow_all_prereleases = allow_all_prereleases
+ self.prefer_binary = prefer_binary
+
+
+class BestCandidateResult:
+ """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
+
+ This class is only intended to be instantiated by CandidateEvaluator's
+ `compute_best_candidate()` method.
+ """
+
+ def __init__(
+ self,
+ candidates: List[InstallationCandidate],
+ applicable_candidates: List[InstallationCandidate],
+ best_candidate: Optional[InstallationCandidate],
+ ) -> None:
+ """
+ :param candidates: A sequence of all available candidates found.
+ :param applicable_candidates: The applicable candidates.
+ :param best_candidate: The most preferred candidate found, or None
+ if no applicable candidates were found.
+ """
+ assert set(applicable_candidates) <= set(candidates)
+
+ if best_candidate is None:
+ assert not applicable_candidates
+ else:
+ assert best_candidate in applicable_candidates
+
+ self._applicable_candidates = applicable_candidates
+ self._candidates = candidates
+
+ self.best_candidate = best_candidate
+
+ def iter_all(self) -> Iterable[InstallationCandidate]:
+ """Iterate through all candidates."""
+ return iter(self._candidates)
+
+ def iter_applicable(self) -> Iterable[InstallationCandidate]:
+ """Iterate through the applicable candidates."""
+ return iter(self._applicable_candidates)
+
+
+class CandidateEvaluator:
+
+ """
+ Responsible for filtering and sorting candidates for installation based
+ on what tags are valid.
+ """
+
+ @classmethod
+ def create(
+ cls,
+ project_name: str,
+ target_python: Optional[TargetPython] = None,
+ prefer_binary: bool = False,
+ allow_all_prereleases: bool = False,
+ specifier: Optional[specifiers.BaseSpecifier] = None,
+ hashes: Optional[Hashes] = None,
+ ) -> "CandidateEvaluator":
+ """Create a CandidateEvaluator object.
+
+ :param target_python: The target Python interpreter to use when
+ checking compatibility. If None (the default), a TargetPython
+ object will be constructed from the running Python.
+ :param specifier: An optional object implementing `filter`
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+ versions.
+ :param hashes: An optional collection of allowed hashes.
+ """
+ if target_python is None:
+ target_python = TargetPython()
+ if specifier is None:
+ specifier = specifiers.SpecifierSet()
+
+ supported_tags = target_python.get_tags()
+
+ return cls(
+ project_name=project_name,
+ supported_tags=supported_tags,
+ specifier=specifier,
+ prefer_binary=prefer_binary,
+ allow_all_prereleases=allow_all_prereleases,
+ hashes=hashes,
+ )
+
+ def __init__(
+ self,
+ project_name: str,
+ supported_tags: List[Tag],
+ specifier: specifiers.BaseSpecifier,
+ prefer_binary: bool = False,
+ allow_all_prereleases: bool = False,
+ hashes: Optional[Hashes] = None,
+ ) -> None:
+ """
+ :param supported_tags: The PEP 425 tags supported by the target
+ Python in order of preference (most preferred first).
+ """
+ self._allow_all_prereleases = allow_all_prereleases
+ self._hashes = hashes
+ self._prefer_binary = prefer_binary
+ self._project_name = project_name
+ self._specifier = specifier
+ self._supported_tags = supported_tags
+ # Since the index of the tag in the _supported_tags list is used
+ # as a priority, precompute a map from tag to index/priority to be
+ # used in wheel.find_most_preferred_tag.
+ self._wheel_tag_preferences = {
+ tag: idx for idx, tag in enumerate(supported_tags)
+ }
+
+ def get_applicable_candidates(
+ self,
+ candidates: List[InstallationCandidate],
+ ) -> List[InstallationCandidate]:
+ """
+ Return the applicable candidates from a list of candidates.
+ """
+ # Using None infers from the specifier instead.
+ allow_prereleases = self._allow_all_prereleases or None
+ specifier = self._specifier
+ versions = {
+ str(v)
+ for v in specifier.filter(
+ # We turn the version object into a str here because otherwise
+ # when we're debundled but setuptools isn't, Python will see
+ # packaging.version.Version and
+ # pkg_resources._vendor.packaging.version.Version as different
+ # types. This way we'll use a str as a common data interchange
+ # format. If we stop using the pkg_resources provided specifier
+ # and start using our own, we can drop the cast to str().
+ (str(c.version) for c in candidates),
+ prereleases=allow_prereleases,
+ )
+ }
+
+ # Again, converting version to str to deal with debundling.
+ applicable_candidates = [c for c in candidates if str(c.version) in versions]
+
+ filtered_applicable_candidates = filter_unallowed_hashes(
+ candidates=applicable_candidates,
+ hashes=self._hashes,
+ project_name=self._project_name,
+ )
+
+ return sorted(filtered_applicable_candidates, key=self._sort_key)
+
+ def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
+ """
+ Function to pass as the `key` argument to a call to sorted() to sort
+ InstallationCandidates by preference.
+
+ Returns a tuple such that tuples sorting as greater using Python's
+ default comparison operator are more preferred.
+
+ The preference is as follows:
+
+ First and foremost, candidates with allowed (matching) hashes are
+ always preferred over candidates without matching hashes. This is
+ because e.g. if the only candidate with an allowed hash is yanked,
+ we still want to use that candidate.
+
+ Second, excepting hash considerations, candidates that have been
+ yanked (in the sense of PEP 592) are always less preferred than
+ candidates that haven't been yanked. Then:
+
+ If not finding wheels, they are sorted by version only.
+ If finding wheels, then the sort order is by version, then:
+ 1. existing installs
+ 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
+ 3. source archives
+ If prefer_binary was set, then all wheels are sorted above sources.
+
+ Note: it was considered to embed this logic into the Link
+ comparison operators, but then different sdist links
+ with the same version, would have to be considered equal
+ """
+ valid_tags = self._supported_tags
+ support_num = len(valid_tags)
+ build_tag: BuildTag = ()
+ binary_preference = 0
+ link = candidate.link
+ if link.is_wheel:
+ # can raise InvalidWheelFilename
+ wheel = Wheel(link.filename)
+ try:
+ pri = -(
+ wheel.find_most_preferred_tag(
+ valid_tags, self._wheel_tag_preferences
+ )
+ )
+ except ValueError:
+ raise UnsupportedWheel(
+ "{} is not a supported wheel for this platform. It "
+ "can't be sorted.".format(wheel.filename)
+ )
+ if self._prefer_binary:
+ binary_preference = 1
+ if wheel.build_tag is not None:
+ match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
+ assert match is not None, "guaranteed by filename validation"
+ build_tag_groups = match.groups()
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+ else: # sdist
+ pri = -(support_num)
+ has_allowed_hash = int(link.is_hash_allowed(self._hashes))
+ yank_value = -1 * int(link.is_yanked) # -1 for yanked.
+ return (
+ has_allowed_hash,
+ yank_value,
+ binary_preference,
+ candidate.version,
+ pri,
+ build_tag,
+ )
+
+ def sort_best_candidate(
+ self,
+ candidates: List[InstallationCandidate],
+ ) -> Optional[InstallationCandidate]:
+ """
+ Return the best candidate per the instance's sort order, or None if
+ no candidate is acceptable.
+ """
+ if not candidates:
+ return None
+ best_candidate = max(candidates, key=self._sort_key)
+ return best_candidate
+
+ def compute_best_candidate(
+ self,
+ candidates: List[InstallationCandidate],
+ ) -> BestCandidateResult:
+ """
+ Compute and return a `BestCandidateResult` instance.
+ """
+ applicable_candidates = self.get_applicable_candidates(candidates)
+
+ best_candidate = self.sort_best_candidate(applicable_candidates)
+
+ return BestCandidateResult(
+ candidates,
+ applicable_candidates=applicable_candidates,
+ best_candidate=best_candidate,
+ )
+
+
+class PackageFinder:
+ """This finds packages.
+
+ This is meant to match easy_install's technique for looking for
+ packages, by reading pages and looking for appropriate links.
+ """
+
+ def __init__(
+ self,
+ link_collector: LinkCollector,
+ target_python: TargetPython,
+ allow_yanked: bool,
+ format_control: Optional[FormatControl] = None,
+ candidate_prefs: Optional[CandidatePreferences] = None,
+ ignore_requires_python: Optional[bool] = None,
+ ) -> None:
+ """
+ This constructor is primarily meant to be used by the create() class
+ method and from tests.
+
+ :param format_control: A FormatControl object, used to control
+ the selection of source packages / binary packages when consulting
+ the index and links.
+ :param candidate_prefs: Options to use when creating a
+ CandidateEvaluator object.
+ """
+ if candidate_prefs is None:
+ candidate_prefs = CandidatePreferences()
+
+ format_control = format_control or FormatControl(set(), set())
+
+ self._allow_yanked = allow_yanked
+ self._candidate_prefs = candidate_prefs
+ self._ignore_requires_python = ignore_requires_python
+ self._link_collector = link_collector
+ self._target_python = target_python
+
+ self.format_control = format_control
+
+ # These are boring links that have already been logged somehow.
+ self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ @classmethod
+ def create(
+ cls,
+ link_collector: LinkCollector,
+ selection_prefs: SelectionPreferences,
+ target_python: Optional[TargetPython] = None,
+ ) -> "PackageFinder":
+ """Create a PackageFinder.
+
+ :param selection_prefs: The candidate selection preferences, as a
+ SelectionPreferences object.
+ :param target_python: The target Python interpreter to use when
+ checking compatibility. If None (the default), a TargetPython
+ object will be constructed from the running Python.
+ """
+ if target_python is None:
+ target_python = TargetPython()
+
+ candidate_prefs = CandidatePreferences(
+ prefer_binary=selection_prefs.prefer_binary,
+ allow_all_prereleases=selection_prefs.allow_all_prereleases,
+ )
+
+ return cls(
+ candidate_prefs=candidate_prefs,
+ link_collector=link_collector,
+ target_python=target_python,
+ allow_yanked=selection_prefs.allow_yanked,
+ format_control=selection_prefs.format_control,
+ ignore_requires_python=selection_prefs.ignore_requires_python,
+ )
+
+ @property
+ def target_python(self) -> TargetPython:
+ return self._target_python
+
+ @property
+ def search_scope(self) -> SearchScope:
+ return self._link_collector.search_scope
+
+ @search_scope.setter
+ def search_scope(self, search_scope: SearchScope) -> None:
+ self._link_collector.search_scope = search_scope
+
+ @property
+ def find_links(self) -> List[str]:
+ return self._link_collector.find_links
+
+ @property
+ def index_urls(self) -> List[str]:
+ return self.search_scope.index_urls
+
+ @property
+ def trusted_hosts(self) -> Iterable[str]:
+ for host_port in self._link_collector.session.pip_trusted_origins:
+ yield build_netloc(*host_port)
+
+ @property
+ def allow_all_prereleases(self) -> bool:
+ return self._candidate_prefs.allow_all_prereleases
+
+ def set_allow_all_prereleases(self) -> None:
+ self._candidate_prefs.allow_all_prereleases = True
+
+ @property
+ def prefer_binary(self) -> bool:
+ return self._candidate_prefs.prefer_binary
+
+ def set_prefer_binary(self) -> None:
+ self._candidate_prefs.prefer_binary = True
+
+ def requires_python_skipped_reasons(self) -> List[str]:
+ reasons = {
+ detail
+ for _, result, detail in self._logged_links
+ if result == LinkType.requires_python_mismatch
+ }
+ return sorted(reasons)
+
+ def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
+ canonical_name = canonicalize_name(project_name)
+ formats = self.format_control.get_allowed_formats(canonical_name)
+
+ return LinkEvaluator(
+ project_name=project_name,
+ canonical_name=canonical_name,
+ formats=formats,
+ target_python=self._target_python,
+ allow_yanked=self._allow_yanked,
+ ignore_requires_python=self._ignore_requires_python,
+ )
+
+ def _sort_links(self, links: Iterable[Link]) -> List[Link]:
+ """
+ Returns elements of links in order, non-egg links first, egg links
+ second, while eliminating duplicates
+ """
+ eggs, no_eggs = [], []
+ seen: Set[Link] = set()
+ for link in links:
+ if link not in seen:
+ seen.add(link)
+ if link.egg_fragment:
+ eggs.append(link)
+ else:
+ no_eggs.append(link)
+ return no_eggs + eggs
+
+ def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
+ entry = (link, result, detail)
+ if entry not in self._logged_links:
+ # Put the link at the end so the reason is more visible and because
+ # the link string is usually very long.
+ logger.debug("Skipping link: %s: %s", detail, link)
+ self._logged_links.add(entry)
+
+ def get_install_candidate(
+ self, link_evaluator: LinkEvaluator, link: Link
+ ) -> Optional[InstallationCandidate]:
+ """
+ If the link is a candidate for install, convert it to an
+ InstallationCandidate and return it. Otherwise, return None.
+ """
+ result, detail = link_evaluator.evaluate_link(link)
+ if result != LinkType.candidate:
+ self._log_skipped_link(link, result, detail)
+ return None
+
+ return InstallationCandidate(
+ name=link_evaluator.project_name,
+ link=link,
+ version=detail,
+ )
+
+ def evaluate_links(
+ self, link_evaluator: LinkEvaluator, links: Iterable[Link]
+ ) -> List[InstallationCandidate]:
+ """
+ Convert links that are candidates to InstallationCandidate objects.
+ """
+ candidates = []
+ for link in self._sort_links(links):
+ candidate = self.get_install_candidate(link_evaluator, link)
+ if candidate is not None:
+ candidates.append(candidate)
+
+ return candidates
+
+ def process_project_url(
+ self, project_url: Link, link_evaluator: LinkEvaluator
+ ) -> List[InstallationCandidate]:
+ logger.debug(
+ "Fetching project page and analyzing links: %s",
+ project_url,
+ )
+ index_response = self._link_collector.fetch_response(project_url)
+ if index_response is None:
+ return []
+
+ page_links = list(parse_links(index_response))
+
+ with indent_log():
+ package_links = self.evaluate_links(
+ link_evaluator,
+ links=page_links,
+ )
+
+ return package_links
+
+ @functools.lru_cache(maxsize=None)
+ def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
+ """Find all available InstallationCandidate for project_name
+
+ This checks index_urls and find_links.
+ All versions found are returned as an InstallationCandidate list.
+
+ See LinkEvaluator.evaluate_link() for details on which files
+ are accepted.
+ """
+ link_evaluator = self.make_link_evaluator(project_name)
+
+ collected_sources = self._link_collector.collect_sources(
+ project_name=project_name,
+ candidates_from_page=functools.partial(
+ self.process_project_url,
+ link_evaluator=link_evaluator,
+ ),
+ )
+
+ page_candidates_it = itertools.chain.from_iterable(
+ source.page_candidates()
+ for sources in collected_sources
+ for source in sources
+ if source is not None
+ )
+ page_candidates = list(page_candidates_it)
+
+ file_links_it = itertools.chain.from_iterable(
+ source.file_links()
+ for sources in collected_sources
+ for source in sources
+ if source is not None
+ )
+ file_candidates = self.evaluate_links(
+ link_evaluator,
+ sorted(file_links_it, reverse=True),
+ )
+
+ if logger.isEnabledFor(logging.DEBUG) and file_candidates:
+ paths = []
+ for candidate in file_candidates:
+ assert candidate.link.url # we need to have a URL
+ try:
+ paths.append(candidate.link.file_path)
+ except Exception:
+ paths.append(candidate.link.url) # it's not a local file
+
+ logger.debug("Local files found: %s", ", ".join(paths))
+
+ # This is an intentional priority ordering
+ return file_candidates + page_candidates
+
+ def make_candidate_evaluator(
+ self,
+ project_name: str,
+ specifier: Optional[specifiers.BaseSpecifier] = None,
+ hashes: Optional[Hashes] = None,
+ ) -> CandidateEvaluator:
+ """Create a CandidateEvaluator object to use."""
+ candidate_prefs = self._candidate_prefs
+ return CandidateEvaluator.create(
+ project_name=project_name,
+ target_python=self._target_python,
+ prefer_binary=candidate_prefs.prefer_binary,
+ allow_all_prereleases=candidate_prefs.allow_all_prereleases,
+ specifier=specifier,
+ hashes=hashes,
+ )
+
+ @functools.lru_cache(maxsize=None)
+ def find_best_candidate(
+ self,
+ project_name: str,
+ specifier: Optional[specifiers.BaseSpecifier] = None,
+ hashes: Optional[Hashes] = None,
+ ) -> BestCandidateResult:
+ """Find matches for the given project and specifier.
+
+ :param specifier: An optional object implementing `filter`
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+ versions.
+
+ :return: A `BestCandidateResult` instance.
+ """
+ candidates = self.find_all_candidates(project_name)
+ candidate_evaluator = self.make_candidate_evaluator(
+ project_name=project_name,
+ specifier=specifier,
+ hashes=hashes,
+ )
+ return candidate_evaluator.compute_best_candidate(candidates)
+
+ def find_requirement(
+ self, req: InstallRequirement, upgrade: bool
+ ) -> Optional[InstallationCandidate]:
+ """Try to find a Link matching req
+
+ Expects req, an InstallRequirement and upgrade, a boolean
+ Returns a InstallationCandidate if found,
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
+ """
+ hashes = req.hashes(trust_internet=False)
+ best_candidate_result = self.find_best_candidate(
+ req.name,
+ specifier=req.specifier,
+ hashes=hashes,
+ )
+ best_candidate = best_candidate_result.best_candidate
+
+ installed_version: Optional[_BaseVersion] = None
+ if req.satisfied_by is not None:
+ installed_version = req.satisfied_by.version
+
+ def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
+ # This repeated parse_version and str() conversion is needed to
+ # handle different vendoring sources from pip and pkg_resources.
+ # If we stop using the pkg_resources provided specifier and start
+ # using our own, we can drop the cast to str().
+ return (
+ ", ".join(
+ sorted(
+ {str(c.version) for c in cand_iter},
+ key=parse_version,
+ )
+ )
+ or "none"
+ )
+
+ if installed_version is None and best_candidate is None:
+ logger.critical(
+ "Could not find a version that satisfies the requirement %s "
+ "(from versions: %s)",
+ req,
+ _format_versions(best_candidate_result.iter_all()),
+ )
+
+ raise DistributionNotFound(
+ "No matching distribution found for {}".format(req)
+ )
+
+ def _should_install_candidate(
+ candidate: Optional[InstallationCandidate],
+ ) -> "TypeGuard[InstallationCandidate]":
+ if installed_version is None:
+ return True
+ if best_candidate is None:
+ return False
+ return best_candidate.version > installed_version
+
+ if not upgrade and installed_version is not None:
+ if _should_install_candidate(best_candidate):
+ logger.debug(
+ "Existing installed version (%s) satisfies requirement "
+ "(most up-to-date version is %s)",
+ installed_version,
+ best_candidate.version,
+ )
+ else:
+ logger.debug(
+ "Existing installed version (%s) is most up-to-date and "
+ "satisfies requirement",
+ installed_version,
+ )
+ return None
+
+ if _should_install_candidate(best_candidate):
+ logger.debug(
+ "Using version %s (newest of versions: %s)",
+ best_candidate.version,
+ _format_versions(best_candidate_result.iter_applicable()),
+ )
+ return best_candidate
+
+ # We have an existing version, and its the best version
+ logger.debug(
+ "Installed version (%s) is most up-to-date (past versions: %s)",
+ installed_version,
+ _format_versions(best_candidate_result.iter_applicable()),
+ )
+ raise BestVersionAlreadyInstalled
+
+
+def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
+ """Find the separator's index based on the package's canonical name.
+
+ :param fragment: A + filename "fragment" (stem) or
+ egg fragment.
+ :param canonical_name: The package's canonical name.
+
+ This function is needed since the canonicalized name does not necessarily
+ have the same length as the egg info's name part. An example::
+
+ >>> fragment = 'foo__bar-1.0'
+ >>> canonical_name = 'foo-bar'
+ >>> _find_name_version_sep(fragment, canonical_name)
+ 8
+ """
+ # Project name and version must be separated by one single dash. Find all
+ # occurrences of dashes; if the string in front of it matches the canonical
+ # name, this is the one separating the name and version parts.
+ for i, c in enumerate(fragment):
+ if c != "-":
+ continue
+ if canonicalize_name(fragment[:i]) == canonical_name:
+ return i
+ raise ValueError(f"{fragment} does not match {canonical_name}")
+
+
+def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
+ """Parse the version string from a + filename
+ "fragment" (stem) or egg fragment.
+
+ :param fragment: The string to parse. E.g. foo-2.1
+ :param canonical_name: The canonicalized name of the package this
+ belongs to.
+ """
+ try:
+ version_start = _find_name_version_sep(fragment, canonical_name) + 1
+ except ValueError:
+ return None
+ version = fragment[version_start:]
+ if not version:
+ return None
+ return version
diff --git a/.venv/Lib/site-packages/pip/_internal/index/sources.py b/.venv/Lib/site-packages/pip/_internal/index/sources.py
new file mode 100644
index 0000000000000000000000000000000000000000..cd9cb8d40f135d1da7d2517630816605a0805fe7
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/index/sources.py
@@ -0,0 +1,223 @@
+import logging
+import mimetypes
+import os
+import pathlib
+from typing import Callable, Iterable, Optional, Tuple
+
+from pip._internal.models.candidate import InstallationCandidate
+from pip._internal.models.link import Link
+from pip._internal.utils.urls import path_to_url, url_to_path
+from pip._internal.vcs import is_url
+
+logger = logging.getLogger(__name__)
+
+FoundCandidates = Iterable[InstallationCandidate]
+FoundLinks = Iterable[Link]
+CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
+PageValidator = Callable[[Link], bool]
+
+
+class LinkSource:
+ @property
+ def link(self) -> Optional[Link]:
+ """Returns the underlying link, if there's one."""
+ raise NotImplementedError()
+
+ def page_candidates(self) -> FoundCandidates:
+ """Candidates found by parsing an archive listing HTML file."""
+ raise NotImplementedError()
+
+ def file_links(self) -> FoundLinks:
+ """Links found by specifying archives directly."""
+ raise NotImplementedError()
+
+
+def _is_html_file(file_url: str) -> bool:
+ return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
+
+
+class _FlatDirectorySource(LinkSource):
+ """Link source specified by ``--find-links=``.
+
+ This looks the content of the directory, and returns:
+
+ * ``page_candidates``: Links listed on each HTML file in the directory.
+ * ``file_candidates``: Archives in the directory.
+ """
+
+ def __init__(
+ self,
+ candidates_from_page: CandidatesFromPage,
+ path: str,
+ ) -> None:
+ self._candidates_from_page = candidates_from_page
+ self._path = pathlib.Path(os.path.realpath(path))
+
+ @property
+ def link(self) -> Optional[Link]:
+ return None
+
+ def page_candidates(self) -> FoundCandidates:
+ for path in self._path.iterdir():
+ url = path_to_url(str(path))
+ if not _is_html_file(url):
+ continue
+ yield from self._candidates_from_page(Link(url))
+
+ def file_links(self) -> FoundLinks:
+ for path in self._path.iterdir():
+ url = path_to_url(str(path))
+ if _is_html_file(url):
+ continue
+ yield Link(url)
+
+
+class _LocalFileSource(LinkSource):
+ """``--find-links=`` or ``--[extra-]index-url=``.
+
+ If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
+ the option, it is converted to a URL first. This returns:
+
+ * ``page_candidates``: Links listed on an HTML file.
+ * ``file_candidates``: The non-HTML file.
+ """
+
+ def __init__(
+ self,
+ candidates_from_page: CandidatesFromPage,
+ link: Link,
+ ) -> None:
+ self._candidates_from_page = candidates_from_page
+ self._link = link
+
+ @property
+ def link(self) -> Optional[Link]:
+ return self._link
+
+ def page_candidates(self) -> FoundCandidates:
+ if not _is_html_file(self._link.url):
+ return
+ yield from self._candidates_from_page(self._link)
+
+ def file_links(self) -> FoundLinks:
+ if _is_html_file(self._link.url):
+ return
+ yield self._link
+
+
+class _RemoteFileSource(LinkSource):
+ """``--find-links=`` or ``--[extra-]index-url=``.
+
+ This returns:
+
+ * ``page_candidates``: Links listed on an HTML file.
+ * ``file_candidates``: The non-HTML file.
+ """
+
+ def __init__(
+ self,
+ candidates_from_page: CandidatesFromPage,
+ page_validator: PageValidator,
+ link: Link,
+ ) -> None:
+ self._candidates_from_page = candidates_from_page
+ self._page_validator = page_validator
+ self._link = link
+
+ @property
+ def link(self) -> Optional[Link]:
+ return self._link
+
+ def page_candidates(self) -> FoundCandidates:
+ if not self._page_validator(self._link):
+ return
+ yield from self._candidates_from_page(self._link)
+
+ def file_links(self) -> FoundLinks:
+ yield self._link
+
+
+class _IndexDirectorySource(LinkSource):
+ """``--[extra-]index-url=``.
+
+ This is treated like a remote URL; ``candidates_from_page`` contains logic
+ for this by appending ``index.html`` to the link.
+ """
+
+ def __init__(
+ self,
+ candidates_from_page: CandidatesFromPage,
+ link: Link,
+ ) -> None:
+ self._candidates_from_page = candidates_from_page
+ self._link = link
+
+ @property
+ def link(self) -> Optional[Link]:
+ return self._link
+
+ def page_candidates(self) -> FoundCandidates:
+ yield from self._candidates_from_page(self._link)
+
+ def file_links(self) -> FoundLinks:
+ return ()
+
+
+def build_source(
+ location: str,
+ *,
+ candidates_from_page: CandidatesFromPage,
+ page_validator: PageValidator,
+ expand_dir: bool,
+ cache_link_parsing: bool,
+) -> Tuple[Optional[str], Optional[LinkSource]]:
+ path: Optional[str] = None
+ url: Optional[str] = None
+ if os.path.exists(location): # Is a local path.
+ url = path_to_url(location)
+ path = location
+ elif location.startswith("file:"): # A file: URL.
+ url = location
+ path = url_to_path(location)
+ elif is_url(location):
+ url = location
+
+ if url is None:
+ msg = (
+ "Location '%s' is ignored: "
+ "it is either a non-existing path or lacks a specific scheme."
+ )
+ logger.warning(msg, location)
+ return (None, None)
+
+ if path is None:
+ source: LinkSource = _RemoteFileSource(
+ candidates_from_page=candidates_from_page,
+ page_validator=page_validator,
+ link=Link(url, cache_link_parsing=cache_link_parsing),
+ )
+ return (url, source)
+
+ if os.path.isdir(path):
+ if expand_dir:
+ source = _FlatDirectorySource(
+ candidates_from_page=candidates_from_page,
+ path=path,
+ )
+ else:
+ source = _IndexDirectorySource(
+ candidates_from_page=candidates_from_page,
+ link=Link(url, cache_link_parsing=cache_link_parsing),
+ )
+ return (url, source)
+ elif os.path.isfile(path):
+ source = _LocalFileSource(
+ candidates_from_page=candidates_from_page,
+ link=Link(url, cache_link_parsing=cache_link_parsing),
+ )
+ return (url, source)
+ logger.warning(
+ "Location '%s' is ignored: it is neither a file nor a directory.",
+ location,
+ )
+ return (url, None)
diff --git a/.venv/Lib/site-packages/pip/_internal/locations/__init__.py b/.venv/Lib/site-packages/pip/_internal/locations/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d54bc63eba364bda3f869a0f3b1863b872f9682a
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/locations/__init__.py
@@ -0,0 +1,467 @@
+import functools
+import logging
+import os
+import pathlib
+import sys
+import sysconfig
+from typing import Any, Dict, Generator, Optional, Tuple
+
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from . import _sysconfig
+from .base import (
+ USER_CACHE_DIR,
+ get_major_minor_version,
+ get_src_prefix,
+ is_osx_framework,
+ site_packages,
+ user_site,
+)
+
+__all__ = [
+ "USER_CACHE_DIR",
+ "get_bin_prefix",
+ "get_bin_user",
+ "get_major_minor_version",
+ "get_platlib",
+ "get_purelib",
+ "get_scheme",
+ "get_src_prefix",
+ "site_packages",
+ "user_site",
+]
+
+
+logger = logging.getLogger(__name__)
+
+
+_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
+
+_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
+
+
+def _should_use_sysconfig() -> bool:
+ """This function determines the value of _USE_SYSCONFIG.
+
+ By default, pip uses sysconfig on Python 3.10+.
+ But Python distributors can override this decision by setting:
+ sysconfig._PIP_USE_SYSCONFIG = True / False
+ Rationale in https://github.com/pypa/pip/issues/10647
+
+ This is a function for testability, but should be constant during any one
+ run.
+ """
+ return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
+
+
+_USE_SYSCONFIG = _should_use_sysconfig()
+
+if not _USE_SYSCONFIG:
+ # Import distutils lazily to avoid deprecation warnings,
+ # but import it soon enough that it is in memory and available during
+ # a pip reinstall.
+ from . import _distutils
+
+# Be noisy about incompatibilities if this platforms "should" be using
+# sysconfig, but is explicitly opting out and using distutils instead.
+if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
+ _MISMATCH_LEVEL = logging.WARNING
+else:
+ _MISMATCH_LEVEL = logging.DEBUG
+
+
+def _looks_like_bpo_44860() -> bool:
+ """The resolution to bpo-44860 will change this incorrect platlib.
+
+ See .
+ """
+ from distutils.command.install import INSTALL_SCHEMES
+
+ try:
+ unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
+ except KeyError:
+ return False
+ return unix_user_platlib == "$usersite"
+
+
+def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
+ platlib = scheme["platlib"]
+ if "/$platlibdir/" in platlib:
+ platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
+ if "/lib64/" not in platlib:
+ return False
+ unpatched = platlib.replace("/lib64/", "/lib/")
+ return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_red_hat_lib() -> bool:
+ """Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
+
+ This is the only way I can see to tell a Red Hat-patched Python.
+ """
+ from distutils.command.install import INSTALL_SCHEMES
+
+ return all(
+ k in INSTALL_SCHEMES
+ and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
+ for k in ("unix_prefix", "unix_home")
+ )
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_debian_scheme() -> bool:
+ """Debian adds two additional schemes."""
+ from distutils.command.install import INSTALL_SCHEMES
+
+ return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_red_hat_scheme() -> bool:
+ """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
+
+ Red Hat's ``00251-change-user-install-location.patch`` changes the install
+ command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
+ (fortunately?) done quite unconditionally, so we create a default command
+ object without any configuration to detect this.
+ """
+ from distutils.command.install import install
+ from distutils.dist import Distribution
+
+ cmd: Any = install(Distribution())
+ cmd.finalize_options()
+ return (
+ cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
+ and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
+ )
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_slackware_scheme() -> bool:
+ """Slackware patches sysconfig but fails to patch distutils and site.
+
+ Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
+ path, but does not do the same to the site module.
+ """
+ if user_site is None: # User-site not available.
+ return False
+ try:
+ paths = sysconfig.get_paths(scheme="posix_user", expand=False)
+ except KeyError: # User-site not available.
+ return False
+ return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_msys2_mingw_scheme() -> bool:
+ """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
+
+ However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
+ likely going to be included in their 3.10 release, so we ignore the warning.
+ See msys2/MINGW-packages#9319.
+
+ MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
+ and is missing the final ``"site-packages"``.
+ """
+ paths = sysconfig.get_paths("nt", expand=False)
+ return all(
+ "Lib" not in p and "lib" in p and not p.endswith("site-packages")
+ for p in (paths[key] for key in ("platlib", "purelib"))
+ )
+
+
+def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
+ ldversion = sysconfig.get_config_var("LDVERSION")
+ abiflags = getattr(sys, "abiflags", None)
+
+ # LDVERSION does not end with sys.abiflags. Just return the path unchanged.
+ if not ldversion or not abiflags or not ldversion.endswith(abiflags):
+ yield from parts
+ return
+
+ # Strip sys.abiflags from LDVERSION-based path components.
+ for part in parts:
+ if part.endswith(ldversion):
+ part = part[: (0 - len(abiflags))]
+ yield part
+
+
+@functools.lru_cache(maxsize=None)
+def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
+ issue_url = "https://github.com/pypa/pip/issues/10151"
+ message = (
+ "Value for %s does not match. Please report this to <%s>"
+ "\ndistutils: %s"
+ "\nsysconfig: %s"
+ )
+ logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
+
+
+def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
+ if old == new:
+ return False
+ _warn_mismatched(old, new, key=key)
+ return True
+
+
+@functools.lru_cache(maxsize=None)
+def _log_context(
+ *,
+ user: bool = False,
+ home: Optional[str] = None,
+ root: Optional[str] = None,
+ prefix: Optional[str] = None,
+) -> None:
+ parts = [
+ "Additional context:",
+ "user = %r",
+ "home = %r",
+ "root = %r",
+ "prefix = %r",
+ ]
+
+ logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
+
+
+def get_scheme(
+ dist_name: str,
+ user: bool = False,
+ home: Optional[str] = None,
+ root: Optional[str] = None,
+ isolated: bool = False,
+ prefix: Optional[str] = None,
+) -> Scheme:
+ new = _sysconfig.get_scheme(
+ dist_name,
+ user=user,
+ home=home,
+ root=root,
+ isolated=isolated,
+ prefix=prefix,
+ )
+ if _USE_SYSCONFIG:
+ return new
+
+ old = _distutils.get_scheme(
+ dist_name,
+ user=user,
+ home=home,
+ root=root,
+ isolated=isolated,
+ prefix=prefix,
+ )
+
+ warning_contexts = []
+ for k in SCHEME_KEYS:
+ old_v = pathlib.Path(getattr(old, k))
+ new_v = pathlib.Path(getattr(new, k))
+
+ if old_v == new_v:
+ continue
+
+ # distutils incorrectly put PyPy packages under ``site-packages/python``
+ # in the ``posix_home`` scheme, but PyPy devs said they expect the
+ # directory name to be ``pypy`` instead. So we treat this as a bug fix
+ # and not warn about it. See bpo-43307 and python/cpython#24628.
+ skip_pypy_special_case = (
+ sys.implementation.name == "pypy"
+ and home is not None
+ and k in ("platlib", "purelib")
+ and old_v.parent == new_v.parent
+ and old_v.name.startswith("python")
+ and new_v.name.startswith("pypy")
+ )
+ if skip_pypy_special_case:
+ continue
+
+ # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
+ # the ``include`` value, but distutils's ``headers`` does. We'll let
+ # CPython decide whether this is a bug or feature. See bpo-43948.
+ skip_osx_framework_user_special_case = (
+ user
+ and is_osx_framework()
+ and k == "headers"
+ and old_v.parent.parent == new_v.parent
+ and old_v.parent.name.startswith("python")
+ )
+ if skip_osx_framework_user_special_case:
+ continue
+
+ # On Red Hat and derived Linux distributions, distutils is patched to
+ # use "lib64" instead of "lib" for platlib.
+ if k == "platlib" and _looks_like_red_hat_lib():
+ continue
+
+ # On Python 3.9+, sysconfig's posix_user scheme sets platlib against
+ # sys.platlibdir, but distutils's unix_user incorrectly coninutes
+ # using the same $usersite for both platlib and purelib. This creates a
+ # mismatch when sys.platlibdir is not "lib".
+ skip_bpo_44860 = (
+ user
+ and k == "platlib"
+ and not WINDOWS
+ and sys.version_info >= (3, 9)
+ and _PLATLIBDIR != "lib"
+ and _looks_like_bpo_44860()
+ )
+ if skip_bpo_44860:
+ continue
+
+ # Slackware incorrectly patches posix_user to use lib64 instead of lib,
+ # but not usersite to match the location.
+ skip_slackware_user_scheme = (
+ user
+ and k in ("platlib", "purelib")
+ and not WINDOWS
+ and _looks_like_slackware_scheme()
+ )
+ if skip_slackware_user_scheme:
+ continue
+
+ # Both Debian and Red Hat patch Python to place the system site under
+ # /usr/local instead of /usr. Debian also places lib in dist-packages
+ # instead of site-packages, but the /usr/local check should cover it.
+ skip_linux_system_special_case = (
+ not (user or home or prefix or running_under_virtualenv())
+ and old_v.parts[1:3] == ("usr", "local")
+ and len(new_v.parts) > 1
+ and new_v.parts[1] == "usr"
+ and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
+ and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
+ )
+ if skip_linux_system_special_case:
+ continue
+
+ # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in
+ # the "pythonX.Y" part of the path, but distutils does.
+ skip_sysconfig_abiflag_bug = (
+ sys.version_info < (3, 8)
+ and not WINDOWS
+ and k in ("headers", "platlib", "purelib")
+ and tuple(_fix_abiflags(old_v.parts)) == new_v.parts
+ )
+ if skip_sysconfig_abiflag_bug:
+ continue
+
+ # MSYS2 MINGW's sysconfig patch does not include the "site-packages"
+ # part of the path. This is incorrect and will be fixed in MSYS.
+ skip_msys2_mingw_bug = (
+ WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
+ )
+ if skip_msys2_mingw_bug:
+ continue
+
+ # CPython's POSIX install script invokes pip (via ensurepip) against the
+ # interpreter located in the source tree, not the install site. This
+ # triggers special logic in sysconfig that's not present in distutils.
+ # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
+ skip_cpython_build = (
+ sysconfig.is_python_build(check_home=True)
+ and not WINDOWS
+ and k in ("headers", "include", "platinclude")
+ )
+ if skip_cpython_build:
+ continue
+
+ warning_contexts.append((old_v, new_v, f"scheme.{k}"))
+
+ if not warning_contexts:
+ return old
+
+ # Check if this path mismatch is caused by distutils config files. Those
+ # files will no longer work once we switch to sysconfig, so this raises a
+ # deprecation message for them.
+ default_old = _distutils.distutils_scheme(
+ dist_name,
+ user,
+ home,
+ root,
+ isolated,
+ prefix,
+ ignore_config_files=True,
+ )
+ if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
+ deprecated(
+ reason=(
+ "Configuring installation scheme with distutils config files "
+ "is deprecated and will no longer work in the near future. If you "
+ "are using a Homebrew or Linuxbrew Python, please see discussion "
+ "at https://github.com/Homebrew/homebrew-core/issues/76621"
+ ),
+ replacement=None,
+ gone_in=None,
+ )
+ return old
+
+ # Post warnings about this mismatch so user can report them back.
+ for old_v, new_v, key in warning_contexts:
+ _warn_mismatched(old_v, new_v, key=key)
+ _log_context(user=user, home=home, root=root, prefix=prefix)
+
+ return old
+
+
+def get_bin_prefix() -> str:
+ new = _sysconfig.get_bin_prefix()
+ if _USE_SYSCONFIG:
+ return new
+
+ old = _distutils.get_bin_prefix()
+ if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
+ _log_context()
+ return old
+
+
+def get_bin_user() -> str:
+ return _sysconfig.get_scheme("", user=True).scripts
+
+
+def _looks_like_deb_system_dist_packages(value: str) -> bool:
+ """Check if the value is Debian's APT-controlled dist-packages.
+
+ Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
+ default package path controlled by APT, but does not patch ``sysconfig`` to
+ do the same. This is similar to the bug worked around in ``get_scheme()``,
+ but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
+ we can't do anything about this Debian bug, and this detection allows us to
+ skip the warning when needed.
+ """
+ if not _looks_like_debian_scheme():
+ return False
+ if value == "/usr/lib/python3/dist-packages":
+ return True
+ return False
+
+
+def get_purelib() -> str:
+ """Return the default pure-Python lib location."""
+ new = _sysconfig.get_purelib()
+ if _USE_SYSCONFIG:
+ return new
+
+ old = _distutils.get_purelib()
+ if _looks_like_deb_system_dist_packages(old):
+ return old
+ if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
+ _log_context()
+ return old
+
+
+def get_platlib() -> str:
+ """Return the default platform-shared lib location."""
+ new = _sysconfig.get_platlib()
+ if _USE_SYSCONFIG:
+ return new
+
+ from . import _distutils
+
+ old = _distutils.get_platlib()
+ if _looks_like_deb_system_dist_packages(old):
+ return old
+ if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
+ _log_context()
+ return old
diff --git a/.venv/Lib/site-packages/pip/_internal/locations/_distutils.py b/.venv/Lib/site-packages/pip/_internal/locations/_distutils.py
new file mode 100644
index 0000000000000000000000000000000000000000..92bd93179c5cd3cb377c8b9f1e9d22d13fd7d003
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/locations/_distutils.py
@@ -0,0 +1,173 @@
+"""Locations where we look for configs, install stuff, etc"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+# If pip's going to use distutils, it should not be using the copy that setuptools
+# might have injected into the environment. This is done by removing the injected
+# shim, if it's injected.
+#
+# See https://github.com/pypa/pip/issues/8761 for the original discussion and
+# rationale for why this is done within pip.
+try:
+ __import__("_distutils_hack").remove_shim()
+except (ImportError, AttributeError):
+ pass
+
+import logging
+import os
+import sys
+from distutils.cmd import Command as DistutilsCommand
+from distutils.command.install import SCHEME_KEYS
+from distutils.command.install import install as distutils_install_command
+from distutils.sysconfig import get_python_lib
+from typing import Dict, List, Optional, Union, cast
+
+from pip._internal.models.scheme import Scheme
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import get_major_minor_version
+
+logger = logging.getLogger(__name__)
+
+
+def distutils_scheme(
+ dist_name: str,
+ user: bool = False,
+ home: Optional[str] = None,
+ root: Optional[str] = None,
+ isolated: bool = False,
+ prefix: Optional[str] = None,
+ *,
+ ignore_config_files: bool = False,
+) -> Dict[str, str]:
+ """
+ Return a distutils install scheme
+ """
+ from distutils.dist import Distribution
+
+ dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
+ if isolated:
+ dist_args["script_args"] = ["--no-user-cfg"]
+
+ d = Distribution(dist_args)
+ if not ignore_config_files:
+ try:
+ d.parse_config_files()
+ except UnicodeDecodeError:
+ # Typeshed does not include find_config_files() for some reason.
+ paths = d.find_config_files() # type: ignore
+ logger.warning(
+ "Ignore distutils configs in %s due to encoding errors.",
+ ", ".join(os.path.basename(p) for p in paths),
+ )
+ obj: Optional[DistutilsCommand] = None
+ obj = d.get_command_obj("install", create=True)
+ assert obj is not None
+ i = cast(distutils_install_command, obj)
+ # NOTE: setting user or home has the side-effect of creating the home dir
+ # or user base for installations during finalize_options()
+ # ideally, we'd prefer a scheme class that has no side-effects.
+ assert not (user and prefix), f"user={user} prefix={prefix}"
+ assert not (home and prefix), f"home={home} prefix={prefix}"
+ i.user = user or i.user
+ if user or home:
+ i.prefix = ""
+ i.prefix = prefix or i.prefix
+ i.home = home or i.home
+ i.root = root or i.root
+ i.finalize_options()
+
+ scheme = {}
+ for key in SCHEME_KEYS:
+ scheme[key] = getattr(i, "install_" + key)
+
+ # install_lib specified in setup.cfg should install *everything*
+ # into there (i.e. it takes precedence over both purelib and
+ # platlib). Note, i.install_lib is *always* set after
+ # finalize_options(); we only want to override here if the user
+ # has explicitly requested it hence going back to the config
+ if "install_lib" in d.get_option_dict("install"):
+ scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
+
+ if running_under_virtualenv():
+ if home:
+ prefix = home
+ elif user:
+ prefix = i.install_userbase
+ else:
+ prefix = i.prefix
+ scheme["headers"] = os.path.join(
+ prefix,
+ "include",
+ "site",
+ f"python{get_major_minor_version()}",
+ dist_name,
+ )
+
+ if root is not None:
+ path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
+ scheme["headers"] = os.path.join(root, path_no_drive[1:])
+
+ return scheme
+
+
+def get_scheme(
+ dist_name: str,
+ user: bool = False,
+ home: Optional[str] = None,
+ root: Optional[str] = None,
+ isolated: bool = False,
+ prefix: Optional[str] = None,
+) -> Scheme:
+ """
+ Get the "scheme" corresponding to the input parameters. The distutils
+ documentation provides the context for the available schemes:
+ https://docs.python.org/3/install/index.html#alternate-installation
+
+ :param dist_name: the name of the package to retrieve the scheme for, used
+ in the headers scheme path
+ :param user: indicates to use the "user" scheme
+ :param home: indicates to use the "home" scheme and provides the base
+ directory for the same
+ :param root: root under which other directories are re-based
+ :param isolated: equivalent to --no-user-cfg, i.e. do not consider
+ ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
+ scheme paths
+ :param prefix: indicates to use the "prefix" scheme and provides the
+ base directory for the same
+ """
+ scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
+ return Scheme(
+ platlib=scheme["platlib"],
+ purelib=scheme["purelib"],
+ headers=scheme["headers"],
+ scripts=scheme["scripts"],
+ data=scheme["data"],
+ )
+
+
+def get_bin_prefix() -> str:
+ # XXX: In old virtualenv versions, sys.prefix can contain '..' components,
+ # so we need to call normpath to eliminate them.
+ prefix = os.path.normpath(sys.prefix)
+ if WINDOWS:
+ bin_py = os.path.join(prefix, "Scripts")
+ # buildout uses 'bin' on Windows too?
+ if not os.path.exists(bin_py):
+ bin_py = os.path.join(prefix, "bin")
+ return bin_py
+ # Forcing to use /usr/local/bin for standard macOS framework installs
+ # Also log to ~/Library/Logs/ for use with the Console.app log viewer
+ if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
+ return "/usr/local/bin"
+ return os.path.join(prefix, "bin")
+
+
+def get_purelib() -> str:
+ return get_python_lib(plat_specific=False)
+
+
+def get_platlib() -> str:
+ return get_python_lib(plat_specific=True)
diff --git a/.venv/Lib/site-packages/pip/_internal/locations/_sysconfig.py b/.venv/Lib/site-packages/pip/_internal/locations/_sysconfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..97aef1f1ac237e6ef97b1a1d026818d7b8ab9be9
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/locations/_sysconfig.py
@@ -0,0 +1,213 @@
+import logging
+import os
+import sys
+import sysconfig
+import typing
+
+from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import change_root, get_major_minor_version, is_osx_framework
+
+logger = logging.getLogger(__name__)
+
+
+# Notes on _infer_* functions.
+# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
+# way to ask things like "what is the '_prefix' scheme on this platform". These
+# functions try to answer that with some heuristics while accounting for ad-hoc
+# platforms not covered by CPython's default sysconfig implementation. If the
+# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
+# a POSIX scheme.
+
+_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
+
+_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
+
+
+def _should_use_osx_framework_prefix() -> bool:
+ """Check for Apple's ``osx_framework_library`` scheme.
+
+ Python distributed by Apple's Command Line Tools has this special scheme
+ that's used when:
+
+ * This is a framework build.
+ * We are installing into the system prefix.
+
+ This does not account for ``pip install --prefix`` (also means we're not
+ installing to the system prefix), which should use ``posix_prefix``, but
+ logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
+ since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
+ which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
+ wouldn't be able to magically switch between ``osx_framework_library`` and
+ ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
+ means its behavior is consistent whether we use the stdlib implementation
+ or our own, and we deal with this special case in ``get_scheme()`` instead.
+ """
+ return (
+ "osx_framework_library" in _AVAILABLE_SCHEMES
+ and not running_under_virtualenv()
+ and is_osx_framework()
+ )
+
+
+def _infer_prefix() -> str:
+ """Try to find a prefix scheme for the current platform.
+
+ This tries:
+
+ * A special ``osx_framework_library`` for Python distributed by Apple's
+ Command Line Tools, when not running in a virtual environment.
+ * Implementation + OS, used by PyPy on Windows (``pypy_nt``).
+ * Implementation without OS, used by PyPy on POSIX (``pypy``).
+ * OS + "prefix", used by CPython on POSIX (``posix_prefix``).
+ * Just the OS name, used by CPython on Windows (``nt``).
+
+ If none of the above works, fall back to ``posix_prefix``.
+ """
+ if _PREFERRED_SCHEME_API:
+ return _PREFERRED_SCHEME_API("prefix")
+ if _should_use_osx_framework_prefix():
+ return "osx_framework_library"
+ implementation_suffixed = f"{sys.implementation.name}_{os.name}"
+ if implementation_suffixed in _AVAILABLE_SCHEMES:
+ return implementation_suffixed
+ if sys.implementation.name in _AVAILABLE_SCHEMES:
+ return sys.implementation.name
+ suffixed = f"{os.name}_prefix"
+ if suffixed in _AVAILABLE_SCHEMES:
+ return suffixed
+ if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt".
+ return os.name
+ return "posix_prefix"
+
+
+def _infer_user() -> str:
+ """Try to find a user scheme for the current platform."""
+ if _PREFERRED_SCHEME_API:
+ return _PREFERRED_SCHEME_API("user")
+ if is_osx_framework() and not running_under_virtualenv():
+ suffixed = "osx_framework_user"
+ else:
+ suffixed = f"{os.name}_user"
+ if suffixed in _AVAILABLE_SCHEMES:
+ return suffixed
+ if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
+ raise UserInstallationInvalid()
+ return "posix_user"
+
+
+def _infer_home() -> str:
+ """Try to find a home for the current platform."""
+ if _PREFERRED_SCHEME_API:
+ return _PREFERRED_SCHEME_API("home")
+ suffixed = f"{os.name}_home"
+ if suffixed in _AVAILABLE_SCHEMES:
+ return suffixed
+ return "posix_home"
+
+
+# Update these keys if the user sets a custom home.
+_HOME_KEYS = [
+ "installed_base",
+ "base",
+ "installed_platbase",
+ "platbase",
+ "prefix",
+ "exec_prefix",
+]
+if sysconfig.get_config_var("userbase") is not None:
+ _HOME_KEYS.append("userbase")
+
+
+def get_scheme(
+ dist_name: str,
+ user: bool = False,
+ home: typing.Optional[str] = None,
+ root: typing.Optional[str] = None,
+ isolated: bool = False,
+ prefix: typing.Optional[str] = None,
+) -> Scheme:
+ """
+ Get the "scheme" corresponding to the input parameters.
+
+ :param dist_name: the name of the package to retrieve the scheme for, used
+ in the headers scheme path
+ :param user: indicates to use the "user" scheme
+ :param home: indicates to use the "home" scheme
+ :param root: root under which other directories are re-based
+ :param isolated: ignored, but kept for distutils compatibility (where
+ this controls whether the user-site pydistutils.cfg is honored)
+ :param prefix: indicates to use the "prefix" scheme and provides the
+ base directory for the same
+ """
+ if user and prefix:
+ raise InvalidSchemeCombination("--user", "--prefix")
+ if home and prefix:
+ raise InvalidSchemeCombination("--home", "--prefix")
+
+ if home is not None:
+ scheme_name = _infer_home()
+ elif user:
+ scheme_name = _infer_user()
+ else:
+ scheme_name = _infer_prefix()
+
+ # Special case: When installing into a custom prefix, use posix_prefix
+ # instead of osx_framework_library. See _should_use_osx_framework_prefix()
+ # docstring for details.
+ if prefix is not None and scheme_name == "osx_framework_library":
+ scheme_name = "posix_prefix"
+
+ if home is not None:
+ variables = {k: home for k in _HOME_KEYS}
+ elif prefix is not None:
+ variables = {k: prefix for k in _HOME_KEYS}
+ else:
+ variables = {}
+
+ paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
+
+ # Logic here is very arbitrary, we're doing it for compatibility, don't ask.
+ # 1. Pip historically uses a special header path in virtual environments.
+ # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
+ # only do the same when not running in a virtual environment because
+ # pip's historical header path logic (see point 1) did not do this.
+ if running_under_virtualenv():
+ if user:
+ base = variables.get("userbase", sys.prefix)
+ else:
+ base = variables.get("base", sys.prefix)
+ python_xy = f"python{get_major_minor_version()}"
+ paths["include"] = os.path.join(base, "include", "site", python_xy)
+ elif not dist_name:
+ dist_name = "UNKNOWN"
+
+ scheme = Scheme(
+ platlib=paths["platlib"],
+ purelib=paths["purelib"],
+ headers=os.path.join(paths["include"], dist_name),
+ scripts=paths["scripts"],
+ data=paths["data"],
+ )
+ if root is not None:
+ for key in SCHEME_KEYS:
+ value = change_root(root, getattr(scheme, key))
+ setattr(scheme, key, value)
+ return scheme
+
+
+def get_bin_prefix() -> str:
+ # Forcing to use /usr/local/bin for standard macOS framework installs.
+ if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
+ return "/usr/local/bin"
+ return sysconfig.get_paths()["scripts"]
+
+
+def get_purelib() -> str:
+ return sysconfig.get_paths()["purelib"]
+
+
+def get_platlib() -> str:
+ return sysconfig.get_paths()["platlib"]
diff --git a/.venv/Lib/site-packages/pip/_internal/locations/base.py b/.venv/Lib/site-packages/pip/_internal/locations/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f9f896e632e929a63e9724ab80ecdfc9761b795
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/locations/base.py
@@ -0,0 +1,81 @@
+import functools
+import os
+import site
+import sys
+import sysconfig
+import typing
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils import appdirs
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+# Application Directories
+USER_CACHE_DIR = appdirs.user_cache_dir("pip")
+
+# FIXME doesn't account for venv linked to global site-packages
+site_packages: str = sysconfig.get_path("purelib")
+
+
+def get_major_minor_version() -> str:
+ """
+ Return the major-minor version of the current Python as a string, e.g.
+ "3.7" or "3.10".
+ """
+ return "{}.{}".format(*sys.version_info)
+
+
+def change_root(new_root: str, pathname: str) -> str:
+ """Return 'pathname' with 'new_root' prepended.
+
+ If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).
+ Otherwise, it requires making 'pathname' relative and then joining the
+ two, which is tricky on DOS/Windows and Mac OS.
+
+ This is borrowed from Python's standard library's distutils module.
+ """
+ if os.name == "posix":
+ if not os.path.isabs(pathname):
+ return os.path.join(new_root, pathname)
+ else:
+ return os.path.join(new_root, pathname[1:])
+
+ elif os.name == "nt":
+ (drive, path) = os.path.splitdrive(pathname)
+ if path[0] == "\\":
+ path = path[1:]
+ return os.path.join(new_root, path)
+
+ else:
+ raise InstallationError(
+ f"Unknown platform: {os.name}\n"
+ "Can not change root path prefix on unknown platform."
+ )
+
+
+def get_src_prefix() -> str:
+ if running_under_virtualenv():
+ src_prefix = os.path.join(sys.prefix, "src")
+ else:
+ # FIXME: keep src in cwd for now (it is not a temporary folder)
+ try:
+ src_prefix = os.path.join(os.getcwd(), "src")
+ except OSError:
+ # In case the current working directory has been renamed or deleted
+ sys.exit("The folder you are executing pip from can no longer be found.")
+
+ # under macOS + virtualenv sys.prefix is not properly resolved
+ # it is something like /path/to/python/bin/..
+ return os.path.abspath(src_prefix)
+
+
+try:
+ # Use getusersitepackages if this is present, as it ensures that the
+ # value is initialised properly.
+ user_site: typing.Optional[str] = site.getusersitepackages()
+except AttributeError:
+ user_site = site.USER_SITE
+
+
+@functools.lru_cache(maxsize=None)
+def is_osx_framework() -> bool:
+ return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
diff --git a/.venv/Lib/site-packages/pip/_internal/main.py b/.venv/Lib/site-packages/pip/_internal/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..33c6d24cd85b55a9fb1b1e6ab784f471e2b135f0
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/main.py
@@ -0,0 +1,12 @@
+from typing import List, Optional
+
+
+def main(args: Optional[List[str]] = None) -> int:
+ """This is preserved for old console scripts that may still be referencing
+ it.
+
+ For additional details, see https://github.com/pypa/pip/issues/7498.
+ """
+ from pip._internal.utils.entrypoints import _wrapper
+
+ return _wrapper(args)
diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/base.py b/.venv/Lib/site-packages/pip/_internal/metadata/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..cafb79fb3dcf43744393e2964056fe32c350bbc1
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/metadata/base.py
@@ -0,0 +1,688 @@
+import csv
+import email.message
+import functools
+import json
+import logging
+import pathlib
+import re
+import zipfile
+from typing import (
+ IO,
+ TYPE_CHECKING,
+ Any,
+ Collection,
+ Container,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ NamedTuple,
+ Optional,
+ Tuple,
+ Union,
+)
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName
+from pip._vendor.packaging.version import LegacyVersion, Version
+
+from pip._internal.exceptions import NoneMetadataError
+from pip._internal.locations import site_packages, user_site
+from pip._internal.models.direct_url import (
+ DIRECT_URL_METADATA_NAME,
+ DirectUrl,
+ DirectUrlValidationError,
+)
+from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
+from pip._internal.utils.egg_link import egg_link_path_from_sys_path
+from pip._internal.utils.misc import is_local, normalize_path
+from pip._internal.utils.packaging import safe_extra
+from pip._internal.utils.urls import url_to_path
+
+from ._json import msg_to_json
+
+if TYPE_CHECKING:
+ from typing import Protocol
+else:
+ Protocol = object
+
+DistributionVersion = Union[LegacyVersion, Version]
+
+InfoPath = Union[str, pathlib.PurePath]
+
+logger = logging.getLogger(__name__)
+
+
+class BaseEntryPoint(Protocol):
+ @property
+ def name(self) -> str:
+ raise NotImplementedError()
+
+ @property
+ def value(self) -> str:
+ raise NotImplementedError()
+
+ @property
+ def group(self) -> str:
+ raise NotImplementedError()
+
+
+def _convert_installed_files_path(
+ entry: Tuple[str, ...],
+ info: Tuple[str, ...],
+) -> str:
+ """Convert a legacy installed-files.txt path into modern RECORD path.
+
+ The legacy format stores paths relative to the info directory, while the
+ modern format stores paths relative to the package root, e.g. the
+ site-packages directory.
+
+ :param entry: Path parts of the installed-files.txt entry.
+ :param info: Path parts of the egg-info directory relative to package root.
+ :returns: The converted entry.
+
+ For best compatibility with symlinks, this does not use ``abspath()`` or
+ ``Path.resolve()``, but tries to work with path parts:
+
+ 1. While ``entry`` starts with ``..``, remove the equal amounts of parts
+ from ``info``; if ``info`` is empty, start appending ``..`` instead.
+ 2. Join the two directly.
+ """
+ while entry and entry[0] == "..":
+ if not info or info[-1] == "..":
+ info += ("..",)
+ else:
+ info = info[:-1]
+ entry = entry[1:]
+ return str(pathlib.Path(*info, *entry))
+
+
+class RequiresEntry(NamedTuple):
+ requirement: str
+ extra: str
+ marker: str
+
+
+class BaseDistribution(Protocol):
+ @classmethod
+ def from_directory(cls, directory: str) -> "BaseDistribution":
+ """Load the distribution from a metadata directory.
+
+ :param directory: Path to a metadata directory, e.g. ``.dist-info``.
+ """
+ raise NotImplementedError()
+
+ @classmethod
+ def from_metadata_file_contents(
+ cls,
+ metadata_contents: bytes,
+ filename: str,
+ project_name: str,
+ ) -> "BaseDistribution":
+ """Load the distribution from the contents of a METADATA file.
+
+ This is used to implement PEP 658 by generating a "shallow" dist object that can
+ be used for resolution without downloading or building the actual dist yet.
+
+ :param metadata_contents: The contents of a METADATA file.
+ :param filename: File name for the dist with this metadata.
+ :param project_name: Name of the project this dist represents.
+ """
+ raise NotImplementedError()
+
+ @classmethod
+ def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":
+ """Load the distribution from a given wheel.
+
+ :param wheel: A concrete wheel definition.
+ :param name: File name of the wheel.
+
+ :raises InvalidWheel: Whenever loading of the wheel causes a
+ :py:exc:`zipfile.BadZipFile` exception to be thrown.
+ :raises UnsupportedWheel: If the wheel is a valid zip, but malformed
+ internally.
+ """
+ raise NotImplementedError()
+
+ def __repr__(self) -> str:
+ return f"{self.raw_name} {self.version} ({self.location})"
+
+ def __str__(self) -> str:
+ return f"{self.raw_name} {self.version}"
+
+ @property
+ def location(self) -> Optional[str]:
+ """Where the distribution is loaded from.
+
+ A string value is not necessarily a filesystem path, since distributions
+ can be loaded from other sources, e.g. arbitrary zip archives. ``None``
+ means the distribution is created in-memory.
+
+ Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
+ this is a symbolic link, we want to preserve the relative path between
+ it and files in the distribution.
+ """
+ raise NotImplementedError()
+
+ @property
+ def editable_project_location(self) -> Optional[str]:
+ """The project location for editable distributions.
+
+ This is the directory where pyproject.toml or setup.py is located.
+ None if the distribution is not installed in editable mode.
+ """
+ # TODO: this property is relatively costly to compute, memoize it ?
+ direct_url = self.direct_url
+ if direct_url:
+ if direct_url.is_local_editable():
+ return url_to_path(direct_url.url)
+ else:
+ # Search for an .egg-link file by walking sys.path, as it was
+ # done before by dist_is_editable().
+ egg_link_path = egg_link_path_from_sys_path(self.raw_name)
+ if egg_link_path:
+ # TODO: get project location from second line of egg_link file
+ # (https://github.com/pypa/pip/issues/10243)
+ return self.location
+ return None
+
+ @property
+ def installed_location(self) -> Optional[str]:
+ """The distribution's "installed" location.
+
+ This should generally be a ``site-packages`` directory. This is
+ usually ``dist.location``, except for legacy develop-installed packages,
+ where ``dist.location`` is the source code location, and this is where
+ the ``.egg-link`` file is.
+
+ The returned location is normalized (in particular, with symlinks removed).
+ """
+ raise NotImplementedError()
+
+ @property
+ def info_location(self) -> Optional[str]:
+ """Location of the .[egg|dist]-info directory or file.
+
+ Similarly to ``location``, a string value is not necessarily a
+ filesystem path. ``None`` means the distribution is created in-memory.
+
+ For a modern .dist-info installation on disk, this should be something
+ like ``{location}/{raw_name}-{version}.dist-info``.
+
+ Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
+ this is a symbolic link, we want to preserve the relative path between
+ it and other files in the distribution.
+ """
+ raise NotImplementedError()
+
+ @property
+ def installed_by_distutils(self) -> bool:
+ """Whether this distribution is installed with legacy distutils format.
+
+ A distribution installed with "raw" distutils not patched by setuptools
+ uses one single file at ``info_location`` to store metadata. We need to
+ treat this specially on uninstallation.
+ """
+ info_location = self.info_location
+ if not info_location:
+ return False
+ return pathlib.Path(info_location).is_file()
+
+ @property
+ def installed_as_egg(self) -> bool:
+ """Whether this distribution is installed as an egg.
+
+ This usually indicates the distribution was installed by (older versions
+ of) easy_install.
+ """
+ location = self.location
+ if not location:
+ return False
+ return location.endswith(".egg")
+
+ @property
+ def installed_with_setuptools_egg_info(self) -> bool:
+ """Whether this distribution is installed with the ``.egg-info`` format.
+
+ This usually indicates the distribution was installed with setuptools
+ with an old pip version or with ``single-version-externally-managed``.
+
+ Note that this ensure the metadata store is a directory. distutils can
+ also installs an ``.egg-info``, but as a file, not a directory. This
+ property is *False* for that case. Also see ``installed_by_distutils``.
+ """
+ info_location = self.info_location
+ if not info_location:
+ return False
+ if not info_location.endswith(".egg-info"):
+ return False
+ return pathlib.Path(info_location).is_dir()
+
+ @property
+ def installed_with_dist_info(self) -> bool:
+ """Whether this distribution is installed with the "modern format".
+
+ This indicates a "modern" installation, e.g. storing metadata in the
+ ``.dist-info`` directory. This applies to installations made by
+ setuptools (but through pip, not directly), or anything using the
+ standardized build backend interface (PEP 517).
+ """
+ info_location = self.info_location
+ if not info_location:
+ return False
+ if not info_location.endswith(".dist-info"):
+ return False
+ return pathlib.Path(info_location).is_dir()
+
+ @property
+ def canonical_name(self) -> NormalizedName:
+ raise NotImplementedError()
+
+ @property
+ def version(self) -> DistributionVersion:
+ raise NotImplementedError()
+
+ @property
+ def setuptools_filename(self) -> str:
+ """Convert a project name to its setuptools-compatible filename.
+
+ This is a copy of ``pkg_resources.to_filename()`` for compatibility.
+ """
+ return self.raw_name.replace("-", "_")
+
+ @property
+ def direct_url(self) -> Optional[DirectUrl]:
+ """Obtain a DirectUrl from this distribution.
+
+ Returns None if the distribution has no `direct_url.json` metadata,
+ or if `direct_url.json` is invalid.
+ """
+ try:
+ content = self.read_text(DIRECT_URL_METADATA_NAME)
+ except FileNotFoundError:
+ return None
+ try:
+ return DirectUrl.from_json(content)
+ except (
+ UnicodeDecodeError,
+ json.JSONDecodeError,
+ DirectUrlValidationError,
+ ) as e:
+ logger.warning(
+ "Error parsing %s for %s: %s",
+ DIRECT_URL_METADATA_NAME,
+ self.canonical_name,
+ e,
+ )
+ return None
+
+ @property
+ def installer(self) -> str:
+ try:
+ installer_text = self.read_text("INSTALLER")
+ except (OSError, ValueError, NoneMetadataError):
+ return "" # Fail silently if the installer file cannot be read.
+ for line in installer_text.splitlines():
+ cleaned_line = line.strip()
+ if cleaned_line:
+ return cleaned_line
+ return ""
+
+ @property
+ def requested(self) -> bool:
+ return self.is_file("REQUESTED")
+
+ @property
+ def editable(self) -> bool:
+ return bool(self.editable_project_location)
+
+ @property
+ def local(self) -> bool:
+ """If distribution is installed in the current virtual environment.
+
+ Always True if we're not in a virtualenv.
+ """
+ if self.installed_location is None:
+ return False
+ return is_local(self.installed_location)
+
+ @property
+ def in_usersite(self) -> bool:
+ if self.installed_location is None or user_site is None:
+ return False
+ return self.installed_location.startswith(normalize_path(user_site))
+
+ @property
+ def in_site_packages(self) -> bool:
+ if self.installed_location is None or site_packages is None:
+ return False
+ return self.installed_location.startswith(normalize_path(site_packages))
+
+ def is_file(self, path: InfoPath) -> bool:
+ """Check whether an entry in the info directory is a file."""
+ raise NotImplementedError()
+
+ def iter_distutils_script_names(self) -> Iterator[str]:
+ """Find distutils 'scripts' entries metadata.
+
+ If 'scripts' is supplied in ``setup.py``, distutils records those in the
+ installed distribution's ``scripts`` directory, a file for each script.
+ """
+ raise NotImplementedError()
+
+ def read_text(self, path: InfoPath) -> str:
+ """Read a file in the info directory.
+
+ :raise FileNotFoundError: If ``path`` does not exist in the directory.
+ :raise NoneMetadataError: If ``path`` exists in the info directory, but
+ cannot be read.
+ """
+ raise NotImplementedError()
+
+ def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
+ raise NotImplementedError()
+
+ def _metadata_impl(self) -> email.message.Message:
+ raise NotImplementedError()
+
+ @functools.lru_cache(maxsize=1)
+ def _metadata_cached(self) -> email.message.Message:
+ # When we drop python 3.7 support, move this to the metadata property and use
+ # functools.cached_property instead of lru_cache.
+ metadata = self._metadata_impl()
+ self._add_egg_info_requires(metadata)
+ return metadata
+
+ @property
+ def metadata(self) -> email.message.Message:
+ """Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
+
+ This should return an empty message if the metadata file is unavailable.
+
+ :raises NoneMetadataError: If the metadata file is available, but does
+ not contain valid metadata.
+ """
+ return self._metadata_cached()
+
+ @property
+ def metadata_dict(self) -> Dict[str, Any]:
+ """PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO.
+
+ This should return an empty dict if the metadata file is unavailable.
+
+ :raises NoneMetadataError: If the metadata file is available, but does
+ not contain valid metadata.
+ """
+ return msg_to_json(self.metadata)
+
+ @property
+ def metadata_version(self) -> Optional[str]:
+ """Value of "Metadata-Version:" in distribution metadata, if available."""
+ return self.metadata.get("Metadata-Version")
+
+ @property
+ def raw_name(self) -> str:
+ """Value of "Name:" in distribution metadata."""
+ # The metadata should NEVER be missing the Name: key, but if it somehow
+ # does, fall back to the known canonical name.
+ return self.metadata.get("Name", self.canonical_name)
+
+ @property
+ def requires_python(self) -> SpecifierSet:
+ """Value of "Requires-Python:" in distribution metadata.
+
+ If the key does not exist or contains an invalid value, an empty
+ SpecifierSet should be returned.
+ """
+ value = self.metadata.get("Requires-Python")
+ if value is None:
+ return SpecifierSet()
+ try:
+ # Convert to str to satisfy the type checker; this can be a Header object.
+ spec = SpecifierSet(str(value))
+ except InvalidSpecifier as e:
+ message = "Package %r has an invalid Requires-Python: %s"
+ logger.warning(message, self.raw_name, e)
+ return SpecifierSet()
+ return spec
+
+ def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
+ """Dependencies of this distribution.
+
+ For modern .dist-info distributions, this is the collection of
+ "Requires-Dist:" entries in distribution metadata.
+ """
+ raise NotImplementedError()
+
+ def iter_provided_extras(self) -> Iterable[str]:
+ """Extras provided by this distribution.
+
+ For modern .dist-info distributions, this is the collection of
+ "Provides-Extra:" entries in distribution metadata.
+ """
+ raise NotImplementedError()
+
+ def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
+ try:
+ text = self.read_text("RECORD")
+ except FileNotFoundError:
+ return None
+ # This extra Path-str cast normalizes entries.
+ return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
+
+ def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
+ try:
+ text = self.read_text("installed-files.txt")
+ except FileNotFoundError:
+ return None
+ paths = (p for p in text.splitlines(keepends=False) if p)
+ root = self.location
+ info = self.info_location
+ if root is None or info is None:
+ return paths
+ try:
+ info_rel = pathlib.Path(info).relative_to(root)
+ except ValueError: # info is not relative to root.
+ return paths
+ if not info_rel.parts: # info *is* root.
+ return paths
+ return (
+ _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
+ for p in paths
+ )
+
+ def iter_declared_entries(self) -> Optional[Iterator[str]]:
+ """Iterate through file entries declared in this distribution.
+
+ For modern .dist-info distributions, this is the files listed in the
+ ``RECORD`` metadata file. For legacy setuptools distributions, this
+ comes from ``installed-files.txt``, with entries normalized to be
+ compatible with the format used by ``RECORD``.
+
+ :return: An iterator for listed entries, or None if the distribution
+ contains neither ``RECORD`` nor ``installed-files.txt``.
+ """
+ return (
+ self._iter_declared_entries_from_record()
+ or self._iter_declared_entries_from_legacy()
+ )
+
+ def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]:
+ """Parse a ``requires.txt`` in an egg-info directory.
+
+ This is an INI-ish format where an egg-info stores dependencies. A
+ section name describes extra other environment markers, while each entry
+ is an arbitrary string (not a key-value pair) representing a dependency
+ as a requirement string (no markers).
+
+ There is a construct in ``importlib.metadata`` called ``Sectioned`` that
+ does mostly the same, but the format is currently considered private.
+ """
+ try:
+ content = self.read_text("requires.txt")
+ except FileNotFoundError:
+ return
+ extra = marker = "" # Section-less entries don't have markers.
+ for line in content.splitlines():
+ line = line.strip()
+ if not line or line.startswith("#"): # Comment; ignored.
+ continue
+ if line.startswith("[") and line.endswith("]"): # A section header.
+ extra, _, marker = line.strip("[]").partition(":")
+ continue
+ yield RequiresEntry(requirement=line, extra=extra, marker=marker)
+
+ def _iter_egg_info_extras(self) -> Iterable[str]:
+ """Get extras from the egg-info directory."""
+ known_extras = {""}
+ for entry in self._iter_requires_txt_entries():
+ if entry.extra in known_extras:
+ continue
+ known_extras.add(entry.extra)
+ yield entry.extra
+
+ def _iter_egg_info_dependencies(self) -> Iterable[str]:
+ """Get distribution dependencies from the egg-info directory.
+
+ To ease parsing, this converts a legacy dependency entry into a PEP 508
+ requirement string. Like ``_iter_requires_txt_entries()``, there is code
+ in ``importlib.metadata`` that does mostly the same, but not do exactly
+ what we need.
+
+ Namely, ``importlib.metadata`` does not normalize the extra name before
+ putting it into the requirement string, which causes marker comparison
+ to fail because the dist-info format do normalize. This is consistent in
+ all currently available PEP 517 backends, although not standardized.
+ """
+ for entry in self._iter_requires_txt_entries():
+ if entry.extra and entry.marker:
+ marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"'
+ elif entry.extra:
+ marker = f'extra == "{safe_extra(entry.extra)}"'
+ elif entry.marker:
+ marker = entry.marker
+ else:
+ marker = ""
+ if marker:
+ yield f"{entry.requirement} ; {marker}"
+ else:
+ yield entry.requirement
+
+ def _add_egg_info_requires(self, metadata: email.message.Message) -> None:
+ """Add egg-info requires.txt information to the metadata."""
+ if not metadata.get_all("Requires-Dist"):
+ for dep in self._iter_egg_info_dependencies():
+ metadata["Requires-Dist"] = dep
+ if not metadata.get_all("Provides-Extra"):
+ for extra in self._iter_egg_info_extras():
+ metadata["Provides-Extra"] = extra
+
+
+class BaseEnvironment:
+ """An environment containing distributions to introspect."""
+
+ @classmethod
+ def default(cls) -> "BaseEnvironment":
+ raise NotImplementedError()
+
+ @classmethod
+ def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment":
+ raise NotImplementedError()
+
+ def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
+ """Given a requirement name, return the installed distributions.
+
+ The name may not be normalized. The implementation must canonicalize
+ it for lookup.
+ """
+ raise NotImplementedError()
+
+ def _iter_distributions(self) -> Iterator["BaseDistribution"]:
+ """Iterate through installed distributions.
+
+ This function should be implemented by subclass, but never called
+ directly. Use the public ``iter_distribution()`` instead, which
+ implements additional logic to make sure the distributions are valid.
+ """
+ raise NotImplementedError()
+
+ def iter_all_distributions(self) -> Iterator[BaseDistribution]:
+ """Iterate through all installed distributions without any filtering."""
+ for dist in self._iter_distributions():
+ # Make sure the distribution actually comes from a valid Python
+ # packaging distribution. Pip's AdjacentTempDirectory leaves folders
+ # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The
+ # valid project name pattern is taken from PEP 508.
+ project_name_valid = re.match(
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",
+ dist.canonical_name,
+ flags=re.IGNORECASE,
+ )
+ if not project_name_valid:
+ logger.warning(
+ "Ignoring invalid distribution %s (%s)",
+ dist.canonical_name,
+ dist.location,
+ )
+ continue
+ yield dist
+
+ def iter_installed_distributions(
+ self,
+ local_only: bool = True,
+ skip: Container[str] = stdlib_pkgs,
+ include_editables: bool = True,
+ editables_only: bool = False,
+ user_only: bool = False,
+ ) -> Iterator[BaseDistribution]:
+ """Return a list of installed distributions.
+
+ This is based on ``iter_all_distributions()`` with additional filtering
+ options. Note that ``iter_installed_distributions()`` without arguments
+ is *not* equal to ``iter_all_distributions()``, since some of the
+ configurations exclude packages by default.
+
+ :param local_only: If True (default), only return installations
+ local to the current virtualenv, if in a virtualenv.
+ :param skip: An iterable of canonicalized project names to ignore;
+ defaults to ``stdlib_pkgs``.
+ :param include_editables: If False, don't report editables.
+ :param editables_only: If True, only report editables.
+ :param user_only: If True, only report installations in the user
+ site directory.
+ """
+ it = self.iter_all_distributions()
+ if local_only:
+ it = (d for d in it if d.local)
+ if not include_editables:
+ it = (d for d in it if not d.editable)
+ if editables_only:
+ it = (d for d in it if d.editable)
+ if user_only:
+ it = (d for d in it if d.in_usersite)
+ return (d for d in it if d.canonical_name not in skip)
+
+
+class Wheel(Protocol):
+ location: str
+
+ def as_zipfile(self) -> zipfile.ZipFile:
+ raise NotImplementedError()
+
+
+class FilesystemWheel(Wheel):
+ def __init__(self, location: str) -> None:
+ self.location = location
+
+ def as_zipfile(self) -> zipfile.ZipFile:
+ return zipfile.ZipFile(self.location, allowZip64=True)
+
+
+class MemoryWheel(Wheel):
+ def __init__(self, location: str, stream: IO[bytes]) -> None:
+ self.location = location
+ self.stream = stream
+
+ def as_zipfile(self) -> zipfile.ZipFile:
+ return zipfile.ZipFile(self.stream, allowZip64=True)
diff --git a/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__init__.py b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e7af9fe521bd529dd2c1878b0a6e9ea7c57752d
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/metadata/importlib/__init__.py
@@ -0,0 +1,4 @@
+from ._dists import Distribution
+from ._envs import Environment
+
+__all__ = ["Distribution", "Environment"]
diff --git a/.venv/Lib/site-packages/pip/_internal/models/candidate.py b/.venv/Lib/site-packages/pip/_internal/models/candidate.py
new file mode 100644
index 0000000000000000000000000000000000000000..a4963aec6388c27c3beb064f0a730af200380aee
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/models/candidate.py
@@ -0,0 +1,34 @@
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.models.link import Link
+from pip._internal.utils.models import KeyBasedCompareMixin
+
+
+class InstallationCandidate(KeyBasedCompareMixin):
+ """Represents a potential "candidate" for installation."""
+
+ __slots__ = ["name", "version", "link"]
+
+ def __init__(self, name: str, version: str, link: Link) -> None:
+ self.name = name
+ self.version = parse_version(version)
+ self.link = link
+
+ super().__init__(
+ key=(self.name, self.version, self.link),
+ defining_class=InstallationCandidate,
+ )
+
+ def __repr__(self) -> str:
+ return "".format(
+ self.name,
+ self.version,
+ self.link,
+ )
+
+ def __str__(self) -> str:
+ return "{!r} candidate (version {} at {})".format(
+ self.name,
+ self.version,
+ self.link,
+ )
diff --git a/.venv/Lib/site-packages/pip/_internal/models/direct_url.py b/.venv/Lib/site-packages/pip/_internal/models/direct_url.py
new file mode 100644
index 0000000000000000000000000000000000000000..e219d73849bbbfc556be108fac2ae619042bce1a
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/models/direct_url.py
@@ -0,0 +1,237 @@
+""" PEP 610 """
+import json
+import re
+import urllib.parse
+from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
+
+__all__ = [
+ "DirectUrl",
+ "DirectUrlValidationError",
+ "DirInfo",
+ "ArchiveInfo",
+ "VcsInfo",
+]
+
+T = TypeVar("T")
+
+DIRECT_URL_METADATA_NAME = "direct_url.json"
+ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
+
+
+class DirectUrlValidationError(Exception):
+ pass
+
+
+def _get(
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
+) -> Optional[T]:
+ """Get value from dictionary and verify expected type."""
+ if key not in d:
+ return default
+ value = d[key]
+ if not isinstance(value, expected_type):
+ raise DirectUrlValidationError(
+ "{!r} has unexpected type for {} (expected {})".format(
+ value, key, expected_type
+ )
+ )
+ return value
+
+
+def _get_required(
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
+) -> T:
+ value = _get(d, expected_type, key, default)
+ if value is None:
+ raise DirectUrlValidationError(f"{key} must have a value")
+ return value
+
+
+def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
+ infos = [info for info in infos if info is not None]
+ if not infos:
+ raise DirectUrlValidationError(
+ "missing one of archive_info, dir_info, vcs_info"
+ )
+ if len(infos) > 1:
+ raise DirectUrlValidationError(
+ "more than one of archive_info, dir_info, vcs_info"
+ )
+ assert infos[0] is not None
+ return infos[0]
+
+
+def _filter_none(**kwargs: Any) -> Dict[str, Any]:
+ """Make dict excluding None values."""
+ return {k: v for k, v in kwargs.items() if v is not None}
+
+
+class VcsInfo:
+ name = "vcs_info"
+
+ def __init__(
+ self,
+ vcs: str,
+ commit_id: str,
+ requested_revision: Optional[str] = None,
+ ) -> None:
+ self.vcs = vcs
+ self.requested_revision = requested_revision
+ self.commit_id = commit_id
+
+ @classmethod
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
+ if d is None:
+ return None
+ return cls(
+ vcs=_get_required(d, str, "vcs"),
+ commit_id=_get_required(d, str, "commit_id"),
+ requested_revision=_get(d, str, "requested_revision"),
+ )
+
+ def _to_dict(self) -> Dict[str, Any]:
+ return _filter_none(
+ vcs=self.vcs,
+ requested_revision=self.requested_revision,
+ commit_id=self.commit_id,
+ )
+
+
+class ArchiveInfo:
+ name = "archive_info"
+
+ def __init__(
+ self,
+ hash: Optional[str] = None,
+ hashes: Optional[Dict[str, str]] = None,
+ ) -> None:
+ # set hashes before hash, since the hash setter will further populate hashes
+ self.hashes = hashes
+ self.hash = hash
+
+ @property
+ def hash(self) -> Optional[str]:
+ return self._hash
+
+ @hash.setter
+ def hash(self, value: Optional[str]) -> None:
+ if value is not None:
+ # Auto-populate the hashes key to upgrade to the new format automatically.
+ # We don't back-populate the legacy hash key from hashes.
+ try:
+ hash_name, hash_value = value.split("=", 1)
+ except ValueError:
+ raise DirectUrlValidationError(
+ f"invalid archive_info.hash format: {value!r}"
+ )
+ if self.hashes is None:
+ self.hashes = {hash_name: hash_value}
+ elif hash_name not in self.hashes:
+ self.hashes = self.hashes.copy()
+ self.hashes[hash_name] = hash_value
+ self._hash = value
+
+ @classmethod
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
+ if d is None:
+ return None
+ return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
+
+ def _to_dict(self) -> Dict[str, Any]:
+ return _filter_none(hash=self.hash, hashes=self.hashes)
+
+
+class DirInfo:
+ name = "dir_info"
+
+ def __init__(
+ self,
+ editable: bool = False,
+ ) -> None:
+ self.editable = editable
+
+ @classmethod
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
+ if d is None:
+ return None
+ return cls(editable=_get_required(d, bool, "editable", default=False))
+
+ def _to_dict(self) -> Dict[str, Any]:
+ return _filter_none(editable=self.editable or None)
+
+
+InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
+
+
+class DirectUrl:
+ def __init__(
+ self,
+ url: str,
+ info: InfoType,
+ subdirectory: Optional[str] = None,
+ ) -> None:
+ self.url = url
+ self.info = info
+ self.subdirectory = subdirectory
+
+ def _remove_auth_from_netloc(self, netloc: str) -> str:
+ if "@" not in netloc:
+ return netloc
+ user_pass, netloc_no_user_pass = netloc.split("@", 1)
+ if (
+ isinstance(self.info, VcsInfo)
+ and self.info.vcs == "git"
+ and user_pass == "git"
+ ):
+ return netloc
+ if ENV_VAR_RE.match(user_pass):
+ return netloc
+ return netloc_no_user_pass
+
+ @property
+ def redacted_url(self) -> str:
+ """url with user:password part removed unless it is formed with
+ environment variables as specified in PEP 610, or it is ``git``
+ in the case of a git URL.
+ """
+ purl = urllib.parse.urlsplit(self.url)
+ netloc = self._remove_auth_from_netloc(purl.netloc)
+ surl = urllib.parse.urlunsplit(
+ (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
+ )
+ return surl
+
+ def validate(self) -> None:
+ self.from_dict(self.to_dict())
+
+ @classmethod
+ def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
+ return DirectUrl(
+ url=_get_required(d, str, "url"),
+ subdirectory=_get(d, str, "subdirectory"),
+ info=_exactly_one_of(
+ [
+ ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
+ DirInfo._from_dict(_get(d, dict, "dir_info")),
+ VcsInfo._from_dict(_get(d, dict, "vcs_info")),
+ ]
+ ),
+ )
+
+ def to_dict(self) -> Dict[str, Any]:
+ res = _filter_none(
+ url=self.redacted_url,
+ subdirectory=self.subdirectory,
+ )
+ res[self.info.name] = self.info._to_dict()
+ return res
+
+ @classmethod
+ def from_json(cls, s: str) -> "DirectUrl":
+ return cls.from_dict(json.loads(s))
+
+ def to_json(self) -> str:
+ return json.dumps(self.to_dict(), sort_keys=True)
+
+ def is_local_editable(self) -> bool:
+ return isinstance(self.info, DirInfo) and self.info.editable
diff --git a/.venv/Lib/site-packages/pip/_internal/models/format_control.py b/.venv/Lib/site-packages/pip/_internal/models/format_control.py
new file mode 100644
index 0000000000000000000000000000000000000000..db3995eac9f9ec2450e0e2d4a18e666c0b178681
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/models/format_control.py
@@ -0,0 +1,80 @@
+from typing import FrozenSet, Optional, Set
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import CommandError
+
+
+class FormatControl:
+ """Helper for managing formats from which a package can be installed."""
+
+ __slots__ = ["no_binary", "only_binary"]
+
+ def __init__(
+ self,
+ no_binary: Optional[Set[str]] = None,
+ only_binary: Optional[Set[str]] = None,
+ ) -> None:
+ if no_binary is None:
+ no_binary = set()
+ if only_binary is None:
+ only_binary = set()
+
+ self.no_binary = no_binary
+ self.only_binary = only_binary
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+
+ if self.__slots__ != other.__slots__:
+ return False
+
+ return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
+
+ def __repr__(self) -> str:
+ return "{}({}, {})".format(
+ self.__class__.__name__, self.no_binary, self.only_binary
+ )
+
+ @staticmethod
+ def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
+ if value.startswith("-"):
+ raise CommandError(
+ "--no-binary / --only-binary option requires 1 argument."
+ )
+ new = value.split(",")
+ while ":all:" in new:
+ other.clear()
+ target.clear()
+ target.add(":all:")
+ del new[: new.index(":all:") + 1]
+ # Without a none, we want to discard everything as :all: covers it
+ if ":none:" not in new:
+ return
+ for name in new:
+ if name == ":none:":
+ target.clear()
+ continue
+ name = canonicalize_name(name)
+ other.discard(name)
+ target.add(name)
+
+ def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
+ result = {"binary", "source"}
+ if canonical_name in self.only_binary:
+ result.discard("source")
+ elif canonical_name in self.no_binary:
+ result.discard("binary")
+ elif ":all:" in self.only_binary:
+ result.discard("source")
+ elif ":all:" in self.no_binary:
+ result.discard("binary")
+ return frozenset(result)
+
+ def disallow_binaries(self) -> None:
+ self.handle_mutual_excludes(
+ ":all:",
+ self.no_binary,
+ self.only_binary,
+ )
diff --git a/.venv/Lib/site-packages/pip/_internal/models/index.py b/.venv/Lib/site-packages/pip/_internal/models/index.py
new file mode 100644
index 0000000000000000000000000000000000000000..b94c32511f0cda2363bfc4f29c9c8bfcc7101f9b
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/models/index.py
@@ -0,0 +1,28 @@
+import urllib.parse
+
+
+class PackageIndex:
+ """Represents a Package Index and provides easier access to endpoints"""
+
+ __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
+
+ def __init__(self, url: str, file_storage_domain: str) -> None:
+ super().__init__()
+ self.url = url
+ self.netloc = urllib.parse.urlsplit(url).netloc
+ self.simple_url = self._url_for_path("simple")
+ self.pypi_url = self._url_for_path("pypi")
+
+ # This is part of a temporary hack used to block installs of PyPI
+ # packages which depend on external urls only necessary until PyPI can
+ # block such packages themselves
+ self.file_storage_domain = file_storage_domain
+
+ def _url_for_path(self, path: str) -> str:
+ return urllib.parse.urljoin(self.url, path)
+
+
+PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
+TestPyPI = PackageIndex(
+ "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
+)
diff --git a/.venv/Lib/site-packages/pip/_internal/models/installation_report.py b/.venv/Lib/site-packages/pip/_internal/models/installation_report.py
new file mode 100644
index 0000000000000000000000000000000000000000..7f001f35ef20b63f6b6a5954864b69ec5f37efc6
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/models/installation_report.py
@@ -0,0 +1,53 @@
+from typing import Any, Dict, Sequence
+
+from pip._vendor.packaging.markers import default_environment
+
+from pip import __version__
+from pip._internal.req.req_install import InstallRequirement
+
+
+class InstallationReport:
+ def __init__(self, install_requirements: Sequence[InstallRequirement]):
+ self._install_requirements = install_requirements
+
+ @classmethod
+ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
+ assert ireq.download_info, f"No download_info for {ireq}"
+ res = {
+ # PEP 610 json for the download URL. download_info.archive_info.hashes may
+ # be absent when the requirement was installed from the wheel cache
+ # and the cache entry was populated by an older pip version that did not
+ # record origin.json.
+ "download_info": ireq.download_info.to_dict(),
+ # is_direct is true if the requirement was a direct URL reference (which
+ # includes editable requirements), and false if the requirement was
+ # downloaded from a PEP 503 index or --find-links.
+ "is_direct": ireq.is_direct,
+ # requested is true if the requirement was specified by the user (aka
+ # top level requirement), and false if it was installed as a dependency of a
+ # requirement. https://peps.python.org/pep-0376/#requested
+ "requested": ireq.user_supplied,
+ # PEP 566 json encoding for metadata
+ # https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata
+ "metadata": ireq.get_dist().metadata_dict,
+ }
+ if ireq.user_supplied and ireq.extras:
+ # For top level requirements, the list of requested extras, if any.
+ res["requested_extras"] = list(sorted(ireq.extras))
+ return res
+
+ def to_dict(self) -> Dict[str, Any]:
+ return {
+ "version": "1",
+ "pip_version": __version__,
+ "install": [
+ self._install_req_to_dict(ireq) for ireq in self._install_requirements
+ ],
+ # https://peps.python.org/pep-0508/#environment-markers
+ # TODO: currently, the resolver uses the default environment to evaluate
+ # environment markers, so that is what we report here. In the future, it
+ # should also take into account options such as --python-version or
+ # --platform, perhaps under the form of an environment_override field?
+ # https://github.com/pypa/pip/issues/11198
+ "environment": default_environment(),
+ }
diff --git a/.venv/Lib/site-packages/pip/_internal/models/link.py b/.venv/Lib/site-packages/pip/_internal/models/link.py
new file mode 100644
index 0000000000000000000000000000000000000000..4453519ad0202281cfa53b3ca2a0282a9b0a1799
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/models/link.py
@@ -0,0 +1,581 @@
+import functools
+import itertools
+import logging
+import os
+import posixpath
+import re
+import urllib.parse
+from dataclasses import dataclass
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ List,
+ Mapping,
+ NamedTuple,
+ Optional,
+ Tuple,
+ Union,
+)
+
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.misc import (
+ pairwise,
+ redact_auth_from_url,
+ split_auth_from_netloc,
+ splitext,
+)
+from pip._internal.utils.models import KeyBasedCompareMixin
+from pip._internal.utils.urls import path_to_url, url_to_path
+
+if TYPE_CHECKING:
+ from pip._internal.index.collector import IndexContent
+
+logger = logging.getLogger(__name__)
+
+
+# Order matters, earlier hashes have a precedence over later hashes for what
+# we will pick to use.
+_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
+
+
+@dataclass(frozen=True)
+class LinkHash:
+ """Links to content may have embedded hash values. This class parses those.
+
+ `name` must be any member of `_SUPPORTED_HASHES`.
+
+ This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
+ be JSON-serializable to conform to PEP 610, this class contains the logic for
+ parsing a hash name and value for correctness, and then checking whether that hash
+ conforms to a schema with `.is_hash_allowed()`."""
+
+ name: str
+ value: str
+
+ _hash_url_fragment_re = re.compile(
+ # NB: we do not validate that the second group (.*) is a valid hex
+ # digest. Instead, we simply keep that string in this class, and then check it
+ # against Hashes when hash-checking is needed. This is easier to debug than
+ # proactively discarding an invalid hex digest, as we handle incorrect hashes
+ # and malformed hashes in the same place.
+ r"[#&]({choices})=([^&]*)".format(
+ choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
+ ),
+ )
+
+ def __post_init__(self) -> None:
+ assert self.name in _SUPPORTED_HASHES
+
+ @classmethod
+ @functools.lru_cache(maxsize=None)
+ def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
+ """Search a string for a checksum algorithm name and encoded output value."""
+ match = cls._hash_url_fragment_re.search(url)
+ if match is None:
+ return None
+ name, value = match.groups()
+ return cls(name=name, value=value)
+
+ def as_dict(self) -> Dict[str, str]:
+ return {self.name: self.value}
+
+ def as_hashes(self) -> Hashes:
+ """Return a Hashes instance which checks only for the current hash."""
+ return Hashes({self.name: [self.value]})
+
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
+ """
+ Return True if the current hash is allowed by `hashes`.
+ """
+ if hashes is None:
+ return False
+ return hashes.is_hash_allowed(self.name, hex_digest=self.value)
+
+
+@dataclass(frozen=True)
+class MetadataFile:
+ """Information about a core metadata file associated with a distribution."""
+
+ hashes: Optional[Dict[str, str]]
+
+ def __post_init__(self) -> None:
+ if self.hashes is not None:
+ assert all(name in _SUPPORTED_HASHES for name in self.hashes)
+
+
+def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
+ # Remove any unsupported hash types from the mapping. If this leaves no
+ # supported hashes, return None
+ if hashes is None:
+ return None
+ hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
+ if not hashes:
+ return None
+ return hashes
+
+
+def _clean_url_path_part(part: str) -> str:
+ """
+ Clean a "part" of a URL path (i.e. after splitting on "@" characters).
+ """
+ # We unquote prior to quoting to make sure nothing is double quoted.
+ return urllib.parse.quote(urllib.parse.unquote(part))
+
+
+def _clean_file_url_path(part: str) -> str:
+ """
+ Clean the first part of a URL path that corresponds to a local
+ filesystem path (i.e. the first part after splitting on "@" characters).
+ """
+ # We unquote prior to quoting to make sure nothing is double quoted.
+ # Also, on Windows the path part might contain a drive letter which
+ # should not be quoted. On Linux where drive letters do not
+ # exist, the colon should be quoted. We rely on urllib.request
+ # to do the right thing here.
+ return urllib.request.pathname2url(urllib.request.url2pathname(part))
+
+
+# percent-encoded: /
+_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
+
+
+def _clean_url_path(path: str, is_local_path: bool) -> str:
+ """
+ Clean the path portion of a URL.
+ """
+ if is_local_path:
+ clean_func = _clean_file_url_path
+ else:
+ clean_func = _clean_url_path_part
+
+ # Split on the reserved characters prior to cleaning so that
+ # revision strings in VCS URLs are properly preserved.
+ parts = _reserved_chars_re.split(path)
+
+ cleaned_parts = []
+ for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
+ cleaned_parts.append(clean_func(to_clean))
+ # Normalize %xx escapes (e.g. %2f -> %2F)
+ cleaned_parts.append(reserved.upper())
+
+ return "".join(cleaned_parts)
+
+
+def _ensure_quoted_url(url: str) -> str:
+ """
+ Make sure a link is fully quoted.
+ For example, if ' ' occurs in the URL, it will be replaced with "%20",
+ and without double-quoting other characters.
+ """
+ # Split the URL into parts according to the general structure
+ # `scheme://netloc/path;parameters?query#fragment`.
+ result = urllib.parse.urlparse(url)
+ # If the netloc is empty, then the URL refers to a local filesystem path.
+ is_local_path = not result.netloc
+ path = _clean_url_path(result.path, is_local_path=is_local_path)
+ return urllib.parse.urlunparse(result._replace(path=path))
+
+
+class Link(KeyBasedCompareMixin):
+ """Represents a parsed link from a Package Index's simple URL"""
+
+ __slots__ = [
+ "_parsed_url",
+ "_url",
+ "_hashes",
+ "comes_from",
+ "requires_python",
+ "yanked_reason",
+ "metadata_file_data",
+ "cache_link_parsing",
+ "egg_fragment",
+ ]
+
+ def __init__(
+ self,
+ url: str,
+ comes_from: Optional[Union[str, "IndexContent"]] = None,
+ requires_python: Optional[str] = None,
+ yanked_reason: Optional[str] = None,
+ metadata_file_data: Optional[MetadataFile] = None,
+ cache_link_parsing: bool = True,
+ hashes: Optional[Mapping[str, str]] = None,
+ ) -> None:
+ """
+ :param url: url of the resource pointed to (href of the link)
+ :param comes_from: instance of IndexContent where the link was found,
+ or string.
+ :param requires_python: String containing the `Requires-Python`
+ metadata field, specified in PEP 345. This may be specified by
+ a data-requires-python attribute in the HTML link tag, as
+ described in PEP 503.
+ :param yanked_reason: the reason the file has been yanked, if the
+ file has been yanked, or None if the file hasn't been yanked.
+ This is the value of the "data-yanked" attribute, if present, in
+ a simple repository HTML link. If the file has been yanked but
+ no reason was provided, this should be the empty string. See
+ PEP 592 for more information and the specification.
+ :param metadata_file_data: the metadata attached to the file, or None if
+ no such metadata is provided. This argument, if not None, indicates
+ that a separate metadata file exists, and also optionally supplies
+ hashes for that file.
+ :param cache_link_parsing: A flag that is used elsewhere to determine
+ whether resources retrieved from this link should be cached. PyPI
+ URLs should generally have this set to False, for example.
+ :param hashes: A mapping of hash names to digests to allow us to
+ determine the validity of a download.
+ """
+
+ # The comes_from, requires_python, and metadata_file_data arguments are
+ # only used by classmethods of this class, and are not used in client
+ # code directly.
+
+ # url can be a UNC windows share
+ if url.startswith("\\\\"):
+ url = path_to_url(url)
+
+ self._parsed_url = urllib.parse.urlsplit(url)
+ # Store the url as a private attribute to prevent accidentally
+ # trying to set a new value.
+ self._url = url
+
+ link_hash = LinkHash.find_hash_url_fragment(url)
+ hashes_from_link = {} if link_hash is None else link_hash.as_dict()
+ if hashes is None:
+ self._hashes = hashes_from_link
+ else:
+ self._hashes = {**hashes, **hashes_from_link}
+
+ self.comes_from = comes_from
+ self.requires_python = requires_python if requires_python else None
+ self.yanked_reason = yanked_reason
+ self.metadata_file_data = metadata_file_data
+
+ super().__init__(key=url, defining_class=Link)
+
+ self.cache_link_parsing = cache_link_parsing
+ self.egg_fragment = self._egg_fragment()
+
+ @classmethod
+ def from_json(
+ cls,
+ file_data: Dict[str, Any],
+ page_url: str,
+ ) -> Optional["Link"]:
+ """
+ Convert an pypi json document from a simple repository page into a Link.
+ """
+ file_url = file_data.get("url")
+ if file_url is None:
+ return None
+
+ url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
+ pyrequire = file_data.get("requires-python")
+ yanked_reason = file_data.get("yanked")
+ hashes = file_data.get("hashes", {})
+
+ # PEP 714: Indexes must use the name core-metadata, but
+ # clients should support the old name as a fallback for compatibility.
+ metadata_info = file_data.get("core-metadata")
+ if metadata_info is None:
+ metadata_info = file_data.get("dist-info-metadata")
+
+ # The metadata info value may be a boolean, or a dict of hashes.
+ if isinstance(metadata_info, dict):
+ # The file exists, and hashes have been supplied
+ metadata_file_data = MetadataFile(supported_hashes(metadata_info))
+ elif metadata_info:
+ # The file exists, but there are no hashes
+ metadata_file_data = MetadataFile(None)
+ else:
+ # False or not present: the file does not exist
+ metadata_file_data = None
+
+ # The Link.yanked_reason expects an empty string instead of a boolean.
+ if yanked_reason and not isinstance(yanked_reason, str):
+ yanked_reason = ""
+ # The Link.yanked_reason expects None instead of False.
+ elif not yanked_reason:
+ yanked_reason = None
+
+ return cls(
+ url,
+ comes_from=page_url,
+ requires_python=pyrequire,
+ yanked_reason=yanked_reason,
+ hashes=hashes,
+ metadata_file_data=metadata_file_data,
+ )
+
+ @classmethod
+ def from_element(
+ cls,
+ anchor_attribs: Dict[str, Optional[str]],
+ page_url: str,
+ base_url: str,
+ ) -> Optional["Link"]:
+ """
+ Convert an anchor element's attributes in a simple repository page to a Link.
+ """
+ href = anchor_attribs.get("href")
+ if not href:
+ return None
+
+ url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
+ pyrequire = anchor_attribs.get("data-requires-python")
+ yanked_reason = anchor_attribs.get("data-yanked")
+
+ # PEP 714: Indexes must use the name data-core-metadata, but
+ # clients should support the old name as a fallback for compatibility.
+ metadata_info = anchor_attribs.get("data-core-metadata")
+ if metadata_info is None:
+ metadata_info = anchor_attribs.get("data-dist-info-metadata")
+ # The metadata info value may be the string "true", or a string of
+ # the form "hashname=hashval"
+ if metadata_info == "true":
+ # The file exists, but there are no hashes
+ metadata_file_data = MetadataFile(None)
+ elif metadata_info is None:
+ # The file does not exist
+ metadata_file_data = None
+ else:
+ # The file exists, and hashes have been supplied
+ hashname, sep, hashval = metadata_info.partition("=")
+ if sep == "=":
+ metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
+ else:
+ # Error - data is wrong. Treat as no hashes supplied.
+ logger.debug(
+ "Index returned invalid data-dist-info-metadata value: %s",
+ metadata_info,
+ )
+ metadata_file_data = MetadataFile(None)
+
+ return cls(
+ url,
+ comes_from=page_url,
+ requires_python=pyrequire,
+ yanked_reason=yanked_reason,
+ metadata_file_data=metadata_file_data,
+ )
+
+ def __str__(self) -> str:
+ if self.requires_python:
+ rp = f" (requires-python:{self.requires_python})"
+ else:
+ rp = ""
+ if self.comes_from:
+ return "{} (from {}){}".format(
+ redact_auth_from_url(self._url), self.comes_from, rp
+ )
+ else:
+ return redact_auth_from_url(str(self._url))
+
+ def __repr__(self) -> str:
+ return f""
+
+ @property
+ def url(self) -> str:
+ return self._url
+
+ @property
+ def filename(self) -> str:
+ path = self.path.rstrip("/")
+ name = posixpath.basename(path)
+ if not name:
+ # Make sure we don't leak auth information if the netloc
+ # includes a username and password.
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
+ return netloc
+
+ name = urllib.parse.unquote(name)
+ assert name, f"URL {self._url!r} produced no filename"
+ return name
+
+ @property
+ def file_path(self) -> str:
+ return url_to_path(self.url)
+
+ @property
+ def scheme(self) -> str:
+ return self._parsed_url.scheme
+
+ @property
+ def netloc(self) -> str:
+ """
+ This can contain auth information.
+ """
+ return self._parsed_url.netloc
+
+ @property
+ def path(self) -> str:
+ return urllib.parse.unquote(self._parsed_url.path)
+
+ def splitext(self) -> Tuple[str, str]:
+ return splitext(posixpath.basename(self.path.rstrip("/")))
+
+ @property
+ def ext(self) -> str:
+ return self.splitext()[1]
+
+ @property
+ def url_without_fragment(self) -> str:
+ scheme, netloc, path, query, fragment = self._parsed_url
+ return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
+
+ _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
+
+ # Per PEP 508.
+ _project_name_re = re.compile(
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+ )
+
+ def _egg_fragment(self) -> Optional[str]:
+ match = self._egg_fragment_re.search(self._url)
+ if not match:
+ return None
+
+ # An egg fragment looks like a PEP 508 project name, along with
+ # an optional extras specifier. Anything else is invalid.
+ project_name = match.group(1)
+ if not self._project_name_re.match(project_name):
+ deprecated(
+ reason=f"{self} contains an egg fragment with a non-PEP 508 name",
+ replacement="to use the req @ url syntax, and remove the egg fragment",
+ gone_in="25.0",
+ issue=11617,
+ )
+
+ return project_name
+
+ _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
+
+ @property
+ def subdirectory_fragment(self) -> Optional[str]:
+ match = self._subdirectory_fragment_re.search(self._url)
+ if not match:
+ return None
+ return match.group(1)
+
+ def metadata_link(self) -> Optional["Link"]:
+ """Return a link to the associated core metadata file (if any)."""
+ if self.metadata_file_data is None:
+ return None
+ metadata_url = f"{self.url_without_fragment}.metadata"
+ if self.metadata_file_data.hashes is None:
+ return Link(metadata_url)
+ return Link(metadata_url, hashes=self.metadata_file_data.hashes)
+
+ def as_hashes(self) -> Hashes:
+ return Hashes({k: [v] for k, v in self._hashes.items()})
+
+ @property
+ def hash(self) -> Optional[str]:
+ return next(iter(self._hashes.values()), None)
+
+ @property
+ def hash_name(self) -> Optional[str]:
+ return next(iter(self._hashes), None)
+
+ @property
+ def show_url(self) -> str:
+ return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
+
+ @property
+ def is_file(self) -> bool:
+ return self.scheme == "file"
+
+ def is_existing_dir(self) -> bool:
+ return self.is_file and os.path.isdir(self.file_path)
+
+ @property
+ def is_wheel(self) -> bool:
+ return self.ext == WHEEL_EXTENSION
+
+ @property
+ def is_vcs(self) -> bool:
+ from pip._internal.vcs import vcs
+
+ return self.scheme in vcs.all_schemes
+
+ @property
+ def is_yanked(self) -> bool:
+ return self.yanked_reason is not None
+
+ @property
+ def has_hash(self) -> bool:
+ return bool(self._hashes)
+
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
+ """
+ Return True if the link has a hash and it is allowed by `hashes`.
+ """
+ if hashes is None:
+ return False
+ return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
+
+
+class _CleanResult(NamedTuple):
+ """Convert link for equivalency check.
+
+ This is used in the resolver to check whether two URL-specified requirements
+ likely point to the same distribution and can be considered equivalent. This
+ equivalency logic avoids comparing URLs literally, which can be too strict
+ (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
+
+ Currently this does three things:
+
+ 1. Drop the basic auth part. This is technically wrong since a server can
+ serve different content based on auth, but if it does that, it is even
+ impossible to guarantee two URLs without auth are equivalent, since
+ the user can input different auth information when prompted. So the
+ practical solution is to assume the auth doesn't affect the response.
+ 2. Parse the query to avoid the ordering issue. Note that ordering under the
+ same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
+ still considered different.
+ 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
+ hash values, since it should have no impact the downloaded content. Note
+ that this drops the "egg=" part historically used to denote the requested
+ project (and extras), which is wrong in the strictest sense, but too many
+ people are supplying it inconsistently to cause superfluous resolution
+ conflicts, so we choose to also ignore them.
+ """
+
+ parsed: urllib.parse.SplitResult
+ query: Dict[str, List[str]]
+ subdirectory: str
+ hashes: Dict[str, str]
+
+
+def _clean_link(link: Link) -> _CleanResult:
+ parsed = link._parsed_url
+ netloc = parsed.netloc.rsplit("@", 1)[-1]
+ # According to RFC 8089, an empty host in file: means localhost.
+ if parsed.scheme == "file" and not netloc:
+ netloc = "localhost"
+ fragment = urllib.parse.parse_qs(parsed.fragment)
+ if "egg" in fragment:
+ logger.debug("Ignoring egg= fragment in %s", link)
+ try:
+ # If there are multiple subdirectory values, use the first one.
+ # This matches the behavior of Link.subdirectory_fragment.
+ subdirectory = fragment["subdirectory"][0]
+ except (IndexError, KeyError):
+ subdirectory = ""
+ # If there are multiple hash values under the same algorithm, use the
+ # first one. This matches the behavior of Link.hash_value.
+ hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
+ return _CleanResult(
+ parsed=parsed._replace(netloc=netloc, query="", fragment=""),
+ query=urllib.parse.parse_qs(parsed.query),
+ subdirectory=subdirectory,
+ hashes=hashes,
+ )
+
+
+@functools.lru_cache(maxsize=None)
+def links_equivalent(link1: Link, link2: Link) -> bool:
+ return _clean_link(link1) == _clean_link(link2)
diff --git a/.venv/Lib/site-packages/pip/_internal/models/scheme.py b/.venv/Lib/site-packages/pip/_internal/models/scheme.py
new file mode 100644
index 0000000000000000000000000000000000000000..f51190ac60354d90eb2aef4b04c484f8517275c2
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/models/scheme.py
@@ -0,0 +1,31 @@
+"""
+For types associated with installation schemes.
+
+For a general overview of available schemes and their context, see
+https://docs.python.org/3/install/index.html#alternate-installation.
+"""
+
+
+SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
+
+
+class Scheme:
+ """A Scheme holds paths which are used as the base directories for
+ artifacts associated with a Python package.
+ """
+
+ __slots__ = SCHEME_KEYS
+
+ def __init__(
+ self,
+ platlib: str,
+ purelib: str,
+ headers: str,
+ scripts: str,
+ data: str,
+ ) -> None:
+ self.platlib = platlib
+ self.purelib = purelib
+ self.headers = headers
+ self.scripts = scripts
+ self.data = data
diff --git a/.venv/Lib/site-packages/pip/_internal/models/search_scope.py b/.venv/Lib/site-packages/pip/_internal/models/search_scope.py
new file mode 100644
index 0000000000000000000000000000000000000000..fe61e8116b71e073351939ed7a499ee752398f1c
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/models/search_scope.py
@@ -0,0 +1,132 @@
+import itertools
+import logging
+import os
+import posixpath
+import urllib.parse
+from typing import List
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.models.index import PyPI
+from pip._internal.utils.compat import has_tls
+from pip._internal.utils.misc import normalize_path, redact_auth_from_url
+
+logger = logging.getLogger(__name__)
+
+
+class SearchScope:
+
+ """
+ Encapsulates the locations that pip is configured to search.
+ """
+
+ __slots__ = ["find_links", "index_urls", "no_index"]
+
+ @classmethod
+ def create(
+ cls,
+ find_links: List[str],
+ index_urls: List[str],
+ no_index: bool,
+ ) -> "SearchScope":
+ """
+ Create a SearchScope object after normalizing the `find_links`.
+ """
+ # Build find_links. If an argument starts with ~, it may be
+ # a local file relative to a home directory. So try normalizing
+ # it and if it exists, use the normalized version.
+ # This is deliberately conservative - it might be fine just to
+ # blindly normalize anything starting with a ~...
+ built_find_links: List[str] = []
+ for link in find_links:
+ if link.startswith("~"):
+ new_link = normalize_path(link)
+ if os.path.exists(new_link):
+ link = new_link
+ built_find_links.append(link)
+
+ # If we don't have TLS enabled, then WARN if anyplace we're looking
+ # relies on TLS.
+ if not has_tls():
+ for link in itertools.chain(index_urls, built_find_links):
+ parsed = urllib.parse.urlparse(link)
+ if parsed.scheme == "https":
+ logger.warning(
+ "pip is configured with locations that require "
+ "TLS/SSL, however the ssl module in Python is not "
+ "available."
+ )
+ break
+
+ return cls(
+ find_links=built_find_links,
+ index_urls=index_urls,
+ no_index=no_index,
+ )
+
+ def __init__(
+ self,
+ find_links: List[str],
+ index_urls: List[str],
+ no_index: bool,
+ ) -> None:
+ self.find_links = find_links
+ self.index_urls = index_urls
+ self.no_index = no_index
+
+ def get_formatted_locations(self) -> str:
+ lines = []
+ redacted_index_urls = []
+ if self.index_urls and self.index_urls != [PyPI.simple_url]:
+ for url in self.index_urls:
+ redacted_index_url = redact_auth_from_url(url)
+
+ # Parse the URL
+ purl = urllib.parse.urlsplit(redacted_index_url)
+
+ # URL is generally invalid if scheme and netloc is missing
+ # there are issues with Python and URL parsing, so this test
+ # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
+ # always parse invalid URLs correctly - it should raise
+ # exceptions for malformed URLs
+ if not purl.scheme and not purl.netloc:
+ logger.warning(
+ 'The index url "%s" seems invalid, please provide a scheme.',
+ redacted_index_url,
+ )
+
+ redacted_index_urls.append(redacted_index_url)
+
+ lines.append(
+ "Looking in indexes: {}".format(", ".join(redacted_index_urls))
+ )
+
+ if self.find_links:
+ lines.append(
+ "Looking in links: {}".format(
+ ", ".join(redact_auth_from_url(url) for url in self.find_links)
+ )
+ )
+ return "\n".join(lines)
+
+ def get_index_urls_locations(self, project_name: str) -> List[str]:
+ """Returns the locations found via self.index_urls
+
+ Checks the url_name on the main (first in the list) index and
+ use this url_name to produce all locations
+ """
+
+ def mkurl_pypi_url(url: str) -> str:
+ loc = posixpath.join(
+ url, urllib.parse.quote(canonicalize_name(project_name))
+ )
+ # For maximum compatibility with easy_install, ensure the path
+ # ends in a trailing slash. Although this isn't in the spec
+ # (and PyPI can handle it without the slash) some other index
+ # implementations might break if they relied on easy_install's
+ # behavior.
+ if not loc.endswith("/"):
+ loc = loc + "/"
+ return loc
+
+ return [mkurl_pypi_url(url) for url in self.index_urls]
diff --git a/.venv/Lib/site-packages/pip/_internal/models/selection_prefs.py b/.venv/Lib/site-packages/pip/_internal/models/selection_prefs.py
new file mode 100644
index 0000000000000000000000000000000000000000..977bc4caa75c1e76156fa97e2841a01332f6fa47
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/models/selection_prefs.py
@@ -0,0 +1,51 @@
+from typing import Optional
+
+from pip._internal.models.format_control import FormatControl
+
+
+class SelectionPreferences:
+ """
+ Encapsulates the candidate selection preferences for downloading
+ and installing files.
+ """
+
+ __slots__ = [
+ "allow_yanked",
+ "allow_all_prereleases",
+ "format_control",
+ "prefer_binary",
+ "ignore_requires_python",
+ ]
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ def __init__(
+ self,
+ allow_yanked: bool,
+ allow_all_prereleases: bool = False,
+ format_control: Optional[FormatControl] = None,
+ prefer_binary: bool = False,
+ ignore_requires_python: Optional[bool] = None,
+ ) -> None:
+ """Create a SelectionPreferences object.
+
+ :param allow_yanked: Whether files marked as yanked (in the sense
+ of PEP 592) are permitted to be candidates for install.
+ :param format_control: A FormatControl object or None. Used to control
+ the selection of source packages / binary packages when consulting
+ the index and links.
+ :param prefer_binary: Whether to prefer an old, but valid, binary
+ dist over a new source dist.
+ :param ignore_requires_python: Whether to ignore incompatible
+ "Requires-Python" values in links. Defaults to False.
+ """
+ if ignore_requires_python is None:
+ ignore_requires_python = False
+
+ self.allow_yanked = allow_yanked
+ self.allow_all_prereleases = allow_all_prereleases
+ self.format_control = format_control
+ self.prefer_binary = prefer_binary
+ self.ignore_requires_python = ignore_requires_python
diff --git a/.venv/Lib/site-packages/pip/_internal/pyproject.py b/.venv/Lib/site-packages/pip/_internal/pyproject.py
new file mode 100644
index 0000000000000000000000000000000000000000..eb8e12b2dec992dc38c87510055d6ccb5f66c828
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/pyproject.py
@@ -0,0 +1,179 @@
+import importlib.util
+import os
+from collections import namedtuple
+from typing import Any, List, Optional
+
+from pip._vendor import tomli
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+
+from pip._internal.exceptions import (
+ InstallationError,
+ InvalidPyProjectBuildRequires,
+ MissingPyProjectBuildRequires,
+)
+
+
+def _is_list_of_str(obj: Any) -> bool:
+ return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
+
+
+def make_pyproject_path(unpacked_source_directory: str) -> str:
+ return os.path.join(unpacked_source_directory, "pyproject.toml")
+
+
+BuildSystemDetails = namedtuple(
+ "BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
+)
+
+
+def load_pyproject_toml(
+ use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
+) -> Optional[BuildSystemDetails]:
+ """Load the pyproject.toml file.
+
+ Parameters:
+ use_pep517 - Has the user requested PEP 517 processing? None
+ means the user hasn't explicitly specified.
+ pyproject_toml - Location of the project's pyproject.toml file
+ setup_py - Location of the project's setup.py file
+ req_name - The name of the requirement we're processing (for
+ error reporting)
+
+ Returns:
+ None if we should use the legacy code path, otherwise a tuple
+ (
+ requirements from pyproject.toml,
+ name of PEP 517 backend,
+ requirements we should check are installed after setting
+ up the build environment
+ directory paths to import the backend from (backend-path),
+ relative to the project root.
+ )
+ """
+ has_pyproject = os.path.isfile(pyproject_toml)
+ has_setup = os.path.isfile(setup_py)
+
+ if not has_pyproject and not has_setup:
+ raise InstallationError(
+ f"{req_name} does not appear to be a Python project: "
+ f"neither 'setup.py' nor 'pyproject.toml' found."
+ )
+
+ if has_pyproject:
+ with open(pyproject_toml, encoding="utf-8") as f:
+ pp_toml = tomli.loads(f.read())
+ build_system = pp_toml.get("build-system")
+ else:
+ build_system = None
+
+ # The following cases must use PEP 517
+ # We check for use_pep517 being non-None and falsey because that means
+ # the user explicitly requested --no-use-pep517. The value 0 as
+ # opposed to False can occur when the value is provided via an
+ # environment variable or config file option (due to the quirk of
+ # strtobool() returning an integer in pip's configuration code).
+ if has_pyproject and not has_setup:
+ if use_pep517 is not None and not use_pep517:
+ raise InstallationError(
+ "Disabling PEP 517 processing is invalid: "
+ "project does not have a setup.py"
+ )
+ use_pep517 = True
+ elif build_system and "build-backend" in build_system:
+ if use_pep517 is not None and not use_pep517:
+ raise InstallationError(
+ "Disabling PEP 517 processing is invalid: "
+ "project specifies a build backend of {} "
+ "in pyproject.toml".format(build_system["build-backend"])
+ )
+ use_pep517 = True
+
+ # If we haven't worked out whether to use PEP 517 yet,
+ # and the user hasn't explicitly stated a preference,
+ # we do so if the project has a pyproject.toml file
+ # or if we cannot import setuptools or wheels.
+
+ # We fallback to PEP 517 when without setuptools or without the wheel package,
+ # so setuptools can be installed as a default build backend.
+ # For more info see:
+ # https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
+ # https://github.com/pypa/pip/issues/8559
+ elif use_pep517 is None:
+ use_pep517 = (
+ has_pyproject
+ or not importlib.util.find_spec("setuptools")
+ or not importlib.util.find_spec("wheel")
+ )
+
+ # At this point, we know whether we're going to use PEP 517.
+ assert use_pep517 is not None
+
+ # If we're using the legacy code path, there is nothing further
+ # for us to do here.
+ if not use_pep517:
+ return None
+
+ if build_system is None:
+ # Either the user has a pyproject.toml with no build-system
+ # section, or the user has no pyproject.toml, but has opted in
+ # explicitly via --use-pep517.
+ # In the absence of any explicit backend specification, we
+ # assume the setuptools backend that most closely emulates the
+ # traditional direct setup.py execution, and require wheel and
+ # a version of setuptools that supports that backend.
+
+ build_system = {
+ "requires": ["setuptools>=40.8.0", "wheel"],
+ "build-backend": "setuptools.build_meta:__legacy__",
+ }
+
+ # If we're using PEP 517, we have build system information (either
+ # from pyproject.toml, or defaulted by the code above).
+ # Note that at this point, we do not know if the user has actually
+ # specified a backend, though.
+ assert build_system is not None
+
+ # Ensure that the build-system section in pyproject.toml conforms
+ # to PEP 518.
+
+ # Specifying the build-system table but not the requires key is invalid
+ if "requires" not in build_system:
+ raise MissingPyProjectBuildRequires(package=req_name)
+
+ # Error out if requires is not a list of strings
+ requires = build_system["requires"]
+ if not _is_list_of_str(requires):
+ raise InvalidPyProjectBuildRequires(
+ package=req_name,
+ reason="It is not a list of strings.",
+ )
+
+ # Each requirement must be valid as per PEP 508
+ for requirement in requires:
+ try:
+ Requirement(requirement)
+ except InvalidRequirement as error:
+ raise InvalidPyProjectBuildRequires(
+ package=req_name,
+ reason=f"It contains an invalid requirement: {requirement!r}",
+ ) from error
+
+ backend = build_system.get("build-backend")
+ backend_path = build_system.get("backend-path", [])
+ check: List[str] = []
+ if backend is None:
+ # If the user didn't specify a backend, we assume they want to use
+ # the setuptools backend. But we can't be sure they have included
+ # a version of setuptools which supplies the backend. So we
+ # make a note to check that this requirement is present once
+ # we have set up the environment.
+ # This is quite a lot of work to check for a very specific case. But
+ # the problem is, that case is potentially quite common - projects that
+ # adopted PEP 518 early for the ability to specify requirements to
+ # execute setup.py, but never considered needing to mention the build
+ # tools themselves. The original PEP 518 code had a similar check (but
+ # implemented in a different way).
+ backend = "setuptools.build_meta:__legacy__"
+ check = ["setuptools>=40.8.0"]
+
+ return BuildSystemDetails(requires, backend, check, backend_path)
diff --git a/.venv/Lib/site-packages/pip/_internal/self_outdated_check.py b/.venv/Lib/site-packages/pip/_internal/self_outdated_check.py
new file mode 100644
index 0000000000000000000000000000000000000000..41cc42c5677ddf0709d9eeb894eb8dbe4fd16f91
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/self_outdated_check.py
@@ -0,0 +1,242 @@
+import datetime
+import functools
+import hashlib
+import json
+import logging
+import optparse
+import os.path
+import sys
+from dataclasses import dataclass
+from typing import Any, Callable, Dict, Optional
+
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.rich.console import Group
+from pip._vendor.rich.markup import escape
+from pip._vendor.rich.text import Text
+
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import get_default_environment
+from pip._internal.metadata.base import DistributionVersion
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.network.session import PipSession
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.entrypoints import (
+ get_best_invocation_for_this_pip,
+ get_best_invocation_for_this_python,
+)
+from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
+from pip._internal.utils.misc import ensure_dir
+
+_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
+
+
+logger = logging.getLogger(__name__)
+
+
+def _get_statefile_name(key: str) -> str:
+ key_bytes = key.encode()
+ name = hashlib.sha224(key_bytes).hexdigest()
+ return name
+
+
+class SelfCheckState:
+ def __init__(self, cache_dir: str) -> None:
+ self._state: Dict[str, Any] = {}
+ self._statefile_path = None
+
+ # Try to load the existing state
+ if cache_dir:
+ self._statefile_path = os.path.join(
+ cache_dir, "selfcheck", _get_statefile_name(self.key)
+ )
+ try:
+ with open(self._statefile_path, encoding="utf-8") as statefile:
+ self._state = json.load(statefile)
+ except (OSError, ValueError, KeyError):
+ # Explicitly suppressing exceptions, since we don't want to
+ # error out if the cache file is invalid.
+ pass
+
+ @property
+ def key(self) -> str:
+ return sys.prefix
+
+ def get(self, current_time: datetime.datetime) -> Optional[str]:
+ """Check if we have a not-outdated version loaded already."""
+ if not self._state:
+ return None
+
+ if "last_check" not in self._state:
+ return None
+
+ if "pypi_version" not in self._state:
+ return None
+
+ seven_days_in_seconds = 7 * 24 * 60 * 60
+
+ # Determine if we need to refresh the state
+ last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT)
+ seconds_since_last_check = (current_time - last_check).total_seconds()
+ if seconds_since_last_check > seven_days_in_seconds:
+ return None
+
+ return self._state["pypi_version"]
+
+ def set(self, pypi_version: str, current_time: datetime.datetime) -> None:
+ # If we do not have a path to cache in, don't bother saving.
+ if not self._statefile_path:
+ return
+
+ # Check to make sure that we own the directory
+ if not check_path_owner(os.path.dirname(self._statefile_path)):
+ return
+
+ # Now that we've ensured the directory is owned by this user, we'll go
+ # ahead and make sure that all our directories are created.
+ ensure_dir(os.path.dirname(self._statefile_path))
+
+ state = {
+ # Include the key so it's easy to tell which pip wrote the
+ # file.
+ "key": self.key,
+ "last_check": current_time.strftime(_DATE_FMT),
+ "pypi_version": pypi_version,
+ }
+
+ text = json.dumps(state, sort_keys=True, separators=(",", ":"))
+
+ with adjacent_tmp_file(self._statefile_path) as f:
+ f.write(text.encode())
+
+ try:
+ # Since we have a prefix-specific state file, we can just
+ # overwrite whatever is there, no need to check.
+ replace(f.name, self._statefile_path)
+ except OSError:
+ # Best effort.
+ pass
+
+
+@dataclass
+class UpgradePrompt:
+ old: str
+ new: str
+
+ def __rich__(self) -> Group:
+ if WINDOWS:
+ pip_cmd = f"{get_best_invocation_for_this_python()} -m pip"
+ else:
+ pip_cmd = get_best_invocation_for_this_pip()
+
+ notice = "[bold][[reset][blue]notice[reset][bold]][reset]"
+ return Group(
+ Text(),
+ Text.from_markup(
+ f"{notice} A new release of pip is available: "
+ f"[red]{self.old}[reset] -> [green]{self.new}[reset]"
+ ),
+ Text.from_markup(
+ f"{notice} To update, run: "
+ f"[green]{escape(pip_cmd)} install --upgrade pip"
+ ),
+ )
+
+
+def was_installed_by_pip(pkg: str) -> bool:
+ """Checks whether pkg was installed by pip
+
+ This is used not to display the upgrade message when pip is in fact
+ installed by system package manager, such as dnf on Fedora.
+ """
+ dist = get_default_environment().get_distribution(pkg)
+ return dist is not None and "pip" == dist.installer
+
+
+def _get_current_remote_pip_version(
+ session: PipSession, options: optparse.Values
+) -> Optional[str]:
+ # Lets use PackageFinder to see what the latest pip version is
+ link_collector = LinkCollector.create(
+ session,
+ options=options,
+ suppress_no_index=True,
+ )
+
+ # Pass allow_yanked=False so we don't suggest upgrading to a
+ # yanked version.
+ selection_prefs = SelectionPreferences(
+ allow_yanked=False,
+ allow_all_prereleases=False, # Explicitly set to False
+ )
+
+ finder = PackageFinder.create(
+ link_collector=link_collector,
+ selection_prefs=selection_prefs,
+ )
+ best_candidate = finder.find_best_candidate("pip").best_candidate
+ if best_candidate is None:
+ return None
+
+ return str(best_candidate.version)
+
+
+def _self_version_check_logic(
+ *,
+ state: SelfCheckState,
+ current_time: datetime.datetime,
+ local_version: DistributionVersion,
+ get_remote_version: Callable[[], Optional[str]],
+) -> Optional[UpgradePrompt]:
+ remote_version_str = state.get(current_time)
+ if remote_version_str is None:
+ remote_version_str = get_remote_version()
+ if remote_version_str is None:
+ logger.debug("No remote pip version found")
+ return None
+ state.set(remote_version_str, current_time)
+
+ remote_version = parse_version(remote_version_str)
+ logger.debug("Remote version of pip: %s", remote_version)
+ logger.debug("Local version of pip: %s", local_version)
+
+ pip_installed_by_pip = was_installed_by_pip("pip")
+ logger.debug("Was pip installed by pip? %s", pip_installed_by_pip)
+ if not pip_installed_by_pip:
+ return None # Only suggest upgrade if pip is installed by pip.
+
+ local_version_is_older = (
+ local_version < remote_version
+ and local_version.base_version != remote_version.base_version
+ )
+ if local_version_is_older:
+ return UpgradePrompt(old=str(local_version), new=remote_version_str)
+
+ return None
+
+
+def pip_self_version_check(session: PipSession, options: optparse.Values) -> None:
+ """Check for an update for pip.
+
+ Limit the frequency of checks to once per week. State is stored either in
+ the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
+ of the pip script path.
+ """
+ installed_dist = get_default_environment().get_distribution("pip")
+ if not installed_dist:
+ return
+
+ try:
+ upgrade_prompt = _self_version_check_logic(
+ state=SelfCheckState(cache_dir=options.cache_dir),
+ current_time=datetime.datetime.utcnow(),
+ local_version=installed_dist.version,
+ get_remote_version=functools.partial(
+ _get_current_remote_pip_version, session, options
+ ),
+ )
+ if upgrade_prompt is not None:
+ logger.warning("[present-rich] %s", upgrade_prompt)
+ except Exception:
+ logger.warning("There was an error checking the latest version of pip.")
+ logger.debug("See below for error", exc_info=True)
diff --git a/.venv/Lib/site-packages/pip/_internal/wheel_builder.py b/.venv/Lib/site-packages/pip/_internal/wheel_builder.py
new file mode 100644
index 0000000000000000000000000000000000000000..60d75dd18effb6e35b216cdfa3e30b8cc5bd620b
--- /dev/null
+++ b/.venv/Lib/site-packages/pip/_internal/wheel_builder.py
@@ -0,0 +1,355 @@
+"""Orchestrator for building wheels from InstallRequirements.
+"""
+
+import logging
+import os.path
+import re
+import shutil
+from typing import Iterable, List, Optional, Tuple
+
+from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version
+from pip._vendor.packaging.version import InvalidVersion, Version
+
+from pip._internal.cache import WheelCache
+from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel
+from pip._internal.metadata import FilesystemWheel, get_wheel_distribution
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.operations.build.wheel import build_wheel_pep517
+from pip._internal.operations.build.wheel_editable import build_wheel_editable
+from pip._internal.operations.build.wheel_legacy import build_wheel_legacy
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import ensure_dir, hash_file
+from pip._internal.utils.setuptools_build import make_setuptools_clean_args
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs import vcs
+
+logger = logging.getLogger(__name__)
+
+_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE)
+
+BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]]
+
+
+def _contains_egg_info(s: str) -> bool:
+ """Determine whether the string looks like an egg_info.
+
+ :param s: The string to parse. E.g. foo-2.1
+ """
+ return bool(_egg_info_re.search(s))
+
+
+def _should_build(
+ req: InstallRequirement,
+ need_wheel: bool,
+) -> bool:
+ """Return whether an InstallRequirement should be built into a wheel."""
+ if req.constraint:
+ # never build requirements that are merely constraints
+ return False
+ if req.is_wheel:
+ if need_wheel:
+ logger.info(
+ "Skipping %s, due to already being wheel.",
+ req.name,
+ )
+ return False
+
+ if need_wheel:
+ # i.e. pip wheel, not pip install
+ return True
+
+ # From this point, this concerns the pip install command only
+ # (need_wheel=False).
+
+ if not req.source_dir:
+ return False
+
+ if req.editable:
+ # we only build PEP 660 editable requirements
+ return req.supports_pyproject_editable()
+
+ return True
+
+
+def should_build_for_wheel_command(
+ req: InstallRequirement,
+) -> bool:
+ return _should_build(req, need_wheel=True)
+
+
+def should_build_for_install_command(
+ req: InstallRequirement,
+) -> bool:
+ return _should_build(req, need_wheel=False)
+
+
+def _should_cache(
+ req: InstallRequirement,
+) -> Optional[bool]:
+ """
+ Return whether a built InstallRequirement can be stored in the persistent
+ wheel cache, assuming the wheel cache is available, and _should_build()
+ has determined a wheel needs to be built.
+ """
+ if req.editable or not req.source_dir:
+ # never cache editable requirements
+ return False
+
+ if req.link and req.link.is_vcs:
+ # VCS checkout. Do not cache
+ # unless it points to an immutable commit hash.
+ assert not req.editable
+ assert req.source_dir
+ vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
+ assert vcs_backend
+ if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
+ return True
+ return False
+
+ assert req.link
+ base, ext = req.link.splitext()
+ if _contains_egg_info(base):
+ return True
+
+ # Otherwise, do not cache.
+ return False
+
+
+def _get_cache_dir(
+ req: InstallRequirement,
+ wheel_cache: WheelCache,
+) -> str:
+ """Return the persistent or temporary cache directory where the built
+ wheel need to be stored.
+ """
+ cache_available = bool(wheel_cache.cache_dir)
+ assert req.link
+ if cache_available and _should_cache(req):
+ cache_dir = wheel_cache.get_path_for_link(req.link)
+ else:
+ cache_dir = wheel_cache.get_ephem_path_for_link(req.link)
+ return cache_dir
+
+
+def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
+ canonical_name = canonicalize_name(req.name or "")
+ w = Wheel(os.path.basename(wheel_path))
+ if canonicalize_name(w.name) != canonical_name:
+ raise InvalidWheelFilename(
+ "Wheel has unexpected file name: expected {!r}, "
+ "got {!r}".format(canonical_name, w.name),
+ )
+ dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name)
+ dist_verstr = str(dist.version)
+ if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
+ raise InvalidWheelFilename(
+ "Wheel has unexpected file name: expected {!r}, "
+ "got {!r}".format(dist_verstr, w.version),
+ )
+ metadata_version_value = dist.metadata_version
+ if metadata_version_value is None:
+ raise UnsupportedWheel("Missing Metadata-Version")
+ try:
+ metadata_version = Version(metadata_version_value)
+ except InvalidVersion:
+ msg = f"Invalid Metadata-Version: {metadata_version_value}"
+ raise UnsupportedWheel(msg)
+ if metadata_version >= Version("1.2") and not isinstance(dist.version, Version):
+ raise UnsupportedWheel(
+ "Metadata 1.2 mandates PEP 440 version, "
+ "but {!r} is not".format(dist_verstr)
+ )
+
+
+def _build_one(
+ req: InstallRequirement,
+ output_dir: str,
+ verify: bool,
+ build_options: List[str],
+ global_options: List[str],
+ editable: bool,
+) -> Optional[str]:
+ """Build one wheel.
+
+ :return: The filename of the built wheel, or None if the build failed.
+ """
+ artifact = "editable" if editable else "wheel"
+ try:
+ ensure_dir(output_dir)
+ except OSError as e:
+ logger.warning(
+ "Building %s for %s failed: %s",
+ artifact,
+ req.name,
+ e,
+ )
+ return None
+
+ # Install build deps into temporary directory (PEP 518)
+ with req.build_env:
+ wheel_path = _build_one_inside_env(
+ req, output_dir, build_options, global_options, editable
+ )
+ if wheel_path and verify:
+ try:
+ _verify_one(req, wheel_path)
+ except (InvalidWheelFilename, UnsupportedWheel) as e:
+ logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e)
+ return None
+ return wheel_path
+
+
+def _build_one_inside_env(
+ req: InstallRequirement,
+ output_dir: str,
+ build_options: List[str],
+ global_options: List[str],
+ editable: bool,
+) -> Optional[str]:
+ with TempDirectory(kind="wheel") as temp_dir:
+ assert req.name
+ if req.use_pep517:
+ assert req.metadata_directory
+ assert req.pep517_backend
+ if global_options:
+ logger.warning(
+ "Ignoring --global-option when building %s using PEP 517", req.name
+ )
+ if build_options:
+ logger.warning(
+ "Ignoring --build-option when building %s using PEP 517", req.name
+ )
+ if editable:
+ wheel_path = build_wheel_editable(
+ name=req.name,
+ backend=req.pep517_backend,
+ metadata_directory=req.metadata_directory,
+ tempd=temp_dir.path,
+ )
+ else:
+ wheel_path = build_wheel_pep517(
+ name=req.name,
+ backend=req.pep517_backend,
+ metadata_directory=req.metadata_directory,
+ tempd=temp_dir.path,
+ )
+ else:
+ wheel_path = build_wheel_legacy(
+ name=req.name,
+ setup_py_path=req.setup_py_path,
+ source_dir=req.unpacked_source_directory,
+ global_options=global_options,
+ build_options=build_options,
+ tempd=temp_dir.path,
+ )
+
+ if wheel_path is not None:
+ wheel_name = os.path.basename(wheel_path)
+ dest_path = os.path.join(output_dir, wheel_name)
+ try:
+ wheel_hash, length = hash_file(wheel_path)
+ shutil.move(wheel_path, dest_path)
+ logger.info(
+ "Created wheel for %s: filename=%s size=%d sha256=%s",
+ req.name,
+ wheel_name,
+ length,
+ wheel_hash.hexdigest(),
+ )
+ logger.info("Stored in directory: %s", output_dir)
+ return dest_path
+ except Exception as e:
+ logger.warning(
+ "Building wheel for %s failed: %s",
+ req.name,
+ e,
+ )
+ # Ignore return, we can't do anything else useful.
+ if not req.use_pep517:
+ _clean_one_legacy(req, global_options)
+ return None
+
+
+def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool:
+ clean_args = make_setuptools_clean_args(
+ req.setup_py_path,
+ global_options=global_options,
+ )
+
+ logger.info("Running setup.py clean for %s", req.name)
+ try:
+ call_subprocess(
+ clean_args, command_desc="python setup.py clean", cwd=req.source_dir
+ )
+ return True
+ except Exception:
+ logger.error("Failed cleaning build dir for %s", req.name)
+ return False
+
+
+def build(
+ requirements: Iterable[InstallRequirement],
+ wheel_cache: WheelCache,
+ verify: bool,
+ build_options: List[str],
+ global_options: List[str],
+) -> BuildResult:
+ """Build wheels.
+
+ :return: The list of InstallRequirement that succeeded to build and
+ the list of InstallRequirement that failed to build.
+ """
+ if not requirements:
+ return [], []
+
+ # Build the wheels.
+ logger.info(
+ "Building wheels for collected packages: %s",
+ ", ".join(req.name for req in requirements), # type: ignore
+ )
+
+ with indent_log():
+ build_successes, build_failures = [], []
+ for req in requirements:
+ assert req.name
+ cache_dir = _get_cache_dir(req, wheel_cache)
+ wheel_file = _build_one(
+ req,
+ cache_dir,
+ verify,
+ build_options,
+ global_options,
+ req.editable and req.permit_editable_wheels,
+ )
+ if wheel_file:
+ # Record the download origin in the cache
+ if req.download_info is not None:
+ # download_info is guaranteed to be set because when we build an
+ # InstallRequirement it has been through the preparer before, but
+ # let's be cautious.
+ wheel_cache.record_download_origin(cache_dir, req.download_info)
+ # Update the link for this.
+ req.link = Link(path_to_url(wheel_file))
+ req.local_file_path = req.link.file_path
+ assert req.link.is_wheel
+ build_successes.append(req)
+ else:
+ build_failures.append(req)
+
+ # notify success/failure
+ if build_successes:
+ logger.info(
+ "Successfully built %s",
+ " ".join([req.name for req in build_successes]), # type: ignore
+ )
+ if build_failures:
+ logger.info(
+ "Failed to build %s",
+ " ".join([req.name for req in build_failures]), # type: ignore
+ )
+ # Return a list of requirements that failed to build
+ return build_successes, build_failures
diff --git a/README.md b/README.md
index 7be5fc7f47d5db027d120b8024982df93db95b74..1f48c32e9df46945f0ac7142bfda001a1beb4747 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,3 @@
---
-license: mit
+license: cc-by-nc-4.0
---