content
stringlengths
1
103k
path
stringlengths
8
216
filename
stringlengths
2
179
language
stringclasses
15 values
size_bytes
int64
2
189k
quality_score
float64
0.5
0.95
complexity
float64
0
1
documentation_ratio
float64
0
1
repository
stringclasses
5 values
stars
int64
0
1k
created_date
stringdate
2023-07-10 19:21:08
2025-07-09 19:11:45
license
stringclasses
4 values
is_test
bool
2 classes
file_hash
stringlengths
32
32
\n\n
.venv\Lib\site-packages\pip\_internal\resolution\__pycache__\base.cpython-313.pyc
base.cpython-313.pyc
Other
1,249
0.8
0
0
node-utils
82
2023-09-18T18:12:24.691373
BSD-3-Clause
false
d98c6cf107aa2ed67aeb8f5e1e6c3c88
\n\n
.venv\Lib\site-packages\pip\_internal\resolution\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
199
0.7
0
0
node-utils
534
2023-10-29T12:47:34.203886
MIT
false
ddcc7586f05e3abb012dbb5424a8dbe1
"""\nThis code wraps the vendored appdirs module to so the return values are\ncompatible for the current pip code base.\n\nThe intention is to rewrite current usages gradually, keeping the tests pass,\nand eventually drop this after all usages are changed.\n"""\n\nimport os\nimport sys\nfrom typing import List\n\nfrom pip._vendor import platformdirs as _appdirs\n\n\ndef user_cache_dir(appname: str) -> str:\n return _appdirs.user_cache_dir(appname, appauthor=False)\n\n\ndef _macos_user_config_dir(appname: str, roaming: bool = True) -> str:\n # Use ~/Application Support/pip, if the directory exists.\n path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)\n if os.path.isdir(path):\n return path\n\n # Use a Linux-like ~/.config/pip, by default.\n linux_like_path = "~/.config/"\n if appname:\n linux_like_path = os.path.join(linux_like_path, appname)\n\n return os.path.expanduser(linux_like_path)\n\n\ndef user_config_dir(appname: str, roaming: bool = True) -> str:\n if sys.platform == "darwin":\n return _macos_user_config_dir(appname, roaming)\n\n return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)\n\n\n# for the discussion regarding site_config_dir locations\n# see <https://github.com/pypa/pip/issues/1733>\ndef site_config_dirs(appname: str) -> List[str]:\n if sys.platform == "darwin":\n dirval = _appdirs.site_data_dir(appname, appauthor=False, multipath=True)\n return dirval.split(os.pathsep)\n\n dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)\n if sys.platform == "win32":\n return [dirval]\n\n # Unix-y system. Look in /etc as well.\n return dirval.split(os.pathsep) + ["/etc"]\n
.venv\Lib\site-packages\pip\_internal\utils\appdirs.py
appdirs.py
Python
1,705
0.95
0.226415
0.135135
react-lib
813
2024-04-26T12:26:23.259825
GPL-3.0
false
cdb2f314ea65225cf3b01ada84f9c312
"""Stuff that differs in different Python versions and platform\ndistributions."""\n\nimport importlib.resources\nimport logging\nimport os\nimport sys\nfrom typing import IO\n\n__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef has_tls() -> bool:\n try:\n import _ssl # noqa: F401 # ignore unused\n\n return True\n except ImportError:\n pass\n\n from pip._vendor.urllib3.util import IS_PYOPENSSL\n\n return IS_PYOPENSSL\n\n\ndef get_path_uid(path: str) -> int:\n """\n Return path's uid.\n\n Does not follow symlinks:\n https://github.com/pypa/pip/pull/935#discussion_r5307003\n\n Placed this function in compat due to differences on AIX and\n Jython, that should eventually go away.\n\n :raises OSError: When path is a symlink or can't be read.\n """\n if hasattr(os, "O_NOFOLLOW"):\n fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)\n file_uid = os.fstat(fd).st_uid\n os.close(fd)\n else: # AIX and Jython\n # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW\n if not os.path.islink(path):\n # older versions of Jython don't have `os.fstat`\n file_uid = os.stat(path).st_uid\n else:\n # raise OSError for parity with os.O_NOFOLLOW above\n raise OSError(f"{path} is a symlink; Will not return uid for symlinks")\n return file_uid\n\n\n# The importlib.resources.open_text function was deprecated in 3.11 with suggested\n# replacement we use below.\nif sys.version_info < (3, 11):\n open_text_resource = importlib.resources.open_text\nelse:\n\n def open_text_resource(\n package: str, resource: str, encoding: str = "utf-8", errors: str = "strict"\n ) -> IO[str]:\n return (importlib.resources.files(package) / resource).open(\n "r", encoding=encoding, errors=errors\n )\n\n\n# packages in the stdlib that may have installation metadata, but should not be\n# considered 'installed'. this theoretically could be determined based on\n# dist.location (py27:`sysconfig.get_paths()['stdlib']`,\n# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may\n# make this ineffective, so hard-coding\nstdlib_pkgs = {"python", "wsgiref", "argparse"}\n\n\n# windows detection, covers cpython and ironpython\nWINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")\n
.venv\Lib\site-packages\pip\_internal\utils\compat.py
compat.py
Python
2,399
0.95
0.139241
0.189655
python-kit
738
2025-02-14T14:59:22.991006
MIT
false
412f6c16b2c8c0a5157f84af2cac05c7
"""Generate and work with PEP 425 Compatibility Tags."""\n\nimport re\nfrom typing import List, Optional, Tuple\n\nfrom pip._vendor.packaging.tags import (\n PythonVersion,\n Tag,\n android_platforms,\n compatible_tags,\n cpython_tags,\n generic_tags,\n interpreter_name,\n interpreter_version,\n ios_platforms,\n mac_platforms,\n)\n\n_apple_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")\n\n\ndef version_info_to_nodot(version_info: Tuple[int, ...]) -> str:\n # Only use up to the first two numbers.\n return "".join(map(str, version_info[:2]))\n\n\ndef _mac_platforms(arch: str) -> List[str]:\n match = _apple_arch_pat.match(arch)\n if match:\n name, major, minor, actual_arch = match.groups()\n mac_version = (int(major), int(minor))\n arches = [\n # Since we have always only checked that the platform starts\n # with "macosx", for backwards-compatibility we extract the\n # actual prefix provided by the user in case they provided\n # something like "macosxcustom_". It may be good to remove\n # this as undocumented or deprecate it in the future.\n "{}_{}".format(name, arch[len("macosx_") :])\n for arch in mac_platforms(mac_version, actual_arch)\n ]\n else:\n # arch pattern didn't match (?!)\n arches = [arch]\n return arches\n\n\ndef _ios_platforms(arch: str) -> List[str]:\n match = _apple_arch_pat.match(arch)\n if match:\n name, major, minor, actual_multiarch = match.groups()\n ios_version = (int(major), int(minor))\n arches = [\n # Since we have always only checked that the platform starts\n # with "ios", for backwards-compatibility we extract the\n # actual prefix provided by the user in case they provided\n # something like "ioscustom_". It may be good to remove\n # this as undocumented or deprecate it in the future.\n "{}_{}".format(name, arch[len("ios_") :])\n for arch in ios_platforms(ios_version, actual_multiarch)\n ]\n else:\n # arch pattern didn't match (?!)\n arches = [arch]\n return arches\n\n\ndef _android_platforms(arch: str) -> List[str]:\n match = re.fullmatch(r"android_(\d+)_(.+)", arch)\n if match:\n api_level, abi = match.groups()\n return list(android_platforms(int(api_level), abi))\n else:\n # arch pattern didn't match (?!)\n return [arch]\n\n\ndef _custom_manylinux_platforms(arch: str) -> List[str]:\n arches = [arch]\n arch_prefix, arch_sep, arch_suffix = arch.partition("_")\n if arch_prefix == "manylinux2014":\n # manylinux1/manylinux2010 wheels run on most manylinux2014 systems\n # with the exception of wheels depending on ncurses. PEP 599 states\n # manylinux1/manylinux2010 wheels should be considered\n # manylinux2014 wheels:\n # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels\n if arch_suffix in {"i686", "x86_64"}:\n arches.append("manylinux2010" + arch_sep + arch_suffix)\n arches.append("manylinux1" + arch_sep + arch_suffix)\n elif arch_prefix == "manylinux2010":\n # manylinux1 wheels run on most manylinux2010 systems with the\n # exception of wheels depending on ncurses. PEP 571 states\n # manylinux1 wheels should be considered manylinux2010 wheels:\n # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels\n arches.append("manylinux1" + arch_sep + arch_suffix)\n return arches\n\n\ndef _get_custom_platforms(arch: str) -> List[str]:\n arch_prefix, arch_sep, arch_suffix = arch.partition("_")\n if arch.startswith("macosx"):\n arches = _mac_platforms(arch)\n elif arch.startswith("ios"):\n arches = _ios_platforms(arch)\n elif arch_prefix == "android":\n arches = _android_platforms(arch)\n elif arch_prefix in ["manylinux2014", "manylinux2010"]:\n arches = _custom_manylinux_platforms(arch)\n else:\n arches = [arch]\n return arches\n\n\ndef _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]:\n if not platforms:\n return None\n\n seen = set()\n result = []\n\n for p in platforms:\n if p in seen:\n continue\n additions = [c for c in _get_custom_platforms(p) if c not in seen]\n seen.update(additions)\n result.extend(additions)\n\n return result\n\n\ndef _get_python_version(version: str) -> PythonVersion:\n if len(version) > 1:\n return int(version[0]), int(version[1:])\n else:\n return (int(version[0]),)\n\n\ndef _get_custom_interpreter(\n implementation: Optional[str] = None, version: Optional[str] = None\n) -> str:\n if implementation is None:\n implementation = interpreter_name()\n if version is None:\n version = interpreter_version()\n return f"{implementation}{version}"\n\n\ndef get_supported(\n version: Optional[str] = None,\n platforms: Optional[List[str]] = None,\n impl: Optional[str] = None,\n abis: Optional[List[str]] = None,\n) -> List[Tag]:\n """Return a list of supported tags for each version specified in\n `versions`.\n\n :param version: a string version, of the form "33" or "32",\n or None. The version will be assumed to support our ABI.\n :param platform: specify a list of platforms you want valid\n tags for, or None. If None, use the local system platform.\n :param impl: specify the exact implementation you want valid\n tags for, or None. If None, use the local interpreter impl.\n :param abis: specify a list of abis you want valid\n tags for, or None. If None, use the local interpreter abi.\n """\n supported: List[Tag] = []\n\n python_version: Optional[PythonVersion] = None\n if version is not None:\n python_version = _get_python_version(version)\n\n interpreter = _get_custom_interpreter(impl, version)\n\n platforms = _expand_allowed_platforms(platforms)\n\n is_cpython = (impl or interpreter_name()) == "cp"\n if is_cpython:\n supported.extend(\n cpython_tags(\n python_version=python_version,\n abis=abis,\n platforms=platforms,\n )\n )\n else:\n supported.extend(\n generic_tags(\n interpreter=interpreter,\n abis=abis,\n platforms=platforms,\n )\n )\n supported.extend(\n compatible_tags(\n python_version=python_version,\n interpreter=interpreter,\n platforms=platforms,\n )\n )\n\n return supported\n
.venv\Lib\site-packages\pip\_internal\utils\compatibility_tags.py
compatibility_tags.py
Python
6,662
0.95
0.17
0.136905
python-kit
330
2024-04-07T15:45:42.588990
GPL-3.0
false
b3676d3412de626d67ed0b5ce011318d
"""For when pip wants to check the date or time."""\n\nimport datetime\n\n\ndef today_is_later_than(year: int, month: int, day: int) -> bool:\n today = datetime.date.today()\n given = datetime.date(year, month, day)\n\n return today > given\n
.venv\Lib\site-packages\pip\_internal\utils\datetime.py
datetime.py
Python
241
0.85
0.1
0
react-lib
448
2024-11-24T01:50:19.220468
GPL-3.0
false
06b725884db94eebe71362f5fdaa43c7
"""\nA module that implements tooling to enable easy warnings about deprecations.\n"""\n\nimport logging\nimport warnings\nfrom typing import Any, Optional, TextIO, Type, Union\n\nfrom pip._vendor.packaging.version import parse\n\nfrom pip import __version__ as current_version # NOTE: tests patch this name.\n\nDEPRECATION_MSG_PREFIX = "DEPRECATION: "\n\n\nclass PipDeprecationWarning(Warning):\n pass\n\n\n_original_showwarning: Any = None\n\n\n# Warnings <-> Logging Integration\ndef _showwarning(\n message: Union[Warning, str],\n category: Type[Warning],\n filename: str,\n lineno: int,\n file: Optional[TextIO] = None,\n line: Optional[str] = None,\n) -> None:\n if file is not None:\n if _original_showwarning is not None:\n _original_showwarning(message, category, filename, lineno, file, line)\n elif issubclass(category, PipDeprecationWarning):\n # We use a specially named logger which will handle all of the\n # deprecation messages for pip.\n logger = logging.getLogger("pip._internal.deprecations")\n logger.warning(message)\n else:\n _original_showwarning(message, category, filename, lineno, file, line)\n\n\ndef install_warning_logger() -> None:\n # Enable our Deprecation Warnings\n warnings.simplefilter("default", PipDeprecationWarning, append=True)\n\n global _original_showwarning\n\n if _original_showwarning is None:\n _original_showwarning = warnings.showwarning\n warnings.showwarning = _showwarning\n\n\ndef deprecated(\n *,\n reason: str,\n replacement: Optional[str],\n gone_in: Optional[str],\n feature_flag: Optional[str] = None,\n issue: Optional[int] = None,\n) -> None:\n """Helper to deprecate existing functionality.\n\n reason:\n Textual reason shown to the user about why this functionality has\n been deprecated. Should be a complete sentence.\n replacement:\n Textual suggestion shown to the user about what alternative\n functionality they can use.\n gone_in:\n The version of pip does this functionality should get removed in.\n Raises an error if pip's current version is greater than or equal to\n this.\n feature_flag:\n Command-line flag of the form --use-feature={feature_flag} for testing\n upcoming functionality.\n issue:\n Issue number on the tracker that would serve as a useful place for\n users to find related discussion and provide feedback.\n """\n\n # Determine whether or not the feature is already gone in this version.\n is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)\n\n message_parts = [\n (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),\n (\n gone_in,\n (\n "pip {} will enforce this behaviour change."\n if not is_gone\n else "Since pip {}, this is no longer supported."\n ),\n ),\n (\n replacement,\n "A possible replacement is {}.",\n ),\n (\n feature_flag,\n (\n "You can use the flag --use-feature={} to test the upcoming behaviour."\n if not is_gone\n else None\n ),\n ),\n (\n issue,\n "Discussion can be found at https://github.com/pypa/pip/issues/{}",\n ),\n ]\n\n message = " ".join(\n format_str.format(value)\n for value, format_str in message_parts\n if format_str is not None and value is not None\n )\n\n # Raise as an error if this behaviour is deprecated.\n if is_gone:\n raise PipDeprecationWarning(message)\n\n warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)\n
.venv\Lib\site-packages\pip\_internal\utils\deprecation.py
deprecation.py
Python
3,707
0.95
0.137097
0.068627
vue-tools
79
2024-05-31T06:01:57.018662
MIT
false
a2b8aef0ef4cf4839812e40bae5db807
from typing import Optional\n\nfrom pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo\nfrom pip._internal.models.link import Link\nfrom pip._internal.utils.urls import path_to_url\nfrom pip._internal.vcs import vcs\n\n\ndef direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str:\n """Convert a DirectUrl to a pip requirement string."""\n direct_url.validate() # if invalid, this is a pip bug\n requirement = name + " @ "\n fragments = []\n if isinstance(direct_url.info, VcsInfo):\n requirement += (\n f"{direct_url.info.vcs}+{direct_url.url}@{direct_url.info.commit_id}"\n )\n elif isinstance(direct_url.info, ArchiveInfo):\n requirement += direct_url.url\n if direct_url.info.hash:\n fragments.append(direct_url.info.hash)\n else:\n assert isinstance(direct_url.info, DirInfo)\n requirement += direct_url.url\n if direct_url.subdirectory:\n fragments.append("subdirectory=" + direct_url.subdirectory)\n if fragments:\n requirement += "#" + "&".join(fragments)\n return requirement\n\n\ndef direct_url_for_editable(source_dir: str) -> DirectUrl:\n return DirectUrl(\n url=path_to_url(source_dir),\n info=DirInfo(editable=True),\n )\n\n\ndef direct_url_from_link(\n link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False\n) -> DirectUrl:\n if link.is_vcs:\n vcs_backend = vcs.get_backend_for_scheme(link.scheme)\n assert vcs_backend\n url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(\n link.url_without_fragment\n )\n # For VCS links, we need to find out and add commit_id.\n if link_is_in_wheel_cache:\n # If the requested VCS link corresponds to a cached\n # wheel, it means the requested revision was an\n # immutable commit hash, otherwise it would not have\n # been cached. In that case we don't have a source_dir\n # with the VCS checkout.\n assert requested_revision\n commit_id = requested_revision\n else:\n # If the wheel was not in cache, it means we have\n # had to checkout from VCS to build and we have a source_dir\n # which we can inspect to find out the commit id.\n assert source_dir\n commit_id = vcs_backend.get_revision(source_dir)\n return DirectUrl(\n url=url,\n info=VcsInfo(\n vcs=vcs_backend.name,\n commit_id=commit_id,\n requested_revision=requested_revision,\n ),\n subdirectory=link.subdirectory_fragment,\n )\n elif link.is_existing_dir():\n return DirectUrl(\n url=link.url_without_fragment,\n info=DirInfo(),\n subdirectory=link.subdirectory_fragment,\n )\n else:\n hash = None\n hash_name = link.hash_name\n if hash_name:\n hash = f"{hash_name}={link.hash}"\n return DirectUrl(\n url=link.url_without_fragment,\n info=ArchiveInfo(hash=hash),\n subdirectory=link.subdirectory_fragment,\n )\n
.venv\Lib\site-packages\pip\_internal\utils\direct_url_helpers.py
direct_url_helpers.py
Python
3,196
0.95
0.126437
0.1125
python-kit
677
2025-01-14T02:49:02.831805
Apache-2.0
false
1c72a57c06feb0a553e09137539e5263
import os\nimport re\nimport sys\nfrom typing import List, Optional\n\nfrom pip._internal.locations import site_packages, user_site\nfrom pip._internal.utils.virtualenv import (\n running_under_virtualenv,\n virtualenv_no_global,\n)\n\n__all__ = [\n "egg_link_path_from_sys_path",\n "egg_link_path_from_location",\n]\n\n\ndef _egg_link_names(raw_name: str) -> List[str]:\n """\n Convert a Name metadata value to a .egg-link name, by applying\n the same substitution as pkg_resources's safe_name function.\n Note: we cannot use canonicalize_name because it has a different logic.\n\n We also look for the raw name (without normalization) as setuptools 69 changed\n the way it names .egg-link files (https://github.com/pypa/setuptools/issues/4167).\n """\n return [\n re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link",\n f"{raw_name}.egg-link",\n ]\n\n\ndef egg_link_path_from_sys_path(raw_name: str) -> Optional[str]:\n """\n Look for a .egg-link file for project name, by walking sys.path.\n """\n egg_link_names = _egg_link_names(raw_name)\n for path_item in sys.path:\n for egg_link_name in egg_link_names:\n egg_link = os.path.join(path_item, egg_link_name)\n if os.path.isfile(egg_link):\n return egg_link\n return None\n\n\ndef egg_link_path_from_location(raw_name: str) -> Optional[str]:\n """\n Return the path for the .egg-link file if it exists, otherwise, None.\n\n There's 3 scenarios:\n 1) not in a virtualenv\n try to find in site.USER_SITE, then site_packages\n 2) in a no-global virtualenv\n try to find in site_packages\n 3) in a yes-global virtualenv\n try to find in site_packages, then site.USER_SITE\n (don't look in global location)\n\n For #1 and #3, there could be odd cases, where there's an egg-link in 2\n locations.\n\n This method will just return the first one found.\n """\n sites: List[str] = []\n if running_under_virtualenv():\n sites.append(site_packages)\n if not virtualenv_no_global() and user_site:\n sites.append(user_site)\n else:\n if user_site:\n sites.append(user_site)\n sites.append(site_packages)\n\n egg_link_names = _egg_link_names(raw_name)\n for site in sites:\n for egg_link_name in egg_link_names:\n egglink = os.path.join(site, egg_link_name)\n if os.path.isfile(egglink):\n return egglink\n return None\n
.venv\Lib\site-packages\pip\_internal\utils\egg_link.py
egg_link.py
Python
2,463
0.95
0.2625
0
node-utils
909
2024-02-19T11:08:58.776764
Apache-2.0
false
779e46db7bbc718a77b123cf76078ce1
import itertools\nimport os\nimport shutil\nimport sys\nfrom typing import List, Optional\n\nfrom pip._internal.cli.main import main\nfrom pip._internal.utils.compat import WINDOWS\n\n_EXECUTABLE_NAMES = [\n "pip",\n f"pip{sys.version_info.major}",\n f"pip{sys.version_info.major}.{sys.version_info.minor}",\n]\nif WINDOWS:\n _allowed_extensions = {"", ".exe"}\n _EXECUTABLE_NAMES = [\n "".join(parts)\n for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions)\n ]\n\n\ndef _wrapper(args: Optional[List[str]] = None) -> int:\n """Central wrapper for all old entrypoints.\n\n Historically pip has had several entrypoints defined. Because of issues\n arising from PATH, sys.path, multiple Pythons, their interactions, and most\n of them having a pip installed, users suffer every time an entrypoint gets\n moved.\n\n To alleviate this pain, and provide a mechanism for warning users and\n directing them to an appropriate place for help, we now define all of\n our old entrypoints as wrappers for the current one.\n """\n sys.stderr.write(\n "WARNING: pip is being invoked by an old script wrapper. This will "\n "fail in a future version of pip.\n"\n "Please see https://github.com/pypa/pip/issues/5599 for advice on "\n "fixing the underlying issue.\n"\n "To avoid this problem you can invoke Python with '-m pip' instead of "\n "running pip directly.\n"\n )\n return main(args)\n\n\ndef get_best_invocation_for_this_pip() -> str:\n """Try to figure out the best way to invoke pip in the current environment."""\n binary_directory = "Scripts" if WINDOWS else "bin"\n binary_prefix = os.path.join(sys.prefix, binary_directory)\n\n # Try to use pip[X[.Y]] names, if those executables for this environment are\n # the first on PATH with that name.\n path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep)\n exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts\n if exe_are_in_PATH:\n for exe_name in _EXECUTABLE_NAMES:\n found_executable = shutil.which(exe_name)\n binary_executable = os.path.join(binary_prefix, exe_name)\n if (\n found_executable\n and os.path.exists(binary_executable)\n and os.path.samefile(\n found_executable,\n binary_executable,\n )\n ):\n return exe_name\n\n # Use the `-m` invocation, if there's no "nice" invocation.\n return f"{get_best_invocation_for_this_python()} -m pip"\n\n\ndef get_best_invocation_for_this_python() -> str:\n """Try to figure out the best way to invoke the current Python."""\n exe = sys.executable\n exe_name = os.path.basename(exe)\n\n # Try to use the basename, if it's the first executable.\n found_executable = shutil.which(exe_name)\n # Virtual environments often symlink to their parent Python binaries, but we don't\n # want to treat the Python binaries as equivalent when the environment's Python is\n # not on PATH (not activated). Thus, we don't follow symlinks.\n if found_executable and os.path.samestat(os.lstat(found_executable), os.lstat(exe)):\n return exe_name\n\n # Use the full executable name, because we couldn't find something simpler.\n return exe\n
.venv\Lib\site-packages\pip\_internal\utils\entrypoints.py
entrypoints.py
Python
3,325
0.95
0.218391
0.109589
awesome-app
471
2023-11-21T14:18:26.347940
Apache-2.0
false
085e306d029457f2a595c7fc2ccefd02
import fnmatch\nimport os\nimport os.path\nimport random\nimport sys\nfrom contextlib import contextmanager\nfrom tempfile import NamedTemporaryFile\nfrom typing import Any, BinaryIO, Generator, List, Union, cast\n\nfrom pip._internal.utils.compat import get_path_uid\nfrom pip._internal.utils.misc import format_size\nfrom pip._internal.utils.retry import retry\n\n\ndef check_path_owner(path: str) -> bool:\n # If we don't have a way to check the effective uid of this process, then\n # we'll just assume that we own the directory.\n if sys.platform == "win32" or not hasattr(os, "geteuid"):\n return True\n\n assert os.path.isabs(path)\n\n previous = None\n while path != previous:\n if os.path.lexists(path):\n # Check if path is writable by current user.\n if os.geteuid() == 0:\n # Special handling for root user in order to handle properly\n # cases where users use sudo without -H flag.\n try:\n path_uid = get_path_uid(path)\n except OSError:\n return False\n return path_uid == 0\n else:\n return os.access(path, os.W_OK)\n else:\n previous, path = path, os.path.dirname(path)\n return False # assume we don't own the path\n\n\n@contextmanager\ndef adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:\n """Return a file-like object pointing to a tmp file next to path.\n\n The file is created securely and is ensured to be written to disk\n after the context reaches its end.\n\n kwargs will be passed to tempfile.NamedTemporaryFile to control\n the way the temporary file will be opened.\n """\n with NamedTemporaryFile(\n delete=False,\n dir=os.path.dirname(path),\n prefix=os.path.basename(path),\n suffix=".tmp",\n **kwargs,\n ) as f:\n result = cast(BinaryIO, f)\n try:\n yield result\n finally:\n result.flush()\n os.fsync(result.fileno())\n\n\nreplace = retry(stop_after_delay=1, wait=0.25)(os.replace)\n\n\n# test_writable_dir and _test_writable_dir_win are copied from Flit,\n# with the author's agreement to also place them under pip's license.\ndef test_writable_dir(path: str) -> bool:\n """Check if a directory is writable.\n\n Uses os.access() on POSIX, tries creating files on Windows.\n """\n # If the directory doesn't exist, find the closest parent that does.\n while not os.path.isdir(path):\n parent = os.path.dirname(path)\n if parent == path:\n break # Should never get here, but infinite loops are bad\n path = parent\n\n if os.name == "posix":\n return os.access(path, os.W_OK)\n\n return _test_writable_dir_win(path)\n\n\ndef _test_writable_dir_win(path: str) -> bool:\n # os.access doesn't work on Windows: http://bugs.python.org/issue2528\n # and we can't use tempfile: http://bugs.python.org/issue22107\n basename = "accesstest_deleteme_fishfingers_custard_"\n alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"\n for _ in range(10):\n name = basename + "".join(random.choice(alphabet) for _ in range(6))\n file = os.path.join(path, name)\n try:\n fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)\n except FileExistsError:\n pass\n except PermissionError:\n # This could be because there's a directory with the same name.\n # But it's highly unlikely there's a directory called that,\n # so we'll assume it's because the parent dir is not writable.\n # This could as well be because the parent dir is not readable,\n # due to non-privileged user access.\n return False\n else:\n os.close(fd)\n os.unlink(file)\n return True\n\n # This should never be reached\n raise OSError("Unexpected condition testing for writable directory")\n\n\ndef find_files(path: str, pattern: str) -> List[str]:\n """Returns a list of absolute paths of files beneath path, recursively,\n with filenames which match the UNIX-style shell glob pattern."""\n result: List[str] = []\n for root, _, files in os.walk(path):\n matches = fnmatch.filter(files, pattern)\n result.extend(os.path.join(root, f) for f in matches)\n return result\n\n\ndef file_size(path: str) -> Union[int, float]:\n # If it's a symlink, return 0.\n if os.path.islink(path):\n return 0\n return os.path.getsize(path)\n\n\ndef format_file_size(path: str) -> str:\n return format_size(file_size(path))\n\n\ndef directory_size(path: str) -> Union[int, float]:\n size = 0.0\n for root, _dirs, files in os.walk(path):\n for filename in files:\n file_path = os.path.join(root, filename)\n size += file_size(file_path)\n return size\n\n\ndef format_directory_size(path: str) -> str:\n return format_size(directory_size(path))\n
.venv\Lib\site-packages\pip\_internal\utils\filesystem.py
filesystem.py
Python
4,950
0.95
0.201342
0.15
awesome-app
74
2024-09-07T23:17:47.505331
MIT
false
7c9ac0d7af57aab6094386b1309e331e
"""Filetype information."""\n\nfrom typing import Tuple\n\nfrom pip._internal.utils.misc import splitext\n\nWHEEL_EXTENSION = ".whl"\nBZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz")\nXZ_EXTENSIONS: Tuple[str, ...] = (\n ".tar.xz",\n ".txz",\n ".tlz",\n ".tar.lz",\n ".tar.lzma",\n)\nZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION)\nTAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar")\nARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS\n\n\ndef is_archive_file(name: str) -> bool:\n """Return True if `name` is a considered as an archive file."""\n ext = splitext(name)[1].lower()\n if ext in ARCHIVE_EXTENSIONS:\n return True\n return False\n
.venv\Lib\site-packages\pip\_internal\utils\filetypes.py
filetypes.py
Python
715
0.85
0.115385
0
python-kit
790
2023-09-04T14:23:15.483188
GPL-3.0
false
b259066d0c953db243ee580f8c3a3d32
import os\nimport sys\nfrom typing import Optional, Tuple\n\n\ndef glibc_version_string() -> Optional[str]:\n "Returns glibc version string, or None if not using glibc."\n return glibc_version_string_confstr() or glibc_version_string_ctypes()\n\n\ndef glibc_version_string_confstr() -> Optional[str]:\n "Primary implementation of glibc_version_string using os.confstr."\n # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely\n # to be broken or missing. This strategy is used in the standard library\n # platform module:\n # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183\n if sys.platform == "win32":\n return None\n try:\n gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")\n if gnu_libc_version is None:\n return None\n # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":\n _, version = gnu_libc_version.split()\n except (AttributeError, OSError, ValueError):\n # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...\n return None\n return version\n\n\ndef glibc_version_string_ctypes() -> Optional[str]:\n "Fallback implementation of glibc_version_string using ctypes."\n\n try:\n import ctypes\n except ImportError:\n return None\n\n # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen\n # manpage says, "If filename is NULL, then the returned handle is for the\n # main program". This way we can let the linker do the work to figure out\n # which libc our process is actually using.\n #\n # We must also handle the special case where the executable is not a\n # dynamically linked executable. This can occur when using musl libc,\n # for example. In this situation, dlopen() will error, leading to an\n # OSError. Interestingly, at least in the case of musl, there is no\n # errno set on the OSError. The single string argument used to construct\n # OSError comes from libc itself and is therefore not portable to\n # hard code here. In any case, failure to call dlopen() means we\n # can't proceed, so we bail on our attempt.\n try:\n process_namespace = ctypes.CDLL(None)\n except OSError:\n return None\n\n try:\n gnu_get_libc_version = process_namespace.gnu_get_libc_version\n except AttributeError:\n # Symbol doesn't exist -> therefore, we are not linked to\n # glibc.\n return None\n\n # Call gnu_get_libc_version, which returns a string like "2.5"\n gnu_get_libc_version.restype = ctypes.c_char_p\n version_str: str = gnu_get_libc_version()\n # py2 / py3 compatibility:\n if not isinstance(version_str, str):\n version_str = version_str.decode("ascii")\n\n return version_str\n\n\n# platform.libc_ver regularly returns completely nonsensical glibc\n# versions. E.g. on my computer, platform says:\n#\n# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'\n# ('glibc', '2.7')\n# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'\n# ('glibc', '2.9')\n#\n# But the truth is:\n#\n# ~$ ldd --version\n# ldd (Debian GLIBC 2.22-11) 2.22\n#\n# This is unfortunate, because it means that the linehaul data on libc\n# versions that was generated by pip 8.1.2 and earlier is useless and\n# misleading. Solution: instead of using platform, use our code that actually\n# works.\ndef libc_ver() -> Tuple[str, str]:\n """Try to determine the glibc version\n\n Returns a tuple of strings (lib, version) which default to empty strings\n in case the lookup fails.\n """\n glibc_version = glibc_version_string()\n if glibc_version is None:\n return ("", "")\n else:\n return ("glibc", glibc_version)\n
.venv\Lib\site-packages\pip\_internal\utils\glibc.py
glibc.py
Python
3,734
0.95
0.148515
0.45977
awesome-app
222
2024-11-08T23:15:33.052985
BSD-3-Clause
false
571da2de36eea6aa5e414ad453a5360c
import hashlib\nfrom typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, NoReturn, Optional\n\nfrom pip._internal.exceptions import HashMismatch, HashMissing, InstallationError\nfrom pip._internal.utils.misc import read_chunks\n\nif TYPE_CHECKING:\n from hashlib import _Hash\n\n\n# The recommended hash algo of the moment. Change this whenever the state of\n# the art changes; it won't hurt backward compatibility.\nFAVORITE_HASH = "sha256"\n\n\n# Names of hashlib algorithms allowed by the --hash option and ``pip hash``\n# Currently, those are the ones at least as collision-resistant as sha256.\nSTRONG_HASHES = ["sha256", "sha384", "sha512"]\n\n\nclass Hashes:\n """A wrapper that builds multiple hashes at once and checks them against\n known-good values\n\n """\n\n def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None:\n """\n :param hashes: A dict of algorithm names pointing to lists of allowed\n hex digests\n """\n allowed = {}\n if hashes is not None:\n for alg, keys in hashes.items():\n # Make sure values are always sorted (to ease equality checks)\n allowed[alg] = [k.lower() for k in sorted(keys)]\n self._allowed = allowed\n\n def __and__(self, other: "Hashes") -> "Hashes":\n if not isinstance(other, Hashes):\n return NotImplemented\n\n # If either of the Hashes object is entirely empty (i.e. no hash\n # specified at all), all hashes from the other object are allowed.\n if not other:\n return self\n if not self:\n return other\n\n # Otherwise only hashes that present in both objects are allowed.\n new = {}\n for alg, values in other._allowed.items():\n if alg not in self._allowed:\n continue\n new[alg] = [v for v in values if v in self._allowed[alg]]\n return Hashes(new)\n\n @property\n def digest_count(self) -> int:\n return sum(len(digests) for digests in self._allowed.values())\n\n def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool:\n """Return whether the given hex digest is allowed."""\n return hex_digest in self._allowed.get(hash_name, [])\n\n def check_against_chunks(self, chunks: Iterable[bytes]) -> None:\n """Check good hashes against ones built from iterable of chunks of\n data.\n\n Raise HashMismatch if none match.\n\n """\n gots = {}\n for hash_name in self._allowed.keys():\n try:\n gots[hash_name] = hashlib.new(hash_name)\n except (ValueError, TypeError):\n raise InstallationError(f"Unknown hash name: {hash_name}")\n\n for chunk in chunks:\n for hash in gots.values():\n hash.update(chunk)\n\n for hash_name, got in gots.items():\n if got.hexdigest() in self._allowed[hash_name]:\n return\n self._raise(gots)\n\n def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":\n raise HashMismatch(self._allowed, gots)\n\n def check_against_file(self, file: BinaryIO) -> None:\n """Check good hashes against a file-like object\n\n Raise HashMismatch if none match.\n\n """\n return self.check_against_chunks(read_chunks(file))\n\n def check_against_path(self, path: str) -> None:\n with open(path, "rb") as file:\n return self.check_against_file(file)\n\n def has_one_of(self, hashes: Dict[str, str]) -> bool:\n """Return whether any of the given hashes are allowed."""\n for hash_name, hex_digest in hashes.items():\n if self.is_hash_allowed(hash_name, hex_digest):\n return True\n return False\n\n def __bool__(self) -> bool:\n """Return whether I know any known-good hashes."""\n return bool(self._allowed)\n\n def __eq__(self, other: object) -> bool:\n if not isinstance(other, Hashes):\n return NotImplemented\n return self._allowed == other._allowed\n\n def __hash__(self) -> int:\n return hash(\n ",".join(\n sorted(\n ":".join((alg, digest))\n for alg, digest_list in self._allowed.items()\n for digest in digest_list\n )\n )\n )\n\n\nclass MissingHashes(Hashes):\n """A workalike for Hashes used when we're missing a hash for a requirement\n\n It computes the actual hash of the requirement and raises a HashMissing\n exception showing it to the user.\n\n """\n\n def __init__(self) -> None:\n """Don't offer the ``hashes`` kwarg."""\n # Pass our favorite hash in to generate a "gotten hash". With the\n # empty list, it will never match, so an error will always raise.\n super().__init__(hashes={FAVORITE_HASH: []})\n\n def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":\n raise HashMissing(gots[FAVORITE_HASH].hexdigest())\n
.venv\Lib\site-packages\pip\_internal\utils\hashes.py
hashes.py
Python
4,972
0.95
0.292517
0.089286
python-kit
928
2025-01-24T00:25:02.283890
Apache-2.0
false
da7e18da884550c21aee0fcf1e1897ff
import contextlib\nimport errno\nimport logging\nimport logging.handlers\nimport os\nimport sys\nimport threading\nfrom dataclasses import dataclass\nfrom io import TextIOWrapper\nfrom logging import Filter\nfrom typing import Any, ClassVar, Generator, List, Optional, Type\n\nfrom pip._vendor.rich.console import (\n Console,\n ConsoleOptions,\n ConsoleRenderable,\n RenderableType,\n RenderResult,\n RichCast,\n)\nfrom pip._vendor.rich.highlighter import NullHighlighter\nfrom pip._vendor.rich.logging import RichHandler\nfrom pip._vendor.rich.segment import Segment\nfrom pip._vendor.rich.style import Style\n\nfrom pip._internal.utils._log import VERBOSE, getLogger\nfrom pip._internal.utils.compat import WINDOWS\nfrom pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX\nfrom pip._internal.utils.misc import ensure_dir\n\n_log_state = threading.local()\n_stdout_console = None\n_stderr_console = None\nsubprocess_logger = getLogger("pip.subprocessor")\n\n\nclass BrokenStdoutLoggingError(Exception):\n """\n Raised if BrokenPipeError occurs for the stdout stream while logging.\n """\n\n\ndef _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool:\n if exc_class is BrokenPipeError:\n return True\n\n # On Windows, a broken pipe can show up as EINVAL rather than EPIPE:\n # https://bugs.python.org/issue19612\n # https://bugs.python.org/issue30418\n if not WINDOWS:\n return False\n\n return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)\n\n\n@contextlib.contextmanager\ndef indent_log(num: int = 2) -> Generator[None, None, None]:\n """\n A context manager which will cause the log output to be indented for any\n log messages emitted inside it.\n """\n # For thread-safety\n _log_state.indentation = get_indentation()\n _log_state.indentation += num\n try:\n yield\n finally:\n _log_state.indentation -= num\n\n\ndef get_indentation() -> int:\n return getattr(_log_state, "indentation", 0)\n\n\nclass IndentingFormatter(logging.Formatter):\n default_time_format = "%Y-%m-%dT%H:%M:%S"\n\n def __init__(\n self,\n *args: Any,\n add_timestamp: bool = False,\n **kwargs: Any,\n ) -> None:\n """\n A logging.Formatter that obeys the indent_log() context manager.\n\n :param add_timestamp: A bool indicating output lines should be prefixed\n with their record's timestamp.\n """\n self.add_timestamp = add_timestamp\n super().__init__(*args, **kwargs)\n\n def get_message_start(self, formatted: str, levelno: int) -> str:\n """\n Return the start of the formatted log message (not counting the\n prefix to add to each line).\n """\n if levelno < logging.WARNING:\n return ""\n if formatted.startswith(DEPRECATION_MSG_PREFIX):\n # Then the message already has a prefix. We don't want it to\n # look like "WARNING: DEPRECATION: ...."\n return ""\n if levelno < logging.ERROR:\n return "WARNING: "\n\n return "ERROR: "\n\n def format(self, record: logging.LogRecord) -> str:\n """\n Calls the standard formatter, but will indent all of the log message\n lines by our current indentation level.\n """\n formatted = super().format(record)\n message_start = self.get_message_start(formatted, record.levelno)\n formatted = message_start + formatted\n\n prefix = ""\n if self.add_timestamp:\n prefix = f"{self.formatTime(record)} "\n prefix += " " * get_indentation()\n formatted = "".join([prefix + line for line in formatted.splitlines(True)])\n return formatted\n\n\n@dataclass\nclass IndentedRenderable:\n renderable: RenderableType\n indent: int\n\n def __rich_console__(\n self, console: Console, options: ConsoleOptions\n ) -> RenderResult:\n segments = console.render(self.renderable, options)\n lines = Segment.split_lines(segments)\n for line in lines:\n yield Segment(" " * self.indent)\n yield from line\n yield Segment("\n")\n\n\nclass PipConsole(Console):\n def on_broken_pipe(self) -> None:\n # Reraise the original exception, rich 13.8.0+ exits by default\n # instead, preventing our handler from firing.\n raise BrokenPipeError() from None\n\n\ndef get_console(*, stderr: bool = False) -> Console:\n if stderr:\n assert _stderr_console is not None, "stderr rich console is missing!"\n return _stderr_console\n else:\n assert _stdout_console is not None, "stdout rich console is missing!"\n return _stdout_console\n\n\nclass RichPipStreamHandler(RichHandler):\n KEYWORDS: ClassVar[Optional[List[str]]] = []\n\n def __init__(self, console: Console) -> None:\n super().__init__(\n console=console,\n show_time=False,\n show_level=False,\n show_path=False,\n highlighter=NullHighlighter(),\n )\n\n # Our custom override on Rich's logger, to make things work as we need them to.\n def emit(self, record: logging.LogRecord) -> None:\n style: Optional[Style] = None\n\n # If we are given a diagnostic error to present, present it with indentation.\n if getattr(record, "rich", False):\n assert isinstance(record.args, tuple)\n (rich_renderable,) = record.args\n assert isinstance(\n rich_renderable, (ConsoleRenderable, RichCast, str)\n ), f"{rich_renderable} is not rich-console-renderable"\n\n renderable: RenderableType = IndentedRenderable(\n rich_renderable, indent=get_indentation()\n )\n else:\n message = self.format(record)\n renderable = self.render_message(record, message)\n if record.levelno is not None:\n if record.levelno >= logging.ERROR:\n style = Style(color="red")\n elif record.levelno >= logging.WARNING:\n style = Style(color="yellow")\n\n try:\n self.console.print(renderable, overflow="ignore", crop=False, style=style)\n except Exception:\n self.handleError(record)\n\n def handleError(self, record: logging.LogRecord) -> None:\n """Called when logging is unable to log some output."""\n\n exc_class, exc = sys.exc_info()[:2]\n # If a broken pipe occurred while calling write() or flush() on the\n # stdout stream in logging's Handler.emit(), then raise our special\n # exception so we can handle it in main() instead of logging the\n # broken pipe error and continuing.\n if (\n exc_class\n and exc\n and self.console.file is sys.stdout\n and _is_broken_pipe_error(exc_class, exc)\n ):\n raise BrokenStdoutLoggingError()\n\n return super().handleError(record)\n\n\nclass BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):\n def _open(self) -> TextIOWrapper:\n ensure_dir(os.path.dirname(self.baseFilename))\n return super()._open()\n\n\nclass MaxLevelFilter(Filter):\n def __init__(self, level: int) -> None:\n self.level = level\n\n def filter(self, record: logging.LogRecord) -> bool:\n return record.levelno < self.level\n\n\nclass ExcludeLoggerFilter(Filter):\n """\n A logging Filter that excludes records from a logger (or its children).\n """\n\n def filter(self, record: logging.LogRecord) -> bool:\n # The base Filter class allows only records from a logger (or its\n # children).\n return not super().filter(record)\n\n\ndef setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int:\n """Configures and sets up all of the logging\n\n Returns the requested logging level, as its integer value.\n """\n\n # Determine the level to be logging at.\n if verbosity >= 2:\n level_number = logging.DEBUG\n elif verbosity == 1:\n level_number = VERBOSE\n elif verbosity == -1:\n level_number = logging.WARNING\n elif verbosity == -2:\n level_number = logging.ERROR\n elif verbosity <= -3:\n level_number = logging.CRITICAL\n else:\n level_number = logging.INFO\n\n level = logging.getLevelName(level_number)\n\n # The "root" logger should match the "console" level *unless* we also need\n # to log to a user log file.\n include_user_log = user_log_file is not None\n if include_user_log:\n additional_log_file = user_log_file\n root_level = "DEBUG"\n else:\n additional_log_file = "/dev/null"\n root_level = level\n\n # Disable any logging besides WARNING unless we have DEBUG level logging\n # enabled for vendored libraries.\n vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"\n\n # Shorthands for clarity\n handler_classes = {\n "stream": "pip._internal.utils.logging.RichPipStreamHandler",\n "file": "pip._internal.utils.logging.BetterRotatingFileHandler",\n }\n handlers = ["console", "console_errors", "console_subprocess"] + (\n ["user_log"] if include_user_log else []\n )\n global _stdout_console, stderr_console\n _stdout_console = PipConsole(file=sys.stdout, no_color=no_color, soft_wrap=True)\n _stderr_console = PipConsole(file=sys.stderr, no_color=no_color, soft_wrap=True)\n\n logging.config.dictConfig(\n {\n "version": 1,\n "disable_existing_loggers": False,\n "filters": {\n "exclude_warnings": {\n "()": "pip._internal.utils.logging.MaxLevelFilter",\n "level": logging.WARNING,\n },\n "restrict_to_subprocess": {\n "()": "logging.Filter",\n "name": subprocess_logger.name,\n },\n "exclude_subprocess": {\n "()": "pip._internal.utils.logging.ExcludeLoggerFilter",\n "name": subprocess_logger.name,\n },\n },\n "formatters": {\n "indent": {\n "()": IndentingFormatter,\n "format": "%(message)s",\n },\n "indent_with_timestamp": {\n "()": IndentingFormatter,\n "format": "%(message)s",\n "add_timestamp": True,\n },\n },\n "handlers": {\n "console": {\n "level": level,\n "class": handler_classes["stream"],\n "console": _stdout_console,\n "filters": ["exclude_subprocess", "exclude_warnings"],\n "formatter": "indent",\n },\n "console_errors": {\n "level": "WARNING",\n "class": handler_classes["stream"],\n "console": _stderr_console,\n "filters": ["exclude_subprocess"],\n "formatter": "indent",\n },\n # A handler responsible for logging to the console messages\n # from the "subprocessor" logger.\n "console_subprocess": {\n "level": level,\n "class": handler_classes["stream"],\n "console": _stderr_console,\n "filters": ["restrict_to_subprocess"],\n "formatter": "indent",\n },\n "user_log": {\n "level": "DEBUG",\n "class": handler_classes["file"],\n "filename": additional_log_file,\n "encoding": "utf-8",\n "delay": True,\n "formatter": "indent_with_timestamp",\n },\n },\n "root": {\n "level": root_level,\n "handlers": handlers,\n },\n "loggers": {"pip._vendor": {"level": vendored_log_level}},\n }\n )\n\n return level_number\n
.venv\Lib\site-packages\pip\_internal\utils\logging.py
logging.py
Python
12,076
0.95
0.157895
0.085246
vue-tools
358
2025-02-23T20:50:09.996460
GPL-3.0
false
b9e9238c699be8a0403d704db7b61719
import errno\nimport getpass\nimport hashlib\nimport logging\nimport os\nimport posixpath\nimport shutil\nimport stat\nimport sys\nimport sysconfig\nimport urllib.parse\nfrom dataclasses import dataclass\nfrom functools import partial\nfrom io import StringIO\nfrom itertools import filterfalse, tee, zip_longest\nfrom pathlib import Path\nfrom types import FunctionType, TracebackType\nfrom typing import (\n Any,\n BinaryIO,\n Callable,\n Generator,\n Iterable,\n Iterator,\n List,\n Mapping,\n Optional,\n Sequence,\n TextIO,\n Tuple,\n Type,\n TypeVar,\n Union,\n cast,\n)\n\nfrom pip._vendor.packaging.requirements import Requirement\nfrom pip._vendor.pyproject_hooks import BuildBackendHookCaller\n\nfrom pip import __version__\nfrom pip._internal.exceptions import CommandError, ExternallyManagedEnvironment\nfrom pip._internal.locations import get_major_minor_version\nfrom pip._internal.utils.compat import WINDOWS\nfrom pip._internal.utils.retry import retry\nfrom pip._internal.utils.virtualenv import running_under_virtualenv\n\n__all__ = [\n "rmtree",\n "display_path",\n "backup_dir",\n "ask",\n "splitext",\n "format_size",\n "is_installable_dir",\n "normalize_path",\n "renames",\n "get_prog",\n "ensure_dir",\n "remove_auth_from_url",\n "check_externally_managed",\n "ConfiguredBuildBackendHookCaller",\n]\n\nlogger = logging.getLogger(__name__)\n\nT = TypeVar("T")\nExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]\nVersionInfo = Tuple[int, int, int]\nNetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]\nOnExc = Callable[[FunctionType, Path, BaseException], Any]\nOnErr = Callable[[FunctionType, Path, ExcInfo], Any]\n\nFILE_CHUNK_SIZE = 1024 * 1024\n\n\ndef get_pip_version() -> str:\n pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")\n pip_pkg_dir = os.path.abspath(pip_pkg_dir)\n\n return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})"\n\n\ndef normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:\n """\n Convert a tuple of ints representing a Python version to one of length\n three.\n\n :param py_version_info: a tuple of ints representing a Python version,\n or None to specify no version. The tuple can have any length.\n\n :return: a tuple of length three if `py_version_info` is non-None.\n Otherwise, return `py_version_info` unchanged (i.e. None).\n """\n if len(py_version_info) < 3:\n py_version_info += (3 - len(py_version_info)) * (0,)\n elif len(py_version_info) > 3:\n py_version_info = py_version_info[:3]\n\n return cast("VersionInfo", py_version_info)\n\n\ndef ensure_dir(path: str) -> None:\n """os.path.makedirs without EEXIST."""\n try:\n os.makedirs(path)\n except OSError as e:\n # Windows can raise spurious ENOTEMPTY errors. See #6426.\n if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:\n raise\n\n\ndef get_prog() -> str:\n try:\n prog = os.path.basename(sys.argv[0])\n if prog in ("__main__.py", "-c"):\n return f"{sys.executable} -m pip"\n else:\n return prog\n except (AttributeError, TypeError, IndexError):\n pass\n return "pip"\n\n\n# Retry every half second for up to 3 seconds\n@retry(stop_after_delay=3, wait=0.5)\ndef rmtree(\n dir: str, ignore_errors: bool = False, onexc: Optional[OnExc] = None\n) -> None:\n if ignore_errors:\n onexc = _onerror_ignore\n if onexc is None:\n onexc = _onerror_reraise\n handler: OnErr = partial(rmtree_errorhandler, onexc=onexc)\n if sys.version_info >= (3, 12):\n # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil.\n shutil.rmtree(dir, onexc=handler) # type: ignore\n else:\n shutil.rmtree(dir, onerror=handler) # type: ignore\n\n\ndef _onerror_ignore(*_args: Any) -> None:\n pass\n\n\ndef _onerror_reraise(*_args: Any) -> None:\n raise # noqa: PLE0704 - Bare exception used to reraise existing exception\n\n\ndef rmtree_errorhandler(\n func: FunctionType,\n path: Path,\n exc_info: Union[ExcInfo, BaseException],\n *,\n onexc: OnExc = _onerror_reraise,\n) -> None:\n """\n `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`).\n\n * If a file is readonly then it's write flag is set and operation is\n retried.\n\n * `onerror` is the original callback from `rmtree(... onerror=onerror)`\n that is chained at the end if the "rm -f" still fails.\n """\n try:\n st_mode = os.stat(path).st_mode\n except OSError:\n # it's equivalent to os.path.exists\n return\n\n if not st_mode & stat.S_IWRITE:\n # convert to read/write\n try:\n os.chmod(path, st_mode | stat.S_IWRITE)\n except OSError:\n pass\n else:\n # use the original function to repeat the operation\n try:\n func(path)\n return\n except OSError:\n pass\n\n if not isinstance(exc_info, BaseException):\n _, exc_info, _ = exc_info\n onexc(func, path, exc_info)\n\n\ndef display_path(path: str) -> str:\n """Gives the display value for a given path, making it relative to cwd\n if possible."""\n path = os.path.normcase(os.path.abspath(path))\n if path.startswith(os.getcwd() + os.path.sep):\n path = "." + path[len(os.getcwd()) :]\n return path\n\n\ndef backup_dir(dir: str, ext: str = ".bak") -> str:\n """Figure out the name of a directory to back up the given dir to\n (adding .bak, .bak2, etc)"""\n n = 1\n extension = ext\n while os.path.exists(dir + extension):\n n += 1\n extension = ext + str(n)\n return dir + extension\n\n\ndef ask_path_exists(message: str, options: Iterable[str]) -> str:\n for action in os.environ.get("PIP_EXISTS_ACTION", "").split():\n if action in options:\n return action\n return ask(message, options)\n\n\ndef _check_no_input(message: str) -> None:\n """Raise an error if no input is allowed."""\n if os.environ.get("PIP_NO_INPUT"):\n raise Exception(\n f"No input was expected ($PIP_NO_INPUT set); question: {message}"\n )\n\n\ndef ask(message: str, options: Iterable[str]) -> str:\n """Ask the message interactively, with the given possible responses"""\n while 1:\n _check_no_input(message)\n response = input(message)\n response = response.strip().lower()\n if response not in options:\n print(\n "Your response ({!r}) was not one of the expected responses: "\n "{}".format(response, ", ".join(options))\n )\n else:\n return response\n\n\ndef ask_input(message: str) -> str:\n """Ask for input interactively."""\n _check_no_input(message)\n return input(message)\n\n\ndef ask_password(message: str) -> str:\n """Ask for a password interactively."""\n _check_no_input(message)\n return getpass.getpass(message)\n\n\ndef strtobool(val: str) -> int:\n """Convert a string representation of truth to true (1) or false (0).\n\n True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values\n are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if\n 'val' is anything else.\n """\n val = val.lower()\n if val in ("y", "yes", "t", "true", "on", "1"):\n return 1\n elif val in ("n", "no", "f", "false", "off", "0"):\n return 0\n else:\n raise ValueError(f"invalid truth value {val!r}")\n\n\ndef format_size(bytes: float) -> str:\n if bytes > 1000 * 1000:\n return f"{bytes / 1000.0 / 1000:.1f} MB"\n elif bytes > 10 * 1000:\n return f"{int(bytes / 1000)} kB"\n elif bytes > 1000:\n return f"{bytes / 1000.0:.1f} kB"\n else:\n return f"{int(bytes)} bytes"\n\n\ndef tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:\n """Return a list of formatted rows and a list of column sizes.\n\n For example::\n\n >>> tabulate([['foobar', 2000], [0xdeadbeef]])\n (['foobar 2000', '3735928559'], [10, 4])\n """\n rows = [tuple(map(str, row)) for row in rows]\n sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")]\n table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows]\n return table, sizes\n\n\ndef is_installable_dir(path: str) -> bool:\n """Is path is a directory containing pyproject.toml or setup.py?\n\n If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for\n a legacy setuptools layout by identifying setup.py. We don't check for the\n setup.cfg because using it without setup.py is only available for PEP 517\n projects, which are already covered by the pyproject.toml check.\n """\n if not os.path.isdir(path):\n return False\n if os.path.isfile(os.path.join(path, "pyproject.toml")):\n return True\n if os.path.isfile(os.path.join(path, "setup.py")):\n return True\n return False\n\n\ndef read_chunks(\n file: BinaryIO, size: int = FILE_CHUNK_SIZE\n) -> Generator[bytes, None, None]:\n """Yield pieces of data from a file-like object until EOF."""\n while True:\n chunk = file.read(size)\n if not chunk:\n break\n yield chunk\n\n\ndef normalize_path(path: str, resolve_symlinks: bool = True) -> str:\n """\n Convert a path to its canonical, case-normalized, absolute version.\n\n """\n path = os.path.expanduser(path)\n if resolve_symlinks:\n path = os.path.realpath(path)\n else:\n path = os.path.abspath(path)\n return os.path.normcase(path)\n\n\ndef splitext(path: str) -> Tuple[str, str]:\n """Like os.path.splitext, but take off .tar too"""\n base, ext = posixpath.splitext(path)\n if base.lower().endswith(".tar"):\n ext = base[-4:] + ext\n base = base[:-4]\n return base, ext\n\n\ndef renames(old: str, new: str) -> None:\n """Like os.renames(), but handles renaming across devices."""\n # Implementation borrowed from os.renames().\n head, tail = os.path.split(new)\n if head and tail and not os.path.exists(head):\n os.makedirs(head)\n\n shutil.move(old, new)\n\n head, tail = os.path.split(old)\n if head and tail:\n try:\n os.removedirs(head)\n except OSError:\n pass\n\n\ndef is_local(path: str) -> bool:\n """\n Return True if path is within sys.prefix, if we're running in a virtualenv.\n\n If we're not in a virtualenv, all paths are considered "local."\n\n Caution: this function assumes the head of path has been normalized\n with normalize_path.\n """\n if not running_under_virtualenv():\n return True\n return path.startswith(normalize_path(sys.prefix))\n\n\ndef write_output(msg: Any, *args: Any) -> None:\n logger.info(msg, *args)\n\n\nclass StreamWrapper(StringIO):\n orig_stream: TextIO\n\n @classmethod\n def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":\n ret = cls()\n ret.orig_stream = orig_stream\n return ret\n\n # compileall.compile_dir() needs stdout.encoding to print to stdout\n # type ignore is because TextIOBase.encoding is writeable\n @property\n def encoding(self) -> str: # type: ignore\n return self.orig_stream.encoding\n\n\n# Simulates an enum\ndef enum(*sequential: Any, **named: Any) -> Type[Any]:\n enums = dict(zip(sequential, range(len(sequential))), **named)\n reverse = {value: key for key, value in enums.items()}\n enums["reverse_mapping"] = reverse\n return type("Enum", (), enums)\n\n\ndef build_netloc(host: str, port: Optional[int]) -> str:\n """\n Build a netloc from a host-port pair\n """\n if port is None:\n return host\n if ":" in host:\n # Only wrap host with square brackets when it is IPv6\n host = f"[{host}]"\n return f"{host}:{port}"\n\n\ndef build_url_from_netloc(netloc: str, scheme: str = "https") -> str:\n """\n Build a full URL from a netloc.\n """\n if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc:\n # It must be a bare IPv6 address, so wrap it with brackets.\n netloc = f"[{netloc}]"\n return f"{scheme}://{netloc}"\n\n\ndef parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]:\n """\n Return the host-port pair from a netloc.\n """\n url = build_url_from_netloc(netloc)\n parsed = urllib.parse.urlparse(url)\n return parsed.hostname, parsed.port\n\n\ndef split_auth_from_netloc(netloc: str) -> NetlocTuple:\n """\n Parse out and remove the auth information from a netloc.\n\n Returns: (netloc, (username, password)).\n """\n if "@" not in netloc:\n return netloc, (None, None)\n\n # Split from the right because that's how urllib.parse.urlsplit()\n # behaves if more than one @ is present (which can be checked using\n # the password attribute of urlsplit()'s return value).\n auth, netloc = netloc.rsplit("@", 1)\n pw: Optional[str] = None\n if ":" in auth:\n # Split from the left because that's how urllib.parse.urlsplit()\n # behaves if more than one : is present (which again can be checked\n # using the password attribute of the return value)\n user, pw = auth.split(":", 1)\n else:\n user, pw = auth, None\n\n user = urllib.parse.unquote(user)\n if pw is not None:\n pw = urllib.parse.unquote(pw)\n\n return netloc, (user, pw)\n\n\ndef redact_netloc(netloc: str) -> str:\n """\n Replace the sensitive data in a netloc with "****", if it exists.\n\n For example:\n - "user:pass@example.com" returns "user:****@example.com"\n - "accesstoken@example.com" returns "****@example.com"\n """\n netloc, (user, password) = split_auth_from_netloc(netloc)\n if user is None:\n return netloc\n if password is None:\n user = "****"\n password = ""\n else:\n user = urllib.parse.quote(user)\n password = ":****"\n return f"{user}{password}@{netloc}"\n\n\ndef _transform_url(\n url: str, transform_netloc: Callable[[str], Tuple[Any, ...]]\n) -> Tuple[str, NetlocTuple]:\n """Transform and replace netloc in a url.\n\n transform_netloc is a function taking the netloc and returning a\n tuple. The first element of this tuple is the new netloc. The\n entire tuple is returned.\n\n Returns a tuple containing the transformed url as item 0 and the\n original tuple returned by transform_netloc as item 1.\n """\n purl = urllib.parse.urlsplit(url)\n netloc_tuple = transform_netloc(purl.netloc)\n # stripped url\n url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment)\n surl = urllib.parse.urlunsplit(url_pieces)\n return surl, cast("NetlocTuple", netloc_tuple)\n\n\ndef _get_netloc(netloc: str) -> NetlocTuple:\n return split_auth_from_netloc(netloc)\n\n\ndef _redact_netloc(netloc: str) -> Tuple[str]:\n return (redact_netloc(netloc),)\n\n\ndef split_auth_netloc_from_url(\n url: str,\n) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]:\n """\n Parse a url into separate netloc, auth, and url with no auth.\n\n Returns: (url_without_auth, netloc, (username, password))\n """\n url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)\n return url_without_auth, netloc, auth\n\n\ndef remove_auth_from_url(url: str) -> str:\n """Return a copy of url with 'username:password@' removed."""\n # username/pass params are passed to subversion through flags\n # and are not recognized in the url.\n return _transform_url(url, _get_netloc)[0]\n\n\ndef redact_auth_from_url(url: str) -> str:\n """Replace the password in a given url with ****."""\n return _transform_url(url, _redact_netloc)[0]\n\n\ndef redact_auth_from_requirement(req: Requirement) -> str:\n """Replace the password in a given requirement url with ****."""\n if not req.url:\n return str(req)\n return str(req).replace(req.url, redact_auth_from_url(req.url))\n\n\n@dataclass(frozen=True)\nclass HiddenText:\n secret: str\n redacted: str\n\n def __repr__(self) -> str:\n return f"<HiddenText {str(self)!r}>"\n\n def __str__(self) -> str:\n return self.redacted\n\n # This is useful for testing.\n def __eq__(self, other: Any) -> bool:\n if type(self) is not type(other):\n return False\n\n # The string being used for redaction doesn't also have to match,\n # just the raw, original string.\n return self.secret == other.secret\n\n\ndef hide_value(value: str) -> HiddenText:\n return HiddenText(value, redacted="****")\n\n\ndef hide_url(url: str) -> HiddenText:\n redacted = redact_auth_from_url(url)\n return HiddenText(url, redacted=redacted)\n\n\ndef protect_pip_from_modification_on_windows(modifying_pip: bool) -> None:\n """Protection of pip.exe from modification on Windows\n\n On Windows, any operation modifying pip should be run as:\n python -m pip ...\n """\n pip_names = [\n "pip",\n f"pip{sys.version_info.major}",\n f"pip{sys.version_info.major}.{sys.version_info.minor}",\n ]\n\n # See https://github.com/pypa/pip/issues/1299 for more discussion\n should_show_use_python_msg = (\n modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names\n )\n\n if should_show_use_python_msg:\n new_command = [sys.executable, "-m", "pip"] + sys.argv[1:]\n raise CommandError(\n "To modify pip, please run the following command:\n{}".format(\n " ".join(new_command)\n )\n )\n\n\ndef check_externally_managed() -> None:\n """Check whether the current environment is externally managed.\n\n If the ``EXTERNALLY-MANAGED`` config file is found, the current environment\n is considered externally managed, and an ExternallyManagedEnvironment is\n raised.\n """\n if running_under_virtualenv():\n return\n marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED")\n if not os.path.isfile(marker):\n return\n raise ExternallyManagedEnvironment.from_config(marker)\n\n\ndef is_console_interactive() -> bool:\n """Is this console interactive?"""\n return sys.stdin is not None and sys.stdin.isatty()\n\n\ndef hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]:\n """Return (hash, length) for path using hashlib.sha256()"""\n\n h = hashlib.sha256()\n length = 0\n with open(path, "rb") as f:\n for block in read_chunks(f, size=blocksize):\n length += len(block)\n h.update(block)\n return h, length\n\n\ndef pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:\n """\n Return paired elements.\n\n For example:\n s -> (s0, s1), (s2, s3), (s4, s5), ...\n """\n iterable = iter(iterable)\n return zip_longest(iterable, iterable)\n\n\ndef partition(\n pred: Callable[[T], bool], iterable: Iterable[T]\n) -> Tuple[Iterable[T], Iterable[T]]:\n """\n Use a predicate to partition entries into false entries and true entries,\n like\n\n partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9\n """\n t1, t2 = tee(iterable)\n return filterfalse(pred, t1), filter(pred, t2)\n\n\nclass ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):\n def __init__(\n self,\n config_holder: Any,\n source_dir: str,\n build_backend: str,\n backend_path: Optional[str] = None,\n runner: Optional[Callable[..., None]] = None,\n python_executable: Optional[str] = None,\n ):\n super().__init__(\n source_dir, build_backend, backend_path, runner, python_executable\n )\n self.config_holder = config_holder\n\n def build_wheel(\n self,\n wheel_directory: str,\n config_settings: Optional[Mapping[str, Any]] = None,\n metadata_directory: Optional[str] = None,\n ) -> str:\n cs = self.config_holder.config_settings\n return super().build_wheel(\n wheel_directory, config_settings=cs, metadata_directory=metadata_directory\n )\n\n def build_sdist(\n self,\n sdist_directory: str,\n config_settings: Optional[Mapping[str, Any]] = None,\n ) -> str:\n cs = self.config_holder.config_settings\n return super().build_sdist(sdist_directory, config_settings=cs)\n\n def build_editable(\n self,\n wheel_directory: str,\n config_settings: Optional[Mapping[str, Any]] = None,\n metadata_directory: Optional[str] = None,\n ) -> str:\n cs = self.config_holder.config_settings\n return super().build_editable(\n wheel_directory, config_settings=cs, metadata_directory=metadata_directory\n )\n\n def get_requires_for_build_wheel(\n self, config_settings: Optional[Mapping[str, Any]] = None\n ) -> Sequence[str]:\n cs = self.config_holder.config_settings\n return super().get_requires_for_build_wheel(config_settings=cs)\n\n def get_requires_for_build_sdist(\n self, config_settings: Optional[Mapping[str, Any]] = None\n ) -> Sequence[str]:\n cs = self.config_holder.config_settings\n return super().get_requires_for_build_sdist(config_settings=cs)\n\n def get_requires_for_build_editable(\n self, config_settings: Optional[Mapping[str, Any]] = None\n ) -> Sequence[str]:\n cs = self.config_holder.config_settings\n return super().get_requires_for_build_editable(config_settings=cs)\n\n def prepare_metadata_for_build_wheel(\n self,\n metadata_directory: str,\n config_settings: Optional[Mapping[str, Any]] = None,\n _allow_fallback: bool = True,\n ) -> str:\n cs = self.config_holder.config_settings\n return super().prepare_metadata_for_build_wheel(\n metadata_directory=metadata_directory,\n config_settings=cs,\n _allow_fallback=_allow_fallback,\n )\n\n def prepare_metadata_for_build_editable(\n self,\n metadata_directory: str,\n config_settings: Optional[Mapping[str, Any]] = None,\n _allow_fallback: bool = True,\n ) -> Optional[str]:\n cs = self.config_holder.config_settings\n return super().prepare_metadata_for_build_editable(\n metadata_directory=metadata_directory,\n config_settings=cs,\n _allow_fallback=_allow_fallback,\n )\n\n\ndef warn_if_run_as_root() -> None:\n """Output a warning for sudo users on Unix.\n\n In a virtual environment, sudo pip still writes to virtualenv.\n On Windows, users may run pip as Administrator without issues.\n This warning only applies to Unix root users outside of virtualenv.\n """\n if running_under_virtualenv():\n return\n if not hasattr(os, "getuid"):\n return\n # On Windows, there are no "system managed" Python packages. Installing as\n # Administrator via pip is the correct way of updating system environments.\n #\n # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform\n # checks: https://mypy.readthedocs.io/en/stable/common_issues.html\n if sys.platform == "win32" or sys.platform == "cygwin":\n return\n\n if os.getuid() != 0:\n return\n\n logger.warning(\n "Running pip as the 'root' user can result in broken permissions and "\n "conflicting behaviour with the system package manager, possibly "\n "rendering your system unusable. "\n "It is recommended to use a virtual environment instead: "\n "https://pip.pypa.io/warnings/venv. "\n "Use the --root-user-action option if you know what you are doing and "\n "want to suppress this warning."\n )\n
.venv\Lib\site-packages\pip\_internal\utils\misc.py
misc.py
Python
23,450
0.95
0.187581
0.053398
node-utils
199
2024-10-11T21:57:11.780936
MIT
false
0d7890e7abbfb124d9141358b58d9b8f
import functools\nimport logging\nfrom typing import Optional, Tuple\n\nfrom pip._vendor.packaging import specifiers, version\nfrom pip._vendor.packaging.requirements import Requirement\n\nlogger = logging.getLogger(__name__)\n\n\n@functools.lru_cache(maxsize=32)\ndef check_requires_python(\n requires_python: Optional[str], version_info: Tuple[int, ...]\n) -> bool:\n """\n Check if the given Python version matches a "Requires-Python" specifier.\n\n :param version_info: A 3-tuple of ints representing a Python\n major-minor-micro version to check (e.g. `sys.version_info[:3]`).\n\n :return: `True` if the given Python version satisfies the requirement.\n Otherwise, return `False`.\n\n :raises InvalidSpecifier: If `requires_python` has an invalid format.\n """\n if requires_python is None:\n # The package provides no information\n return True\n requires_python_specifier = specifiers.SpecifierSet(requires_python)\n\n python_version = version.parse(".".join(map(str, version_info)))\n return python_version in requires_python_specifier\n\n\n@functools.lru_cache(maxsize=10000)\ndef get_requirement(req_string: str) -> Requirement:\n """Construct a packaging.Requirement object with caching"""\n # Parsing requirement strings is expensive, and is also expected to happen\n # with a low diversity of different arguments (at least relative the number\n # constructed). This method adds a cache to requirement object creation to\n # minimize repeated parsing of the same string to construct equivalent\n # Requirement objects.\n return Requirement(req_string)\n
.venv\Lib\site-packages\pip\_internal\utils\packaging.py
packaging.py
Python
1,603
0.95
0.116279
0.181818
node-utils
234
2023-10-11T21:01:23.162074
MIT
false
e04464c9616e2d588f5a7b555342bd29
import functools\nfrom time import perf_counter, sleep\nfrom typing import Callable, TypeVar\n\nfrom pip._vendor.typing_extensions import ParamSpec\n\nT = TypeVar("T")\nP = ParamSpec("P")\n\n\ndef retry(\n wait: float, stop_after_delay: float\n) -> Callable[[Callable[P, T]], Callable[P, T]]:\n """Decorator to automatically retry a function on error.\n\n If the function raises, the function is recalled with the same arguments\n until it returns or the time limit is reached. When the time limit is\n surpassed, the last exception raised is reraised.\n\n :param wait: The time to wait after an error before retrying, in seconds.\n :param stop_after_delay: The time limit after which retries will cease,\n in seconds.\n """\n\n def wrapper(func: Callable[P, T]) -> Callable[P, T]:\n\n @functools.wraps(func)\n def retry_wrapped(*args: P.args, **kwargs: P.kwargs) -> T:\n # The performance counter is monotonic on all platforms we care\n # about and has much better resolution than time.monotonic().\n start_time = perf_counter()\n while True:\n try:\n return func(*args, **kwargs)\n except Exception:\n if perf_counter() - start_time > stop_after_delay:\n raise\n sleep(wait)\n\n return retry_wrapped\n\n return wrapper\n
.venv\Lib\site-packages\pip\_internal\utils\retry.py
retry.py
Python
1,392
0.95
0.214286
0.0625
python-kit
838
2024-01-02T23:45:21.428133
GPL-3.0
false
aa5bada61863c523f5d44e2393ce9a60
import sys\nimport textwrap\nfrom typing import List, Optional, Sequence\n\n# Shim to wrap setup.py invocation with setuptools\n# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on\n# Windows are correctly handled (it should be "C:\\Users" not "C:\Users").\n_SETUPTOOLS_SHIM = textwrap.dedent(\n """\n exec(compile('''\n # This is <pip-setuptools-caller> -- a caller that pip uses to run setup.py\n #\n # - It imports setuptools before invoking setup.py, to enable projects that directly\n # import from `distutils.core` to work with newer packaging standards.\n # - It provides a clear error message when setuptools is not installed.\n # - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so\n # setuptools doesn't think the script is `-c`. This avoids the following warning:\n # manifest_maker: standard file '-c' not found".\n # - It generates a shim setup.py, for handling setup.cfg-only projects.\n import os, sys, tokenize, traceback\n\n try:\n import setuptools\n except ImportError:\n print(\n "ERROR: Can not execute `setup.py` since setuptools failed to import in "\n "the build environment with exception:",\n file=sys.stderr,\n )\n traceback.print_exc()\n sys.exit(1)\n\n __file__ = %r\n sys.argv[0] = __file__\n\n if os.path.exists(__file__):\n filename = __file__\n with tokenize.open(__file__) as f:\n setup_py_code = f.read()\n else:\n filename = "<auto-generated setuptools caller>"\n setup_py_code = "from setuptools import setup; setup()"\n\n exec(compile(setup_py_code, filename, "exec"))\n ''' % ({!r},), "<pip-setuptools-caller>", "exec"))\n """\n).rstrip()\n\n\ndef make_setuptools_shim_args(\n setup_py_path: str,\n global_options: Optional[Sequence[str]] = None,\n no_user_config: bool = False,\n unbuffered_output: bool = False,\n) -> List[str]:\n """\n Get setuptools command arguments with shim wrapped setup file invocation.\n\n :param setup_py_path: The path to setup.py to be wrapped.\n :param global_options: Additional global options.\n :param no_user_config: If True, disables personal user configuration.\n :param unbuffered_output: If True, adds the unbuffered switch to the\n argument list.\n """\n args = [sys.executable]\n if unbuffered_output:\n args += ["-u"]\n args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]\n if global_options:\n args += global_options\n if no_user_config:\n args += ["--no-user-cfg"]\n return args\n\n\ndef make_setuptools_bdist_wheel_args(\n setup_py_path: str,\n global_options: Sequence[str],\n build_options: Sequence[str],\n destination_dir: str,\n) -> List[str]:\n # NOTE: Eventually, we'd want to also -S to the flags here, when we're\n # isolating. Currently, it breaks Python in virtualenvs, because it\n # relies on site.py to find parts of the standard library outside the\n # virtualenv.\n args = make_setuptools_shim_args(\n setup_py_path, global_options=global_options, unbuffered_output=True\n )\n args += ["bdist_wheel", "-d", destination_dir]\n args += build_options\n return args\n\n\ndef make_setuptools_clean_args(\n setup_py_path: str,\n global_options: Sequence[str],\n) -> List[str]:\n args = make_setuptools_shim_args(\n setup_py_path, global_options=global_options, unbuffered_output=True\n )\n args += ["clean", "--all"]\n return args\n\n\ndef make_setuptools_develop_args(\n setup_py_path: str,\n *,\n global_options: Sequence[str],\n no_user_config: bool,\n prefix: Optional[str],\n home: Optional[str],\n use_user_site: bool,\n) -> List[str]:\n assert not (use_user_site and prefix)\n\n args = make_setuptools_shim_args(\n setup_py_path,\n global_options=global_options,\n no_user_config=no_user_config,\n )\n\n args += ["develop", "--no-deps"]\n\n if prefix:\n args += ["--prefix", prefix]\n if home is not None:\n args += ["--install-dir", home]\n\n if use_user_site:\n args += ["--user", "--prefix="]\n\n return args\n\n\ndef make_setuptools_egg_info_args(\n setup_py_path: str,\n egg_info_dir: Optional[str],\n no_user_config: bool,\n) -> List[str]:\n args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)\n\n args += ["egg_info"]\n\n if egg_info_dir:\n args += ["--egg-base", egg_info_dir]\n\n return args\n
.venv\Lib\site-packages\pip\_internal\utils\setuptools_build.py
setuptools_build.py
Python
4,482
0.95
0.108844
0.138211
awesome-app
378
2023-09-12T10:49:43.129487
BSD-3-Clause
false
e7b71eea3bb87fd42497b74497eeadc4
import logging\nimport os\nimport shlex\nimport subprocess\nfrom typing import Any, Callable, Iterable, List, Literal, Mapping, Optional, Union\n\nfrom pip._vendor.rich.markup import escape\n\nfrom pip._internal.cli.spinners import SpinnerInterface, open_spinner\nfrom pip._internal.exceptions import InstallationSubprocessError\nfrom pip._internal.utils.logging import VERBOSE, subprocess_logger\nfrom pip._internal.utils.misc import HiddenText\n\nCommandArgs = List[Union[str, HiddenText]]\n\n\ndef make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs:\n """\n Create a CommandArgs object.\n """\n command_args: CommandArgs = []\n for arg in args:\n # Check for list instead of CommandArgs since CommandArgs is\n # only known during type-checking.\n if isinstance(arg, list):\n command_args.extend(arg)\n else:\n # Otherwise, arg is str or HiddenText.\n command_args.append(arg)\n\n return command_args\n\n\ndef format_command_args(args: Union[List[str], CommandArgs]) -> str:\n """\n Format command arguments for display.\n """\n # For HiddenText arguments, display the redacted form by calling str().\n # Also, we don't apply str() to arguments that aren't HiddenText since\n # this can trigger a UnicodeDecodeError in Python 2 if the argument\n # has type unicode and includes a non-ascii character. (The type\n # checker doesn't ensure the annotations are correct in all cases.)\n return " ".join(\n shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg)\n for arg in args\n )\n\n\ndef reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]:\n """\n Return the arguments in their raw, unredacted form.\n """\n return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args]\n\n\ndef call_subprocess(\n cmd: Union[List[str], CommandArgs],\n show_stdout: bool = False,\n cwd: Optional[str] = None,\n on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",\n extra_ok_returncodes: Optional[Iterable[int]] = None,\n extra_environ: Optional[Mapping[str, Any]] = None,\n unset_environ: Optional[Iterable[str]] = None,\n spinner: Optional[SpinnerInterface] = None,\n log_failed_cmd: Optional[bool] = True,\n stdout_only: Optional[bool] = False,\n *,\n command_desc: str,\n) -> str:\n """\n Args:\n show_stdout: if true, use INFO to log the subprocess's stderr and\n stdout streams. Otherwise, use DEBUG. Defaults to False.\n extra_ok_returncodes: an iterable of integer return codes that are\n acceptable, in addition to 0. Defaults to None, which means [].\n unset_environ: an iterable of environment variable names to unset\n prior to calling subprocess.Popen().\n log_failed_cmd: if false, failed commands are not logged, only raised.\n stdout_only: if true, return only stdout, else return both. When true,\n logging of both stdout and stderr occurs when the subprocess has\n terminated, else logging occurs as subprocess output is produced.\n """\n if extra_ok_returncodes is None:\n extra_ok_returncodes = []\n if unset_environ is None:\n unset_environ = []\n # Most places in pip use show_stdout=False. What this means is--\n #\n # - We connect the child's output (combined stderr and stdout) to a\n # single pipe, which we read.\n # - We log this output to stderr at DEBUG level as it is received.\n # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't\n # requested), then we show a spinner so the user can still see the\n # subprocess is in progress.\n # - If the subprocess exits with an error, we log the output to stderr\n # at ERROR level if it hasn't already been displayed to the console\n # (e.g. if --verbose logging wasn't enabled). This way we don't log\n # the output to the console twice.\n #\n # If show_stdout=True, then the above is still done, but with DEBUG\n # replaced by INFO.\n if show_stdout:\n # Then log the subprocess output at INFO level.\n log_subprocess: Callable[..., None] = subprocess_logger.info\n used_level = logging.INFO\n else:\n # Then log the subprocess output using VERBOSE. This also ensures\n # it will be logged to the log file (aka user_log), if enabled.\n log_subprocess = subprocess_logger.verbose\n used_level = VERBOSE\n\n # Whether the subprocess will be visible in the console.\n showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level\n\n # Only use the spinner if we're not showing the subprocess output\n # and we have a spinner.\n use_spinner = not showing_subprocess and spinner is not None\n\n log_subprocess("Running command %s", command_desc)\n env = os.environ.copy()\n if extra_environ:\n env.update(extra_environ)\n for name in unset_environ:\n env.pop(name, None)\n try:\n proc = subprocess.Popen(\n # Convert HiddenText objects to the underlying str.\n reveal_command_args(cmd),\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE,\n cwd=cwd,\n env=env,\n errors="backslashreplace",\n )\n except Exception as exc:\n if log_failed_cmd:\n subprocess_logger.critical(\n "Error %s while executing command %s",\n exc,\n command_desc,\n )\n raise\n all_output = []\n if not stdout_only:\n assert proc.stdout\n assert proc.stdin\n proc.stdin.close()\n # In this mode, stdout and stderr are in the same pipe.\n while True:\n line: str = proc.stdout.readline()\n if not line:\n break\n line = line.rstrip()\n all_output.append(line + "\n")\n\n # Show the line immediately.\n log_subprocess(line)\n # Update the spinner.\n if use_spinner:\n assert spinner\n spinner.spin()\n try:\n proc.wait()\n finally:\n if proc.stdout:\n proc.stdout.close()\n output = "".join(all_output)\n else:\n # In this mode, stdout and stderr are in different pipes.\n # We must use communicate() which is the only safe way to read both.\n out, err = proc.communicate()\n # log line by line to preserve pip log indenting\n for out_line in out.splitlines():\n log_subprocess(out_line)\n all_output.append(out)\n for err_line in err.splitlines():\n log_subprocess(err_line)\n all_output.append(err)\n output = out\n\n proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes\n if use_spinner:\n assert spinner\n if proc_had_error:\n spinner.finish("error")\n else:\n spinner.finish("done")\n if proc_had_error:\n if on_returncode == "raise":\n error = InstallationSubprocessError(\n command_description=command_desc,\n exit_code=proc.returncode,\n output_lines=all_output if not showing_subprocess else None,\n )\n if log_failed_cmd:\n subprocess_logger.error("%s", error, extra={"rich": True})\n subprocess_logger.verbose(\n "[bold magenta]full command[/]: [blue]%s[/]",\n escape(format_command_args(cmd)),\n extra={"markup": True},\n )\n subprocess_logger.verbose(\n "[bold magenta]cwd[/]: %s",\n escape(cwd or "[inherit]"),\n extra={"markup": True},\n )\n\n raise error\n elif on_returncode == "warn":\n subprocess_logger.warning(\n 'Command "%s" had error code %s in %s',\n command_desc,\n proc.returncode,\n cwd,\n )\n elif on_returncode == "ignore":\n pass\n else:\n raise ValueError(f"Invalid value: on_returncode={on_returncode!r}")\n return output\n\n\ndef runner_with_spinner_message(message: str) -> Callable[..., None]:\n """Provide a subprocess_runner that shows a spinner message.\n\n Intended for use with for BuildBackendHookCaller. Thus, the runner has\n an API that matches what's expected by BuildBackendHookCaller.subprocess_runner.\n """\n\n def runner(\n cmd: List[str],\n cwd: Optional[str] = None,\n extra_environ: Optional[Mapping[str, Any]] = None,\n ) -> None:\n with open_spinner(message) as spinner:\n call_subprocess(\n cmd,\n command_desc=message,\n cwd=cwd,\n extra_environ=extra_environ,\n spinner=spinner,\n )\n\n return runner\n
.venv\Lib\site-packages\pip\_internal\utils\subprocess.py
subprocess.py
Python
8,988
0.95
0.195918
0.166667
vue-tools
228
2023-07-18T20:33:28.848252
BSD-3-Clause
false
43c53532521743b369079178ccb6bc3e
import errno\nimport itertools\nimport logging\nimport os.path\nimport tempfile\nimport traceback\nfrom contextlib import ExitStack, contextmanager\nfrom pathlib import Path\nfrom typing import (\n Any,\n Callable,\n Dict,\n Generator,\n List,\n Optional,\n TypeVar,\n Union,\n)\n\nfrom pip._internal.utils.misc import enum, rmtree\n\nlogger = logging.getLogger(__name__)\n\n_T = TypeVar("_T", bound="TempDirectory")\n\n\n# Kinds of temporary directories. Only needed for ones that are\n# globally-managed.\ntempdir_kinds = enum(\n BUILD_ENV="build-env",\n EPHEM_WHEEL_CACHE="ephem-wheel-cache",\n REQ_BUILD="req-build",\n)\n\n\n_tempdir_manager: Optional[ExitStack] = None\n\n\n@contextmanager\ndef global_tempdir_manager() -> Generator[None, None, None]:\n global _tempdir_manager\n with ExitStack() as stack:\n old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack\n try:\n yield\n finally:\n _tempdir_manager = old_tempdir_manager\n\n\nclass TempDirectoryTypeRegistry:\n """Manages temp directory behavior"""\n\n def __init__(self) -> None:\n self._should_delete: Dict[str, bool] = {}\n\n def set_delete(self, kind: str, value: bool) -> None:\n """Indicate whether a TempDirectory of the given kind should be\n auto-deleted.\n """\n self._should_delete[kind] = value\n\n def get_delete(self, kind: str) -> bool:\n """Get configured auto-delete flag for a given TempDirectory type,\n default True.\n """\n return self._should_delete.get(kind, True)\n\n\n_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None\n\n\n@contextmanager\ndef tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]:\n """Provides a scoped global tempdir registry that can be used to dictate\n whether directories should be deleted.\n """\n global _tempdir_registry\n old_tempdir_registry = _tempdir_registry\n _tempdir_registry = TempDirectoryTypeRegistry()\n try:\n yield _tempdir_registry\n finally:\n _tempdir_registry = old_tempdir_registry\n\n\nclass _Default:\n pass\n\n\n_default = _Default()\n\n\nclass TempDirectory:\n """Helper class that owns and cleans up a temporary directory.\n\n This class can be used as a context manager or as an OO representation of a\n temporary directory.\n\n Attributes:\n path\n Location to the created temporary directory\n delete\n Whether the directory should be deleted when exiting\n (when used as a contextmanager)\n\n Methods:\n cleanup()\n Deletes the temporary directory\n\n When used as a context manager, if the delete attribute is True, on\n exiting the context the temporary directory is deleted.\n """\n\n def __init__(\n self,\n path: Optional[str] = None,\n delete: Union[bool, None, _Default] = _default,\n kind: str = "temp",\n globally_managed: bool = False,\n ignore_cleanup_errors: bool = True,\n ):\n super().__init__()\n\n if delete is _default:\n if path is not None:\n # If we were given an explicit directory, resolve delete option\n # now.\n delete = False\n else:\n # Otherwise, we wait until cleanup and see what\n # tempdir_registry says.\n delete = None\n\n # The only time we specify path is in for editables where it\n # is the value of the --src option.\n if path is None:\n path = self._create(kind)\n\n self._path = path\n self._deleted = False\n self.delete = delete\n self.kind = kind\n self.ignore_cleanup_errors = ignore_cleanup_errors\n\n if globally_managed:\n assert _tempdir_manager is not None\n _tempdir_manager.enter_context(self)\n\n @property\n def path(self) -> str:\n assert not self._deleted, f"Attempted to access deleted path: {self._path}"\n return self._path\n\n def __repr__(self) -> str:\n return f"<{self.__class__.__name__} {self.path!r}>"\n\n def __enter__(self: _T) -> _T:\n return self\n\n def __exit__(self, exc: Any, value: Any, tb: Any) -> None:\n if self.delete is not None:\n delete = self.delete\n elif _tempdir_registry:\n delete = _tempdir_registry.get_delete(self.kind)\n else:\n delete = True\n\n if delete:\n self.cleanup()\n\n def _create(self, kind: str) -> str:\n """Create a temporary directory and store its path in self.path"""\n # We realpath here because some systems have their default tmpdir\n # symlinked to another directory. This tends to confuse build\n # scripts, so we canonicalize the path by traversing potential\n # symlinks here.\n path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))\n logger.debug("Created temporary directory: %s", path)\n return path\n\n def cleanup(self) -> None:\n """Remove the temporary directory created and reset state"""\n self._deleted = True\n if not os.path.exists(self._path):\n return\n\n errors: List[BaseException] = []\n\n def onerror(\n func: Callable[..., Any],\n path: Path,\n exc_val: BaseException,\n ) -> None:\n """Log a warning for a `rmtree` error and continue"""\n formatted_exc = "\n".join(\n traceback.format_exception_only(type(exc_val), exc_val)\n )\n formatted_exc = formatted_exc.rstrip() # remove trailing new line\n if func in (os.unlink, os.remove, os.rmdir):\n logger.debug(\n "Failed to remove a temporary file '%s' due to %s.\n",\n path,\n formatted_exc,\n )\n else:\n logger.debug("%s failed with %s.", func.__qualname__, formatted_exc)\n errors.append(exc_val)\n\n if self.ignore_cleanup_errors:\n try:\n # first try with @retry; retrying to handle ephemeral errors\n rmtree(self._path, ignore_errors=False)\n except OSError:\n # last pass ignore/log all errors\n rmtree(self._path, onexc=onerror)\n if errors:\n logger.warning(\n "Failed to remove contents in a temporary directory '%s'.\n"\n "You can safely remove it manually.",\n self._path,\n )\n else:\n rmtree(self._path)\n\n\nclass AdjacentTempDirectory(TempDirectory):\n """Helper class that creates a temporary directory adjacent to a real one.\n\n Attributes:\n original\n The original directory to create a temp directory for.\n path\n After calling create() or entering, contains the full\n path to the temporary directory.\n delete\n Whether the directory should be deleted when exiting\n (when used as a contextmanager)\n\n """\n\n # The characters that may be used to name the temp directory\n # We always prepend a ~ and then rotate through these until\n # a usable name is found.\n # pkg_resources raises a different error for .dist-info folder\n # with leading '-' and invalid metadata\n LEADING_CHARS = "-~.=%0123456789"\n\n def __init__(self, original: str, delete: Optional[bool] = None) -> None:\n self.original = original.rstrip("/\\")\n super().__init__(delete=delete)\n\n @classmethod\n def _generate_names(cls, name: str) -> Generator[str, None, None]:\n """Generates a series of temporary names.\n\n The algorithm replaces the leading characters in the name\n with ones that are valid filesystem characters, but are not\n valid package names (for both Python and pip definitions of\n package).\n """\n for i in range(1, len(name)):\n for candidate in itertools.combinations_with_replacement(\n cls.LEADING_CHARS, i - 1\n ):\n new_name = "~" + "".join(candidate) + name[i:]\n if new_name != name:\n yield new_name\n\n # If we make it this far, we will have to make a longer name\n for i in range(len(cls.LEADING_CHARS)):\n for candidate in itertools.combinations_with_replacement(\n cls.LEADING_CHARS, i\n ):\n new_name = "~" + "".join(candidate) + name\n if new_name != name:\n yield new_name\n\n def _create(self, kind: str) -> str:\n root, name = os.path.split(self.original)\n for candidate in self._generate_names(name):\n path = os.path.join(root, candidate)\n try:\n os.mkdir(path)\n except OSError as ex:\n # Continue if the name exists already\n if ex.errno != errno.EEXIST:\n raise\n else:\n path = os.path.realpath(path)\n break\n else:\n # Final fallback on the default behavior.\n path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))\n\n logger.debug("Created temporary directory: %s", path)\n return path\n
.venv\Lib\site-packages\pip\_internal\utils\temp_dir.py
temp_dir.py
Python
9,310
0.95
0.185811
0.090909
awesome-app
736
2024-11-16T19:12:00.562362
Apache-2.0
false
53843a3d977e27c9dfdc97122906c4ee
"""Utilities related archives."""\n\nimport logging\nimport os\nimport shutil\nimport stat\nimport sys\nimport tarfile\nimport zipfile\nfrom typing import Iterable, List, Optional\nfrom zipfile import ZipInfo\n\nfrom pip._internal.exceptions import InstallationError\nfrom pip._internal.utils.filetypes import (\n BZ2_EXTENSIONS,\n TAR_EXTENSIONS,\n XZ_EXTENSIONS,\n ZIP_EXTENSIONS,\n)\nfrom pip._internal.utils.misc import ensure_dir\n\nlogger = logging.getLogger(__name__)\n\n\nSUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS\n\ntry:\n import bz2 # noqa\n\n SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS\nexcept ImportError:\n logger.debug("bz2 module is not available")\n\ntry:\n # Only for Python 3.3+\n import lzma # noqa\n\n SUPPORTED_EXTENSIONS += XZ_EXTENSIONS\nexcept ImportError:\n logger.debug("lzma module is not available")\n\n\ndef current_umask() -> int:\n """Get the current umask which involves having to set it temporarily."""\n mask = os.umask(0)\n os.umask(mask)\n return mask\n\n\ndef split_leading_dir(path: str) -> List[str]:\n path = path.lstrip("/").lstrip("\\")\n if "/" in path and (\n ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path\n ):\n return path.split("/", 1)\n elif "\\" in path:\n return path.split("\\", 1)\n else:\n return [path, ""]\n\n\ndef has_leading_dir(paths: Iterable[str]) -> bool:\n """Returns true if all the paths have the same leading path name\n (i.e., everything is in one subdirectory in an archive)"""\n common_prefix = None\n for path in paths:\n prefix, rest = split_leading_dir(path)\n if not prefix:\n return False\n elif common_prefix is None:\n common_prefix = prefix\n elif prefix != common_prefix:\n return False\n return True\n\n\ndef is_within_directory(directory: str, target: str) -> bool:\n """\n Return true if the absolute path of target is within the directory\n """\n abs_directory = os.path.abspath(directory)\n abs_target = os.path.abspath(target)\n\n prefix = os.path.commonprefix([abs_directory, abs_target])\n return prefix == abs_directory\n\n\ndef _get_default_mode_plus_executable() -> int:\n return 0o777 & ~current_umask() | 0o111\n\n\ndef set_extracted_file_to_default_mode_plus_executable(path: str) -> None:\n """\n Make file present at path have execute for user/group/world\n (chmod +x) is no-op on windows per python docs\n """\n os.chmod(path, _get_default_mode_plus_executable())\n\n\ndef zip_item_is_executable(info: ZipInfo) -> bool:\n mode = info.external_attr >> 16\n # if mode and regular file and any execute permissions for\n # user/group/world?\n return bool(mode and stat.S_ISREG(mode) and mode & 0o111)\n\n\ndef unzip_file(filename: str, location: str, flatten: bool = True) -> None:\n """\n Unzip the file (with path `filename`) to the destination `location`. All\n files are written based on system defaults and umask (i.e. permissions are\n not preserved), except that regular file members with any execute\n permissions (user, group, or world) have "chmod +x" applied after being\n written. Note that for windows, any execute changes using os.chmod are\n no-ops per the python docs.\n """\n ensure_dir(location)\n zipfp = open(filename, "rb")\n try:\n zip = zipfile.ZipFile(zipfp, allowZip64=True)\n leading = has_leading_dir(zip.namelist()) and flatten\n for info in zip.infolist():\n name = info.filename\n fn = name\n if leading:\n fn = split_leading_dir(name)[1]\n fn = os.path.join(location, fn)\n dir = os.path.dirname(fn)\n if not is_within_directory(location, fn):\n message = (\n "The zip file ({}) has a file ({}) trying to install "\n "outside target directory ({})"\n )\n raise InstallationError(message.format(filename, fn, location))\n if fn.endswith("/") or fn.endswith("\\"):\n # A directory\n ensure_dir(fn)\n else:\n ensure_dir(dir)\n # Don't use read() to avoid allocating an arbitrarily large\n # chunk of memory for the file's content\n fp = zip.open(name)\n try:\n with open(fn, "wb") as destfp:\n shutil.copyfileobj(fp, destfp)\n finally:\n fp.close()\n if zip_item_is_executable(info):\n set_extracted_file_to_default_mode_plus_executable(fn)\n finally:\n zipfp.close()\n\n\ndef untar_file(filename: str, location: str) -> None:\n """\n Untar the file (with path `filename`) to the destination `location`.\n All files are written based on system defaults and umask (i.e. permissions\n are not preserved), except that regular file members with any execute\n permissions (user, group, or world) have "chmod +x" applied on top of the\n default. Note that for windows, any execute changes using os.chmod are\n no-ops per the python docs.\n """\n ensure_dir(location)\n if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"):\n mode = "r:gz"\n elif filename.lower().endswith(BZ2_EXTENSIONS):\n mode = "r:bz2"\n elif filename.lower().endswith(XZ_EXTENSIONS):\n mode = "r:xz"\n elif filename.lower().endswith(".tar"):\n mode = "r"\n else:\n logger.warning(\n "Cannot determine compression type for file %s",\n filename,\n )\n mode = "r:*"\n\n tar = tarfile.open(filename, mode, encoding="utf-8") # type: ignore\n try:\n leading = has_leading_dir([member.name for member in tar.getmembers()])\n\n # PEP 706 added `tarfile.data_filter`, and made some other changes to\n # Python's tarfile module (see below). The features were backported to\n # security releases.\n try:\n data_filter = tarfile.data_filter\n except AttributeError:\n _untar_without_filter(filename, location, tar, leading)\n else:\n default_mode_plus_executable = _get_default_mode_plus_executable()\n\n if leading:\n # Strip the leading directory from all files in the archive,\n # including hardlink targets (which are relative to the\n # unpack location).\n for member in tar.getmembers():\n name_lead, name_rest = split_leading_dir(member.name)\n member.name = name_rest\n if member.islnk():\n lnk_lead, lnk_rest = split_leading_dir(member.linkname)\n if lnk_lead == name_lead:\n member.linkname = lnk_rest\n\n def pip_filter(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo:\n orig_mode = member.mode\n try:\n try:\n member = data_filter(member, location)\n except tarfile.LinkOutsideDestinationError:\n if sys.version_info[:3] in {\n (3, 9, 17),\n (3, 10, 12),\n (3, 11, 4),\n }:\n # The tarfile filter in specific Python versions\n # raises LinkOutsideDestinationError on valid input\n # (https://github.com/python/cpython/issues/107845)\n # Ignore the error there, but do use the\n # more lax `tar_filter`\n member = tarfile.tar_filter(member, location)\n else:\n raise\n except tarfile.TarError as exc:\n message = "Invalid member in the tar file {}: {}"\n # Filter error messages mention the member name.\n # No need to add it here.\n raise InstallationError(\n message.format(\n filename,\n exc,\n )\n )\n if member.isfile() and orig_mode & 0o111:\n member.mode = default_mode_plus_executable\n else:\n # See PEP 706 note above.\n # The PEP changed this from `int` to `Optional[int]`,\n # where None means "use the default". Mypy doesn't\n # know this yet.\n member.mode = None # type: ignore [assignment]\n return member\n\n tar.extractall(location, filter=pip_filter)\n\n finally:\n tar.close()\n\n\ndef _untar_without_filter(\n filename: str,\n location: str,\n tar: tarfile.TarFile,\n leading: bool,\n) -> None:\n """Fallback for Python without tarfile.data_filter"""\n for member in tar.getmembers():\n fn = member.name\n if leading:\n fn = split_leading_dir(fn)[1]\n path = os.path.join(location, fn)\n if not is_within_directory(location, path):\n message = (\n "The tar file ({}) has a file ({}) trying to install "\n "outside target directory ({})"\n )\n raise InstallationError(message.format(filename, path, location))\n if member.isdir():\n ensure_dir(path)\n elif member.issym():\n try:\n tar._extract_member(member, path)\n except Exception as exc:\n # Some corrupt tar files seem to produce this\n # (specifically bad symlinks)\n logger.warning(\n "In the tar file %s the member %s is invalid: %s",\n filename,\n member.name,\n exc,\n )\n continue\n else:\n try:\n fp = tar.extractfile(member)\n except (KeyError, AttributeError) as exc:\n # Some corrupt tar files seem to produce this\n # (specifically bad symlinks)\n logger.warning(\n "In the tar file %s the member %s is invalid: %s",\n filename,\n member.name,\n exc,\n )\n continue\n ensure_dir(os.path.dirname(path))\n assert fp is not None\n with open(path, "wb") as destfp:\n shutil.copyfileobj(fp, destfp)\n fp.close()\n # Update the timestamp (useful for cython compiled files)\n tar.utime(member, path)\n # member have any execute permissions for user/group/world?\n if member.mode & 0o111:\n set_extracted_file_to_default_mode_plus_executable(path)\n\n\ndef unpack_file(\n filename: str,\n location: str,\n content_type: Optional[str] = None,\n) -> None:\n filename = os.path.realpath(filename)\n if (\n content_type == "application/zip"\n or filename.lower().endswith(ZIP_EXTENSIONS)\n or zipfile.is_zipfile(filename)\n ):\n unzip_file(filename, location, flatten=not filename.endswith(".whl"))\n elif (\n content_type == "application/x-gzip"\n or tarfile.is_tarfile(filename)\n or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)\n ):\n untar_file(filename, location)\n else:\n # FIXME: handle?\n # FIXME: magic signatures?\n logger.critical(\n "Cannot unpack file %s (downloaded from %s, content-type: %s); "\n "cannot detect archive format",\n filename,\n location,\n content_type,\n )\n raise InstallationError(f"Cannot determine archive format of {location}")\n
.venv\Lib\site-packages\pip\_internal\utils\unpacking.py
unpacking.py
Python
11,926
0.95
0.170149
0.104377
awesome-app
129
2024-01-05T17:17:55.069386
BSD-3-Clause
false
26b0105998df9eb01df3a96fdbb2cfa8
import os\nimport string\nimport urllib.parse\nimport urllib.request\n\nfrom .compat import WINDOWS\n\n\ndef path_to_url(path: str) -> str:\n """\n Convert a path to a file: URL. The path will be made absolute and have\n quoted path parts.\n """\n path = os.path.normpath(os.path.abspath(path))\n url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path))\n return url\n\n\ndef url_to_path(url: str) -> str:\n """\n Convert a file: URL to a path.\n """\n assert url.startswith(\n "file:"\n ), f"You can only turn file: urls into filenames (not {url!r})"\n\n _, netloc, path, _, _ = urllib.parse.urlsplit(url)\n\n if not netloc or netloc == "localhost":\n # According to RFC 8089, same as empty authority.\n netloc = ""\n elif WINDOWS:\n # If we have a UNC path, prepend UNC share notation.\n netloc = "\\\\" + netloc\n else:\n raise ValueError(\n f"non-local file URIs are not supported on this platform: {url!r}"\n )\n\n path = urllib.request.url2pathname(netloc + path)\n\n # On Windows, urlsplit parses the path as something like "/C:/Users/foo".\n # This creates issues for path-related functions like io.open(), so we try\n # to detect and strip the leading slash.\n if (\n WINDOWS\n and not netloc # Not UNC.\n and len(path) >= 3\n and path[0] == "/" # Leading slash to strip.\n and path[1] in string.ascii_letters # Drive letter.\n and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path.\n ):\n path = path[1:]\n\n return path\n
.venv\Lib\site-packages\pip\_internal\utils\urls.py
urls.py
Python
1,599
0.95
0.109091
0.111111
python-kit
740
2023-09-01T02:00:35.435930
BSD-3-Clause
false
01c785ae452eba3f93b7404f21d6bf97
import logging\nimport os\nimport re\nimport site\nimport sys\nfrom typing import List, Optional\n\nlogger = logging.getLogger(__name__)\n_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(\n r"include-system-site-packages\s*=\s*(?P<value>true|false)"\n)\n\n\ndef _running_under_venv() -> bool:\n """Checks if sys.base_prefix and sys.prefix match.\n\n This handles PEP 405 compliant virtual environments.\n """\n return sys.prefix != getattr(sys, "base_prefix", sys.prefix)\n\n\ndef _running_under_legacy_virtualenv() -> bool:\n """Checks if sys.real_prefix is set.\n\n This handles virtual environments created with pypa's virtualenv.\n """\n # pypa/virtualenv case\n return hasattr(sys, "real_prefix")\n\n\ndef running_under_virtualenv() -> bool:\n """True if we're running inside a virtual environment, False otherwise."""\n return _running_under_venv() or _running_under_legacy_virtualenv()\n\n\ndef _get_pyvenv_cfg_lines() -> Optional[List[str]]:\n """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines\n\n Returns None, if it could not read/access the file.\n """\n pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg")\n try:\n # Although PEP 405 does not specify, the built-in venv module always\n # writes with UTF-8. (pypa/pip#8717)\n with open(pyvenv_cfg_file, encoding="utf-8") as f:\n return f.read().splitlines() # avoids trailing newlines\n except OSError:\n return None\n\n\ndef _no_global_under_venv() -> bool:\n """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion\n\n PEP 405 specifies that when system site-packages are not supposed to be\n visible from a virtual environment, `pyvenv.cfg` must contain the following\n line:\n\n include-system-site-packages = false\n\n Additionally, log a warning if accessing the file fails.\n """\n cfg_lines = _get_pyvenv_cfg_lines()\n if cfg_lines is None:\n # We're not in a "sane" venv, so assume there is no system\n # site-packages access (since that's PEP 405's default state).\n logger.warning(\n "Could not access 'pyvenv.cfg' despite a virtual environment "\n "being active. Assuming global site-packages is not accessible "\n "in this environment."\n )\n return True\n\n for line in cfg_lines:\n match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)\n if match is not None and match.group("value") == "false":\n return True\n return False\n\n\ndef _no_global_under_legacy_virtualenv() -> bool:\n """Check if "no-global-site-packages.txt" exists beside site.py\n\n This mirrors logic in pypa/virtualenv for determining whether system\n site-packages are visible in the virtual environment.\n """\n site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))\n no_global_site_packages_file = os.path.join(\n site_mod_dir,\n "no-global-site-packages.txt",\n )\n return os.path.exists(no_global_site_packages_file)\n\n\ndef virtualenv_no_global() -> bool:\n """Returns a boolean, whether running in venv with no system site-packages."""\n # PEP 405 compliance needs to be checked first since virtualenv >=20 would\n # return True for both checks, but is only able to use the PEP 405 config.\n if _running_under_venv():\n return _no_global_under_venv()\n\n if _running_under_legacy_virtualenv():\n return _no_global_under_legacy_virtualenv()\n\n return False\n
.venv\Lib\site-packages\pip\_internal\utils\virtualenv.py
virtualenv.py
Python
3,456
0.95
0.211538
0.088608
react-lib
222
2024-03-06T09:48:09.070100
Apache-2.0
false
15111b45000fb18281fb5dfe8dc4ef70
"""Support functions for working with wheel files."""\n\nimport logging\nfrom email.message import Message\nfrom email.parser import Parser\nfrom typing import Tuple\nfrom zipfile import BadZipFile, ZipFile\n\nfrom pip._vendor.packaging.utils import canonicalize_name\n\nfrom pip._internal.exceptions import UnsupportedWheel\n\nVERSION_COMPATIBLE = (1, 0)\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:\n """Extract information from the provided wheel, ensuring it meets basic\n standards.\n\n Returns the name of the .dist-info directory and the parsed WHEEL metadata.\n """\n try:\n info_dir = wheel_dist_info_dir(wheel_zip, name)\n metadata = wheel_metadata(wheel_zip, info_dir)\n version = wheel_version(metadata)\n except UnsupportedWheel as e:\n raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")\n\n check_compatibility(version, name)\n\n return info_dir, metadata\n\n\ndef wheel_dist_info_dir(source: ZipFile, name: str) -> str:\n """Returns the name of the contained .dist-info directory.\n\n Raises AssertionError or UnsupportedWheel if not found, >1 found, or\n it doesn't match the provided name.\n """\n # Zip file path separators must be /\n subdirs = {p.split("/", 1)[0] for p in source.namelist()}\n\n info_dirs = [s for s in subdirs if s.endswith(".dist-info")]\n\n if not info_dirs:\n raise UnsupportedWheel(".dist-info directory not found")\n\n if len(info_dirs) > 1:\n raise UnsupportedWheel(\n "multiple .dist-info directories found: {}".format(", ".join(info_dirs))\n )\n\n info_dir = info_dirs[0]\n\n info_dir_name = canonicalize_name(info_dir)\n canonical_name = canonicalize_name(name)\n if not info_dir_name.startswith(canonical_name):\n raise UnsupportedWheel(\n f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"\n )\n\n return info_dir\n\n\ndef read_wheel_metadata_file(source: ZipFile, path: str) -> bytes:\n try:\n return source.read(path)\n # BadZipFile for general corruption, KeyError for missing entry,\n # and RuntimeError for password-protected files\n except (BadZipFile, KeyError, RuntimeError) as e:\n raise UnsupportedWheel(f"could not read {path!r} file: {e!r}")\n\n\ndef wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message:\n """Return the WHEEL metadata of an extracted wheel, if possible.\n Otherwise, raise UnsupportedWheel.\n """\n path = f"{dist_info_dir}/WHEEL"\n # Zip file path separators must be /\n wheel_contents = read_wheel_metadata_file(source, path)\n\n try:\n wheel_text = wheel_contents.decode()\n except UnicodeDecodeError as e:\n raise UnsupportedWheel(f"error decoding {path!r}: {e!r}")\n\n # FeedParser (used by Parser) does not raise any exceptions. The returned\n # message may have .defects populated, but for backwards-compatibility we\n # currently ignore them.\n return Parser().parsestr(wheel_text)\n\n\ndef wheel_version(wheel_data: Message) -> Tuple[int, ...]:\n """Given WHEEL metadata, return the parsed Wheel-Version.\n Otherwise, raise UnsupportedWheel.\n """\n version_text = wheel_data["Wheel-Version"]\n if version_text is None:\n raise UnsupportedWheel("WHEEL is missing Wheel-Version")\n\n version = version_text.strip()\n\n try:\n return tuple(map(int, version.split(".")))\n except ValueError:\n raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}")\n\n\ndef check_compatibility(version: Tuple[int, ...], name: str) -> None:\n """Raises errors or warns if called with an incompatible Wheel-Version.\n\n pip should refuse to install a Wheel-Version that's a major series\n ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when\n installing a version only minor version ahead (e.g 1.2 > 1.1).\n\n version: a 2-tuple representing a Wheel-Version (Major, Minor)\n name: name of wheel or package to raise exception about\n\n :raises UnsupportedWheel: when an incompatible Wheel-Version is given\n """\n if version[0] > VERSION_COMPATIBLE[0]:\n raise UnsupportedWheel(\n "{}'s Wheel-Version ({}) is not compatible with this version "\n "of pip".format(name, ".".join(map(str, version)))\n )\n elif version > VERSION_COMPATIBLE:\n logger.warning(\n "Installing from a newer Wheel-Version (%s)",\n ".".join(map(str, version)),\n )\n
.venv\Lib\site-packages\pip\_internal\utils\wheel.py
wheel.py
Python
4,493
0.95
0.195489
0.071429
react-lib
451
2024-04-17T14:24:09.463011
MIT
false
7801f5b8005dd7f0d1c89888a1f5886b
"""Functions brought over from jaraco.text.\n\nThese functions are not supposed to be used within `pip._internal`. These are\nhelper functions brought over from `jaraco.text` to enable vendoring newer\ncopies of `pkg_resources` without having to vendor `jaraco.text` and its entire\ndependency cone; something that our vendoring setup is not currently capable of\nhandling.\n\nLicense reproduced from original source below:\n\nCopyright Jason R. Coombs\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the "Software"), to\ndeal in the Software without restriction, including without limitation the\nrights to use, copy, modify, merge, publish, distribute, sublicense, and/or\nsell copies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\nIN THE SOFTWARE.\n"""\n\nimport functools\nimport itertools\n\n\ndef _nonblank(str):\n return str and not str.startswith("#")\n\n\n@functools.singledispatch\ndef yield_lines(iterable):\n r"""\n Yield valid lines of a string or iterable.\n\n >>> list(yield_lines(''))\n []\n >>> list(yield_lines(['foo', 'bar']))\n ['foo', 'bar']\n >>> list(yield_lines('foo\nbar'))\n ['foo', 'bar']\n >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))\n ['foo', 'baz #comment']\n >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))\n ['foo', 'bar', 'baz', 'bing']\n """\n return itertools.chain.from_iterable(map(yield_lines, iterable))\n\n\n@yield_lines.register(str)\ndef _(text):\n return filter(_nonblank, map(str.strip, text.splitlines()))\n\n\ndef drop_comment(line):\n """\n Drop comments.\n\n >>> drop_comment('foo # bar')\n 'foo'\n\n A hash without a space may be in a URL.\n\n >>> drop_comment('http://example.com/foo#bar')\n 'http://example.com/foo#bar'\n """\n return line.partition(" #")[0]\n\n\ndef join_continuation(lines):\n r"""\n Join lines continued by a trailing backslash.\n\n >>> list(join_continuation(['foo \\', 'bar', 'baz']))\n ['foobar', 'baz']\n >>> list(join_continuation(['foo \\', 'bar', 'baz']))\n ['foobar', 'baz']\n >>> list(join_continuation(['foo \\', 'bar \\', 'baz']))\n ['foobarbaz']\n\n Not sure why, but...\n The character preceding the backslash is also elided.\n\n >>> list(join_continuation(['goo\\', 'dly']))\n ['godly']\n\n A terrible idea, but...\n If no line is available to continue, suppress the lines.\n\n >>> list(join_continuation(['foo', 'bar\\', 'baz\\']))\n ['foo']\n """\n lines = iter(lines)\n for item in lines:\n while item.endswith("\\"):\n try:\n item = item[:-2].strip() + next(lines)\n except StopIteration:\n return\n yield item\n
.venv\Lib\site-packages\pip\_internal\utils\_jaraco_text.py
_jaraco_text.py
Python
3,350
0.95
0.073394
0
react-lib
923
2024-11-19T23:28:19.714965
Apache-2.0
false
17df62818a792a3e8159a1787a0098e3
"""Customize logging\n\nDefines custom logger class for the `logger.verbose(...)` method.\n\ninit_logging() must be called before any other modules that call logging.getLogger.\n"""\n\nimport logging\nfrom typing import Any, cast\n\n# custom log level for `--verbose` output\n# between DEBUG and INFO\nVERBOSE = 15\n\n\nclass VerboseLogger(logging.Logger):\n """Custom Logger, defining a verbose log-level\n\n VERBOSE is between INFO and DEBUG.\n """\n\n def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:\n return self.log(VERBOSE, msg, *args, **kwargs)\n\n\ndef getLogger(name: str) -> VerboseLogger:\n """logging.getLogger, but ensures our VerboseLogger class is returned"""\n return cast(VerboseLogger, logging.getLogger(name))\n\n\ndef init_logging() -> None:\n """Register our VerboseLogger and VERBOSE log level.\n\n Should be called before any calls to getLogger(),\n i.e. in pip._internal.__init__\n """\n logging.setLoggerClass(VerboseLogger)\n logging.addLevelName(VERBOSE, "VERBOSE")\n
.venv\Lib\site-packages\pip\_internal\utils\_log.py
_log.py
Python
1,015
0.95
0.210526
0.08
awesome-app
251
2024-03-25T23:24:41.444683
BSD-3-Clause
false
d525aebd855b84182950ca3e13b6fd7a
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\appdirs.cpython-313.pyc
appdirs.cpython-313.pyc
Other
2,497
0.8
0.02439
0.026316
node-utils
50
2024-02-06T11:50:43.565942
Apache-2.0
false
e84498a703dcde6f2ff4ec279e35cca7
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\compat.cpython-313.pyc
compat.cpython-313.pyc
Other
2,932
0.95
0.039216
0.021739
python-kit
161
2025-06-07T08:55:47.267525
MIT
false
99975bd2e22ccfadbf956f4da3b904a3
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\compatibility_tags.cpython-313.pyc
compatibility_tags.cpython-313.pyc
Other
6,941
0.8
0.044944
0.011905
vue-tools
189
2025-06-18T19:27:22.236277
MIT
false
65bc66f2585bb0cc30b999e16c19f80c
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\datetime.cpython-313.pyc
datetime.cpython-313.pyc
Other
674
0.8
0
0
node-utils
153
2024-05-05T22:09:32.182870
BSD-3-Clause
false
551f4c500ec70084d2b0a144ce8869d8
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\deprecation.cpython-313.pyc
deprecation.cpython-313.pyc
Other
4,245
0.95
0.047619
0
vue-tools
904
2024-07-18T04:10:35.089316
GPL-3.0
false
adafc85b4a3e33772be4b45150c71416
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\direct_url_helpers.cpython-313.pyc
direct_url_helpers.cpython-313.pyc
Other
3,633
0.95
0
0.038462
vue-tools
7
2024-05-06T16:44:52.742428
BSD-3-Clause
false
917e3b151a067118873376ce849dba30
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\egg_link.cpython-313.pyc
egg_link.cpython-313.pyc
Other
3,214
0.95
0.18
0
vue-tools
295
2024-10-29T18:39:41.712018
GPL-3.0
false
d3671729a9814b8f113019a7d67347fe
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\entrypoints.cpython-313.pyc
entrypoints.cpython-313.pyc
Other
4,190
0.8
0.1
0
node-utils
580
2023-07-11T11:31:28.677994
MIT
false
1c29d67c6d4e6a2bce42f71417ea9be2
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\filesystem.cpython-313.pyc
filesystem.cpython-313.pyc
Other
7,439
0.8
0.028986
0
python-kit
447
2024-01-05T11:29:53.433836
Apache-2.0
false
38cf9117b3f1f2e02fd27f4edf1f8bb2
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\filetypes.cpython-313.pyc
filetypes.cpython-313.pyc
Other
1,164
0.8
0.0625
0.066667
python-kit
418
2024-02-18T11:02:59.458491
BSD-3-Clause
false
ca334206e735b75612ea97047465c19a
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\glibc.cpython-313.pyc
glibc.cpython-313.pyc
Other
2,449
0.8
0.037037
0
vue-tools
678
2025-01-26T19:42:50.731350
Apache-2.0
false
8a0d103265a3a8cab418c7041f248713
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\hashes.cpython-313.pyc
hashes.cpython-313.pyc
Other
7,741
0.95
0.051948
0.014925
python-kit
205
2023-07-11T21:40:19.951037
BSD-3-Clause
false
a9f5720a23e0598d216dbced03707763
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\logging.cpython-313.pyc
logging.cpython-313.pyc
Other
14,527
0.8
0.03125
0
node-utils
829
2024-08-12T07:58:53.506021
GPL-3.0
false
a0573f859967387b388bde11457cfac6
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\misc.cpython-313.pyc
misc.cpython-313.pyc
Other
33,483
0.95
0.046914
0.005618
react-lib
351
2025-04-14T13:50:02.351985
GPL-3.0
false
0ad3771ca15dfc995f4bb3f5282504d9
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\packaging.cpython-313.pyc
packaging.cpython-313.pyc
Other
1,867
0.95
0.117647
0
vue-tools
675
2024-04-23T21:28:27.457945
MIT
false
d9d572e59e244b750ec54772b57c62d6
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\retry.cpython-313.pyc
retry.cpython-313.pyc
Other
2,109
0.95
0.085714
0
python-kit
53
2024-05-26T20:20:35.871462
GPL-3.0
false
a1823121f0bb3574280744fb2faf29d6
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\setuptools_build.cpython-313.pyc
setuptools_build.cpython-313.pyc
Other
4,616
0.95
0.043478
0.164706
awesome-app
252
2023-09-29T15:50:43.220108
BSD-3-Clause
false
2ee7d9aaf2ce45b9bb3512e8b2568c77
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\subprocess.cpython-313.pyc
subprocess.cpython-313.pyc
Other
8,871
0.8
0.074468
0.011111
vue-tools
143
2025-06-28T00:13:43.342666
GPL-3.0
false
7bee31af4d1b196be33d8e934e7849a8
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\temp_dir.cpython-313.pyc
temp_dir.cpython-313.pyc
Other
12,136
0.95
0.062992
0
react-lib
882
2025-04-30T22:14:23.868770
BSD-3-Clause
false
987c99392e4b89a657f36a6c595a3f4a
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\unpacking.cpython-313.pyc
unpacking.cpython-313.pyc
Other
13,773
0.8
0.048611
0.006993
python-kit
855
2024-09-06T09:36:58.822557
BSD-3-Clause
false
9d658bc1f4c48d35d0871405538883d8
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\urls.cpython-313.pyc
urls.cpython-313.pyc
Other
2,129
0.8
0
0
python-kit
509
2023-12-16T11:46:45.474650
BSD-3-Clause
false
3f51df730596c9fb67fb617fe9d33b2d
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\virtualenv.cpython-313.pyc
virtualenv.cpython-313.pyc
Other
4,469
0.8
0.123077
0.017857
node-utils
768
2024-06-11T04:44:01.610204
GPL-3.0
false
95975e296e45f859bd7801f35a1c8831
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\wheel.cpython-313.pyc
wheel.cpython-313.pyc
Other
5,890
0.95
0.053333
0
node-utils
222
2024-02-03T02:02:05.347694
MIT
false
f2988edf7ed5fa45afb5e9fccf8b8a5c
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\_jaraco_text.cpython-313.pyc
_jaraco_text.cpython-313.pyc
Other
4,440
0.95
0
0
vue-tools
172
2023-10-06T21:17:33.285250
MIT
false
640d90324b4074371ee599b4b5f10325
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\_log.cpython-313.pyc
_log.cpython-313.pyc
Other
1,913
0.85
0.125
0
react-lib
425
2024-06-28T15:05:17.206411
GPL-3.0
false
d214f172cc0e1659c16ca688384c2922
\n\n
.venv\Lib\site-packages\pip\_internal\utils\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
194
0.7
0
0
awesome-app
370
2024-10-23T06:24:58.267966
GPL-3.0
false
a871aafda062916b6370e0a93d43c62d
import logging\nfrom typing import List, Optional, Tuple\n\nfrom pip._internal.utils.misc import HiddenText, display_path\nfrom pip._internal.utils.subprocess import make_command\nfrom pip._internal.utils.urls import path_to_url\nfrom pip._internal.vcs.versioncontrol import (\n AuthInfo,\n RemoteNotFoundError,\n RevOptions,\n VersionControl,\n vcs,\n)\n\nlogger = logging.getLogger(__name__)\n\n\nclass Bazaar(VersionControl):\n name = "bzr"\n dirname = ".bzr"\n repo_name = "branch"\n schemes = (\n "bzr+http",\n "bzr+https",\n "bzr+ssh",\n "bzr+sftp",\n "bzr+ftp",\n "bzr+lp",\n "bzr+file",\n )\n\n @staticmethod\n def get_base_rev_args(rev: str) -> List[str]:\n return ["-r", rev]\n\n def fetch_new(\n self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int\n ) -> None:\n rev_display = rev_options.to_display()\n logger.info(\n "Checking out %s%s to %s",\n url,\n rev_display,\n display_path(dest),\n )\n if verbosity <= 0:\n flags = ["--quiet"]\n elif verbosity == 1:\n flags = []\n else:\n flags = [f"-{'v'*verbosity}"]\n cmd_args = make_command(\n "checkout", "--lightweight", *flags, rev_options.to_args(), url, dest\n )\n self.run_command(cmd_args)\n\n def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n self.run_command(make_command("switch", url), cwd=dest)\n\n def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n output = self.run_command(\n make_command("info"), show_stdout=False, stdout_only=True, cwd=dest\n )\n if output.startswith("Standalone "):\n # Older versions of pip used to create standalone branches.\n # Convert the standalone branch to a checkout by calling "bzr bind".\n cmd_args = make_command("bind", "-q", url)\n self.run_command(cmd_args, cwd=dest)\n\n cmd_args = make_command("update", "-q", rev_options.to_args())\n self.run_command(cmd_args, cwd=dest)\n\n @classmethod\n def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:\n # hotfix the URL scheme after removing bzr+ from bzr+ssh:// re-add it\n url, rev, user_pass = super().get_url_rev_and_auth(url)\n if url.startswith("ssh://"):\n url = "bzr+" + url\n return url, rev, user_pass\n\n @classmethod\n def get_remote_url(cls, location: str) -> str:\n urls = cls.run_command(\n ["info"], show_stdout=False, stdout_only=True, cwd=location\n )\n for line in urls.splitlines():\n line = line.strip()\n for x in ("checkout of branch: ", "parent branch: "):\n if line.startswith(x):\n repo = line.split(x)[1]\n if cls._is_local_repository(repo):\n return path_to_url(repo)\n return repo\n raise RemoteNotFoundError\n\n @classmethod\n def get_revision(cls, location: str) -> str:\n revision = cls.run_command(\n ["revno"],\n show_stdout=False,\n stdout_only=True,\n cwd=location,\n )\n return revision.splitlines()[-1]\n\n @classmethod\n def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:\n """Always assume the versions don't match"""\n return False\n\n\nvcs.register(Bazaar)\n
.venv\Lib\site-packages\pip\_internal\vcs\bazaar.py
bazaar.py
Python
3,528
0.95
0.160714
0.030928
python-kit
929
2025-03-18T07:14:03.853519
GPL-3.0
false
7805b2cdc22fcd57ebb2a0d9570d99e5
import logging\nimport os.path\nimport pathlib\nimport re\nimport urllib.parse\nimport urllib.request\nfrom dataclasses import replace\nfrom typing import Any, List, Optional, Tuple\n\nfrom pip._internal.exceptions import BadCommand, InstallationError\nfrom pip._internal.utils.misc import HiddenText, display_path, hide_url\nfrom pip._internal.utils.subprocess import make_command\nfrom pip._internal.vcs.versioncontrol import (\n AuthInfo,\n RemoteNotFoundError,\n RemoteNotValidError,\n RevOptions,\n VersionControl,\n find_path_to_project_root_from_repo_root,\n vcs,\n)\n\nurlsplit = urllib.parse.urlsplit\nurlunsplit = urllib.parse.urlunsplit\n\n\nlogger = logging.getLogger(__name__)\n\n\nGIT_VERSION_REGEX = re.compile(\n r"^git version " # Prefix.\n r"(\d+)" # Major.\n r"\.(\d+)" # Dot, minor.\n r"(?:\.(\d+))?" # Optional dot, patch.\n r".*$" # Suffix, including any pre- and post-release segments we don't care about.\n)\n\nHASH_REGEX = re.compile("^[a-fA-F0-9]{40}$")\n\n# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git'\nSCP_REGEX = re.compile(\n r"""^\n # Optional user, e.g. 'git@'\n (\w+@)?\n # Server, e.g. 'github.com'.\n ([^/:]+):\n # The server-side path. e.g. 'user/project.git'. Must start with an\n # alphanumeric character so as not to be confusable with a Windows paths\n # like 'C:/foo/bar' or 'C:\foo\bar'.\n (\w[^:]*)\n $""",\n re.VERBOSE,\n)\n\n\ndef looks_like_hash(sha: str) -> bool:\n return bool(HASH_REGEX.match(sha))\n\n\nclass Git(VersionControl):\n name = "git"\n dirname = ".git"\n repo_name = "clone"\n schemes = (\n "git+http",\n "git+https",\n "git+ssh",\n "git+git",\n "git+file",\n )\n # Prevent the user's environment variables from interfering with pip:\n # https://github.com/pypa/pip/issues/1130\n unset_environ = ("GIT_DIR", "GIT_WORK_TREE")\n default_arg_rev = "HEAD"\n\n @staticmethod\n def get_base_rev_args(rev: str) -> List[str]:\n return [rev]\n\n @classmethod\n def run_command(cls, *args: Any, **kwargs: Any) -> str:\n if os.environ.get("PIP_NO_INPUT"):\n extra_environ = kwargs.get("extra_environ", {})\n extra_environ["GIT_TERMINAL_PROMPT"] = "0"\n extra_environ["GIT_SSH_COMMAND"] = "ssh -oBatchMode=yes"\n kwargs["extra_environ"] = extra_environ\n return super().run_command(*args, **kwargs)\n\n def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:\n _, rev_options = self.get_url_rev_options(hide_url(url))\n if not rev_options.rev:\n return False\n if not self.is_commit_id_equal(dest, rev_options.rev):\n # the current commit is different from rev,\n # which means rev was something else than a commit hash\n return False\n # return False in the rare case rev is both a commit hash\n # and a tag or a branch; we don't want to cache in that case\n # because that branch/tag could point to something else in the future\n is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0])\n return not is_tag_or_branch\n\n def get_git_version(self) -> Tuple[int, ...]:\n version = self.run_command(\n ["version"],\n command_desc="git version",\n show_stdout=False,\n stdout_only=True,\n )\n match = GIT_VERSION_REGEX.match(version)\n if not match:\n logger.warning("Can't parse git version: %s", version)\n return ()\n return (int(match.group(1)), int(match.group(2)))\n\n @classmethod\n def get_current_branch(cls, location: str) -> Optional[str]:\n """\n Return the current branch, or None if HEAD isn't at a branch\n (e.g. detached HEAD).\n """\n # git-symbolic-ref exits with empty stdout if "HEAD" is a detached\n # HEAD rather than a symbolic ref. In addition, the -q causes the\n # command to exit with status code 1 instead of 128 in this case\n # and to suppress the message to stderr.\n args = ["symbolic-ref", "-q", "HEAD"]\n output = cls.run_command(\n args,\n extra_ok_returncodes=(1,),\n show_stdout=False,\n stdout_only=True,\n cwd=location,\n )\n ref = output.strip()\n\n if ref.startswith("refs/heads/"):\n return ref[len("refs/heads/") :]\n\n return None\n\n @classmethod\n def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]:\n """\n Return (sha_or_none, is_branch), where sha_or_none is a commit hash\n if the revision names a remote branch or tag, otherwise None.\n\n Args:\n dest: the repository directory.\n rev: the revision name.\n """\n # Pass rev to pre-filter the list.\n output = cls.run_command(\n ["show-ref", rev],\n cwd=dest,\n show_stdout=False,\n stdout_only=True,\n on_returncode="ignore",\n )\n refs = {}\n # NOTE: We do not use splitlines here since that would split on other\n # unicode separators, which can be maliciously used to install a\n # different revision.\n for line in output.strip().split("\n"):\n line = line.rstrip("\r")\n if not line:\n continue\n try:\n ref_sha, ref_name = line.split(" ", maxsplit=2)\n except ValueError:\n # Include the offending line to simplify troubleshooting if\n # this error ever occurs.\n raise ValueError(f"unexpected show-ref line: {line!r}")\n\n refs[ref_name] = ref_sha\n\n branch_ref = f"refs/remotes/origin/{rev}"\n tag_ref = f"refs/tags/{rev}"\n\n sha = refs.get(branch_ref)\n if sha is not None:\n return (sha, True)\n\n sha = refs.get(tag_ref)\n\n return (sha, False)\n\n @classmethod\n def _should_fetch(cls, dest: str, rev: str) -> bool:\n """\n Return true if rev is a ref or is a commit that we don't have locally.\n\n Branches and tags are not considered in this method because they are\n assumed to be always available locally (which is a normal outcome of\n ``git clone`` and ``git fetch --tags``).\n """\n if rev.startswith("refs/"):\n # Always fetch remote refs.\n return True\n\n if not looks_like_hash(rev):\n # Git fetch would fail with abbreviated commits.\n return False\n\n if cls.has_commit(dest, rev):\n # Don't fetch if we have the commit locally.\n return False\n\n return True\n\n @classmethod\n def resolve_revision(\n cls, dest: str, url: HiddenText, rev_options: RevOptions\n ) -> RevOptions:\n """\n Resolve a revision to a new RevOptions object with the SHA1 of the\n branch, tag, or ref if found.\n\n Args:\n rev_options: a RevOptions object.\n """\n rev = rev_options.arg_rev\n # The arg_rev property's implementation for Git ensures that the\n # rev return value is always non-None.\n assert rev is not None\n\n sha, is_branch = cls.get_revision_sha(dest, rev)\n\n if sha is not None:\n rev_options = rev_options.make_new(sha)\n rev_options = replace(rev_options, branch_name=(rev if is_branch else None))\n\n return rev_options\n\n # Do not show a warning for the common case of something that has\n # the form of a Git commit hash.\n if not looks_like_hash(rev):\n logger.warning(\n "Did not find branch or tag '%s', assuming revision or ref.",\n rev,\n )\n\n if not cls._should_fetch(dest, rev):\n return rev_options\n\n # fetch the requested revision\n cls.run_command(\n make_command("fetch", "-q", url, rev_options.to_args()),\n cwd=dest,\n )\n # Change the revision to the SHA of the ref we fetched\n sha = cls.get_revision(dest, rev="FETCH_HEAD")\n rev_options = rev_options.make_new(sha)\n\n return rev_options\n\n @classmethod\n def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:\n """\n Return whether the current commit hash equals the given name.\n\n Args:\n dest: the repository directory.\n name: a string name.\n """\n if not name:\n # Then avoid an unnecessary subprocess call.\n return False\n\n return cls.get_revision(dest) == name\n\n def fetch_new(\n self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int\n ) -> None:\n rev_display = rev_options.to_display()\n logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest))\n if verbosity <= 0:\n flags: Tuple[str, ...] = ("--quiet",)\n elif verbosity == 1:\n flags = ()\n else:\n flags = ("--verbose", "--progress")\n if self.get_git_version() >= (2, 17):\n # Git added support for partial clone in 2.17\n # https://git-scm.com/docs/partial-clone\n # Speeds up cloning by functioning without a complete copy of repository\n self.run_command(\n make_command(\n "clone",\n "--filter=blob:none",\n *flags,\n url,\n dest,\n )\n )\n else:\n self.run_command(make_command("clone", *flags, url, dest))\n\n if rev_options.rev:\n # Then a specific revision was requested.\n rev_options = self.resolve_revision(dest, url, rev_options)\n branch_name = getattr(rev_options, "branch_name", None)\n logger.debug("Rev options %s, branch_name %s", rev_options, branch_name)\n if branch_name is None:\n # Only do a checkout if the current commit id doesn't match\n # the requested revision.\n if not self.is_commit_id_equal(dest, rev_options.rev):\n cmd_args = make_command(\n "checkout",\n "-q",\n rev_options.to_args(),\n )\n self.run_command(cmd_args, cwd=dest)\n elif self.get_current_branch(dest) != branch_name:\n # Then a specific branch was requested, and that branch\n # is not yet checked out.\n track_branch = f"origin/{branch_name}"\n cmd_args = [\n "checkout",\n "-b",\n branch_name,\n "--track",\n track_branch,\n ]\n self.run_command(cmd_args, cwd=dest)\n else:\n sha = self.get_revision(dest)\n rev_options = rev_options.make_new(sha)\n\n logger.info("Resolved %s to commit %s", url, rev_options.rev)\n\n #: repo may contain submodules\n self.update_submodules(dest)\n\n def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n self.run_command(\n make_command("config", "remote.origin.url", url),\n cwd=dest,\n )\n cmd_args = make_command("checkout", "-q", rev_options.to_args())\n self.run_command(cmd_args, cwd=dest)\n\n self.update_submodules(dest)\n\n def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n # First fetch changes from the default remote\n if self.get_git_version() >= (1, 9):\n # fetch tags in addition to everything else\n self.run_command(["fetch", "-q", "--tags"], cwd=dest)\n else:\n self.run_command(["fetch", "-q"], cwd=dest)\n # Then reset to wanted revision (maybe even origin/master)\n rev_options = self.resolve_revision(dest, url, rev_options)\n cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args())\n self.run_command(cmd_args, cwd=dest)\n #: update submodules\n self.update_submodules(dest)\n\n @classmethod\n def get_remote_url(cls, location: str) -> str:\n """\n Return URL of the first remote encountered.\n\n Raises RemoteNotFoundError if the repository does not have a remote\n url configured.\n """\n # We need to pass 1 for extra_ok_returncodes since the command\n # exits with return code 1 if there are no matching lines.\n stdout = cls.run_command(\n ["config", "--get-regexp", r"remote\..*\.url"],\n extra_ok_returncodes=(1,),\n show_stdout=False,\n stdout_only=True,\n cwd=location,\n )\n remotes = stdout.splitlines()\n try:\n found_remote = remotes[0]\n except IndexError:\n raise RemoteNotFoundError\n\n for remote in remotes:\n if remote.startswith("remote.origin.url "):\n found_remote = remote\n break\n url = found_remote.split(" ")[1]\n return cls._git_remote_to_pip_url(url.strip())\n\n @staticmethod\n def _git_remote_to_pip_url(url: str) -> str:\n """\n Convert a remote url from what git uses to what pip accepts.\n\n There are 3 legal forms **url** may take:\n\n 1. A fully qualified url: ssh://git@example.com/foo/bar.git\n 2. A local project.git folder: /path/to/bare/repository.git\n 3. SCP shorthand for form 1: git@example.com:foo/bar.git\n\n Form 1 is output as-is. Form 2 must be converted to URI and form 3 must\n be converted to form 1.\n\n See the corresponding test test_git_remote_url_to_pip() for examples of\n sample inputs/outputs.\n """\n if re.match(r"\w+://", url):\n # This is already valid. Pass it though as-is.\n return url\n if os.path.exists(url):\n # A local bare remote (git clone --mirror).\n # Needs a file:// prefix.\n return pathlib.PurePath(url).as_uri()\n scp_match = SCP_REGEX.match(url)\n if scp_match:\n # Add an ssh:// prefix and replace the ':' with a '/'.\n return scp_match.expand(r"ssh://\1\2/\3")\n # Otherwise, bail out.\n raise RemoteNotValidError(url)\n\n @classmethod\n def has_commit(cls, location: str, rev: str) -> bool:\n """\n Check if rev is a commit that is available in the local repository.\n """\n try:\n cls.run_command(\n ["rev-parse", "-q", "--verify", "sha^" + rev],\n cwd=location,\n log_failed_cmd=False,\n )\n except InstallationError:\n return False\n else:\n return True\n\n @classmethod\n def get_revision(cls, location: str, rev: Optional[str] = None) -> str:\n if rev is None:\n rev = "HEAD"\n current_rev = cls.run_command(\n ["rev-parse", rev],\n show_stdout=False,\n stdout_only=True,\n cwd=location,\n )\n return current_rev.strip()\n\n @classmethod\n def get_subdirectory(cls, location: str) -> Optional[str]:\n """\n Return the path to Python project root, relative to the repo root.\n Return None if the project root is in the repo root.\n """\n # find the repo root\n git_dir = cls.run_command(\n ["rev-parse", "--git-dir"],\n show_stdout=False,\n stdout_only=True,\n cwd=location,\n ).strip()\n if not os.path.isabs(git_dir):\n git_dir = os.path.join(location, git_dir)\n repo_root = os.path.abspath(os.path.join(git_dir, ".."))\n return find_path_to_project_root_from_repo_root(location, repo_root)\n\n @classmethod\n def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:\n """\n Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.\n That's required because although they use SSH they sometimes don't\n work with a ssh:// scheme (e.g. GitHub). But we need a scheme for\n parsing. Hence we remove it again afterwards and return it as a stub.\n """\n # Works around an apparent Git bug\n # (see https://article.gmane.org/gmane.comp.version-control.git/146500)\n scheme, netloc, path, query, fragment = urlsplit(url)\n if scheme.endswith("file"):\n initial_slashes = path[: -len(path.lstrip("/"))]\n newpath = initial_slashes + urllib.request.url2pathname(path).replace(\n "\\", "/"\n ).lstrip("/")\n after_plus = scheme.find("+") + 1\n url = scheme[:after_plus] + urlunsplit(\n (scheme[after_plus:], netloc, newpath, query, fragment),\n )\n\n if "://" not in url:\n assert "file:" not in url\n url = url.replace("git+", "git+ssh://")\n url, rev, user_pass = super().get_url_rev_and_auth(url)\n url = url.replace("ssh://", "")\n else:\n url, rev, user_pass = super().get_url_rev_and_auth(url)\n\n return url, rev, user_pass\n\n @classmethod\n def update_submodules(cls, location: str) -> None:\n if not os.path.exists(os.path.join(location, ".gitmodules")):\n return\n cls.run_command(\n ["submodule", "update", "--init", "--recursive", "-q"],\n cwd=location,\n )\n\n @classmethod\n def get_repository_root(cls, location: str) -> Optional[str]:\n loc = super().get_repository_root(location)\n if loc:\n return loc\n try:\n r = cls.run_command(\n ["rev-parse", "--show-toplevel"],\n cwd=location,\n show_stdout=False,\n stdout_only=True,\n on_returncode="raise",\n log_failed_cmd=False,\n )\n except BadCommand:\n logger.debug(\n "could not determine if %s is under git control "\n "because git is not available",\n location,\n )\n return None\n except InstallationError:\n return None\n return os.path.normpath(r.rstrip("\r\n"))\n\n @staticmethod\n def should_add_vcs_url_prefix(repo_url: str) -> bool:\n """In either https or ssh form, requirements must be prefixed with git+."""\n return True\n\n\nvcs.register(Git)\n
.venv\Lib\site-packages\pip\_internal\vcs\git.py
git.py
Python
18,591
0.95
0.151119
0.122056
vue-tools
227
2023-07-30T11:18:19.564065
BSD-3-Clause
false
688b18a42c1e00defefc261aa1fb1bbc
import configparser\nimport logging\nimport os\nfrom typing import List, Optional, Tuple\n\nfrom pip._internal.exceptions import BadCommand, InstallationError\nfrom pip._internal.utils.misc import HiddenText, display_path\nfrom pip._internal.utils.subprocess import make_command\nfrom pip._internal.utils.urls import path_to_url\nfrom pip._internal.vcs.versioncontrol import (\n RevOptions,\n VersionControl,\n find_path_to_project_root_from_repo_root,\n vcs,\n)\n\nlogger = logging.getLogger(__name__)\n\n\nclass Mercurial(VersionControl):\n name = "hg"\n dirname = ".hg"\n repo_name = "clone"\n schemes = (\n "hg+file",\n "hg+http",\n "hg+https",\n "hg+ssh",\n "hg+static-http",\n )\n\n @staticmethod\n def get_base_rev_args(rev: str) -> List[str]:\n return [f"--rev={rev}"]\n\n def fetch_new(\n self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int\n ) -> None:\n rev_display = rev_options.to_display()\n logger.info(\n "Cloning hg %s%s to %s",\n url,\n rev_display,\n display_path(dest),\n )\n if verbosity <= 0:\n flags: Tuple[str, ...] = ("--quiet",)\n elif verbosity == 1:\n flags = ()\n elif verbosity == 2:\n flags = ("--verbose",)\n else:\n flags = ("--verbose", "--debug")\n self.run_command(make_command("clone", "--noupdate", *flags, url, dest))\n self.run_command(\n make_command("update", *flags, rev_options.to_args()),\n cwd=dest,\n )\n\n def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n repo_config = os.path.join(dest, self.dirname, "hgrc")\n config = configparser.RawConfigParser()\n try:\n config.read(repo_config)\n config.set("paths", "default", url.secret)\n with open(repo_config, "w") as config_file:\n config.write(config_file)\n except (OSError, configparser.NoSectionError) as exc:\n logger.warning("Could not switch Mercurial repository to %s: %s", url, exc)\n else:\n cmd_args = make_command("update", "-q", rev_options.to_args())\n self.run_command(cmd_args, cwd=dest)\n\n def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n self.run_command(["pull", "-q"], cwd=dest)\n cmd_args = make_command("update", "-q", rev_options.to_args())\n self.run_command(cmd_args, cwd=dest)\n\n @classmethod\n def get_remote_url(cls, location: str) -> str:\n url = cls.run_command(\n ["showconfig", "paths.default"],\n show_stdout=False,\n stdout_only=True,\n cwd=location,\n ).strip()\n if cls._is_local_repository(url):\n url = path_to_url(url)\n return url.strip()\n\n @classmethod\n def get_revision(cls, location: str) -> str:\n """\n Return the repository-local changeset revision number, as an integer.\n """\n current_revision = cls.run_command(\n ["parents", "--template={rev}"],\n show_stdout=False,\n stdout_only=True,\n cwd=location,\n ).strip()\n return current_revision\n\n @classmethod\n def get_requirement_revision(cls, location: str) -> str:\n """\n Return the changeset identification hash, as a 40-character\n hexadecimal string\n """\n current_rev_hash = cls.run_command(\n ["parents", "--template={node}"],\n show_stdout=False,\n stdout_only=True,\n cwd=location,\n ).strip()\n return current_rev_hash\n\n @classmethod\n def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:\n """Always assume the versions don't match"""\n return False\n\n @classmethod\n def get_subdirectory(cls, location: str) -> Optional[str]:\n """\n Return the path to Python project root, relative to the repo root.\n Return None if the project root is in the repo root.\n """\n # find the repo root\n repo_root = cls.run_command(\n ["root"], show_stdout=False, stdout_only=True, cwd=location\n ).strip()\n if not os.path.isabs(repo_root):\n repo_root = os.path.abspath(os.path.join(location, repo_root))\n return find_path_to_project_root_from_repo_root(location, repo_root)\n\n @classmethod\n def get_repository_root(cls, location: str) -> Optional[str]:\n loc = super().get_repository_root(location)\n if loc:\n return loc\n try:\n r = cls.run_command(\n ["root"],\n cwd=location,\n show_stdout=False,\n stdout_only=True,\n on_returncode="raise",\n log_failed_cmd=False,\n )\n except BadCommand:\n logger.debug(\n "could not determine if %s is under hg control "\n "because hg is not available",\n location,\n )\n return None\n except InstallationError:\n return None\n return os.path.normpath(r.rstrip("\r\n"))\n\n\nvcs.register(Mercurial)\n
.venv\Lib\site-packages\pip\_internal\vcs\mercurial.py
mercurial.py
Python
5,249
0.95
0.128834
0.006803
react-lib
652
2023-09-25T16:31:18.811058
MIT
false
eb530b5a044a306794956925e8011d76
import logging\nimport os\nimport re\nfrom typing import List, Optional, Tuple\n\nfrom pip._internal.utils.misc import (\n HiddenText,\n display_path,\n is_console_interactive,\n is_installable_dir,\n split_auth_from_netloc,\n)\nfrom pip._internal.utils.subprocess import CommandArgs, make_command\nfrom pip._internal.vcs.versioncontrol import (\n AuthInfo,\n RemoteNotFoundError,\n RevOptions,\n VersionControl,\n vcs,\n)\n\nlogger = logging.getLogger(__name__)\n\n_svn_xml_url_re = re.compile('url="([^"]+)"')\n_svn_rev_re = re.compile(r'committed-rev="(\d+)"')\n_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')\n_svn_info_xml_url_re = re.compile(r"<url>(.*)</url>")\n\n\nclass Subversion(VersionControl):\n name = "svn"\n dirname = ".svn"\n repo_name = "checkout"\n schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file")\n\n @classmethod\n def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:\n return True\n\n @staticmethod\n def get_base_rev_args(rev: str) -> List[str]:\n return ["-r", rev]\n\n @classmethod\n def get_revision(cls, location: str) -> str:\n """\n Return the maximum revision for all files under a given location\n """\n # Note: taken from setuptools.command.egg_info\n revision = 0\n\n for base, dirs, _ in os.walk(location):\n if cls.dirname not in dirs:\n dirs[:] = []\n continue # no sense walking uncontrolled subdirs\n dirs.remove(cls.dirname)\n entries_fn = os.path.join(base, cls.dirname, "entries")\n if not os.path.exists(entries_fn):\n # FIXME: should we warn?\n continue\n\n dirurl, localrev = cls._get_svn_url_rev(base)\n\n if base == location:\n assert dirurl is not None\n base = dirurl + "/" # save the root url\n elif not dirurl or not dirurl.startswith(base):\n dirs[:] = []\n continue # not part of the same svn tree, skip it\n revision = max(revision, localrev)\n return str(revision)\n\n @classmethod\n def get_netloc_and_auth(\n cls, netloc: str, scheme: str\n ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:\n """\n This override allows the auth information to be passed to svn via the\n --username and --password options instead of via the URL.\n """\n if scheme == "ssh":\n # The --username and --password options can't be used for\n # svn+ssh URLs, so keep the auth information in the URL.\n return super().get_netloc_and_auth(netloc, scheme)\n\n return split_auth_from_netloc(netloc)\n\n @classmethod\n def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:\n # hotfix the URL scheme after removing svn+ from svn+ssh:// re-add it\n url, rev, user_pass = super().get_url_rev_and_auth(url)\n if url.startswith("ssh://"):\n url = "svn+" + url\n return url, rev, user_pass\n\n @staticmethod\n def make_rev_args(\n username: Optional[str], password: Optional[HiddenText]\n ) -> CommandArgs:\n extra_args: CommandArgs = []\n if username:\n extra_args += ["--username", username]\n if password:\n extra_args += ["--password", password]\n\n return extra_args\n\n @classmethod\n def get_remote_url(cls, location: str) -> str:\n # In cases where the source is in a subdirectory, we have to look up in\n # the location until we find a valid project root.\n orig_location = location\n while not is_installable_dir(location):\n last_location = location\n location = os.path.dirname(location)\n if location == last_location:\n # We've traversed up to the root of the filesystem without\n # finding a Python project.\n logger.warning(\n "Could not find Python project for directory %s (tried all "\n "parent directories)",\n orig_location,\n )\n raise RemoteNotFoundError\n\n url, _rev = cls._get_svn_url_rev(location)\n if url is None:\n raise RemoteNotFoundError\n\n return url\n\n @classmethod\n def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]:\n from pip._internal.exceptions import InstallationError\n\n entries_path = os.path.join(location, cls.dirname, "entries")\n if os.path.exists(entries_path):\n with open(entries_path) as f:\n data = f.read()\n else: # subversion >= 1.7 does not have the 'entries' file\n data = ""\n\n url = None\n if data.startswith("8") or data.startswith("9") or data.startswith("10"):\n entries = list(map(str.splitlines, data.split("\n\x0c\n")))\n del entries[0][0] # get rid of the '8'\n url = entries[0][3]\n revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0]\n elif data.startswith("<?xml"):\n match = _svn_xml_url_re.search(data)\n if not match:\n raise ValueError(f"Badly formatted data: {data!r}")\n url = match.group(1) # get repository URL\n revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]\n else:\n try:\n # subversion >= 1.7\n # Note that using get_remote_call_options is not necessary here\n # because `svn info` is being run against a local directory.\n # We don't need to worry about making sure interactive mode\n # is being used to prompt for passwords, because passwords\n # are only potentially needed for remote server requests.\n xml = cls.run_command(\n ["info", "--xml", location],\n show_stdout=False,\n stdout_only=True,\n )\n match = _svn_info_xml_url_re.search(xml)\n assert match is not None\n url = match.group(1)\n revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]\n except InstallationError:\n url, revs = None, []\n\n if revs:\n rev = max(revs)\n else:\n rev = 0\n\n return url, rev\n\n @classmethod\n def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:\n """Always assume the versions don't match"""\n return False\n\n def __init__(self, use_interactive: Optional[bool] = None) -> None:\n if use_interactive is None:\n use_interactive = is_console_interactive()\n self.use_interactive = use_interactive\n\n # This member is used to cache the fetched version of the current\n # ``svn`` client.\n # Special value definitions:\n # None: Not evaluated yet.\n # Empty tuple: Could not parse version.\n self._vcs_version: Optional[Tuple[int, ...]] = None\n\n super().__init__()\n\n def call_vcs_version(self) -> Tuple[int, ...]:\n """Query the version of the currently installed Subversion client.\n\n :return: A tuple containing the parts of the version information or\n ``()`` if the version returned from ``svn`` could not be parsed.\n :raises: BadCommand: If ``svn`` is not installed.\n """\n # Example versions:\n # svn, version 1.10.3 (r1842928)\n # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0\n # svn, version 1.7.14 (r1542130)\n # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu\n # svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0)\n # compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2\n version_prefix = "svn, version "\n version = self.run_command(["--version"], show_stdout=False, stdout_only=True)\n if not version.startswith(version_prefix):\n return ()\n\n version = version[len(version_prefix) :].split()[0]\n version_list = version.partition("-")[0].split(".")\n try:\n parsed_version = tuple(map(int, version_list))\n except ValueError:\n return ()\n\n return parsed_version\n\n def get_vcs_version(self) -> Tuple[int, ...]:\n """Return the version of the currently installed Subversion client.\n\n If the version of the Subversion client has already been queried,\n a cached value will be used.\n\n :return: A tuple containing the parts of the version information or\n ``()`` if the version returned from ``svn`` could not be parsed.\n :raises: BadCommand: If ``svn`` is not installed.\n """\n if self._vcs_version is not None:\n # Use cached version, if available.\n # If parsing the version failed previously (empty tuple),\n # do not attempt to parse it again.\n return self._vcs_version\n\n vcs_version = self.call_vcs_version()\n self._vcs_version = vcs_version\n return vcs_version\n\n def get_remote_call_options(self) -> CommandArgs:\n """Return options to be used on calls to Subversion that contact the server.\n\n These options are applicable for the following ``svn`` subcommands used\n in this class.\n\n - checkout\n - switch\n - update\n\n :return: A list of command line arguments to pass to ``svn``.\n """\n if not self.use_interactive:\n # --non-interactive switch is available since Subversion 0.14.4.\n # Subversion < 1.8 runs in interactive mode by default.\n return ["--non-interactive"]\n\n svn_version = self.get_vcs_version()\n # By default, Subversion >= 1.8 runs in non-interactive mode if\n # stdin is not a TTY. Since that is how pip invokes SVN, in\n # call_subprocess(), pip must pass --force-interactive to ensure\n # the user can be prompted for a password, if required.\n # SVN added the --force-interactive option in SVN 1.8. Since\n # e.g. RHEL/CentOS 7, which is supported until 2024, ships with\n # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip\n # can't safely add the option if the SVN version is < 1.8 (or unknown).\n if svn_version >= (1, 8):\n return ["--force-interactive"]\n\n return []\n\n def fetch_new(\n self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int\n ) -> None:\n rev_display = rev_options.to_display()\n logger.info(\n "Checking out %s%s to %s",\n url,\n rev_display,\n display_path(dest),\n )\n if verbosity <= 0:\n flags = ["--quiet"]\n else:\n flags = []\n cmd_args = make_command(\n "checkout",\n *flags,\n self.get_remote_call_options(),\n rev_options.to_args(),\n url,\n dest,\n )\n self.run_command(cmd_args)\n\n def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n cmd_args = make_command(\n "switch",\n self.get_remote_call_options(),\n rev_options.to_args(),\n url,\n dest,\n )\n self.run_command(cmd_args)\n\n def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n cmd_args = make_command(\n "update",\n self.get_remote_call_options(),\n rev_options.to_args(),\n dest,\n )\n self.run_command(cmd_args)\n\n\nvcs.register(Subversion)\n
.venv\Lib\site-packages\pip\_internal\vcs\subversion.py
subversion.py
Python
11,735
0.95
0.191358
0.148014
python-kit
544
2024-01-16T16:35:12.686511
Apache-2.0
false
352d4b7038ca17ffe93a35e265b52e54
"""Handles all VCS (version control) support"""\n\nimport logging\nimport os\nimport shutil\nimport sys\nimport urllib.parse\nfrom dataclasses import dataclass, field\nfrom typing import (\n Any,\n Dict,\n Iterable,\n Iterator,\n List,\n Literal,\n Mapping,\n Optional,\n Tuple,\n Type,\n Union,\n)\n\nfrom pip._internal.cli.spinners import SpinnerInterface\nfrom pip._internal.exceptions import BadCommand, InstallationError\nfrom pip._internal.utils.misc import (\n HiddenText,\n ask_path_exists,\n backup_dir,\n display_path,\n hide_url,\n hide_value,\n is_installable_dir,\n rmtree,\n)\nfrom pip._internal.utils.subprocess import (\n CommandArgs,\n call_subprocess,\n format_command_args,\n make_command,\n)\n\n__all__ = ["vcs"]\n\n\nlogger = logging.getLogger(__name__)\n\nAuthInfo = Tuple[Optional[str], Optional[str]]\n\n\ndef is_url(name: str) -> bool:\n """\n Return true if the name looks like a URL.\n """\n scheme = urllib.parse.urlsplit(name).scheme\n if not scheme:\n return False\n return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes\n\n\ndef make_vcs_requirement_url(\n repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None\n) -> str:\n """\n Return the URL for a VCS requirement.\n\n Args:\n repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").\n project_name: the (unescaped) project name.\n """\n egg_project_name = project_name.replace("-", "_")\n req = f"{repo_url}@{rev}#egg={egg_project_name}"\n if subdir:\n req += f"&subdirectory={subdir}"\n\n return req\n\n\ndef find_path_to_project_root_from_repo_root(\n location: str, repo_root: str\n) -> Optional[str]:\n """\n Find the the Python project's root by searching up the filesystem from\n `location`. Return the path to project root relative to `repo_root`.\n Return None if the project root is `repo_root`, or cannot be found.\n """\n # find project root.\n orig_location = location\n while not is_installable_dir(location):\n last_location = location\n location = os.path.dirname(location)\n if location == last_location:\n # We've traversed up to the root of the filesystem without\n # finding a Python project.\n logger.warning(\n "Could not find a Python project for directory %s (tried all "\n "parent directories)",\n orig_location,\n )\n return None\n\n if os.path.samefile(repo_root, location):\n return None\n\n return os.path.relpath(location, repo_root)\n\n\nclass RemoteNotFoundError(Exception):\n pass\n\n\nclass RemoteNotValidError(Exception):\n def __init__(self, url: str):\n super().__init__(url)\n self.url = url\n\n\n@dataclass(frozen=True)\nclass RevOptions:\n """\n Encapsulates a VCS-specific revision to install, along with any VCS\n install options.\n\n Args:\n vc_class: a VersionControl subclass.\n rev: the name of the revision to install.\n extra_args: a list of extra options.\n """\n\n vc_class: Type["VersionControl"]\n rev: Optional[str] = None\n extra_args: CommandArgs = field(default_factory=list)\n branch_name: Optional[str] = None\n\n def __repr__(self) -> str:\n return f"<RevOptions {self.vc_class.name}: rev={self.rev!r}>"\n\n @property\n def arg_rev(self) -> Optional[str]:\n if self.rev is None:\n return self.vc_class.default_arg_rev\n\n return self.rev\n\n def to_args(self) -> CommandArgs:\n """\n Return the VCS-specific command arguments.\n """\n args: CommandArgs = []\n rev = self.arg_rev\n if rev is not None:\n args += self.vc_class.get_base_rev_args(rev)\n args += self.extra_args\n\n return args\n\n def to_display(self) -> str:\n if not self.rev:\n return ""\n\n return f" (to revision {self.rev})"\n\n def make_new(self, rev: str) -> "RevOptions":\n """\n Make a copy of the current instance, but with a new rev.\n\n Args:\n rev: the name of the revision for the new object.\n """\n return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)\n\n\nclass VcsSupport:\n _registry: Dict[str, "VersionControl"] = {}\n schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"]\n\n def __init__(self) -> None:\n # Register more schemes with urlparse for various version control\n # systems\n urllib.parse.uses_netloc.extend(self.schemes)\n super().__init__()\n\n def __iter__(self) -> Iterator[str]:\n return self._registry.__iter__()\n\n @property\n def backends(self) -> List["VersionControl"]:\n return list(self._registry.values())\n\n @property\n def dirnames(self) -> List[str]:\n return [backend.dirname for backend in self.backends]\n\n @property\n def all_schemes(self) -> List[str]:\n schemes: List[str] = []\n for backend in self.backends:\n schemes.extend(backend.schemes)\n return schemes\n\n def register(self, cls: Type["VersionControl"]) -> None:\n if not hasattr(cls, "name"):\n logger.warning("Cannot register VCS %s", cls.__name__)\n return\n if cls.name not in self._registry:\n self._registry[cls.name] = cls()\n logger.debug("Registered VCS backend: %s", cls.name)\n\n def unregister(self, name: str) -> None:\n if name in self._registry:\n del self._registry[name]\n\n def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]:\n """\n Return a VersionControl object if a repository of that type is found\n at the given directory.\n """\n vcs_backends = {}\n for vcs_backend in self._registry.values():\n repo_path = vcs_backend.get_repository_root(location)\n if not repo_path:\n continue\n logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name)\n vcs_backends[repo_path] = vcs_backend\n\n if not vcs_backends:\n return None\n\n # Choose the VCS in the inner-most directory. Since all repository\n # roots found here would be either `location` or one of its\n # parents, the longest path should have the most path components,\n # i.e. the backend representing the inner-most repository.\n inner_most_repo_path = max(vcs_backends, key=len)\n return vcs_backends[inner_most_repo_path]\n\n def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]:\n """\n Return a VersionControl object or None.\n """\n for vcs_backend in self._registry.values():\n if scheme in vcs_backend.schemes:\n return vcs_backend\n return None\n\n def get_backend(self, name: str) -> Optional["VersionControl"]:\n """\n Return a VersionControl object or None.\n """\n name = name.lower()\n return self._registry.get(name)\n\n\nvcs = VcsSupport()\n\n\nclass VersionControl:\n name = ""\n dirname = ""\n repo_name = ""\n # List of supported schemes for this Version Control\n schemes: Tuple[str, ...] = ()\n # Iterable of environment variable names to pass to call_subprocess().\n unset_environ: Tuple[str, ...] = ()\n default_arg_rev: Optional[str] = None\n\n @classmethod\n def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:\n """\n Return whether the vcs prefix (e.g. "git+") should be added to a\n repository's remote url when used in a requirement.\n """\n return not remote_url.lower().startswith(f"{cls.name}:")\n\n @classmethod\n def get_subdirectory(cls, location: str) -> Optional[str]:\n """\n Return the path to Python project root, relative to the repo root.\n Return None if the project root is in the repo root.\n """\n return None\n\n @classmethod\n def get_requirement_revision(cls, repo_dir: str) -> str:\n """\n Return the revision string that should be used in a requirement.\n """\n return cls.get_revision(repo_dir)\n\n @classmethod\n def get_src_requirement(cls, repo_dir: str, project_name: str) -> str:\n """\n Return the requirement string to use to redownload the files\n currently at the given repository directory.\n\n Args:\n project_name: the (unescaped) project name.\n\n The return value has a form similar to the following:\n\n {repository_url}@{revision}#egg={project_name}\n """\n repo_url = cls.get_remote_url(repo_dir)\n\n if cls.should_add_vcs_url_prefix(repo_url):\n repo_url = f"{cls.name}+{repo_url}"\n\n revision = cls.get_requirement_revision(repo_dir)\n subdir = cls.get_subdirectory(repo_dir)\n req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir)\n\n return req\n\n @staticmethod\n def get_base_rev_args(rev: str) -> List[str]:\n """\n Return the base revision arguments for a vcs command.\n\n Args:\n rev: the name of a revision to install. Cannot be None.\n """\n raise NotImplementedError\n\n def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:\n """\n Return true if the commit hash checked out at dest matches\n the revision in url.\n\n Always return False, if the VCS does not support immutable commit\n hashes.\n\n This method does not check if there are local uncommitted changes\n in dest after checkout, as pip currently has no use case for that.\n """\n return False\n\n @classmethod\n def make_rev_options(\n cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None\n ) -> RevOptions:\n """\n Return a RevOptions object.\n\n Args:\n rev: the name of a revision to install.\n extra_args: a list of extra options.\n """\n return RevOptions(cls, rev, extra_args=extra_args or [])\n\n @classmethod\n def _is_local_repository(cls, repo: str) -> bool:\n """\n posix absolute paths start with os.path.sep,\n win32 ones start with drive (like c:\\folder)\n """\n drive, tail = os.path.splitdrive(repo)\n return repo.startswith(os.path.sep) or bool(drive)\n\n @classmethod\n def get_netloc_and_auth(\n cls, netloc: str, scheme: str\n ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:\n """\n Parse the repository URL's netloc, and return the new netloc to use\n along with auth information.\n\n Args:\n netloc: the original repository URL netloc.\n scheme: the repository URL's scheme without the vcs prefix.\n\n This is mainly for the Subversion class to override, so that auth\n information can be provided via the --username and --password options\n instead of through the URL. For other subclasses like Git without\n such an option, auth information must stay in the URL.\n\n Returns: (netloc, (username, password)).\n """\n return netloc, (None, None)\n\n @classmethod\n def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:\n """\n Parse the repository URL to use, and return the URL, revision,\n and auth info to use.\n\n Returns: (url, rev, (username, password)).\n """\n scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)\n if "+" not in scheme:\n raise ValueError(\n f"Sorry, {url!r} is a malformed VCS url. "\n "The format is <vcs>+<protocol>://<url>, "\n "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"\n )\n # Remove the vcs prefix.\n scheme = scheme.split("+", 1)[1]\n netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)\n rev = None\n if "@" in path:\n path, rev = path.rsplit("@", 1)\n if not rev:\n raise InstallationError(\n f"The URL {url!r} has an empty revision (after @) "\n "which is not supported. Include a revision after @ "\n "or remove @ from the URL."\n )\n url = urllib.parse.urlunsplit((scheme, netloc, path, query, ""))\n return url, rev, user_pass\n\n @staticmethod\n def make_rev_args(\n username: Optional[str], password: Optional[HiddenText]\n ) -> CommandArgs:\n """\n Return the RevOptions "extra arguments" to use in obtain().\n """\n return []\n\n def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]:\n """\n Return the URL and RevOptions object to use in obtain(),\n as a tuple (url, rev_options).\n """\n secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret)\n username, secret_password = user_pass\n password: Optional[HiddenText] = None\n if secret_password is not None:\n password = hide_value(secret_password)\n extra_args = self.make_rev_args(username, password)\n rev_options = self.make_rev_options(rev, extra_args=extra_args)\n\n return hide_url(secret_url), rev_options\n\n @staticmethod\n def normalize_url(url: str) -> str:\n """\n Normalize a URL for comparison by unquoting it and removing any\n trailing slash.\n """\n return urllib.parse.unquote(url).rstrip("/")\n\n @classmethod\n def compare_urls(cls, url1: str, url2: str) -> bool:\n """\n Compare two repo URLs for identity, ignoring incidental differences.\n """\n return cls.normalize_url(url1) == cls.normalize_url(url2)\n\n def fetch_new(\n self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int\n ) -> None:\n """\n Fetch a revision from a repository, in the case that this is the\n first fetch from the repository.\n\n Args:\n dest: the directory to fetch the repository to.\n rev_options: a RevOptions object.\n verbosity: verbosity level.\n """\n raise NotImplementedError\n\n def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n """\n Switch the repo at ``dest`` to point to ``URL``.\n\n Args:\n rev_options: a RevOptions object.\n """\n raise NotImplementedError\n\n def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:\n """\n Update an already-existing repo to the given ``rev_options``.\n\n Args:\n rev_options: a RevOptions object.\n """\n raise NotImplementedError\n\n @classmethod\n def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:\n """\n Return whether the id of the current commit equals the given name.\n\n Args:\n dest: the repository directory.\n name: a string name.\n """\n raise NotImplementedError\n\n def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None:\n """\n Install or update in editable mode the package represented by this\n VersionControl object.\n\n :param dest: the repository directory in which to install or update.\n :param url: the repository URL starting with a vcs prefix.\n :param verbosity: verbosity level.\n """\n url, rev_options = self.get_url_rev_options(url)\n\n if not os.path.exists(dest):\n self.fetch_new(dest, url, rev_options, verbosity=verbosity)\n return\n\n rev_display = rev_options.to_display()\n if self.is_repository_directory(dest):\n existing_url = self.get_remote_url(dest)\n if self.compare_urls(existing_url, url.secret):\n logger.debug(\n "%s in %s exists, and has correct URL (%s)",\n self.repo_name.title(),\n display_path(dest),\n url,\n )\n if not self.is_commit_id_equal(dest, rev_options.rev):\n logger.info(\n "Updating %s %s%s",\n display_path(dest),\n self.repo_name,\n rev_display,\n )\n self.update(dest, url, rev_options)\n else:\n logger.info("Skipping because already up-to-date.")\n return\n\n logger.warning(\n "%s %s in %s exists with URL %s",\n self.name,\n self.repo_name,\n display_path(dest),\n existing_url,\n )\n prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b"))\n else:\n logger.warning(\n "Directory %s already exists, and is not a %s %s.",\n dest,\n self.name,\n self.repo_name,\n )\n # https://github.com/python/mypy/issues/1174\n prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore\n\n logger.warning(\n "The plan is to install the %s repository %s",\n self.name,\n url,\n )\n response = ask_path_exists(f"What to do? {prompt[0]}", prompt[1])\n\n if response == "a":\n sys.exit(-1)\n\n if response == "w":\n logger.warning("Deleting %s", display_path(dest))\n rmtree(dest)\n self.fetch_new(dest, url, rev_options, verbosity=verbosity)\n return\n\n if response == "b":\n dest_dir = backup_dir(dest)\n logger.warning("Backing up %s to %s", display_path(dest), dest_dir)\n shutil.move(dest, dest_dir)\n self.fetch_new(dest, url, rev_options, verbosity=verbosity)\n return\n\n # Do nothing if the response is "i".\n if response == "s":\n logger.info(\n "Switching %s %s to %s%s",\n self.repo_name,\n display_path(dest),\n url,\n rev_display,\n )\n self.switch(dest, url, rev_options)\n\n def unpack(self, location: str, url: HiddenText, verbosity: int) -> None:\n """\n Clean up current location and download the url repository\n (and vcs infos) into location\n\n :param url: the repository URL starting with a vcs prefix.\n :param verbosity: verbosity level.\n """\n if os.path.exists(location):\n rmtree(location)\n self.obtain(location, url=url, verbosity=verbosity)\n\n @classmethod\n def get_remote_url(cls, location: str) -> str:\n """\n Return the url used at location\n\n Raises RemoteNotFoundError if the repository does not have a remote\n url configured.\n """\n raise NotImplementedError\n\n @classmethod\n def get_revision(cls, location: str) -> str:\n """\n Return the current commit id of the files at the given location.\n """\n raise NotImplementedError\n\n @classmethod\n def run_command(\n cls,\n cmd: Union[List[str], CommandArgs],\n show_stdout: bool = True,\n cwd: Optional[str] = None,\n on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",\n extra_ok_returncodes: Optional[Iterable[int]] = None,\n command_desc: Optional[str] = None,\n extra_environ: Optional[Mapping[str, Any]] = None,\n spinner: Optional[SpinnerInterface] = None,\n log_failed_cmd: bool = True,\n stdout_only: bool = False,\n ) -> str:\n """\n Run a VCS subcommand\n This is simply a wrapper around call_subprocess that adds the VCS\n command name, and checks that the VCS is available\n """\n cmd = make_command(cls.name, *cmd)\n if command_desc is None:\n command_desc = format_command_args(cmd)\n try:\n return call_subprocess(\n cmd,\n show_stdout,\n cwd,\n on_returncode=on_returncode,\n extra_ok_returncodes=extra_ok_returncodes,\n command_desc=command_desc,\n extra_environ=extra_environ,\n unset_environ=cls.unset_environ,\n spinner=spinner,\n log_failed_cmd=log_failed_cmd,\n stdout_only=stdout_only,\n )\n except NotADirectoryError:\n raise BadCommand(f"Cannot find command {cls.name!r} - invalid PATH")\n except FileNotFoundError:\n # errno.ENOENT = no such file or directory\n # In other words, the VCS executable isn't available\n raise BadCommand(\n f"Cannot find command {cls.name!r} - do you have "\n f"{cls.name!r} installed and in your PATH?"\n )\n except PermissionError:\n # errno.EACCES = Permission denied\n # This error occurs, for instance, when the command is installed\n # only for another user. So, the current user don't have\n # permission to call the other user command.\n raise BadCommand(\n f"No permission to execute {cls.name!r} - install it "\n f"locally, globally (ask admin), or check your PATH. "\n f"See possible solutions at "\n f"https://pip.pypa.io/en/latest/reference/pip_freeze/"\n f"#fixing-permission-denied."\n )\n\n @classmethod\n def is_repository_directory(cls, path: str) -> bool:\n """\n Return whether a directory path is a repository directory.\n """\n logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name)\n return os.path.exists(os.path.join(path, cls.dirname))\n\n @classmethod\n def get_repository_root(cls, location: str) -> Optional[str]:\n """\n Return the "root" (top-level) directory controlled by the vcs,\n or `None` if the directory is not in any.\n\n It is meant to be overridden to implement smarter detection\n mechanisms for specific vcs.\n\n This can do more than is_repository_directory() alone. For\n example, the Git override checks that Git is actually available.\n """\n if cls.is_repository_directory(location):\n return location\n return None\n
.venv\Lib\site-packages\pip\_internal\vcs\versioncontrol.py
versioncontrol.py
Python
22,440
0.95
0.161337
0.034483
python-kit
230
2025-05-28T18:23:09.871363
MIT
false
9633e511d3b3f4b12023761c25d779b8
# Expose a limited set of classes and functions so callers outside of\n# the vcs package don't need to import deeper than `pip._internal.vcs`.\n# (The test directory may still need to import from a vcs sub-package.)\n# Import all vcs modules to register each VCS in the VcsSupport object.\nimport pip._internal.vcs.bazaar\nimport pip._internal.vcs.git\nimport pip._internal.vcs.mercurial\nimport pip._internal.vcs.subversion # noqa: F401\nfrom pip._internal.vcs.versioncontrol import ( # noqa: F401\n RemoteNotFoundError,\n RemoteNotValidError,\n is_url,\n make_vcs_requirement_url,\n vcs,\n)\n
.venv\Lib\site-packages\pip\_internal\vcs\__init__.py
__init__.py
Python
596
0.95
0
0.266667
react-lib
405
2023-12-03T22:16:34.482854
Apache-2.0
false
eba6bd4aca847fbf75d548ff07627ddc
\n\n
.venv\Lib\site-packages\pip\_internal\vcs\__pycache__\bazaar.cpython-313.pyc
bazaar.cpython-313.pyc
Other
5,138
0.8
0.019608
0.02
awesome-app
561
2023-10-25T23:24:06.789901
BSD-3-Clause
false
a3da674f1221a45bcfbaf1092886ab5a
\n\n
.venv\Lib\site-packages\pip\_internal\vcs\__pycache__\git.cpython-313.pyc
git.cpython-313.pyc
Other
19,418
0.95
0.056075
0.024752
python-kit
455
2024-01-29T16:33:00.997723
Apache-2.0
false
2db703fb8709d215c6721358c0a3a6fe
\n\n
.venv\Lib\site-packages\pip\_internal\vcs\__pycache__\mercurial.cpython-313.pyc
mercurial.cpython-313.pyc
Other
7,600
0.95
0.05814
0
node-utils
860
2023-09-14T08:04:59.745008
BSD-3-Clause
false
c1acdc567c5aa2758299c2f5764df9b7
\n\n
.venv\Lib\site-packages\pip\_internal\vcs\__pycache__\subversion.cpython-313.pyc
subversion.cpython-313.pyc
Other
12,669
0.8
0.059259
0
react-lib
297
2025-02-21T11:04:34.683401
GPL-3.0
false
584e51849c72880c700ede5734acce42
\n\n
.venv\Lib\site-packages\pip\_internal\vcs\__pycache__\versioncontrol.cpython-313.pyc
versioncontrol.cpython-313.pyc
Other
28,294
0.95
0.060274
0.00295
node-utils
806
2024-10-20T21:53:29.599666
MIT
false
739dcb8e96c938d67ee476253e492e52
\n\n
.venv\Lib\site-packages\pip\_internal\vcs\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
533
0.95
0
0
awesome-app
262
2025-05-24T05:35:04.828470
GPL-3.0
false
0dffa530fb738a3c8fed0707da08344a
\n\n
.venv\Lib\site-packages\pip\_internal\__pycache__\build_env.cpython-313.pyc
build_env.cpython-313.pyc
Other
15,164
0.95
0.044335
0.026316
node-utils
386
2023-08-16T15:51:47.091924
Apache-2.0
false
e38bfeca57acb01d9c8a75ddeb149404
\n\n
.venv\Lib\site-packages\pip\_internal\__pycache__\cache.cpython-313.pyc
cache.cpython-313.pyc
Other
12,894
0.95
0.133858
0
node-utils
639
2025-02-07T08:32:01.563567
BSD-3-Clause
false
9f06a06e60474c05a03bb3bf0905061d
\n\n
.venv\Lib\site-packages\pip\_internal\__pycache__\configuration.cpython-313.pyc
configuration.cpython-313.pyc
Other
17,850
0.95
0.02809
0
vue-tools
719
2024-09-17T06:36:06.611487
MIT
false
5e8e3fe15ed8130a6fcfe56388dce68c
\n\n
.venv\Lib\site-packages\pip\_internal\__pycache__\exceptions.cpython-313.pyc
exceptions.cpython-313.pyc
Other
39,924
0.95
0.054404
0.002778
python-kit
441
2024-03-25T03:49:43.816438
BSD-3-Clause
false
f17b1cafe71c48b0f694384c4f5b344b
\n\n
.venv\Lib\site-packages\pip\_internal\__pycache__\main.cpython-313.pyc
main.cpython-313.pyc
Other
664
0.8
0.142857
0
vue-tools
863
2024-11-10T11:05:03.137101
BSD-3-Clause
false
d9d5141850d74ee0efe9568c2a1cb5bb
\n\n
.venv\Lib\site-packages\pip\_internal\__pycache__\pyproject.cpython-313.pyc
pyproject.cpython-313.pyc
Other
5,223
0.95
0.027397
0
awesome-app
981
2024-05-14T18:36:55.312104
GPL-3.0
false
20da61cb1eb8b1d052a3127b1ef523f0
\n\n
.venv\Lib\site-packages\pip\_internal\__pycache__\self_outdated_check.cpython-313.pyc
self_outdated_check.cpython-313.pyc
Other
10,565
0.8
0.029703
0
python-kit
426
2025-05-29T07:44:09.135080
GPL-3.0
false
8eff5045e0b1db543eb9a6d22330469d
\n\n
.venv\Lib\site-packages\pip\_internal\__pycache__\wheel_builder.cpython-313.pyc
wheel_builder.cpython-313.pyc
Other
13,549
0.95
0.064748
0.007576
python-kit
559
2024-06-03T14:26:47.246811
GPL-3.0
false
0dabc82981db4687dc11c83ad973a4f6
\n\n
.venv\Lib\site-packages\pip\_internal\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
779
0.8
0.1
0
react-lib
627
2024-03-17T00:08:47.409852
Apache-2.0
false
a72bb13ac0cc10a156d440b3f38443d4
CacheControl==0.14.2\ndistlib==0.3.9\ndistro==1.9.0\nmsgpack==1.1.0\npackaging==25.0\nplatformdirs==4.3.7\npyproject-hooks==1.2.0\nrequests==2.32.3\n certifi==2025.1.31\n idna==3.10\n urllib3==1.26.20\nrich==14.0.0\n pygments==2.19.1\n typing_extensions==4.13.2\nresolvelib==1.1.0\nsetuptools==70.3.0\ntomli==2.2.1\ntomli-w==1.2.0\ntruststore==0.10.1\ndependency-groups==1.3.1\n
.venv\Lib\site-packages\pip\_vendor\vendor.txt
vendor.txt
Other
373
0.7
0
0
vue-tools
372
2024-07-09T14:33:26.534974
GPL-3.0
false
688018817c34ce568dc6cb0d6266c2cf
"""\npip._vendor is for vendoring dependencies of pip to prevent needing pip to\ndepend on something external.\n\nFiles inside of pip._vendor should be considered immutable and should only be\nupdated to versions from upstream.\n"""\nfrom __future__ import absolute_import\n\nimport glob\nimport os.path\nimport sys\n\n# Downstream redistributors which have debundled our dependencies should also\n# patch this value to be true. This will trigger the additional patching\n# to cause things like "six" to be available as pip.\nDEBUNDLED = False\n\n# By default, look in this directory for a bunch of .whl files which we will\n# add to the beginning of sys.path before attempting to import anything. This\n# is done to support downstream re-distributors like Debian and Fedora who\n# wish to create their own Wheels for our dependencies to aid in debundling.\nWHEEL_DIR = os.path.abspath(os.path.dirname(__file__))\n\n\n# Define a small helper function to alias our vendored modules to the real ones\n# if the vendored ones do not exist. This idea of this was taken from\n# https://github.com/kennethreitz/requests/pull/2567.\ndef vendored(modulename):\n vendored_name = "{0}.{1}".format(__name__, modulename)\n\n try:\n __import__(modulename, globals(), locals(), level=0)\n except ImportError:\n # We can just silently allow import failures to pass here. If we\n # got to this point it means that ``import pip._vendor.whatever``\n # failed and so did ``import whatever``. Since we're importing this\n # upfront in an attempt to alias imports, not erroring here will\n # just mean we get a regular import error whenever pip *actually*\n # tries to import one of these modules to use it, which actually\n # gives us a better error message than we would have otherwise\n # gotten.\n pass\n else:\n sys.modules[vendored_name] = sys.modules[modulename]\n base, head = vendored_name.rsplit(".", 1)\n setattr(sys.modules[base], head, sys.modules[modulename])\n\n\n# If we're operating in a debundled setup, then we want to go ahead and trigger\n# the aliasing of our vendored libraries as well as looking for wheels to add\n# to our sys.path. This will cause all of this code to be a no-op typically\n# however downstream redistributors can enable it in a consistent way across\n# all platforms.\nif DEBUNDLED:\n # Actually look inside of WHEEL_DIR to find .whl files and add them to the\n # front of our sys.path.\n sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path\n\n # Actually alias all of our vendored dependencies.\n vendored("cachecontrol")\n vendored("certifi")\n vendored("dependency-groups")\n vendored("distlib")\n vendored("distro")\n vendored("packaging")\n vendored("packaging.version")\n vendored("packaging.specifiers")\n vendored("pkg_resources")\n vendored("platformdirs")\n vendored("progress")\n vendored("pyproject_hooks")\n vendored("requests")\n vendored("requests.exceptions")\n vendored("requests.packages")\n vendored("requests.packages.urllib3")\n vendored("requests.packages.urllib3._collections")\n vendored("requests.packages.urllib3.connection")\n vendored("requests.packages.urllib3.connectionpool")\n vendored("requests.packages.urllib3.contrib")\n vendored("requests.packages.urllib3.contrib.ntlmpool")\n vendored("requests.packages.urllib3.contrib.pyopenssl")\n vendored("requests.packages.urllib3.exceptions")\n vendored("requests.packages.urllib3.fields")\n vendored("requests.packages.urllib3.filepost")\n vendored("requests.packages.urllib3.packages")\n vendored("requests.packages.urllib3.packages.ordered_dict")\n vendored("requests.packages.urllib3.packages.six")\n vendored("requests.packages.urllib3.packages.ssl_match_hostname")\n vendored("requests.packages.urllib3.packages.ssl_match_hostname."\n "_implementation")\n vendored("requests.packages.urllib3.poolmanager")\n vendored("requests.packages.urllib3.request")\n vendored("requests.packages.urllib3.response")\n vendored("requests.packages.urllib3.util")\n vendored("requests.packages.urllib3.util.connection")\n vendored("requests.packages.urllib3.util.request")\n vendored("requests.packages.urllib3.util.response")\n vendored("requests.packages.urllib3.util.retry")\n vendored("requests.packages.urllib3.util.ssl_")\n vendored("requests.packages.urllib3.util.timeout")\n vendored("requests.packages.urllib3.util.url")\n vendored("resolvelib")\n vendored("rich")\n vendored("rich.console")\n vendored("rich.highlighter")\n vendored("rich.logging")\n vendored("rich.markup")\n vendored("rich.progress")\n vendored("rich.segment")\n vendored("rich.style")\n vendored("rich.text")\n vendored("rich.traceback")\n if sys.version_info < (3, 11):\n vendored("tomli")\n vendored("truststore")\n vendored("urllib3")\n
.venv\Lib\site-packages\pip\_vendor\__init__.py
__init__.py
Python
4,907
0.95
0.08547
0.242991
node-utils
272
2025-01-20T03:10:06.627538
GPL-3.0
false
6d5a895ffde0646bdfc0ed469e51f865
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\nfrom __future__ import annotations\n\nimport functools\nimport types\nimport weakref\nimport zlib\nfrom typing import TYPE_CHECKING, Any, Collection, Mapping\n\nfrom pip._vendor.requests.adapters import HTTPAdapter\n\nfrom pip._vendor.cachecontrol.cache import DictCache\nfrom pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController\nfrom pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper\n\nif TYPE_CHECKING:\n from pip._vendor.requests import PreparedRequest, Response\n from pip._vendor.urllib3 import HTTPResponse\n\n from pip._vendor.cachecontrol.cache import BaseCache\n from pip._vendor.cachecontrol.heuristics import BaseHeuristic\n from pip._vendor.cachecontrol.serialize import Serializer\n\n\nclass CacheControlAdapter(HTTPAdapter):\n invalidating_methods = {"PUT", "PATCH", "DELETE"}\n\n def __init__(\n self,\n cache: BaseCache | None = None,\n cache_etags: bool = True,\n controller_class: type[CacheController] | None = None,\n serializer: Serializer | None = None,\n heuristic: BaseHeuristic | None = None,\n cacheable_methods: Collection[str] | None = None,\n *args: Any,\n **kw: Any,\n ) -> None:\n super().__init__(*args, **kw)\n self.cache = DictCache() if cache is None else cache\n self.heuristic = heuristic\n self.cacheable_methods = cacheable_methods or ("GET",)\n\n controller_factory = controller_class or CacheController\n self.controller = controller_factory(\n self.cache, cache_etags=cache_etags, serializer=serializer\n )\n\n def send(\n self,\n request: PreparedRequest,\n stream: bool = False,\n timeout: None | float | tuple[float, float] | tuple[float, None] = None,\n verify: bool | str = True,\n cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None,\n proxies: Mapping[str, str] | None = None,\n cacheable_methods: Collection[str] | None = None,\n ) -> Response:\n """\n Send a request. Use the request information to see if it\n exists in the cache and cache the response if we need to and can.\n """\n cacheable = cacheable_methods or self.cacheable_methods\n if request.method in cacheable:\n try:\n cached_response = self.controller.cached_request(request)\n except zlib.error:\n cached_response = None\n if cached_response:\n return self.build_response(request, cached_response, from_cache=True)\n\n # check for etags and add headers if appropriate\n request.headers.update(self.controller.conditional_headers(request))\n\n resp = super().send(request, stream, timeout, verify, cert, proxies)\n\n return resp\n\n def build_response( # type: ignore[override]\n self,\n request: PreparedRequest,\n response: HTTPResponse,\n from_cache: bool = False,\n cacheable_methods: Collection[str] | None = None,\n ) -> Response:\n """\n Build a response by making a request or using the cache.\n\n This will end up calling send and returning a potentially\n cached response\n """\n cacheable = cacheable_methods or self.cacheable_methods\n if not from_cache and request.method in cacheable:\n # Check for any heuristics that might update headers\n # before trying to cache.\n if self.heuristic:\n response = self.heuristic.apply(response)\n\n # apply any expiration heuristics\n if response.status == 304:\n # We must have sent an ETag request. This could mean\n # that we've been expired already or that we simply\n # have an etag. In either case, we want to try and\n # update the cache if that is the case.\n cached_response = self.controller.update_cached_response(\n request, response\n )\n\n if cached_response is not response:\n from_cache = True\n\n # We are done with the server response, read a\n # possible response body (compliant servers will\n # not return one, but we cannot be 100% sure) and\n # release the connection back to the pool.\n response.read(decode_content=False)\n response.release_conn()\n\n response = cached_response\n\n # We always cache the 301 responses\n elif int(response.status) in PERMANENT_REDIRECT_STATUSES:\n self.controller.cache_response(request, response)\n else:\n # Wrap the response file with a wrapper that will cache the\n # response when the stream has been consumed.\n response._fp = CallbackFileWrapper( # type: ignore[assignment]\n response._fp, # type: ignore[arg-type]\n functools.partial(\n self.controller.cache_response, request, weakref.ref(response)\n ),\n )\n if response.chunked:\n super_update_chunk_length = response.__class__._update_chunk_length\n\n def _update_chunk_length(\n weak_self: weakref.ReferenceType[HTTPResponse],\n ) -> None:\n self = weak_self()\n if self is None:\n return\n\n super_update_chunk_length(self)\n if self.chunk_left == 0:\n self._fp._close() # type: ignore[union-attr]\n\n response._update_chunk_length = functools.partial( # type: ignore[method-assign]\n _update_chunk_length, weakref.ref(response)\n )\n\n resp: Response = super().build_response(request, response)\n\n # See if we should invalidate the cache.\n if request.method in self.invalidating_methods and resp.ok:\n assert request.url is not None\n cache_url = self.controller.cache_url(request.url)\n self.cache.delete(cache_url)\n\n # Give the request a from_cache attr to let people use it\n resp.from_cache = from_cache # type: ignore[attr-defined]\n\n return resp\n\n def close(self) -> None:\n self.cache.close()\n super().close() # type: ignore[no-untyped-call]\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\adapter.py
adapter.py
Python
6,599
0.95
0.160714
0.157143
node-utils
700
2024-10-20T01:52:15.067745
Apache-2.0
false
77ab3022de13cd24b5f4d4488a3ed4df
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\n\n"""\nThe cache object API for implementing caches. The default is a thread\nsafe in-memory dictionary.\n"""\n\nfrom __future__ import annotations\n\nfrom threading import Lock\nfrom typing import IO, TYPE_CHECKING, MutableMapping\n\nif TYPE_CHECKING:\n from datetime import datetime\n\n\nclass BaseCache:\n def get(self, key: str) -> bytes | None:\n raise NotImplementedError()\n\n def set(\n self, key: str, value: bytes, expires: int | datetime | None = None\n ) -> None:\n raise NotImplementedError()\n\n def delete(self, key: str) -> None:\n raise NotImplementedError()\n\n def close(self) -> None:\n pass\n\n\nclass DictCache(BaseCache):\n def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None:\n self.lock = Lock()\n self.data = init_dict or {}\n\n def get(self, key: str) -> bytes | None:\n return self.data.get(key, None)\n\n def set(\n self, key: str, value: bytes, expires: int | datetime | None = None\n ) -> None:\n with self.lock:\n self.data.update({key: value})\n\n def delete(self, key: str) -> None:\n with self.lock:\n if key in self.data:\n self.data.pop(key)\n\n\nclass SeparateBodyBaseCache(BaseCache):\n """\n In this variant, the body is not stored mixed in with the metadata, but is\n passed in (as a bytes-like object) in a separate call to ``set_body()``.\n\n That is, the expected interaction pattern is::\n\n cache.set(key, serialized_metadata)\n cache.set_body(key)\n\n Similarly, the body should be loaded separately via ``get_body()``.\n """\n\n def set_body(self, key: str, body: bytes) -> None:\n raise NotImplementedError()\n\n def get_body(self, key: str) -> IO[bytes] | None:\n """\n Return the body as file-like object.\n """\n raise NotImplementedError()\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\cache.py
cache.py
Python
1,953
0.95
0.213333
0.055556
node-utils
881
2025-06-13T02:41:37.537353
MIT
false
b06db1426d9b08f643761cb34c978b82
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\n\n"""\nThe httplib2 algorithms ported for use with requests.\n"""\n\nfrom __future__ import annotations\n\nimport calendar\nimport logging\nimport re\nimport time\nimport weakref\nfrom email.utils import parsedate_tz\nfrom typing import TYPE_CHECKING, Collection, Mapping\n\nfrom pip._vendor.requests.structures import CaseInsensitiveDict\n\nfrom pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache\nfrom pip._vendor.cachecontrol.serialize import Serializer\n\nif TYPE_CHECKING:\n from typing import Literal\n\n from pip._vendor.requests import PreparedRequest\n from pip._vendor.urllib3 import HTTPResponse\n\n from pip._vendor.cachecontrol.cache import BaseCache\n\nlogger = logging.getLogger(__name__)\n\nURI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")\n\nPERMANENT_REDIRECT_STATUSES = (301, 308)\n\n\ndef parse_uri(uri: str) -> tuple[str, str, str, str, str]:\n """Parses a URI using the regex given in Appendix B of RFC 3986.\n\n (scheme, authority, path, query, fragment) = parse_uri(uri)\n """\n match = URI.match(uri)\n assert match is not None\n groups = match.groups()\n return (groups[1], groups[3], groups[4], groups[6], groups[8])\n\n\nclass CacheController:\n """An interface to see if request should cached or not."""\n\n def __init__(\n self,\n cache: BaseCache | None = None,\n cache_etags: bool = True,\n serializer: Serializer | None = None,\n status_codes: Collection[int] | None = None,\n ):\n self.cache = DictCache() if cache is None else cache\n self.cache_etags = cache_etags\n self.serializer = serializer or Serializer()\n self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)\n\n @classmethod\n def _urlnorm(cls, uri: str) -> str:\n """Normalize the URL to create a safe key for the cache"""\n (scheme, authority, path, query, fragment) = parse_uri(uri)\n if not scheme or not authority:\n raise Exception("Only absolute URIs are allowed. uri = %s" % uri)\n\n scheme = scheme.lower()\n authority = authority.lower()\n\n if not path:\n path = "/"\n\n # Could do syntax based normalization of the URI before\n # computing the digest. See Section 6.2.2 of Std 66.\n request_uri = query and "?".join([path, query]) or path\n defrag_uri = scheme + "://" + authority + request_uri\n\n return defrag_uri\n\n @classmethod\n def cache_url(cls, uri: str) -> str:\n return cls._urlnorm(uri)\n\n def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]:\n known_directives = {\n # https://tools.ietf.org/html/rfc7234#section-5.2\n "max-age": (int, True),\n "max-stale": (int, False),\n "min-fresh": (int, True),\n "no-cache": (None, False),\n "no-store": (None, False),\n "no-transform": (None, False),\n "only-if-cached": (None, False),\n "must-revalidate": (None, False),\n "public": (None, False),\n "private": (None, False),\n "proxy-revalidate": (None, False),\n "s-maxage": (int, True),\n }\n\n cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))\n\n retval: dict[str, int | None] = {}\n\n for cc_directive in cc_headers.split(","):\n if not cc_directive.strip():\n continue\n\n parts = cc_directive.split("=", 1)\n directive = parts[0].strip()\n\n try:\n typ, required = known_directives[directive]\n except KeyError:\n logger.debug("Ignoring unknown cache-control directive: %s", directive)\n continue\n\n if not typ or not required:\n retval[directive] = None\n if typ:\n try:\n retval[directive] = typ(parts[1].strip())\n except IndexError:\n if required:\n logger.debug(\n "Missing value for cache-control " "directive: %s",\n directive,\n )\n except ValueError:\n logger.debug(\n "Invalid value for cache-control directive " "%s, must be %s",\n directive,\n typ.__name__,\n )\n\n return retval\n\n def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None:\n """\n Load a cached response, or return None if it's not available.\n """\n # We do not support caching of partial content: so if the request contains a\n # Range header then we don't want to load anything from the cache.\n if "Range" in request.headers:\n return None\n\n cache_url = request.url\n assert cache_url is not None\n cache_data = self.cache.get(cache_url)\n if cache_data is None:\n logger.debug("No cache entry available")\n return None\n\n if isinstance(self.cache, SeparateBodyBaseCache):\n body_file = self.cache.get_body(cache_url)\n else:\n body_file = None\n\n result = self.serializer.loads(request, cache_data, body_file)\n if result is None:\n logger.warning("Cache entry deserialization failed, entry ignored")\n return result\n\n def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]:\n """\n Return a cached response if it exists in the cache, otherwise\n return False.\n """\n assert request.url is not None\n cache_url = self.cache_url(request.url)\n logger.debug('Looking up "%s" in the cache', cache_url)\n cc = self.parse_cache_control(request.headers)\n\n # Bail out if the request insists on fresh data\n if "no-cache" in cc:\n logger.debug('Request header has "no-cache", cache bypassed')\n return False\n\n if "max-age" in cc and cc["max-age"] == 0:\n logger.debug('Request header has "max_age" as 0, cache bypassed')\n return False\n\n # Check whether we can load the response from the cache:\n resp = self._load_from_cache(request)\n if not resp:\n return False\n\n # If we have a cached permanent redirect, return it immediately. We\n # don't need to test our response for other headers b/c it is\n # intrinsically "cacheable" as it is Permanent.\n #\n # See:\n # https://tools.ietf.org/html/rfc7231#section-6.4.2\n #\n # Client can try to refresh the value by repeating the request\n # with cache busting headers as usual (ie no-cache).\n if int(resp.status) in PERMANENT_REDIRECT_STATUSES:\n msg = (\n "Returning cached permanent redirect response "\n "(ignoring date and etag information)"\n )\n logger.debug(msg)\n return resp\n\n headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)\n if not headers or "date" not in headers:\n if "etag" not in headers:\n # Without date or etag, the cached response can never be used\n # and should be deleted.\n logger.debug("Purging cached response: no date or etag")\n self.cache.delete(cache_url)\n logger.debug("Ignoring cached response: no date")\n return False\n\n now = time.time()\n time_tuple = parsedate_tz(headers["date"])\n assert time_tuple is not None\n date = calendar.timegm(time_tuple[:6])\n current_age = max(0, now - date)\n logger.debug("Current age based on date: %i", current_age)\n\n # TODO: There is an assumption that the result will be a\n # urllib3 response object. This may not be best since we\n # could probably avoid instantiating or constructing the\n # response until we know we need it.\n resp_cc = self.parse_cache_control(headers)\n\n # determine freshness\n freshness_lifetime = 0\n\n # Check the max-age pragma in the cache control header\n max_age = resp_cc.get("max-age")\n if max_age is not None:\n freshness_lifetime = max_age\n logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)\n\n # If there isn't a max-age, check for an expires header\n elif "expires" in headers:\n expires = parsedate_tz(headers["expires"])\n if expires is not None:\n expire_time = calendar.timegm(expires[:6]) - date\n freshness_lifetime = max(0, expire_time)\n logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)\n\n # Determine if we are setting freshness limit in the\n # request. Note, this overrides what was in the response.\n max_age = cc.get("max-age")\n if max_age is not None:\n freshness_lifetime = max_age\n logger.debug(\n "Freshness lifetime from request max-age: %i", freshness_lifetime\n )\n\n min_fresh = cc.get("min-fresh")\n if min_fresh is not None:\n # adjust our current age by our min fresh\n current_age += min_fresh\n logger.debug("Adjusted current age from min-fresh: %i", current_age)\n\n # Return entry if it is fresh enough\n if freshness_lifetime > current_age:\n logger.debug('The response is "fresh", returning cached response')\n logger.debug("%i > %i", freshness_lifetime, current_age)\n return resp\n\n # we're not fresh. If we don't have an Etag, clear it out\n if "etag" not in headers:\n logger.debug('The cached response is "stale" with no etag, purging')\n self.cache.delete(cache_url)\n\n # return the original handler\n return False\n\n def conditional_headers(self, request: PreparedRequest) -> dict[str, str]:\n resp = self._load_from_cache(request)\n new_headers = {}\n\n if resp:\n headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)\n\n if "etag" in headers:\n new_headers["If-None-Match"] = headers["ETag"]\n\n if "last-modified" in headers:\n new_headers["If-Modified-Since"] = headers["Last-Modified"]\n\n return new_headers\n\n def _cache_set(\n self,\n cache_url: str,\n request: PreparedRequest,\n response: HTTPResponse,\n body: bytes | None = None,\n expires_time: int | None = None,\n ) -> None:\n """\n Store the data in the cache.\n """\n if isinstance(self.cache, SeparateBodyBaseCache):\n # We pass in the body separately; just put a placeholder empty\n # string in the metadata.\n self.cache.set(\n cache_url,\n self.serializer.dumps(request, response, b""),\n expires=expires_time,\n )\n # body is None can happen when, for example, we're only updating\n # headers, as is the case in update_cached_response().\n if body is not None:\n self.cache.set_body(cache_url, body)\n else:\n self.cache.set(\n cache_url,\n self.serializer.dumps(request, response, body),\n expires=expires_time,\n )\n\n def cache_response(\n self,\n request: PreparedRequest,\n response_or_ref: HTTPResponse | weakref.ReferenceType[HTTPResponse],\n body: bytes | None = None,\n status_codes: Collection[int] | None = None,\n ) -> None:\n """\n Algorithm for caching requests.\n\n This assumes a requests Response object.\n """\n if isinstance(response_or_ref, weakref.ReferenceType):\n response = response_or_ref()\n if response is None:\n # The weakref can be None only in case the user used streamed request\n # and did not consume or close it, and holds no reference to requests.Response.\n # In such case, we don't want to cache the response.\n return\n else:\n response = response_or_ref\n\n # From httplib2: Don't cache 206's since we aren't going to\n # handle byte range requests\n cacheable_status_codes = status_codes or self.cacheable_status_codes\n if response.status not in cacheable_status_codes:\n logger.debug(\n "Status code %s not in %s", response.status, cacheable_status_codes\n )\n return\n\n response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(\n response.headers\n )\n\n if "date" in response_headers:\n time_tuple = parsedate_tz(response_headers["date"])\n assert time_tuple is not None\n date = calendar.timegm(time_tuple[:6])\n else:\n date = 0\n\n # If we've been given a body, our response has a Content-Length, that\n # Content-Length is valid then we can check to see if the body we've\n # been given matches the expected size, and if it doesn't we'll just\n # skip trying to cache it.\n if (\n body is not None\n and "content-length" in response_headers\n and response_headers["content-length"].isdigit()\n and int(response_headers["content-length"]) != len(body)\n ):\n return\n\n cc_req = self.parse_cache_control(request.headers)\n cc = self.parse_cache_control(response_headers)\n\n assert request.url is not None\n cache_url = self.cache_url(request.url)\n logger.debug('Updating cache with response from "%s"', cache_url)\n\n # Delete it from the cache if we happen to have it stored there\n no_store = False\n if "no-store" in cc:\n no_store = True\n logger.debug('Response header has "no-store"')\n if "no-store" in cc_req:\n no_store = True\n logger.debug('Request header has "no-store"')\n if no_store and self.cache.get(cache_url):\n logger.debug('Purging existing cache entry to honor "no-store"')\n self.cache.delete(cache_url)\n if no_store:\n return\n\n # https://tools.ietf.org/html/rfc7234#section-4.1:\n # A Vary header field-value of "*" always fails to match.\n # Storing such a response leads to a deserialization warning\n # during cache lookup and is not allowed to ever be served,\n # so storing it can be avoided.\n if "*" in response_headers.get("vary", ""):\n logger.debug('Response header has "Vary: *"')\n return\n\n # If we've been given an etag, then keep the response\n if self.cache_etags and "etag" in response_headers:\n expires_time = 0\n if response_headers.get("expires"):\n expires = parsedate_tz(response_headers["expires"])\n if expires is not None:\n expires_time = calendar.timegm(expires[:6]) - date\n\n expires_time = max(expires_time, 14 * 86400)\n\n logger.debug(f"etag object cached for {expires_time} seconds")\n logger.debug("Caching due to etag")\n self._cache_set(cache_url, request, response, body, expires_time)\n\n # Add to the cache any permanent redirects. We do this before looking\n # that the Date headers.\n elif int(response.status) in PERMANENT_REDIRECT_STATUSES:\n logger.debug("Caching permanent redirect")\n self._cache_set(cache_url, request, response, b"")\n\n # Add to the cache if the response headers demand it. If there\n # is no date header then we can't do anything about expiring\n # the cache.\n elif "date" in response_headers:\n time_tuple = parsedate_tz(response_headers["date"])\n assert time_tuple is not None\n date = calendar.timegm(time_tuple[:6])\n # cache when there is a max-age > 0\n max_age = cc.get("max-age")\n if max_age is not None and max_age > 0:\n logger.debug("Caching b/c date exists and max-age > 0")\n expires_time = max_age\n self._cache_set(\n cache_url,\n request,\n response,\n body,\n expires_time,\n )\n\n # If the request can expire, it means we should cache it\n # in the meantime.\n elif "expires" in response_headers:\n if response_headers["expires"]:\n expires = parsedate_tz(response_headers["expires"])\n if expires is not None:\n expires_time = calendar.timegm(expires[:6]) - date\n else:\n expires_time = None\n\n logger.debug(\n "Caching b/c of expires header. expires in {} seconds".format(\n expires_time\n )\n )\n self._cache_set(\n cache_url,\n request,\n response,\n body,\n expires_time,\n )\n\n def update_cached_response(\n self, request: PreparedRequest, response: HTTPResponse\n ) -> HTTPResponse:\n """On a 304 we will get a new set of headers that we want to\n update our cached value with, assuming we have one.\n\n This should only ever be called when we've sent an ETag and\n gotten a 304 as the response.\n """\n assert request.url is not None\n cache_url = self.cache_url(request.url)\n cached_response = self._load_from_cache(request)\n\n if not cached_response:\n # we didn't have a cached response\n return response\n\n # Lets update our headers with the headers from the new request:\n # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1\n #\n # The server isn't supposed to send headers that would make\n # the cached body invalid. But... just in case, we'll be sure\n # to strip out ones we know that might be problmatic due to\n # typical assumptions.\n excluded_headers = ["content-length"]\n\n cached_response.headers.update(\n {\n k: v\n for k, v in response.headers.items()\n if k.lower() not in excluded_headers\n }\n )\n\n # we want a 200 b/c we have content via the cache\n cached_response.status = 200\n\n # update our cache\n self._cache_set(cache_url, request, cached_response)\n\n return cached_response\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\controller.py
controller.py
Python
19,101
0.95
0.166341
0.167832
python-kit
874
2024-06-09T08:46:31.289097
BSD-3-Clause
false
28356d547924c94f32eec5249bf4c8f3
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\nfrom __future__ import annotations\n\nimport mmap\nfrom tempfile import NamedTemporaryFile\nfrom typing import TYPE_CHECKING, Any, Callable\n\nif TYPE_CHECKING:\n from http.client import HTTPResponse\n\n\nclass CallbackFileWrapper:\n """\n Small wrapper around a fp object which will tee everything read into a\n buffer, and when that file is closed it will execute a callback with the\n contents of that buffer.\n\n All attributes are proxied to the underlying file object.\n\n This class uses members with a double underscore (__) leading prefix so as\n not to accidentally shadow an attribute.\n\n The data is stored in a temporary file until it is all available. As long\n as the temporary files directory is disk-based (sometimes it's a\n memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory\n pressure is high. For small files the disk usually won't be used at all,\n it'll all be in the filesystem memory cache, so there should be no\n performance impact.\n """\n\n def __init__(\n self, fp: HTTPResponse, callback: Callable[[bytes], None] | None\n ) -> None:\n self.__buf = NamedTemporaryFile("rb+", delete=True)\n self.__fp = fp\n self.__callback = callback\n\n def __getattr__(self, name: str) -> Any:\n # The vagaries of garbage collection means that self.__fp is\n # not always set. By using __getattribute__ and the private\n # name[0] allows looking up the attribute value and raising an\n # AttributeError when it doesn't exist. This stop things from\n # infinitely recursing calls to getattr in the case where\n # self.__fp hasn't been set.\n #\n # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers\n fp = self.__getattribute__("_CallbackFileWrapper__fp")\n return getattr(fp, name)\n\n def __is_fp_closed(self) -> bool:\n try:\n return self.__fp.fp is None\n\n except AttributeError:\n pass\n\n try:\n closed: bool = self.__fp.closed\n return closed\n\n except AttributeError:\n pass\n\n # We just don't cache it then.\n # TODO: Add some logging here...\n return False\n\n def _close(self) -> None:\n if self.__callback:\n if self.__buf.tell() == 0:\n # Empty file:\n result = b""\n else:\n # Return the data without actually loading it into memory,\n # relying on Python's buffer API and mmap(). mmap() just gives\n # a view directly into the filesystem's memory cache, so it\n # doesn't result in duplicate memory use.\n self.__buf.seek(0, 0)\n result = memoryview(\n mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ)\n )\n self.__callback(result)\n\n # We assign this to None here, because otherwise we can get into\n # really tricky problems where the CPython interpreter dead locks\n # because the callback is holding a reference to something which\n # has a __del__ method. Setting this to None breaks the cycle\n # and allows the garbage collector to do it's thing normally.\n self.__callback = None\n\n # Closing the temporary file releases memory and frees disk space.\n # Important when caching big files.\n self.__buf.close()\n\n def read(self, amt: int | None = None) -> bytes:\n data: bytes = self.__fp.read(amt)\n if data:\n # We may be dealing with b'', a sign that things are over:\n # it's passed e.g. after we've already closed self.__buf.\n self.__buf.write(data)\n if self.__is_fp_closed():\n self._close()\n\n return data\n\n def _safe_read(self, amt: int) -> bytes:\n data: bytes = self.__fp._safe_read(amt) # type: ignore[attr-defined]\n if amt == 2 and data == b"\r\n":\n # urllib executes this read to toss the CRLF at the end\n # of the chunk.\n return data\n\n self.__buf.write(data)\n if self.__is_fp_closed():\n self._close()\n\n return data\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\filewrapper.py
filewrapper.py
Python
4,291
0.95
0.151261
0.298969
node-utils
213
2023-08-08T11:53:32.291945
MIT
false
2f9cf9f9625de9989ae1821450a2e88f
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\nfrom __future__ import annotations\n\nimport calendar\nimport time\nfrom datetime import datetime, timedelta, timezone\nfrom email.utils import formatdate, parsedate, parsedate_tz\nfrom typing import TYPE_CHECKING, Any, Mapping\n\nif TYPE_CHECKING:\n from pip._vendor.urllib3 import HTTPResponse\n\nTIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"\n\n\ndef expire_after(delta: timedelta, date: datetime | None = None) -> datetime:\n date = date or datetime.now(timezone.utc)\n return date + delta\n\n\ndef datetime_to_header(dt: datetime) -> str:\n return formatdate(calendar.timegm(dt.timetuple()))\n\n\nclass BaseHeuristic:\n def warning(self, response: HTTPResponse) -> str | None:\n """\n Return a valid 1xx warning header value describing the cache\n adjustments.\n\n The response is provided too allow warnings like 113\n http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need\n to explicitly say response is over 24 hours old.\n """\n return '110 - "Response is Stale"'\n\n def update_headers(self, response: HTTPResponse) -> dict[str, str]:\n """Update the response headers with any new headers.\n\n NOTE: This SHOULD always include some Warning header to\n signify that the response was cached by the client, not\n by way of the provided headers.\n """\n return {}\n\n def apply(self, response: HTTPResponse) -> HTTPResponse:\n updated_headers = self.update_headers(response)\n\n if updated_headers:\n response.headers.update(updated_headers)\n warning_header_value = self.warning(response)\n if warning_header_value is not None:\n response.headers.update({"Warning": warning_header_value})\n\n return response\n\n\nclass OneDayCache(BaseHeuristic):\n """\n Cache the response by providing an expires 1 day in the\n future.\n """\n\n def update_headers(self, response: HTTPResponse) -> dict[str, str]:\n headers = {}\n\n if "expires" not in response.headers:\n date = parsedate(response.headers["date"])\n expires = expire_after(\n timedelta(days=1),\n date=datetime(*date[:6], tzinfo=timezone.utc), # type: ignore[index,misc]\n )\n headers["expires"] = datetime_to_header(expires)\n headers["cache-control"] = "public"\n return headers\n\n\nclass ExpiresAfter(BaseHeuristic):\n """\n Cache **all** requests for a defined time period.\n """\n\n def __init__(self, **kw: Any) -> None:\n self.delta = timedelta(**kw)\n\n def update_headers(self, response: HTTPResponse) -> dict[str, str]:\n expires = expire_after(self.delta)\n return {"expires": datetime_to_header(expires), "cache-control": "public"}\n\n def warning(self, response: HTTPResponse) -> str | None:\n tmpl = "110 - Automatically cached for %s. Response might be stale"\n return tmpl % self.delta\n\n\nclass LastModified(BaseHeuristic):\n """\n If there is no Expires header already, fall back on Last-Modified\n using the heuristic from\n http://tools.ietf.org/html/rfc7234#section-4.2.2\n to calculate a reasonable value.\n\n Firefox also does something like this per\n https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ\n http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397\n Unlike mozilla we limit this to 24-hr.\n """\n\n cacheable_by_default_statuses = {\n 200,\n 203,\n 204,\n 206,\n 300,\n 301,\n 404,\n 405,\n 410,\n 414,\n 501,\n }\n\n def update_headers(self, resp: HTTPResponse) -> dict[str, str]:\n headers: Mapping[str, str] = resp.headers\n\n if "expires" in headers:\n return {}\n\n if "cache-control" in headers and headers["cache-control"] != "public":\n return {}\n\n if resp.status not in self.cacheable_by_default_statuses:\n return {}\n\n if "date" not in headers or "last-modified" not in headers:\n return {}\n\n time_tuple = parsedate_tz(headers["date"])\n assert time_tuple is not None\n date = calendar.timegm(time_tuple[:6])\n last_modified = parsedate(headers["last-modified"])\n if last_modified is None:\n return {}\n\n now = time.time()\n current_age = max(0, now - date)\n delta = date - calendar.timegm(last_modified)\n freshness_lifetime = max(0, min(delta / 10, 24 * 3600))\n if freshness_lifetime <= current_age:\n return {}\n\n expires = date + freshness_lifetime\n return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}\n\n def warning(self, resp: HTTPResponse) -> str | None:\n return None\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\heuristics.py
heuristics.py
Python
4,881
0.95
0.171975
0.025
awesome-app
98
2024-02-29T19:30:53.958772
MIT
false
d037f0dc31f47fb7e58961b311ae6ad0
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\nfrom __future__ import annotations\n\nimport io\nfrom typing import IO, TYPE_CHECKING, Any, Mapping, cast\n\nfrom pip._vendor import msgpack\nfrom pip._vendor.requests.structures import CaseInsensitiveDict\nfrom pip._vendor.urllib3 import HTTPResponse\n\nif TYPE_CHECKING:\n from pip._vendor.requests import PreparedRequest\n\n\nclass Serializer:\n serde_version = "4"\n\n def dumps(\n self,\n request: PreparedRequest,\n response: HTTPResponse,\n body: bytes | None = None,\n ) -> bytes:\n response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(\n response.headers\n )\n\n if body is None:\n # When a body isn't passed in, we'll read the response. We\n # also update the response with a new file handler to be\n # sure it acts as though it was never read.\n body = response.read(decode_content=False)\n response._fp = io.BytesIO(body) # type: ignore[assignment]\n response.length_remaining = len(body)\n\n data = {\n "response": {\n "body": body, # Empty bytestring if body is stored separately\n "headers": {str(k): str(v) for k, v in response.headers.items()},\n "status": response.status,\n "version": response.version,\n "reason": str(response.reason),\n "decode_content": response.decode_content,\n }\n }\n\n # Construct our vary headers\n data["vary"] = {}\n if "vary" in response_headers:\n varied_headers = response_headers["vary"].split(",")\n for header in varied_headers:\n header = str(header).strip()\n header_value = request.headers.get(header, None)\n if header_value is not None:\n header_value = str(header_value)\n data["vary"][header] = header_value\n\n return b",".join([f"cc={self.serde_version}".encode(), self.serialize(data)])\n\n def serialize(self, data: dict[str, Any]) -> bytes:\n return cast(bytes, msgpack.dumps(data, use_bin_type=True))\n\n def loads(\n self,\n request: PreparedRequest,\n data: bytes,\n body_file: IO[bytes] | None = None,\n ) -> HTTPResponse | None:\n # Short circuit if we've been given an empty set of data\n if not data:\n return None\n\n # Previous versions of this library supported other serialization\n # formats, but these have all been removed.\n if not data.startswith(f"cc={self.serde_version},".encode()):\n return None\n\n data = data[5:]\n return self._loads_v4(request, data, body_file)\n\n def prepare_response(\n self,\n request: PreparedRequest,\n cached: Mapping[str, Any],\n body_file: IO[bytes] | None = None,\n ) -> HTTPResponse | None:\n """Verify our vary headers match and construct a real urllib3\n HTTPResponse object.\n """\n # Special case the '*' Vary value as it means we cannot actually\n # determine if the cached response is suitable for this request.\n # This case is also handled in the controller code when creating\n # a cache entry, but is left here for backwards compatibility.\n if "*" in cached.get("vary", {}):\n return None\n\n # Ensure that the Vary headers for the cached response match our\n # request\n for header, value in cached.get("vary", {}).items():\n if request.headers.get(header, None) != value:\n return None\n\n body_raw = cached["response"].pop("body")\n\n headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(\n data=cached["response"]["headers"]\n )\n if headers.get("transfer-encoding", "") == "chunked":\n headers.pop("transfer-encoding")\n\n cached["response"]["headers"] = headers\n\n try:\n body: IO[bytes]\n if body_file is None:\n body = io.BytesIO(body_raw)\n else:\n body = body_file\n except TypeError:\n # This can happen if cachecontrol serialized to v1 format (pickle)\n # using Python 2. A Python 2 str(byte string) will be unpickled as\n # a Python 3 str (unicode string), which will cause the above to\n # fail with:\n #\n # TypeError: 'str' does not support the buffer interface\n body = io.BytesIO(body_raw.encode("utf8"))\n\n # Discard any `strict` parameter serialized by older version of cachecontrol.\n cached["response"].pop("strict", None)\n\n return HTTPResponse(body=body, preload_content=False, **cached["response"])\n\n def _loads_v4(\n self,\n request: PreparedRequest,\n data: bytes,\n body_file: IO[bytes] | None = None,\n ) -> HTTPResponse | None:\n try:\n cached = msgpack.loads(data, raw=False)\n except ValueError:\n return None\n\n return self.prepare_response(request, cached, body_file)\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\serialize.py
serialize.py
Python
5,163
0.95
0.191781
0.188525
node-utils
130
2024-10-24T20:52:18.181329
BSD-3-Clause
false
10331aad89207954e35a6a16098170f7
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHECKING, Collection\n\nfrom pip._vendor.cachecontrol.adapter import CacheControlAdapter\nfrom pip._vendor.cachecontrol.cache import DictCache\n\nif TYPE_CHECKING:\n from pip._vendor import requests\n\n from pip._vendor.cachecontrol.cache import BaseCache\n from pip._vendor.cachecontrol.controller import CacheController\n from pip._vendor.cachecontrol.heuristics import BaseHeuristic\n from pip._vendor.cachecontrol.serialize import Serializer\n\n\ndef CacheControl(\n sess: requests.Session,\n cache: BaseCache | None = None,\n cache_etags: bool = True,\n serializer: Serializer | None = None,\n heuristic: BaseHeuristic | None = None,\n controller_class: type[CacheController] | None = None,\n adapter_class: type[CacheControlAdapter] | None = None,\n cacheable_methods: Collection[str] | None = None,\n) -> requests.Session:\n cache = DictCache() if cache is None else cache\n adapter_class = adapter_class or CacheControlAdapter\n adapter = adapter_class(\n cache,\n cache_etags=cache_etags,\n serializer=serializer,\n heuristic=heuristic,\n controller_class=controller_class,\n cacheable_methods=cacheable_methods,\n )\n sess.mount("http://", adapter)\n sess.mount("https://", adapter)\n\n return sess\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\wrapper.py
wrapper.py
Python
1,417
0.95
0.069767
0.083333
react-lib
418
2023-08-17T01:57:55.578834
Apache-2.0
false
5c04d764c34888fe64cf31011d0f6fad
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\nfrom __future__ import annotations\n\nimport logging\nfrom argparse import ArgumentParser\nfrom typing import TYPE_CHECKING\n\nfrom pip._vendor import requests\n\nfrom pip._vendor.cachecontrol.adapter import CacheControlAdapter\nfrom pip._vendor.cachecontrol.cache import DictCache\nfrom pip._vendor.cachecontrol.controller import logger\n\nif TYPE_CHECKING:\n from argparse import Namespace\n\n from pip._vendor.cachecontrol.controller import CacheController\n\n\ndef setup_logging() -> None:\n logger.setLevel(logging.DEBUG)\n handler = logging.StreamHandler()\n logger.addHandler(handler)\n\n\ndef get_session() -> requests.Session:\n adapter = CacheControlAdapter(\n DictCache(), cache_etags=True, serializer=None, heuristic=None\n )\n sess = requests.Session()\n sess.mount("http://", adapter)\n sess.mount("https://", adapter)\n\n sess.cache_controller = adapter.controller # type: ignore[attr-defined]\n return sess\n\n\ndef get_args() -> Namespace:\n parser = ArgumentParser()\n parser.add_argument("url", help="The URL to try and cache")\n return parser.parse_args()\n\n\ndef main() -> None:\n args = get_args()\n sess = get_session()\n\n # Make a request to get a response\n resp = sess.get(args.url)\n\n # Turn on logging\n setup_logging()\n\n # try setting the cache\n cache_controller: CacheController = (\n sess.cache_controller # type: ignore[attr-defined]\n )\n cache_controller.cache_response(resp.request, resp.raw)\n\n # Now try to get it\n if cache_controller.cached_request(resp.request):\n print("Cached!")\n else:\n print("Not cached :(")\n\n\nif __name__ == "__main__":\n main()\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\_cmd.py
_cmd.py
Python
1,737
0.95
0.142857
0.14
vue-tools
780
2023-12-16T11:54:24.970061
Apache-2.0
false
e4259d6ff28702fa18b2f2086840f66e
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\n\n"""CacheControl import Interface.\n\nMake it easy to import from cachecontrol without long namespaces.\n"""\n\n__author__ = "Eric Larson"\n__email__ = "eric@ionrock.org"\n__version__ = "0.14.2"\n\nfrom pip._vendor.cachecontrol.adapter import CacheControlAdapter\nfrom pip._vendor.cachecontrol.controller import CacheController\nfrom pip._vendor.cachecontrol.wrapper import CacheControl\n\n__all__ = [\n "__author__",\n "__email__",\n "__version__",\n "CacheControlAdapter",\n "CacheController",\n "CacheControl",\n]\n\nimport logging\n\nlogging.getLogger(__name__).addHandler(logging.NullHandler())\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__init__.py
__init__.py
Python
677
0.95
0
0.136364
react-lib
267
2025-02-14T17:21:42.634704
Apache-2.0
false
bc6a5114f5a6e713b5e0fca4df65b81f
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\nfrom __future__ import annotations\n\nimport hashlib\nimport os\nimport tempfile\nfrom textwrap import dedent\nfrom typing import IO, TYPE_CHECKING\nfrom pathlib import Path\n\nfrom pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache\nfrom pip._vendor.cachecontrol.controller import CacheController\n\nif TYPE_CHECKING:\n from datetime import datetime\n\n from filelock import BaseFileLock\n\n\nclass _FileCacheMixin:\n """Shared implementation for both FileCache variants."""\n\n def __init__(\n self,\n directory: str | Path,\n forever: bool = False,\n filemode: int = 0o0600,\n dirmode: int = 0o0700,\n lock_class: type[BaseFileLock] | None = None,\n ) -> None:\n try:\n if lock_class is None:\n from filelock import FileLock\n\n lock_class = FileLock\n except ImportError:\n notice = dedent(\n """\n NOTE: In order to use the FileCache you must have\n filelock installed. You can install it via pip:\n pip install cachecontrol[filecache]\n """\n )\n raise ImportError(notice)\n\n self.directory = directory\n self.forever = forever\n self.filemode = filemode\n self.dirmode = dirmode\n self.lock_class = lock_class\n\n @staticmethod\n def encode(x: str) -> str:\n return hashlib.sha224(x.encode()).hexdigest()\n\n def _fn(self, name: str) -> str:\n # NOTE: This method should not change as some may depend on it.\n # See: https://github.com/ionrock/cachecontrol/issues/63\n hashed = self.encode(name)\n parts = list(hashed[:5]) + [hashed]\n return os.path.join(self.directory, *parts)\n\n def get(self, key: str) -> bytes | None:\n name = self._fn(key)\n try:\n with open(name, "rb") as fh:\n return fh.read()\n\n except FileNotFoundError:\n return None\n\n def set(\n self, key: str, value: bytes, expires: int | datetime | None = None\n ) -> None:\n name = self._fn(key)\n self._write(name, value)\n\n def _write(self, path: str, data: bytes) -> None:\n """\n Safely write the data to the given path.\n """\n # Make sure the directory exists\n dirname = os.path.dirname(path)\n os.makedirs(dirname, self.dirmode, exist_ok=True)\n\n with self.lock_class(path + ".lock"):\n # Write our actual file\n (fd, name) = tempfile.mkstemp(dir=dirname)\n try:\n os.write(fd, data)\n finally:\n os.close(fd)\n os.chmod(name, self.filemode)\n os.replace(name, path)\n\n def _delete(self, key: str, suffix: str) -> None:\n name = self._fn(key) + suffix\n if not self.forever:\n try:\n os.remove(name)\n except FileNotFoundError:\n pass\n\n\nclass FileCache(_FileCacheMixin, BaseCache):\n """\n Traditional FileCache: body is stored in memory, so not suitable for large\n downloads.\n """\n\n def delete(self, key: str) -> None:\n self._delete(key, "")\n\n\nclass SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache):\n """\n Memory-efficient FileCache: body is stored in a separate file, reducing\n peak memory usage.\n """\n\n def get_body(self, key: str) -> IO[bytes] | None:\n name = self._fn(key) + ".body"\n try:\n return open(name, "rb")\n except FileNotFoundError:\n return None\n\n def set_body(self, key: str, body: bytes) -> None:\n name = self._fn(key) + ".body"\n self._write(name, body)\n\n def delete(self, key: str) -> None:\n self._delete(key, "")\n self._delete(key, ".body")\n\n\ndef url_to_file_path(url: str, filecache: FileCache) -> str:\n """Return the file cache path based on the URL.\n\n This does not ensure the file exists!\n """\n key = CacheController.cache_url(url)\n return filecache._fn(key)\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\caches\file_cache.py
file_cache.py
Python
4,117
0.95
0.172414
0.059829
vue-tools
491
2025-03-26T23:54:39.107577
GPL-3.0
false
0d5a61fe91938fc18f9895e8f2283e4c
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\nfrom __future__ import annotations\n\n\nfrom datetime import datetime, timezone\nfrom typing import TYPE_CHECKING\n\nfrom pip._vendor.cachecontrol.cache import BaseCache\n\nif TYPE_CHECKING:\n from redis import Redis\n\n\nclass RedisCache(BaseCache):\n def __init__(self, conn: Redis[bytes]) -> None:\n self.conn = conn\n\n def get(self, key: str) -> bytes | None:\n return self.conn.get(key)\n\n def set(\n self, key: str, value: bytes, expires: int | datetime | None = None\n ) -> None:\n if not expires:\n self.conn.set(key, value)\n elif isinstance(expires, datetime):\n now_utc = datetime.now(timezone.utc)\n if expires.tzinfo is None:\n now_utc = now_utc.replace(tzinfo=None)\n delta = expires - now_utc\n self.conn.setex(key, int(delta.total_seconds()), value)\n else:\n self.conn.setex(key, expires, value)\n\n def delete(self, key: str) -> None:\n self.conn.delete(key)\n\n def clear(self) -> None:\n """Helper for clearing all the keys in a database. Use with\n caution!"""\n for key in self.conn.keys():\n self.conn.delete(key)\n\n def close(self) -> None:\n """Redis uses connection pooling, no need to close the connection."""\n pass\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\caches\redis_cache.py
redis_cache.py
Python
1,386
0.95
0.25
0.081081
node-utils
294
2025-06-11T00:03:29.945460
MIT
false
fefe321269efacc26b40436d7ff65295
# SPDX-FileCopyrightText: 2015 Eric Larson\n#\n# SPDX-License-Identifier: Apache-2.0\n\nfrom pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache\nfrom pip._vendor.cachecontrol.caches.redis_cache import RedisCache\n\n__all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"]\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\caches\__init__.py
__init__.py
Python
303
0.95
0
0.5
awesome-app
613
2024-03-26T11:12:46.419700
GPL-3.0
false
a854b9652b8647abb5b30ca3260d2dff
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\caches\__pycache__\file_cache.cpython-313.pyc
file_cache.cpython-313.pyc
Other
7,166
0.8
0.027027
0.028169
awesome-app
266
2024-03-24T01:21:49.062527
BSD-3-Clause
false
72b36f8965136a95c97b69344e70a2ac
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\caches\__pycache__\redis_cache.cpython-313.pyc
redis_cache.cpython-313.pyc
Other
2,819
0.8
0.034483
0.071429
react-lib
978
2024-01-04T08:03:59.244547
MIT
false
11557a3c2351518e67c0c41dbebdd111
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\caches\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
438
0.7
0
0
node-utils
75
2024-10-03T20:57:16.836794
MIT
false
3304398681c33c652dce14c6d746fff3
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__pycache__\adapter.cpython-313.pyc
adapter.cpython-313.pyc
Other
6,853
0.8
0.022989
0
react-lib
510
2025-04-24T08:24:23.979945
MIT
false
dca7c404a08a23d6069c875009729f99
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__pycache__\cache.cpython-313.pyc
cache.cpython-313.pyc
Other
3,930
0.8
0.015152
0
node-utils
227
2024-04-04T14:14:18.540680
BSD-3-Clause
false
68dda624219bdf2d2c83e174b6cdc6a5
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__pycache__\controller.cpython-313.pyc
controller.cpython-313.pyc
Other
16,854
0.95
0.046729
0
awesome-app
659
2024-03-16T10:59:11.493677
MIT
false
bdb01e2f1c0d6e10e9e8c42f151693a8
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__pycache__\filewrapper.cpython-313.pyc
filewrapper.cpython-313.pyc
Other
4,436
0.95
0.036364
0
react-lib
861
2024-07-08T10:33:54.555541
BSD-3-Clause
false
b8008872a0f359bdb6a7644269d9db8e
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__pycache__\heuristics.cpython-313.pyc
heuristics.cpython-313.pyc
Other
6,772
0.8
0.026667
0
node-utils
280
2024-09-22T03:20:48.212443
GPL-3.0
false
c9a20b2ab1981bacfd9c4c22fe415e0b
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__pycache__\serialize.cpython-313.pyc
serialize.cpython-313.pyc
Other
5,369
0.8
0
0
python-kit
425
2024-09-23T19:23:34.987153
MIT
false
9d8a7698447029cfa62b4ba19c8b61f6
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__pycache__\wrapper.cpython-313.pyc
wrapper.cpython-313.pyc
Other
1,697
0.8
0
0
awesome-app
951
2024-10-04T10:54:53.509600
BSD-3-Clause
false
c61e35eb03717f681810b6d29023c267
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__pycache__\_cmd.cpython-313.pyc
_cmd.cpython-313.pyc
Other
2,678
0.8
0.032258
0
react-lib
321
2025-05-03T02:08:50.880304
GPL-3.0
false
71408b4a6af4cdd12665bb0984181680
\n\n
.venv\Lib\site-packages\pip\_vendor\cachecontrol\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
900
0.95
0
0
python-kit
906
2024-06-28T01:12:47.806541
Apache-2.0
false
e8fea874e1210f29196f010aadcd719e