Add files using upload-large-folder tool
Browse files- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/adapters.py +719 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/compat.py +90 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/cookies.py +561 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/exceptions.py +151 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/help.py +127 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/hooks.py +33 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/models.py +1039 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/packages.py +25 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/sessions.py +831 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/live_render.py +106 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/logging.py +297 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/markup.py +251 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/measure.py +151 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/padding.py +141 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/pager.py +34 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/palette.py +100 -0
- external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/panel.py +317 -0
- hfenv/Lib/site-packages/setuptools/command/build_py.py +368 -0
- hfenv/Lib/site-packages/setuptools/command/develop.py +193 -0
- hfenv/Lib/site-packages/setuptools/command/dist_info.py +142 -0
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/adapters.py
ADDED
|
@@ -0,0 +1,719 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.adapters
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the transport adapters that Requests uses to define
|
| 6 |
+
and maintain connections.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import os.path
|
| 10 |
+
import socket # noqa: F401
|
| 11 |
+
import typing
|
| 12 |
+
import warnings
|
| 13 |
+
|
| 14 |
+
from pip._vendor.urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
|
| 15 |
+
from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError
|
| 16 |
+
from pip._vendor.urllib3.exceptions import InvalidHeader as _InvalidHeader
|
| 17 |
+
from pip._vendor.urllib3.exceptions import (
|
| 18 |
+
LocationValueError,
|
| 19 |
+
MaxRetryError,
|
| 20 |
+
NewConnectionError,
|
| 21 |
+
ProtocolError,
|
| 22 |
+
)
|
| 23 |
+
from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError
|
| 24 |
+
from pip._vendor.urllib3.exceptions import ReadTimeoutError, ResponseError
|
| 25 |
+
from pip._vendor.urllib3.exceptions import SSLError as _SSLError
|
| 26 |
+
from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url
|
| 27 |
+
from pip._vendor.urllib3.util import Timeout as TimeoutSauce
|
| 28 |
+
from pip._vendor.urllib3.util import parse_url
|
| 29 |
+
from pip._vendor.urllib3.util.retry import Retry
|
| 30 |
+
from pip._vendor.urllib3.util.ssl_ import create_urllib3_context
|
| 31 |
+
|
| 32 |
+
from .auth import _basic_auth_str
|
| 33 |
+
from .compat import basestring, urlparse
|
| 34 |
+
from .cookies import extract_cookies_to_jar
|
| 35 |
+
from .exceptions import (
|
| 36 |
+
ConnectionError,
|
| 37 |
+
ConnectTimeout,
|
| 38 |
+
InvalidHeader,
|
| 39 |
+
InvalidProxyURL,
|
| 40 |
+
InvalidSchema,
|
| 41 |
+
InvalidURL,
|
| 42 |
+
ProxyError,
|
| 43 |
+
ReadTimeout,
|
| 44 |
+
RetryError,
|
| 45 |
+
SSLError,
|
| 46 |
+
)
|
| 47 |
+
from .models import Response
|
| 48 |
+
from .structures import CaseInsensitiveDict
|
| 49 |
+
from .utils import (
|
| 50 |
+
DEFAULT_CA_BUNDLE_PATH,
|
| 51 |
+
extract_zipped_paths,
|
| 52 |
+
get_auth_from_url,
|
| 53 |
+
get_encoding_from_headers,
|
| 54 |
+
prepend_scheme_if_needed,
|
| 55 |
+
select_proxy,
|
| 56 |
+
urldefragauth,
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
try:
|
| 60 |
+
from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager
|
| 61 |
+
except ImportError:
|
| 62 |
+
|
| 63 |
+
def SOCKSProxyManager(*args, **kwargs):
|
| 64 |
+
raise InvalidSchema("Missing dependencies for SOCKS support.")
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
if typing.TYPE_CHECKING:
|
| 68 |
+
from .models import PreparedRequest
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
DEFAULT_POOLBLOCK = False
|
| 72 |
+
DEFAULT_POOLSIZE = 10
|
| 73 |
+
DEFAULT_RETRIES = 0
|
| 74 |
+
DEFAULT_POOL_TIMEOUT = None
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
try:
|
| 78 |
+
import ssl # noqa: F401
|
| 79 |
+
|
| 80 |
+
_preloaded_ssl_context = create_urllib3_context()
|
| 81 |
+
_preloaded_ssl_context.load_verify_locations(
|
| 82 |
+
extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
|
| 83 |
+
)
|
| 84 |
+
except ImportError:
|
| 85 |
+
# Bypass default SSLContext creation when Python
|
| 86 |
+
# interpreter isn't built with the ssl module.
|
| 87 |
+
_preloaded_ssl_context = None
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _urllib3_request_context(
|
| 91 |
+
request: "PreparedRequest",
|
| 92 |
+
verify: "bool | str | None",
|
| 93 |
+
client_cert: "typing.Tuple[str, str] | str | None",
|
| 94 |
+
poolmanager: "PoolManager",
|
| 95 |
+
) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])":
|
| 96 |
+
host_params = {}
|
| 97 |
+
pool_kwargs = {}
|
| 98 |
+
parsed_request_url = urlparse(request.url)
|
| 99 |
+
scheme = parsed_request_url.scheme.lower()
|
| 100 |
+
port = parsed_request_url.port
|
| 101 |
+
|
| 102 |
+
# Determine if we have and should use our default SSLContext
|
| 103 |
+
# to optimize performance on standard requests.
|
| 104 |
+
poolmanager_kwargs = getattr(poolmanager, "connection_pool_kw", {})
|
| 105 |
+
has_poolmanager_ssl_context = poolmanager_kwargs.get("ssl_context")
|
| 106 |
+
should_use_default_ssl_context = (
|
| 107 |
+
_preloaded_ssl_context is not None and not has_poolmanager_ssl_context
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
cert_reqs = "CERT_REQUIRED"
|
| 111 |
+
if verify is False:
|
| 112 |
+
cert_reqs = "CERT_NONE"
|
| 113 |
+
elif verify is True and should_use_default_ssl_context:
|
| 114 |
+
pool_kwargs["ssl_context"] = _preloaded_ssl_context
|
| 115 |
+
elif isinstance(verify, str):
|
| 116 |
+
if not os.path.isdir(verify):
|
| 117 |
+
pool_kwargs["ca_certs"] = verify
|
| 118 |
+
else:
|
| 119 |
+
pool_kwargs["ca_cert_dir"] = verify
|
| 120 |
+
pool_kwargs["cert_reqs"] = cert_reqs
|
| 121 |
+
if client_cert is not None:
|
| 122 |
+
if isinstance(client_cert, tuple) and len(client_cert) == 2:
|
| 123 |
+
pool_kwargs["cert_file"] = client_cert[0]
|
| 124 |
+
pool_kwargs["key_file"] = client_cert[1]
|
| 125 |
+
else:
|
| 126 |
+
# According to our docs, we allow users to specify just the client
|
| 127 |
+
# cert path
|
| 128 |
+
pool_kwargs["cert_file"] = client_cert
|
| 129 |
+
host_params = {
|
| 130 |
+
"scheme": scheme,
|
| 131 |
+
"host": parsed_request_url.hostname,
|
| 132 |
+
"port": port,
|
| 133 |
+
}
|
| 134 |
+
return host_params, pool_kwargs
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
class BaseAdapter:
|
| 138 |
+
"""The Base Transport Adapter"""
|
| 139 |
+
|
| 140 |
+
def __init__(self):
|
| 141 |
+
super().__init__()
|
| 142 |
+
|
| 143 |
+
def send(
|
| 144 |
+
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
| 145 |
+
):
|
| 146 |
+
"""Sends PreparedRequest object. Returns Response object.
|
| 147 |
+
|
| 148 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
| 149 |
+
:param stream: (optional) Whether to stream the request content.
|
| 150 |
+
:param timeout: (optional) How long to wait for the server to send
|
| 151 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
| 152 |
+
read timeout) <timeouts>` tuple.
|
| 153 |
+
:type timeout: float or tuple
|
| 154 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
| 155 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
| 156 |
+
to a CA bundle to use
|
| 157 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
| 158 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
| 159 |
+
"""
|
| 160 |
+
raise NotImplementedError
|
| 161 |
+
|
| 162 |
+
def close(self):
|
| 163 |
+
"""Cleans up adapter specific items."""
|
| 164 |
+
raise NotImplementedError
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class HTTPAdapter(BaseAdapter):
|
| 168 |
+
"""The built-in HTTP Adapter for urllib3.
|
| 169 |
+
|
| 170 |
+
Provides a general-case interface for Requests sessions to contact HTTP and
|
| 171 |
+
HTTPS urls by implementing the Transport Adapter interface. This class will
|
| 172 |
+
usually be created by the :class:`Session <Session>` class under the
|
| 173 |
+
covers.
|
| 174 |
+
|
| 175 |
+
:param pool_connections: The number of urllib3 connection pools to cache.
|
| 176 |
+
:param pool_maxsize: The maximum number of connections to save in the pool.
|
| 177 |
+
:param max_retries: The maximum number of retries each connection
|
| 178 |
+
should attempt. Note, this applies only to failed DNS lookups, socket
|
| 179 |
+
connections and connection timeouts, never to requests where data has
|
| 180 |
+
made it to the server. By default, Requests does not retry failed
|
| 181 |
+
connections. If you need granular control over the conditions under
|
| 182 |
+
which we retry a request, import urllib3's ``Retry`` class and pass
|
| 183 |
+
that instead.
|
| 184 |
+
:param pool_block: Whether the connection pool should block for connections.
|
| 185 |
+
|
| 186 |
+
Usage::
|
| 187 |
+
|
| 188 |
+
>>> import requests
|
| 189 |
+
>>> s = requests.Session()
|
| 190 |
+
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
| 191 |
+
>>> s.mount('http://', a)
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
__attrs__ = [
|
| 195 |
+
"max_retries",
|
| 196 |
+
"config",
|
| 197 |
+
"_pool_connections",
|
| 198 |
+
"_pool_maxsize",
|
| 199 |
+
"_pool_block",
|
| 200 |
+
]
|
| 201 |
+
|
| 202 |
+
def __init__(
|
| 203 |
+
self,
|
| 204 |
+
pool_connections=DEFAULT_POOLSIZE,
|
| 205 |
+
pool_maxsize=DEFAULT_POOLSIZE,
|
| 206 |
+
max_retries=DEFAULT_RETRIES,
|
| 207 |
+
pool_block=DEFAULT_POOLBLOCK,
|
| 208 |
+
):
|
| 209 |
+
if max_retries == DEFAULT_RETRIES:
|
| 210 |
+
self.max_retries = Retry(0, read=False)
|
| 211 |
+
else:
|
| 212 |
+
self.max_retries = Retry.from_int(max_retries)
|
| 213 |
+
self.config = {}
|
| 214 |
+
self.proxy_manager = {}
|
| 215 |
+
|
| 216 |
+
super().__init__()
|
| 217 |
+
|
| 218 |
+
self._pool_connections = pool_connections
|
| 219 |
+
self._pool_maxsize = pool_maxsize
|
| 220 |
+
self._pool_block = pool_block
|
| 221 |
+
|
| 222 |
+
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
|
| 223 |
+
|
| 224 |
+
def __getstate__(self):
|
| 225 |
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
| 226 |
+
|
| 227 |
+
def __setstate__(self, state):
|
| 228 |
+
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
| 229 |
+
# self.poolmanager uses a lambda function, which isn't pickleable.
|
| 230 |
+
self.proxy_manager = {}
|
| 231 |
+
self.config = {}
|
| 232 |
+
|
| 233 |
+
for attr, value in state.items():
|
| 234 |
+
setattr(self, attr, value)
|
| 235 |
+
|
| 236 |
+
self.init_poolmanager(
|
| 237 |
+
self._pool_connections, self._pool_maxsize, block=self._pool_block
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
def init_poolmanager(
|
| 241 |
+
self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
|
| 242 |
+
):
|
| 243 |
+
"""Initializes a urllib3 PoolManager.
|
| 244 |
+
|
| 245 |
+
This method should not be called from user code, and is only
|
| 246 |
+
exposed for use when subclassing the
|
| 247 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 248 |
+
|
| 249 |
+
:param connections: The number of urllib3 connection pools to cache.
|
| 250 |
+
:param maxsize: The maximum number of connections to save in the pool.
|
| 251 |
+
:param block: Block when no free connections are available.
|
| 252 |
+
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
|
| 253 |
+
"""
|
| 254 |
+
# save these values for pickling
|
| 255 |
+
self._pool_connections = connections
|
| 256 |
+
self._pool_maxsize = maxsize
|
| 257 |
+
self._pool_block = block
|
| 258 |
+
|
| 259 |
+
self.poolmanager = PoolManager(
|
| 260 |
+
num_pools=connections,
|
| 261 |
+
maxsize=maxsize,
|
| 262 |
+
block=block,
|
| 263 |
+
**pool_kwargs,
|
| 264 |
+
)
|
| 265 |
+
|
| 266 |
+
def proxy_manager_for(self, proxy, **proxy_kwargs):
|
| 267 |
+
"""Return urllib3 ProxyManager for the given proxy.
|
| 268 |
+
|
| 269 |
+
This method should not be called from user code, and is only
|
| 270 |
+
exposed for use when subclassing the
|
| 271 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 272 |
+
|
| 273 |
+
:param proxy: The proxy to return a urllib3 ProxyManager for.
|
| 274 |
+
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
|
| 275 |
+
:returns: ProxyManager
|
| 276 |
+
:rtype: urllib3.ProxyManager
|
| 277 |
+
"""
|
| 278 |
+
if proxy in self.proxy_manager:
|
| 279 |
+
manager = self.proxy_manager[proxy]
|
| 280 |
+
elif proxy.lower().startswith("socks"):
|
| 281 |
+
username, password = get_auth_from_url(proxy)
|
| 282 |
+
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
|
| 283 |
+
proxy,
|
| 284 |
+
username=username,
|
| 285 |
+
password=password,
|
| 286 |
+
num_pools=self._pool_connections,
|
| 287 |
+
maxsize=self._pool_maxsize,
|
| 288 |
+
block=self._pool_block,
|
| 289 |
+
**proxy_kwargs,
|
| 290 |
+
)
|
| 291 |
+
else:
|
| 292 |
+
proxy_headers = self.proxy_headers(proxy)
|
| 293 |
+
manager = self.proxy_manager[proxy] = proxy_from_url(
|
| 294 |
+
proxy,
|
| 295 |
+
proxy_headers=proxy_headers,
|
| 296 |
+
num_pools=self._pool_connections,
|
| 297 |
+
maxsize=self._pool_maxsize,
|
| 298 |
+
block=self._pool_block,
|
| 299 |
+
**proxy_kwargs,
|
| 300 |
+
)
|
| 301 |
+
|
| 302 |
+
return manager
|
| 303 |
+
|
| 304 |
+
def cert_verify(self, conn, url, verify, cert):
|
| 305 |
+
"""Verify a SSL certificate. This method should not be called from user
|
| 306 |
+
code, and is only exposed for use when subclassing the
|
| 307 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 308 |
+
|
| 309 |
+
:param conn: The urllib3 connection object associated with the cert.
|
| 310 |
+
:param url: The requested URL.
|
| 311 |
+
:param verify: Either a boolean, in which case it controls whether we verify
|
| 312 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
| 313 |
+
to a CA bundle to use
|
| 314 |
+
:param cert: The SSL certificate to verify.
|
| 315 |
+
"""
|
| 316 |
+
if url.lower().startswith("https") and verify:
|
| 317 |
+
conn.cert_reqs = "CERT_REQUIRED"
|
| 318 |
+
|
| 319 |
+
# Only load the CA certificates if 'verify' is a string indicating the CA bundle to use.
|
| 320 |
+
# Otherwise, if verify is a boolean, we don't load anything since
|
| 321 |
+
# the connection will be using a context with the default certificates already loaded,
|
| 322 |
+
# and this avoids a call to the slow load_verify_locations()
|
| 323 |
+
if verify is not True:
|
| 324 |
+
# `verify` must be a str with a path then
|
| 325 |
+
cert_loc = verify
|
| 326 |
+
|
| 327 |
+
if not os.path.exists(cert_loc):
|
| 328 |
+
raise OSError(
|
| 329 |
+
f"Could not find a suitable TLS CA certificate bundle, "
|
| 330 |
+
f"invalid path: {cert_loc}"
|
| 331 |
+
)
|
| 332 |
+
|
| 333 |
+
if not os.path.isdir(cert_loc):
|
| 334 |
+
conn.ca_certs = cert_loc
|
| 335 |
+
else:
|
| 336 |
+
conn.ca_cert_dir = cert_loc
|
| 337 |
+
else:
|
| 338 |
+
conn.cert_reqs = "CERT_NONE"
|
| 339 |
+
conn.ca_certs = None
|
| 340 |
+
conn.ca_cert_dir = None
|
| 341 |
+
|
| 342 |
+
if cert:
|
| 343 |
+
if not isinstance(cert, basestring):
|
| 344 |
+
conn.cert_file = cert[0]
|
| 345 |
+
conn.key_file = cert[1]
|
| 346 |
+
else:
|
| 347 |
+
conn.cert_file = cert
|
| 348 |
+
conn.key_file = None
|
| 349 |
+
if conn.cert_file and not os.path.exists(conn.cert_file):
|
| 350 |
+
raise OSError(
|
| 351 |
+
f"Could not find the TLS certificate file, "
|
| 352 |
+
f"invalid path: {conn.cert_file}"
|
| 353 |
+
)
|
| 354 |
+
if conn.key_file and not os.path.exists(conn.key_file):
|
| 355 |
+
raise OSError(
|
| 356 |
+
f"Could not find the TLS key file, invalid path: {conn.key_file}"
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
def build_response(self, req, resp):
|
| 360 |
+
"""Builds a :class:`Response <requests.Response>` object from a urllib3
|
| 361 |
+
response. This should not be called from user code, and is only exposed
|
| 362 |
+
for use when subclassing the
|
| 363 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
|
| 364 |
+
|
| 365 |
+
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
|
| 366 |
+
:param resp: The urllib3 response object.
|
| 367 |
+
:rtype: requests.Response
|
| 368 |
+
"""
|
| 369 |
+
response = Response()
|
| 370 |
+
|
| 371 |
+
# Fallback to None if there's no status_code, for whatever reason.
|
| 372 |
+
response.status_code = getattr(resp, "status", None)
|
| 373 |
+
|
| 374 |
+
# Make headers case-insensitive.
|
| 375 |
+
response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
|
| 376 |
+
|
| 377 |
+
# Set encoding.
|
| 378 |
+
response.encoding = get_encoding_from_headers(response.headers)
|
| 379 |
+
response.raw = resp
|
| 380 |
+
response.reason = response.raw.reason
|
| 381 |
+
|
| 382 |
+
if isinstance(req.url, bytes):
|
| 383 |
+
response.url = req.url.decode("utf-8")
|
| 384 |
+
else:
|
| 385 |
+
response.url = req.url
|
| 386 |
+
|
| 387 |
+
# Add new cookies from the server.
|
| 388 |
+
extract_cookies_to_jar(response.cookies, req, resp)
|
| 389 |
+
|
| 390 |
+
# Give the Response some context.
|
| 391 |
+
response.request = req
|
| 392 |
+
response.connection = self
|
| 393 |
+
|
| 394 |
+
return response
|
| 395 |
+
|
| 396 |
+
def build_connection_pool_key_attributes(self, request, verify, cert=None):
|
| 397 |
+
"""Build the PoolKey attributes used by urllib3 to return a connection.
|
| 398 |
+
|
| 399 |
+
This looks at the PreparedRequest, the user-specified verify value,
|
| 400 |
+
and the value of the cert parameter to determine what PoolKey values
|
| 401 |
+
to use to select a connection from a given urllib3 Connection Pool.
|
| 402 |
+
|
| 403 |
+
The SSL related pool key arguments are not consistently set. As of
|
| 404 |
+
this writing, use the following to determine what keys may be in that
|
| 405 |
+
dictionary:
|
| 406 |
+
|
| 407 |
+
* If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the
|
| 408 |
+
default Requests SSL Context
|
| 409 |
+
* If ``verify`` is ``False``, ``"ssl_context"`` will not be set but
|
| 410 |
+
``"cert_reqs"`` will be set
|
| 411 |
+
* If ``verify`` is a string, (i.e., it is a user-specified trust bundle)
|
| 412 |
+
``"ca_certs"`` will be set if the string is not a directory recognized
|
| 413 |
+
by :py:func:`os.path.isdir`, otherwise ``"ca_certs_dir"`` will be
|
| 414 |
+
set.
|
| 415 |
+
* If ``"cert"`` is specified, ``"cert_file"`` will always be set. If
|
| 416 |
+
``"cert"`` is a tuple with a second item, ``"key_file"`` will also
|
| 417 |
+
be present
|
| 418 |
+
|
| 419 |
+
To override these settings, one may subclass this class, call this
|
| 420 |
+
method and use the above logic to change parameters as desired. For
|
| 421 |
+
example, if one wishes to use a custom :py:class:`ssl.SSLContext` one
|
| 422 |
+
must both set ``"ssl_context"`` and based on what else they require,
|
| 423 |
+
alter the other keys to ensure the desired behaviour.
|
| 424 |
+
|
| 425 |
+
:param request:
|
| 426 |
+
The PreparedReqest being sent over the connection.
|
| 427 |
+
:type request:
|
| 428 |
+
:class:`~requests.models.PreparedRequest`
|
| 429 |
+
:param verify:
|
| 430 |
+
Either a boolean, in which case it controls whether
|
| 431 |
+
we verify the server's TLS certificate, or a string, in which case it
|
| 432 |
+
must be a path to a CA bundle to use.
|
| 433 |
+
:param cert:
|
| 434 |
+
(optional) Any user-provided SSL certificate for client
|
| 435 |
+
authentication (a.k.a., mTLS). This may be a string (i.e., just
|
| 436 |
+
the path to a file which holds both certificate and key) or a
|
| 437 |
+
tuple of length 2 with the certificate file path and key file
|
| 438 |
+
path.
|
| 439 |
+
:returns:
|
| 440 |
+
A tuple of two dictionaries. The first is the "host parameters"
|
| 441 |
+
portion of the Pool Key including scheme, hostname, and port. The
|
| 442 |
+
second is a dictionary of SSLContext related parameters.
|
| 443 |
+
"""
|
| 444 |
+
return _urllib3_request_context(request, verify, cert, self.poolmanager)
|
| 445 |
+
|
| 446 |
+
def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
|
| 447 |
+
"""Returns a urllib3 connection for the given request and TLS settings.
|
| 448 |
+
This should not be called from user code, and is only exposed for use
|
| 449 |
+
when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 450 |
+
|
| 451 |
+
:param request:
|
| 452 |
+
The :class:`PreparedRequest <PreparedRequest>` object to be sent
|
| 453 |
+
over the connection.
|
| 454 |
+
:param verify:
|
| 455 |
+
Either a boolean, in which case it controls whether we verify the
|
| 456 |
+
server's TLS certificate, or a string, in which case it must be a
|
| 457 |
+
path to a CA bundle to use.
|
| 458 |
+
:param proxies:
|
| 459 |
+
(optional) The proxies dictionary to apply to the request.
|
| 460 |
+
:param cert:
|
| 461 |
+
(optional) Any user-provided SSL certificate to be used for client
|
| 462 |
+
authentication (a.k.a., mTLS).
|
| 463 |
+
:rtype:
|
| 464 |
+
urllib3.ConnectionPool
|
| 465 |
+
"""
|
| 466 |
+
proxy = select_proxy(request.url, proxies)
|
| 467 |
+
try:
|
| 468 |
+
host_params, pool_kwargs = self.build_connection_pool_key_attributes(
|
| 469 |
+
request,
|
| 470 |
+
verify,
|
| 471 |
+
cert,
|
| 472 |
+
)
|
| 473 |
+
except ValueError as e:
|
| 474 |
+
raise InvalidURL(e, request=request)
|
| 475 |
+
if proxy:
|
| 476 |
+
proxy = prepend_scheme_if_needed(proxy, "http")
|
| 477 |
+
proxy_url = parse_url(proxy)
|
| 478 |
+
if not proxy_url.host:
|
| 479 |
+
raise InvalidProxyURL(
|
| 480 |
+
"Please check proxy URL. It is malformed "
|
| 481 |
+
"and could be missing the host."
|
| 482 |
+
)
|
| 483 |
+
proxy_manager = self.proxy_manager_for(proxy)
|
| 484 |
+
conn = proxy_manager.connection_from_host(
|
| 485 |
+
**host_params, pool_kwargs=pool_kwargs
|
| 486 |
+
)
|
| 487 |
+
else:
|
| 488 |
+
# Only scheme should be lower case
|
| 489 |
+
conn = self.poolmanager.connection_from_host(
|
| 490 |
+
**host_params, pool_kwargs=pool_kwargs
|
| 491 |
+
)
|
| 492 |
+
|
| 493 |
+
return conn
|
| 494 |
+
|
| 495 |
+
def get_connection(self, url, proxies=None):
|
| 496 |
+
"""DEPRECATED: Users should move to `get_connection_with_tls_context`
|
| 497 |
+
for all subclasses of HTTPAdapter using Requests>=2.32.2.
|
| 498 |
+
|
| 499 |
+
Returns a urllib3 connection for the given URL. This should not be
|
| 500 |
+
called from user code, and is only exposed for use when subclassing the
|
| 501 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 502 |
+
|
| 503 |
+
:param url: The URL to connect to.
|
| 504 |
+
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
| 505 |
+
:rtype: urllib3.ConnectionPool
|
| 506 |
+
"""
|
| 507 |
+
warnings.warn(
|
| 508 |
+
(
|
| 509 |
+
"`get_connection` has been deprecated in favor of "
|
| 510 |
+
"`get_connection_with_tls_context`. Custom HTTPAdapter subclasses "
|
| 511 |
+
"will need to migrate for Requests>=2.32.2. Please see "
|
| 512 |
+
"https://github.com/psf/requests/pull/6710 for more details."
|
| 513 |
+
),
|
| 514 |
+
DeprecationWarning,
|
| 515 |
+
)
|
| 516 |
+
proxy = select_proxy(url, proxies)
|
| 517 |
+
|
| 518 |
+
if proxy:
|
| 519 |
+
proxy = prepend_scheme_if_needed(proxy, "http")
|
| 520 |
+
proxy_url = parse_url(proxy)
|
| 521 |
+
if not proxy_url.host:
|
| 522 |
+
raise InvalidProxyURL(
|
| 523 |
+
"Please check proxy URL. It is malformed "
|
| 524 |
+
"and could be missing the host."
|
| 525 |
+
)
|
| 526 |
+
proxy_manager = self.proxy_manager_for(proxy)
|
| 527 |
+
conn = proxy_manager.connection_from_url(url)
|
| 528 |
+
else:
|
| 529 |
+
# Only scheme should be lower case
|
| 530 |
+
parsed = urlparse(url)
|
| 531 |
+
url = parsed.geturl()
|
| 532 |
+
conn = self.poolmanager.connection_from_url(url)
|
| 533 |
+
|
| 534 |
+
return conn
|
| 535 |
+
|
| 536 |
+
def close(self):
|
| 537 |
+
"""Disposes of any internal state.
|
| 538 |
+
|
| 539 |
+
Currently, this closes the PoolManager and any active ProxyManager,
|
| 540 |
+
which closes any pooled connections.
|
| 541 |
+
"""
|
| 542 |
+
self.poolmanager.clear()
|
| 543 |
+
for proxy in self.proxy_manager.values():
|
| 544 |
+
proxy.clear()
|
| 545 |
+
|
| 546 |
+
def request_url(self, request, proxies):
|
| 547 |
+
"""Obtain the url to use when making the final request.
|
| 548 |
+
|
| 549 |
+
If the message is being sent through a HTTP proxy, the full URL has to
|
| 550 |
+
be used. Otherwise, we should only use the path portion of the URL.
|
| 551 |
+
|
| 552 |
+
This should not be called from user code, and is only exposed for use
|
| 553 |
+
when subclassing the
|
| 554 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 555 |
+
|
| 556 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
| 557 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
|
| 558 |
+
:rtype: str
|
| 559 |
+
"""
|
| 560 |
+
proxy = select_proxy(request.url, proxies)
|
| 561 |
+
scheme = urlparse(request.url).scheme
|
| 562 |
+
|
| 563 |
+
is_proxied_http_request = proxy and scheme != "https"
|
| 564 |
+
using_socks_proxy = False
|
| 565 |
+
if proxy:
|
| 566 |
+
proxy_scheme = urlparse(proxy).scheme.lower()
|
| 567 |
+
using_socks_proxy = proxy_scheme.startswith("socks")
|
| 568 |
+
|
| 569 |
+
url = request.path_url
|
| 570 |
+
if url.startswith("//"): # Don't confuse urllib3
|
| 571 |
+
url = f"/{url.lstrip('/')}"
|
| 572 |
+
|
| 573 |
+
if is_proxied_http_request and not using_socks_proxy:
|
| 574 |
+
url = urldefragauth(request.url)
|
| 575 |
+
|
| 576 |
+
return url
|
| 577 |
+
|
| 578 |
+
def add_headers(self, request, **kwargs):
|
| 579 |
+
"""Add any headers needed by the connection. As of v2.0 this does
|
| 580 |
+
nothing by default, but is left for overriding by users that subclass
|
| 581 |
+
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 582 |
+
|
| 583 |
+
This should not be called from user code, and is only exposed for use
|
| 584 |
+
when subclassing the
|
| 585 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 586 |
+
|
| 587 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
|
| 588 |
+
:param kwargs: The keyword arguments from the call to send().
|
| 589 |
+
"""
|
| 590 |
+
pass
|
| 591 |
+
|
| 592 |
+
def proxy_headers(self, proxy):
|
| 593 |
+
"""Returns a dictionary of the headers to add to any request sent
|
| 594 |
+
through a proxy. This works with urllib3 magic to ensure that they are
|
| 595 |
+
correctly sent to the proxy, rather than in a tunnelled request if
|
| 596 |
+
CONNECT is being used.
|
| 597 |
+
|
| 598 |
+
This should not be called from user code, and is only exposed for use
|
| 599 |
+
when subclassing the
|
| 600 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 601 |
+
|
| 602 |
+
:param proxy: The url of the proxy being used for this request.
|
| 603 |
+
:rtype: dict
|
| 604 |
+
"""
|
| 605 |
+
headers = {}
|
| 606 |
+
username, password = get_auth_from_url(proxy)
|
| 607 |
+
|
| 608 |
+
if username:
|
| 609 |
+
headers["Proxy-Authorization"] = _basic_auth_str(username, password)
|
| 610 |
+
|
| 611 |
+
return headers
|
| 612 |
+
|
| 613 |
+
def send(
|
| 614 |
+
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
| 615 |
+
):
|
| 616 |
+
"""Sends PreparedRequest object. Returns Response object.
|
| 617 |
+
|
| 618 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
| 619 |
+
:param stream: (optional) Whether to stream the request content.
|
| 620 |
+
:param timeout: (optional) How long to wait for the server to send
|
| 621 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
| 622 |
+
read timeout) <timeouts>` tuple.
|
| 623 |
+
:type timeout: float or tuple or urllib3 Timeout object
|
| 624 |
+
:param verify: (optional) Either a boolean, in which case it controls whether
|
| 625 |
+
we verify the server's TLS certificate, or a string, in which case it
|
| 626 |
+
must be a path to a CA bundle to use
|
| 627 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
| 628 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
| 629 |
+
:rtype: requests.Response
|
| 630 |
+
"""
|
| 631 |
+
|
| 632 |
+
try:
|
| 633 |
+
conn = self.get_connection_with_tls_context(
|
| 634 |
+
request, verify, proxies=proxies, cert=cert
|
| 635 |
+
)
|
| 636 |
+
except LocationValueError as e:
|
| 637 |
+
raise InvalidURL(e, request=request)
|
| 638 |
+
|
| 639 |
+
self.cert_verify(conn, request.url, verify, cert)
|
| 640 |
+
url = self.request_url(request, proxies)
|
| 641 |
+
self.add_headers(
|
| 642 |
+
request,
|
| 643 |
+
stream=stream,
|
| 644 |
+
timeout=timeout,
|
| 645 |
+
verify=verify,
|
| 646 |
+
cert=cert,
|
| 647 |
+
proxies=proxies,
|
| 648 |
+
)
|
| 649 |
+
|
| 650 |
+
chunked = not (request.body is None or "Content-Length" in request.headers)
|
| 651 |
+
|
| 652 |
+
if isinstance(timeout, tuple):
|
| 653 |
+
try:
|
| 654 |
+
connect, read = timeout
|
| 655 |
+
timeout = TimeoutSauce(connect=connect, read=read)
|
| 656 |
+
except ValueError:
|
| 657 |
+
raise ValueError(
|
| 658 |
+
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
|
| 659 |
+
f"or a single float to set both timeouts to the same value."
|
| 660 |
+
)
|
| 661 |
+
elif isinstance(timeout, TimeoutSauce):
|
| 662 |
+
pass
|
| 663 |
+
else:
|
| 664 |
+
timeout = TimeoutSauce(connect=timeout, read=timeout)
|
| 665 |
+
|
| 666 |
+
try:
|
| 667 |
+
resp = conn.urlopen(
|
| 668 |
+
method=request.method,
|
| 669 |
+
url=url,
|
| 670 |
+
body=request.body,
|
| 671 |
+
headers=request.headers,
|
| 672 |
+
redirect=False,
|
| 673 |
+
assert_same_host=False,
|
| 674 |
+
preload_content=False,
|
| 675 |
+
decode_content=False,
|
| 676 |
+
retries=self.max_retries,
|
| 677 |
+
timeout=timeout,
|
| 678 |
+
chunked=chunked,
|
| 679 |
+
)
|
| 680 |
+
|
| 681 |
+
except (ProtocolError, OSError) as err:
|
| 682 |
+
raise ConnectionError(err, request=request)
|
| 683 |
+
|
| 684 |
+
except MaxRetryError as e:
|
| 685 |
+
if isinstance(e.reason, ConnectTimeoutError):
|
| 686 |
+
# TODO: Remove this in 3.0.0: see #2811
|
| 687 |
+
if not isinstance(e.reason, NewConnectionError):
|
| 688 |
+
raise ConnectTimeout(e, request=request)
|
| 689 |
+
|
| 690 |
+
if isinstance(e.reason, ResponseError):
|
| 691 |
+
raise RetryError(e, request=request)
|
| 692 |
+
|
| 693 |
+
if isinstance(e.reason, _ProxyError):
|
| 694 |
+
raise ProxyError(e, request=request)
|
| 695 |
+
|
| 696 |
+
if isinstance(e.reason, _SSLError):
|
| 697 |
+
# This branch is for urllib3 v1.22 and later.
|
| 698 |
+
raise SSLError(e, request=request)
|
| 699 |
+
|
| 700 |
+
raise ConnectionError(e, request=request)
|
| 701 |
+
|
| 702 |
+
except ClosedPoolError as e:
|
| 703 |
+
raise ConnectionError(e, request=request)
|
| 704 |
+
|
| 705 |
+
except _ProxyError as e:
|
| 706 |
+
raise ProxyError(e)
|
| 707 |
+
|
| 708 |
+
except (_SSLError, _HTTPError) as e:
|
| 709 |
+
if isinstance(e, _SSLError):
|
| 710 |
+
# This branch is for urllib3 versions earlier than v1.22
|
| 711 |
+
raise SSLError(e, request=request)
|
| 712 |
+
elif isinstance(e, ReadTimeoutError):
|
| 713 |
+
raise ReadTimeout(e, request=request)
|
| 714 |
+
elif isinstance(e, _InvalidHeader):
|
| 715 |
+
raise InvalidHeader(e, request=request)
|
| 716 |
+
else:
|
| 717 |
+
raise
|
| 718 |
+
|
| 719 |
+
return self.build_response(request, resp)
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/compat.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.compat
|
| 3 |
+
~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module previously handled import compatibility issues
|
| 6 |
+
between Python 2 and Python 3. It remains for backwards
|
| 7 |
+
compatibility until the next major version.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import sys
|
| 11 |
+
|
| 12 |
+
# -------
|
| 13 |
+
# urllib3
|
| 14 |
+
# -------
|
| 15 |
+
from pip._vendor.urllib3 import __version__ as urllib3_version
|
| 16 |
+
|
| 17 |
+
# Detect which major version of urllib3 is being used.
|
| 18 |
+
try:
|
| 19 |
+
is_urllib3_1 = int(urllib3_version.split(".")[0]) == 1
|
| 20 |
+
except (TypeError, AttributeError):
|
| 21 |
+
# If we can't discern a version, prefer old functionality.
|
| 22 |
+
is_urllib3_1 = True
|
| 23 |
+
|
| 24 |
+
# -------------------
|
| 25 |
+
# Character Detection
|
| 26 |
+
# -------------------
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _resolve_char_detection():
|
| 30 |
+
"""Find supported character detection libraries."""
|
| 31 |
+
chardet = None
|
| 32 |
+
return chardet
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
chardet = _resolve_char_detection()
|
| 36 |
+
|
| 37 |
+
# -------
|
| 38 |
+
# Pythons
|
| 39 |
+
# -------
|
| 40 |
+
|
| 41 |
+
# Syntax sugar.
|
| 42 |
+
_ver = sys.version_info
|
| 43 |
+
|
| 44 |
+
#: Python 2.x?
|
| 45 |
+
is_py2 = _ver[0] == 2
|
| 46 |
+
|
| 47 |
+
#: Python 3.x?
|
| 48 |
+
is_py3 = _ver[0] == 3
|
| 49 |
+
|
| 50 |
+
# Note: We've patched out simplejson support in pip because it prevents
|
| 51 |
+
# upgrading simplejson on Windows.
|
| 52 |
+
import json
|
| 53 |
+
from json import JSONDecodeError
|
| 54 |
+
|
| 55 |
+
# Keep OrderedDict for backwards compatibility.
|
| 56 |
+
from collections import OrderedDict
|
| 57 |
+
from collections.abc import Callable, Mapping, MutableMapping
|
| 58 |
+
from http import cookiejar as cookielib
|
| 59 |
+
from http.cookies import Morsel
|
| 60 |
+
from io import StringIO
|
| 61 |
+
|
| 62 |
+
# --------------
|
| 63 |
+
# Legacy Imports
|
| 64 |
+
# --------------
|
| 65 |
+
from urllib.parse import (
|
| 66 |
+
quote,
|
| 67 |
+
quote_plus,
|
| 68 |
+
unquote,
|
| 69 |
+
unquote_plus,
|
| 70 |
+
urldefrag,
|
| 71 |
+
urlencode,
|
| 72 |
+
urljoin,
|
| 73 |
+
urlparse,
|
| 74 |
+
urlsplit,
|
| 75 |
+
urlunparse,
|
| 76 |
+
)
|
| 77 |
+
from urllib.request import (
|
| 78 |
+
getproxies,
|
| 79 |
+
getproxies_environment,
|
| 80 |
+
parse_http_list,
|
| 81 |
+
proxy_bypass,
|
| 82 |
+
proxy_bypass_environment,
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
builtin_str = str
|
| 86 |
+
str = str
|
| 87 |
+
bytes = bytes
|
| 88 |
+
basestring = (str, bytes)
|
| 89 |
+
numeric_types = (int, float)
|
| 90 |
+
integer_types = (int,)
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/cookies.py
ADDED
|
@@ -0,0 +1,561 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.cookies
|
| 3 |
+
~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Compatibility code to be able to use `http.cookiejar.CookieJar` with requests.
|
| 6 |
+
|
| 7 |
+
requests.utils imports from here, so be careful with imports.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import calendar
|
| 11 |
+
import copy
|
| 12 |
+
import time
|
| 13 |
+
|
| 14 |
+
from ._internal_utils import to_native_string
|
| 15 |
+
from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse
|
| 16 |
+
|
| 17 |
+
try:
|
| 18 |
+
import threading
|
| 19 |
+
except ImportError:
|
| 20 |
+
import dummy_threading as threading
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class MockRequest:
|
| 24 |
+
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
|
| 25 |
+
|
| 26 |
+
The code in `http.cookiejar.CookieJar` expects this interface in order to correctly
|
| 27 |
+
manage cookie policies, i.e., determine whether a cookie can be set, given the
|
| 28 |
+
domains of the request and the cookie.
|
| 29 |
+
|
| 30 |
+
The original request object is read-only. The client is responsible for collecting
|
| 31 |
+
the new headers via `get_new_headers()` and interpreting them appropriately. You
|
| 32 |
+
probably want `get_cookie_header`, defined below.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
def __init__(self, request):
|
| 36 |
+
self._r = request
|
| 37 |
+
self._new_headers = {}
|
| 38 |
+
self.type = urlparse(self._r.url).scheme
|
| 39 |
+
|
| 40 |
+
def get_type(self):
|
| 41 |
+
return self.type
|
| 42 |
+
|
| 43 |
+
def get_host(self):
|
| 44 |
+
return urlparse(self._r.url).netloc
|
| 45 |
+
|
| 46 |
+
def get_origin_req_host(self):
|
| 47 |
+
return self.get_host()
|
| 48 |
+
|
| 49 |
+
def get_full_url(self):
|
| 50 |
+
# Only return the response's URL if the user hadn't set the Host
|
| 51 |
+
# header
|
| 52 |
+
if not self._r.headers.get("Host"):
|
| 53 |
+
return self._r.url
|
| 54 |
+
# If they did set it, retrieve it and reconstruct the expected domain
|
| 55 |
+
host = to_native_string(self._r.headers["Host"], encoding="utf-8")
|
| 56 |
+
parsed = urlparse(self._r.url)
|
| 57 |
+
# Reconstruct the URL as we expect it
|
| 58 |
+
return urlunparse(
|
| 59 |
+
[
|
| 60 |
+
parsed.scheme,
|
| 61 |
+
host,
|
| 62 |
+
parsed.path,
|
| 63 |
+
parsed.params,
|
| 64 |
+
parsed.query,
|
| 65 |
+
parsed.fragment,
|
| 66 |
+
]
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
def is_unverifiable(self):
|
| 70 |
+
return True
|
| 71 |
+
|
| 72 |
+
def has_header(self, name):
|
| 73 |
+
return name in self._r.headers or name in self._new_headers
|
| 74 |
+
|
| 75 |
+
def get_header(self, name, default=None):
|
| 76 |
+
return self._r.headers.get(name, self._new_headers.get(name, default))
|
| 77 |
+
|
| 78 |
+
def add_header(self, key, val):
|
| 79 |
+
"""cookiejar has no legitimate use for this method; add it back if you find one."""
|
| 80 |
+
raise NotImplementedError(
|
| 81 |
+
"Cookie headers should be added with add_unredirected_header()"
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
def add_unredirected_header(self, name, value):
|
| 85 |
+
self._new_headers[name] = value
|
| 86 |
+
|
| 87 |
+
def get_new_headers(self):
|
| 88 |
+
return self._new_headers
|
| 89 |
+
|
| 90 |
+
@property
|
| 91 |
+
def unverifiable(self):
|
| 92 |
+
return self.is_unverifiable()
|
| 93 |
+
|
| 94 |
+
@property
|
| 95 |
+
def origin_req_host(self):
|
| 96 |
+
return self.get_origin_req_host()
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def host(self):
|
| 100 |
+
return self.get_host()
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class MockResponse:
|
| 104 |
+
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
|
| 105 |
+
|
| 106 |
+
...what? Basically, expose the parsed HTTP headers from the server response
|
| 107 |
+
the way `http.cookiejar` expects to see them.
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
def __init__(self, headers):
|
| 111 |
+
"""Make a MockResponse for `cookiejar` to read.
|
| 112 |
+
|
| 113 |
+
:param headers: a httplib.HTTPMessage or analogous carrying the headers
|
| 114 |
+
"""
|
| 115 |
+
self._headers = headers
|
| 116 |
+
|
| 117 |
+
def info(self):
|
| 118 |
+
return self._headers
|
| 119 |
+
|
| 120 |
+
def getheaders(self, name):
|
| 121 |
+
self._headers.getheaders(name)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def extract_cookies_to_jar(jar, request, response):
|
| 125 |
+
"""Extract the cookies from the response into a CookieJar.
|
| 126 |
+
|
| 127 |
+
:param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar)
|
| 128 |
+
:param request: our own requests.Request object
|
| 129 |
+
:param response: urllib3.HTTPResponse object
|
| 130 |
+
"""
|
| 131 |
+
if not (hasattr(response, "_original_response") and response._original_response):
|
| 132 |
+
return
|
| 133 |
+
# the _original_response field is the wrapped httplib.HTTPResponse object,
|
| 134 |
+
req = MockRequest(request)
|
| 135 |
+
# pull out the HTTPMessage with the headers and put it in the mock:
|
| 136 |
+
res = MockResponse(response._original_response.msg)
|
| 137 |
+
jar.extract_cookies(res, req)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def get_cookie_header(jar, request):
|
| 141 |
+
"""
|
| 142 |
+
Produce an appropriate Cookie header string to be sent with `request`, or None.
|
| 143 |
+
|
| 144 |
+
:rtype: str
|
| 145 |
+
"""
|
| 146 |
+
r = MockRequest(request)
|
| 147 |
+
jar.add_cookie_header(r)
|
| 148 |
+
return r.get_new_headers().get("Cookie")
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
|
| 152 |
+
"""Unsets a cookie by name, by default over all domains and paths.
|
| 153 |
+
|
| 154 |
+
Wraps CookieJar.clear(), is O(n).
|
| 155 |
+
"""
|
| 156 |
+
clearables = []
|
| 157 |
+
for cookie in cookiejar:
|
| 158 |
+
if cookie.name != name:
|
| 159 |
+
continue
|
| 160 |
+
if domain is not None and domain != cookie.domain:
|
| 161 |
+
continue
|
| 162 |
+
if path is not None and path != cookie.path:
|
| 163 |
+
continue
|
| 164 |
+
clearables.append((cookie.domain, cookie.path, cookie.name))
|
| 165 |
+
|
| 166 |
+
for domain, path, name in clearables:
|
| 167 |
+
cookiejar.clear(domain, path, name)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class CookieConflictError(RuntimeError):
|
| 171 |
+
"""There are two cookies that meet the criteria specified in the cookie jar.
|
| 172 |
+
Use .get and .set and include domain and path args in order to be more specific.
|
| 173 |
+
"""
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
| 177 |
+
"""Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict
|
| 178 |
+
interface.
|
| 179 |
+
|
| 180 |
+
This is the CookieJar we create by default for requests and sessions that
|
| 181 |
+
don't specify one, since some clients may expect response.cookies and
|
| 182 |
+
session.cookies to support dict operations.
|
| 183 |
+
|
| 184 |
+
Requests does not use the dict interface internally; it's just for
|
| 185 |
+
compatibility with external client code. All requests code should work
|
| 186 |
+
out of the box with externally provided instances of ``CookieJar``, e.g.
|
| 187 |
+
``LWPCookieJar`` and ``FileCookieJar``.
|
| 188 |
+
|
| 189 |
+
Unlike a regular CookieJar, this class is pickleable.
|
| 190 |
+
|
| 191 |
+
.. warning:: dictionary operations that are normally O(1) may be O(n).
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
def get(self, name, default=None, domain=None, path=None):
|
| 195 |
+
"""Dict-like get() that also supports optional domain and path args in
|
| 196 |
+
order to resolve naming collisions from using one cookie jar over
|
| 197 |
+
multiple domains.
|
| 198 |
+
|
| 199 |
+
.. warning:: operation is O(n), not O(1).
|
| 200 |
+
"""
|
| 201 |
+
try:
|
| 202 |
+
return self._find_no_duplicates(name, domain, path)
|
| 203 |
+
except KeyError:
|
| 204 |
+
return default
|
| 205 |
+
|
| 206 |
+
def set(self, name, value, **kwargs):
|
| 207 |
+
"""Dict-like set() that also supports optional domain and path args in
|
| 208 |
+
order to resolve naming collisions from using one cookie jar over
|
| 209 |
+
multiple domains.
|
| 210 |
+
"""
|
| 211 |
+
# support client code that unsets cookies by assignment of a None value:
|
| 212 |
+
if value is None:
|
| 213 |
+
remove_cookie_by_name(
|
| 214 |
+
self, name, domain=kwargs.get("domain"), path=kwargs.get("path")
|
| 215 |
+
)
|
| 216 |
+
return
|
| 217 |
+
|
| 218 |
+
if isinstance(value, Morsel):
|
| 219 |
+
c = morsel_to_cookie(value)
|
| 220 |
+
else:
|
| 221 |
+
c = create_cookie(name, value, **kwargs)
|
| 222 |
+
self.set_cookie(c)
|
| 223 |
+
return c
|
| 224 |
+
|
| 225 |
+
def iterkeys(self):
|
| 226 |
+
"""Dict-like iterkeys() that returns an iterator of names of cookies
|
| 227 |
+
from the jar.
|
| 228 |
+
|
| 229 |
+
.. seealso:: itervalues() and iteritems().
|
| 230 |
+
"""
|
| 231 |
+
for cookie in iter(self):
|
| 232 |
+
yield cookie.name
|
| 233 |
+
|
| 234 |
+
def keys(self):
|
| 235 |
+
"""Dict-like keys() that returns a list of names of cookies from the
|
| 236 |
+
jar.
|
| 237 |
+
|
| 238 |
+
.. seealso:: values() and items().
|
| 239 |
+
"""
|
| 240 |
+
return list(self.iterkeys())
|
| 241 |
+
|
| 242 |
+
def itervalues(self):
|
| 243 |
+
"""Dict-like itervalues() that returns an iterator of values of cookies
|
| 244 |
+
from the jar.
|
| 245 |
+
|
| 246 |
+
.. seealso:: iterkeys() and iteritems().
|
| 247 |
+
"""
|
| 248 |
+
for cookie in iter(self):
|
| 249 |
+
yield cookie.value
|
| 250 |
+
|
| 251 |
+
def values(self):
|
| 252 |
+
"""Dict-like values() that returns a list of values of cookies from the
|
| 253 |
+
jar.
|
| 254 |
+
|
| 255 |
+
.. seealso:: keys() and items().
|
| 256 |
+
"""
|
| 257 |
+
return list(self.itervalues())
|
| 258 |
+
|
| 259 |
+
def iteritems(self):
|
| 260 |
+
"""Dict-like iteritems() that returns an iterator of name-value tuples
|
| 261 |
+
from the jar.
|
| 262 |
+
|
| 263 |
+
.. seealso:: iterkeys() and itervalues().
|
| 264 |
+
"""
|
| 265 |
+
for cookie in iter(self):
|
| 266 |
+
yield cookie.name, cookie.value
|
| 267 |
+
|
| 268 |
+
def items(self):
|
| 269 |
+
"""Dict-like items() that returns a list of name-value tuples from the
|
| 270 |
+
jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
|
| 271 |
+
vanilla python dict of key value pairs.
|
| 272 |
+
|
| 273 |
+
.. seealso:: keys() and values().
|
| 274 |
+
"""
|
| 275 |
+
return list(self.iteritems())
|
| 276 |
+
|
| 277 |
+
def list_domains(self):
|
| 278 |
+
"""Utility method to list all the domains in the jar."""
|
| 279 |
+
domains = []
|
| 280 |
+
for cookie in iter(self):
|
| 281 |
+
if cookie.domain not in domains:
|
| 282 |
+
domains.append(cookie.domain)
|
| 283 |
+
return domains
|
| 284 |
+
|
| 285 |
+
def list_paths(self):
|
| 286 |
+
"""Utility method to list all the paths in the jar."""
|
| 287 |
+
paths = []
|
| 288 |
+
for cookie in iter(self):
|
| 289 |
+
if cookie.path not in paths:
|
| 290 |
+
paths.append(cookie.path)
|
| 291 |
+
return paths
|
| 292 |
+
|
| 293 |
+
def multiple_domains(self):
|
| 294 |
+
"""Returns True if there are multiple domains in the jar.
|
| 295 |
+
Returns False otherwise.
|
| 296 |
+
|
| 297 |
+
:rtype: bool
|
| 298 |
+
"""
|
| 299 |
+
domains = []
|
| 300 |
+
for cookie in iter(self):
|
| 301 |
+
if cookie.domain is not None and cookie.domain in domains:
|
| 302 |
+
return True
|
| 303 |
+
domains.append(cookie.domain)
|
| 304 |
+
return False # there is only one domain in jar
|
| 305 |
+
|
| 306 |
+
def get_dict(self, domain=None, path=None):
|
| 307 |
+
"""Takes as an argument an optional domain and path and returns a plain
|
| 308 |
+
old Python dict of name-value pairs of cookies that meet the
|
| 309 |
+
requirements.
|
| 310 |
+
|
| 311 |
+
:rtype: dict
|
| 312 |
+
"""
|
| 313 |
+
dictionary = {}
|
| 314 |
+
for cookie in iter(self):
|
| 315 |
+
if (domain is None or cookie.domain == domain) and (
|
| 316 |
+
path is None or cookie.path == path
|
| 317 |
+
):
|
| 318 |
+
dictionary[cookie.name] = cookie.value
|
| 319 |
+
return dictionary
|
| 320 |
+
|
| 321 |
+
def __contains__(self, name):
|
| 322 |
+
try:
|
| 323 |
+
return super().__contains__(name)
|
| 324 |
+
except CookieConflictError:
|
| 325 |
+
return True
|
| 326 |
+
|
| 327 |
+
def __getitem__(self, name):
|
| 328 |
+
"""Dict-like __getitem__() for compatibility with client code. Throws
|
| 329 |
+
exception if there are more than one cookie with name. In that case,
|
| 330 |
+
use the more explicit get() method instead.
|
| 331 |
+
|
| 332 |
+
.. warning:: operation is O(n), not O(1).
|
| 333 |
+
"""
|
| 334 |
+
return self._find_no_duplicates(name)
|
| 335 |
+
|
| 336 |
+
def __setitem__(self, name, value):
|
| 337 |
+
"""Dict-like __setitem__ for compatibility with client code. Throws
|
| 338 |
+
exception if there is already a cookie of that name in the jar. In that
|
| 339 |
+
case, use the more explicit set() method instead.
|
| 340 |
+
"""
|
| 341 |
+
self.set(name, value)
|
| 342 |
+
|
| 343 |
+
def __delitem__(self, name):
|
| 344 |
+
"""Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s
|
| 345 |
+
``remove_cookie_by_name()``.
|
| 346 |
+
"""
|
| 347 |
+
remove_cookie_by_name(self, name)
|
| 348 |
+
|
| 349 |
+
def set_cookie(self, cookie, *args, **kwargs):
|
| 350 |
+
if (
|
| 351 |
+
hasattr(cookie.value, "startswith")
|
| 352 |
+
and cookie.value.startswith('"')
|
| 353 |
+
and cookie.value.endswith('"')
|
| 354 |
+
):
|
| 355 |
+
cookie.value = cookie.value.replace('\\"', "")
|
| 356 |
+
return super().set_cookie(cookie, *args, **kwargs)
|
| 357 |
+
|
| 358 |
+
def update(self, other):
|
| 359 |
+
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
| 360 |
+
if isinstance(other, cookielib.CookieJar):
|
| 361 |
+
for cookie in other:
|
| 362 |
+
self.set_cookie(copy.copy(cookie))
|
| 363 |
+
else:
|
| 364 |
+
super().update(other)
|
| 365 |
+
|
| 366 |
+
def _find(self, name, domain=None, path=None):
|
| 367 |
+
"""Requests uses this method internally to get cookie values.
|
| 368 |
+
|
| 369 |
+
If there are conflicting cookies, _find arbitrarily chooses one.
|
| 370 |
+
See _find_no_duplicates if you want an exception thrown if there are
|
| 371 |
+
conflicting cookies.
|
| 372 |
+
|
| 373 |
+
:param name: a string containing name of cookie
|
| 374 |
+
:param domain: (optional) string containing domain of cookie
|
| 375 |
+
:param path: (optional) string containing path of cookie
|
| 376 |
+
:return: cookie.value
|
| 377 |
+
"""
|
| 378 |
+
for cookie in iter(self):
|
| 379 |
+
if cookie.name == name:
|
| 380 |
+
if domain is None or cookie.domain == domain:
|
| 381 |
+
if path is None or cookie.path == path:
|
| 382 |
+
return cookie.value
|
| 383 |
+
|
| 384 |
+
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
| 385 |
+
|
| 386 |
+
def _find_no_duplicates(self, name, domain=None, path=None):
|
| 387 |
+
"""Both ``__get_item__`` and ``get`` call this function: it's never
|
| 388 |
+
used elsewhere in Requests.
|
| 389 |
+
|
| 390 |
+
:param name: a string containing name of cookie
|
| 391 |
+
:param domain: (optional) string containing domain of cookie
|
| 392 |
+
:param path: (optional) string containing path of cookie
|
| 393 |
+
:raises KeyError: if cookie is not found
|
| 394 |
+
:raises CookieConflictError: if there are multiple cookies
|
| 395 |
+
that match name and optionally domain and path
|
| 396 |
+
:return: cookie.value
|
| 397 |
+
"""
|
| 398 |
+
toReturn = None
|
| 399 |
+
for cookie in iter(self):
|
| 400 |
+
if cookie.name == name:
|
| 401 |
+
if domain is None or cookie.domain == domain:
|
| 402 |
+
if path is None or cookie.path == path:
|
| 403 |
+
if toReturn is not None:
|
| 404 |
+
# if there are multiple cookies that meet passed in criteria
|
| 405 |
+
raise CookieConflictError(
|
| 406 |
+
f"There are multiple cookies with name, {name!r}"
|
| 407 |
+
)
|
| 408 |
+
# we will eventually return this as long as no cookie conflict
|
| 409 |
+
toReturn = cookie.value
|
| 410 |
+
|
| 411 |
+
if toReturn:
|
| 412 |
+
return toReturn
|
| 413 |
+
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
| 414 |
+
|
| 415 |
+
def __getstate__(self):
|
| 416 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
| 417 |
+
state = self.__dict__.copy()
|
| 418 |
+
# remove the unpickleable RLock object
|
| 419 |
+
state.pop("_cookies_lock")
|
| 420 |
+
return state
|
| 421 |
+
|
| 422 |
+
def __setstate__(self, state):
|
| 423 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
| 424 |
+
self.__dict__.update(state)
|
| 425 |
+
if "_cookies_lock" not in self.__dict__:
|
| 426 |
+
self._cookies_lock = threading.RLock()
|
| 427 |
+
|
| 428 |
+
def copy(self):
|
| 429 |
+
"""Return a copy of this RequestsCookieJar."""
|
| 430 |
+
new_cj = RequestsCookieJar()
|
| 431 |
+
new_cj.set_policy(self.get_policy())
|
| 432 |
+
new_cj.update(self)
|
| 433 |
+
return new_cj
|
| 434 |
+
|
| 435 |
+
def get_policy(self):
|
| 436 |
+
"""Return the CookiePolicy instance used."""
|
| 437 |
+
return self._policy
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
def _copy_cookie_jar(jar):
|
| 441 |
+
if jar is None:
|
| 442 |
+
return None
|
| 443 |
+
|
| 444 |
+
if hasattr(jar, "copy"):
|
| 445 |
+
# We're dealing with an instance of RequestsCookieJar
|
| 446 |
+
return jar.copy()
|
| 447 |
+
# We're dealing with a generic CookieJar instance
|
| 448 |
+
new_jar = copy.copy(jar)
|
| 449 |
+
new_jar.clear()
|
| 450 |
+
for cookie in jar:
|
| 451 |
+
new_jar.set_cookie(copy.copy(cookie))
|
| 452 |
+
return new_jar
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
def create_cookie(name, value, **kwargs):
|
| 456 |
+
"""Make a cookie from underspecified parameters.
|
| 457 |
+
|
| 458 |
+
By default, the pair of `name` and `value` will be set for the domain ''
|
| 459 |
+
and sent on every request (this is sometimes called a "supercookie").
|
| 460 |
+
"""
|
| 461 |
+
result = {
|
| 462 |
+
"version": 0,
|
| 463 |
+
"name": name,
|
| 464 |
+
"value": value,
|
| 465 |
+
"port": None,
|
| 466 |
+
"domain": "",
|
| 467 |
+
"path": "/",
|
| 468 |
+
"secure": False,
|
| 469 |
+
"expires": None,
|
| 470 |
+
"discard": True,
|
| 471 |
+
"comment": None,
|
| 472 |
+
"comment_url": None,
|
| 473 |
+
"rest": {"HttpOnly": None},
|
| 474 |
+
"rfc2109": False,
|
| 475 |
+
}
|
| 476 |
+
|
| 477 |
+
badargs = set(kwargs) - set(result)
|
| 478 |
+
if badargs:
|
| 479 |
+
raise TypeError(
|
| 480 |
+
f"create_cookie() got unexpected keyword arguments: {list(badargs)}"
|
| 481 |
+
)
|
| 482 |
+
|
| 483 |
+
result.update(kwargs)
|
| 484 |
+
result["port_specified"] = bool(result["port"])
|
| 485 |
+
result["domain_specified"] = bool(result["domain"])
|
| 486 |
+
result["domain_initial_dot"] = result["domain"].startswith(".")
|
| 487 |
+
result["path_specified"] = bool(result["path"])
|
| 488 |
+
|
| 489 |
+
return cookielib.Cookie(**result)
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
def morsel_to_cookie(morsel):
|
| 493 |
+
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
|
| 494 |
+
|
| 495 |
+
expires = None
|
| 496 |
+
if morsel["max-age"]:
|
| 497 |
+
try:
|
| 498 |
+
expires = int(time.time() + int(morsel["max-age"]))
|
| 499 |
+
except ValueError:
|
| 500 |
+
raise TypeError(f"max-age: {morsel['max-age']} must be integer")
|
| 501 |
+
elif morsel["expires"]:
|
| 502 |
+
time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
|
| 503 |
+
expires = calendar.timegm(time.strptime(morsel["expires"], time_template))
|
| 504 |
+
return create_cookie(
|
| 505 |
+
comment=morsel["comment"],
|
| 506 |
+
comment_url=bool(morsel["comment"]),
|
| 507 |
+
discard=False,
|
| 508 |
+
domain=morsel["domain"],
|
| 509 |
+
expires=expires,
|
| 510 |
+
name=morsel.key,
|
| 511 |
+
path=morsel["path"],
|
| 512 |
+
port=None,
|
| 513 |
+
rest={"HttpOnly": morsel["httponly"]},
|
| 514 |
+
rfc2109=False,
|
| 515 |
+
secure=bool(morsel["secure"]),
|
| 516 |
+
value=morsel.value,
|
| 517 |
+
version=morsel["version"] or 0,
|
| 518 |
+
)
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
|
| 522 |
+
"""Returns a CookieJar from a key/value dictionary.
|
| 523 |
+
|
| 524 |
+
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
| 525 |
+
:param cookiejar: (optional) A cookiejar to add the cookies to.
|
| 526 |
+
:param overwrite: (optional) If False, will not replace cookies
|
| 527 |
+
already in the jar with new ones.
|
| 528 |
+
:rtype: CookieJar
|
| 529 |
+
"""
|
| 530 |
+
if cookiejar is None:
|
| 531 |
+
cookiejar = RequestsCookieJar()
|
| 532 |
+
|
| 533 |
+
if cookie_dict is not None:
|
| 534 |
+
names_from_jar = [cookie.name for cookie in cookiejar]
|
| 535 |
+
for name in cookie_dict:
|
| 536 |
+
if overwrite or (name not in names_from_jar):
|
| 537 |
+
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
|
| 538 |
+
|
| 539 |
+
return cookiejar
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
def merge_cookies(cookiejar, cookies):
|
| 543 |
+
"""Add cookies to cookiejar and returns a merged CookieJar.
|
| 544 |
+
|
| 545 |
+
:param cookiejar: CookieJar object to add the cookies to.
|
| 546 |
+
:param cookies: Dictionary or CookieJar object to be added.
|
| 547 |
+
:rtype: CookieJar
|
| 548 |
+
"""
|
| 549 |
+
if not isinstance(cookiejar, cookielib.CookieJar):
|
| 550 |
+
raise ValueError("You can only merge into CookieJar")
|
| 551 |
+
|
| 552 |
+
if isinstance(cookies, dict):
|
| 553 |
+
cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)
|
| 554 |
+
elif isinstance(cookies, cookielib.CookieJar):
|
| 555 |
+
try:
|
| 556 |
+
cookiejar.update(cookies)
|
| 557 |
+
except AttributeError:
|
| 558 |
+
for cookie_in_jar in cookies:
|
| 559 |
+
cookiejar.set_cookie(cookie_in_jar)
|
| 560 |
+
|
| 561 |
+
return cookiejar
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/exceptions.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.exceptions
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the set of Requests' exceptions.
|
| 6 |
+
"""
|
| 7 |
+
from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError
|
| 8 |
+
|
| 9 |
+
from .compat import JSONDecodeError as CompatJSONDecodeError
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class RequestException(IOError):
|
| 13 |
+
"""There was an ambiguous exception that occurred while handling your
|
| 14 |
+
request.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def __init__(self, *args, **kwargs):
|
| 18 |
+
"""Initialize RequestException with `request` and `response` objects."""
|
| 19 |
+
response = kwargs.pop("response", None)
|
| 20 |
+
self.response = response
|
| 21 |
+
self.request = kwargs.pop("request", None)
|
| 22 |
+
if response is not None and not self.request and hasattr(response, "request"):
|
| 23 |
+
self.request = self.response.request
|
| 24 |
+
super().__init__(*args, **kwargs)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class InvalidJSONError(RequestException):
|
| 28 |
+
"""A JSON error occurred."""
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
|
| 32 |
+
"""Couldn't decode the text into json"""
|
| 33 |
+
|
| 34 |
+
def __init__(self, *args, **kwargs):
|
| 35 |
+
"""
|
| 36 |
+
Construct the JSONDecodeError instance first with all
|
| 37 |
+
args. Then use it's args to construct the IOError so that
|
| 38 |
+
the json specific args aren't used as IOError specific args
|
| 39 |
+
and the error message from JSONDecodeError is preserved.
|
| 40 |
+
"""
|
| 41 |
+
CompatJSONDecodeError.__init__(self, *args)
|
| 42 |
+
InvalidJSONError.__init__(self, *self.args, **kwargs)
|
| 43 |
+
|
| 44 |
+
def __reduce__(self):
|
| 45 |
+
"""
|
| 46 |
+
The __reduce__ method called when pickling the object must
|
| 47 |
+
be the one from the JSONDecodeError (be it json/simplejson)
|
| 48 |
+
as it expects all the arguments for instantiation, not just
|
| 49 |
+
one like the IOError, and the MRO would by default call the
|
| 50 |
+
__reduce__ method from the IOError due to the inheritance order.
|
| 51 |
+
"""
|
| 52 |
+
return CompatJSONDecodeError.__reduce__(self)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class HTTPError(RequestException):
|
| 56 |
+
"""An HTTP error occurred."""
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class ConnectionError(RequestException):
|
| 60 |
+
"""A Connection error occurred."""
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class ProxyError(ConnectionError):
|
| 64 |
+
"""A proxy error occurred."""
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class SSLError(ConnectionError):
|
| 68 |
+
"""An SSL error occurred."""
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class Timeout(RequestException):
|
| 72 |
+
"""The request timed out.
|
| 73 |
+
|
| 74 |
+
Catching this error will catch both
|
| 75 |
+
:exc:`~requests.exceptions.ConnectTimeout` and
|
| 76 |
+
:exc:`~requests.exceptions.ReadTimeout` errors.
|
| 77 |
+
"""
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class ConnectTimeout(ConnectionError, Timeout):
|
| 81 |
+
"""The request timed out while trying to connect to the remote server.
|
| 82 |
+
|
| 83 |
+
Requests that produced this error are safe to retry.
|
| 84 |
+
"""
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class ReadTimeout(Timeout):
|
| 88 |
+
"""The server did not send any data in the allotted amount of time."""
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class URLRequired(RequestException):
|
| 92 |
+
"""A valid URL is required to make a request."""
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class TooManyRedirects(RequestException):
|
| 96 |
+
"""Too many redirects."""
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class MissingSchema(RequestException, ValueError):
|
| 100 |
+
"""The URL scheme (e.g. http or https) is missing."""
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class InvalidSchema(RequestException, ValueError):
|
| 104 |
+
"""The URL scheme provided is either invalid or unsupported."""
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class InvalidURL(RequestException, ValueError):
|
| 108 |
+
"""The URL provided was somehow invalid."""
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class InvalidHeader(RequestException, ValueError):
|
| 112 |
+
"""The header value provided was somehow invalid."""
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class InvalidProxyURL(InvalidURL):
|
| 116 |
+
"""The proxy URL provided is invalid."""
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
class ChunkedEncodingError(RequestException):
|
| 120 |
+
"""The server declared chunked encoding but sent an invalid chunk."""
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class ContentDecodingError(RequestException, BaseHTTPError):
|
| 124 |
+
"""Failed to decode response content."""
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class StreamConsumedError(RequestException, TypeError):
|
| 128 |
+
"""The content for this response was already consumed."""
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class RetryError(RequestException):
|
| 132 |
+
"""Custom retries logic failed"""
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
class UnrewindableBodyError(RequestException):
|
| 136 |
+
"""Requests encountered an error when trying to rewind a body."""
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
# Warnings
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
class RequestsWarning(Warning):
|
| 143 |
+
"""Base warning for Requests."""
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
class FileModeWarning(RequestsWarning, DeprecationWarning):
|
| 147 |
+
"""A file was opened in text mode, but Requests determined its binary length."""
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class RequestsDependencyWarning(RequestsWarning):
|
| 151 |
+
"""An imported dependency doesn't match the expected version range."""
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/help.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Module containing bug report helper(s)."""
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import platform
|
| 5 |
+
import ssl
|
| 6 |
+
import sys
|
| 7 |
+
|
| 8 |
+
from pip._vendor import idna
|
| 9 |
+
from pip._vendor import urllib3
|
| 10 |
+
|
| 11 |
+
from . import __version__ as requests_version
|
| 12 |
+
|
| 13 |
+
charset_normalizer = None
|
| 14 |
+
chardet = None
|
| 15 |
+
|
| 16 |
+
try:
|
| 17 |
+
from pip._vendor.urllib3.contrib import pyopenssl
|
| 18 |
+
except ImportError:
|
| 19 |
+
pyopenssl = None
|
| 20 |
+
OpenSSL = None
|
| 21 |
+
cryptography = None
|
| 22 |
+
else:
|
| 23 |
+
import cryptography
|
| 24 |
+
import OpenSSL
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def _implementation():
|
| 28 |
+
"""Return a dict with the Python implementation and version.
|
| 29 |
+
|
| 30 |
+
Provide both the name and the version of the Python implementation
|
| 31 |
+
currently running. For example, on CPython 3.10.3 it will return
|
| 32 |
+
{'name': 'CPython', 'version': '3.10.3'}.
|
| 33 |
+
|
| 34 |
+
This function works best on CPython and PyPy: in particular, it probably
|
| 35 |
+
doesn't work for Jython or IronPython. Future investigation should be done
|
| 36 |
+
to work out the correct shape of the code for those platforms.
|
| 37 |
+
"""
|
| 38 |
+
implementation = platform.python_implementation()
|
| 39 |
+
|
| 40 |
+
if implementation == "CPython":
|
| 41 |
+
implementation_version = platform.python_version()
|
| 42 |
+
elif implementation == "PyPy":
|
| 43 |
+
implementation_version = "{}.{}.{}".format(
|
| 44 |
+
sys.pypy_version_info.major,
|
| 45 |
+
sys.pypy_version_info.minor,
|
| 46 |
+
sys.pypy_version_info.micro,
|
| 47 |
+
)
|
| 48 |
+
if sys.pypy_version_info.releaselevel != "final":
|
| 49 |
+
implementation_version = "".join(
|
| 50 |
+
[implementation_version, sys.pypy_version_info.releaselevel]
|
| 51 |
+
)
|
| 52 |
+
elif implementation == "Jython":
|
| 53 |
+
implementation_version = platform.python_version() # Complete Guess
|
| 54 |
+
elif implementation == "IronPython":
|
| 55 |
+
implementation_version = platform.python_version() # Complete Guess
|
| 56 |
+
else:
|
| 57 |
+
implementation_version = "Unknown"
|
| 58 |
+
|
| 59 |
+
return {"name": implementation, "version": implementation_version}
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def info():
|
| 63 |
+
"""Generate information for a bug report."""
|
| 64 |
+
try:
|
| 65 |
+
platform_info = {
|
| 66 |
+
"system": platform.system(),
|
| 67 |
+
"release": platform.release(),
|
| 68 |
+
}
|
| 69 |
+
except OSError:
|
| 70 |
+
platform_info = {
|
| 71 |
+
"system": "Unknown",
|
| 72 |
+
"release": "Unknown",
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
implementation_info = _implementation()
|
| 76 |
+
urllib3_info = {"version": urllib3.__version__}
|
| 77 |
+
charset_normalizer_info = {"version": None}
|
| 78 |
+
chardet_info = {"version": None}
|
| 79 |
+
if charset_normalizer:
|
| 80 |
+
charset_normalizer_info = {"version": charset_normalizer.__version__}
|
| 81 |
+
if chardet:
|
| 82 |
+
chardet_info = {"version": chardet.__version__}
|
| 83 |
+
|
| 84 |
+
pyopenssl_info = {
|
| 85 |
+
"version": None,
|
| 86 |
+
"openssl_version": "",
|
| 87 |
+
}
|
| 88 |
+
if OpenSSL:
|
| 89 |
+
pyopenssl_info = {
|
| 90 |
+
"version": OpenSSL.__version__,
|
| 91 |
+
"openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
|
| 92 |
+
}
|
| 93 |
+
cryptography_info = {
|
| 94 |
+
"version": getattr(cryptography, "__version__", ""),
|
| 95 |
+
}
|
| 96 |
+
idna_info = {
|
| 97 |
+
"version": getattr(idna, "__version__", ""),
|
| 98 |
+
}
|
| 99 |
+
|
| 100 |
+
system_ssl = ssl.OPENSSL_VERSION_NUMBER
|
| 101 |
+
system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
|
| 102 |
+
|
| 103 |
+
return {
|
| 104 |
+
"platform": platform_info,
|
| 105 |
+
"implementation": implementation_info,
|
| 106 |
+
"system_ssl": system_ssl_info,
|
| 107 |
+
"using_pyopenssl": pyopenssl is not None,
|
| 108 |
+
"using_charset_normalizer": chardet is None,
|
| 109 |
+
"pyOpenSSL": pyopenssl_info,
|
| 110 |
+
"urllib3": urllib3_info,
|
| 111 |
+
"chardet": chardet_info,
|
| 112 |
+
"charset_normalizer": charset_normalizer_info,
|
| 113 |
+
"cryptography": cryptography_info,
|
| 114 |
+
"idna": idna_info,
|
| 115 |
+
"requests": {
|
| 116 |
+
"version": requests_version,
|
| 117 |
+
},
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def main():
|
| 122 |
+
"""Pretty-print the bug information as JSON."""
|
| 123 |
+
print(json.dumps(info(), sort_keys=True, indent=2))
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
if __name__ == "__main__":
|
| 127 |
+
main()
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/hooks.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.hooks
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module provides the capabilities for the Requests hooks system.
|
| 6 |
+
|
| 7 |
+
Available hooks:
|
| 8 |
+
|
| 9 |
+
``response``:
|
| 10 |
+
The response generated from a Request.
|
| 11 |
+
"""
|
| 12 |
+
HOOKS = ["response"]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def default_hooks():
|
| 16 |
+
return {event: [] for event in HOOKS}
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
# TODO: response is the only one
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def dispatch_hook(key, hooks, hook_data, **kwargs):
|
| 23 |
+
"""Dispatches a hook dictionary on a given piece of data."""
|
| 24 |
+
hooks = hooks or {}
|
| 25 |
+
hooks = hooks.get(key)
|
| 26 |
+
if hooks:
|
| 27 |
+
if hasattr(hooks, "__call__"):
|
| 28 |
+
hooks = [hooks]
|
| 29 |
+
for hook in hooks:
|
| 30 |
+
_hook_data = hook(hook_data, **kwargs)
|
| 31 |
+
if _hook_data is not None:
|
| 32 |
+
hook_data = _hook_data
|
| 33 |
+
return hook_data
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/models.py
ADDED
|
@@ -0,0 +1,1039 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.models
|
| 3 |
+
~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the primary objects that power Requests.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import datetime
|
| 9 |
+
|
| 10 |
+
# Import encoding now, to avoid implicit import later.
|
| 11 |
+
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
|
| 12 |
+
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
|
| 13 |
+
import encodings.idna # noqa: F401
|
| 14 |
+
from io import UnsupportedOperation
|
| 15 |
+
|
| 16 |
+
from pip._vendor.urllib3.exceptions import (
|
| 17 |
+
DecodeError,
|
| 18 |
+
LocationParseError,
|
| 19 |
+
ProtocolError,
|
| 20 |
+
ReadTimeoutError,
|
| 21 |
+
SSLError,
|
| 22 |
+
)
|
| 23 |
+
from pip._vendor.urllib3.fields import RequestField
|
| 24 |
+
from pip._vendor.urllib3.filepost import encode_multipart_formdata
|
| 25 |
+
from pip._vendor.urllib3.util import parse_url
|
| 26 |
+
|
| 27 |
+
from ._internal_utils import to_native_string, unicode_is_ascii
|
| 28 |
+
from .auth import HTTPBasicAuth
|
| 29 |
+
from .compat import (
|
| 30 |
+
Callable,
|
| 31 |
+
JSONDecodeError,
|
| 32 |
+
Mapping,
|
| 33 |
+
basestring,
|
| 34 |
+
builtin_str,
|
| 35 |
+
chardet,
|
| 36 |
+
cookielib,
|
| 37 |
+
)
|
| 38 |
+
from .compat import json as complexjson
|
| 39 |
+
from .compat import urlencode, urlsplit, urlunparse
|
| 40 |
+
from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header
|
| 41 |
+
from .exceptions import (
|
| 42 |
+
ChunkedEncodingError,
|
| 43 |
+
ConnectionError,
|
| 44 |
+
ContentDecodingError,
|
| 45 |
+
HTTPError,
|
| 46 |
+
InvalidJSONError,
|
| 47 |
+
InvalidURL,
|
| 48 |
+
)
|
| 49 |
+
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
|
| 50 |
+
from .exceptions import MissingSchema
|
| 51 |
+
from .exceptions import SSLError as RequestsSSLError
|
| 52 |
+
from .exceptions import StreamConsumedError
|
| 53 |
+
from .hooks import default_hooks
|
| 54 |
+
from .status_codes import codes
|
| 55 |
+
from .structures import CaseInsensitiveDict
|
| 56 |
+
from .utils import (
|
| 57 |
+
check_header_validity,
|
| 58 |
+
get_auth_from_url,
|
| 59 |
+
guess_filename,
|
| 60 |
+
guess_json_utf,
|
| 61 |
+
iter_slices,
|
| 62 |
+
parse_header_links,
|
| 63 |
+
requote_uri,
|
| 64 |
+
stream_decode_response_unicode,
|
| 65 |
+
super_len,
|
| 66 |
+
to_key_val_list,
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
#: The set of HTTP status codes that indicate an automatically
|
| 70 |
+
#: processable redirect.
|
| 71 |
+
REDIRECT_STATI = (
|
| 72 |
+
codes.moved, # 301
|
| 73 |
+
codes.found, # 302
|
| 74 |
+
codes.other, # 303
|
| 75 |
+
codes.temporary_redirect, # 307
|
| 76 |
+
codes.permanent_redirect, # 308
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
DEFAULT_REDIRECT_LIMIT = 30
|
| 80 |
+
CONTENT_CHUNK_SIZE = 10 * 1024
|
| 81 |
+
ITER_CHUNK_SIZE = 512
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class RequestEncodingMixin:
|
| 85 |
+
@property
|
| 86 |
+
def path_url(self):
|
| 87 |
+
"""Build the path URL to use."""
|
| 88 |
+
|
| 89 |
+
url = []
|
| 90 |
+
|
| 91 |
+
p = urlsplit(self.url)
|
| 92 |
+
|
| 93 |
+
path = p.path
|
| 94 |
+
if not path:
|
| 95 |
+
path = "/"
|
| 96 |
+
|
| 97 |
+
url.append(path)
|
| 98 |
+
|
| 99 |
+
query = p.query
|
| 100 |
+
if query:
|
| 101 |
+
url.append("?")
|
| 102 |
+
url.append(query)
|
| 103 |
+
|
| 104 |
+
return "".join(url)
|
| 105 |
+
|
| 106 |
+
@staticmethod
|
| 107 |
+
def _encode_params(data):
|
| 108 |
+
"""Encode parameters in a piece of data.
|
| 109 |
+
|
| 110 |
+
Will successfully encode parameters when passed as a dict or a list of
|
| 111 |
+
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
|
| 112 |
+
if parameters are supplied as a dict.
|
| 113 |
+
"""
|
| 114 |
+
|
| 115 |
+
if isinstance(data, (str, bytes)):
|
| 116 |
+
return data
|
| 117 |
+
elif hasattr(data, "read"):
|
| 118 |
+
return data
|
| 119 |
+
elif hasattr(data, "__iter__"):
|
| 120 |
+
result = []
|
| 121 |
+
for k, vs in to_key_val_list(data):
|
| 122 |
+
if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
|
| 123 |
+
vs = [vs]
|
| 124 |
+
for v in vs:
|
| 125 |
+
if v is not None:
|
| 126 |
+
result.append(
|
| 127 |
+
(
|
| 128 |
+
k.encode("utf-8") if isinstance(k, str) else k,
|
| 129 |
+
v.encode("utf-8") if isinstance(v, str) else v,
|
| 130 |
+
)
|
| 131 |
+
)
|
| 132 |
+
return urlencode(result, doseq=True)
|
| 133 |
+
else:
|
| 134 |
+
return data
|
| 135 |
+
|
| 136 |
+
@staticmethod
|
| 137 |
+
def _encode_files(files, data):
|
| 138 |
+
"""Build the body for a multipart/form-data request.
|
| 139 |
+
|
| 140 |
+
Will successfully encode files when passed as a dict or a list of
|
| 141 |
+
tuples. Order is retained if data is a list of tuples but arbitrary
|
| 142 |
+
if parameters are supplied as a dict.
|
| 143 |
+
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
|
| 144 |
+
or 4-tuples (filename, fileobj, contentype, custom_headers).
|
| 145 |
+
"""
|
| 146 |
+
if not files:
|
| 147 |
+
raise ValueError("Files must be provided.")
|
| 148 |
+
elif isinstance(data, basestring):
|
| 149 |
+
raise ValueError("Data must not be a string.")
|
| 150 |
+
|
| 151 |
+
new_fields = []
|
| 152 |
+
fields = to_key_val_list(data or {})
|
| 153 |
+
files = to_key_val_list(files or {})
|
| 154 |
+
|
| 155 |
+
for field, val in fields:
|
| 156 |
+
if isinstance(val, basestring) or not hasattr(val, "__iter__"):
|
| 157 |
+
val = [val]
|
| 158 |
+
for v in val:
|
| 159 |
+
if v is not None:
|
| 160 |
+
# Don't call str() on bytestrings: in Py3 it all goes wrong.
|
| 161 |
+
if not isinstance(v, bytes):
|
| 162 |
+
v = str(v)
|
| 163 |
+
|
| 164 |
+
new_fields.append(
|
| 165 |
+
(
|
| 166 |
+
field.decode("utf-8")
|
| 167 |
+
if isinstance(field, bytes)
|
| 168 |
+
else field,
|
| 169 |
+
v.encode("utf-8") if isinstance(v, str) else v,
|
| 170 |
+
)
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
for k, v in files:
|
| 174 |
+
# support for explicit filename
|
| 175 |
+
ft = None
|
| 176 |
+
fh = None
|
| 177 |
+
if isinstance(v, (tuple, list)):
|
| 178 |
+
if len(v) == 2:
|
| 179 |
+
fn, fp = v
|
| 180 |
+
elif len(v) == 3:
|
| 181 |
+
fn, fp, ft = v
|
| 182 |
+
else:
|
| 183 |
+
fn, fp, ft, fh = v
|
| 184 |
+
else:
|
| 185 |
+
fn = guess_filename(v) or k
|
| 186 |
+
fp = v
|
| 187 |
+
|
| 188 |
+
if isinstance(fp, (str, bytes, bytearray)):
|
| 189 |
+
fdata = fp
|
| 190 |
+
elif hasattr(fp, "read"):
|
| 191 |
+
fdata = fp.read()
|
| 192 |
+
elif fp is None:
|
| 193 |
+
continue
|
| 194 |
+
else:
|
| 195 |
+
fdata = fp
|
| 196 |
+
|
| 197 |
+
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
|
| 198 |
+
rf.make_multipart(content_type=ft)
|
| 199 |
+
new_fields.append(rf)
|
| 200 |
+
|
| 201 |
+
body, content_type = encode_multipart_formdata(new_fields)
|
| 202 |
+
|
| 203 |
+
return body, content_type
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
class RequestHooksMixin:
|
| 207 |
+
def register_hook(self, event, hook):
|
| 208 |
+
"""Properly register a hook."""
|
| 209 |
+
|
| 210 |
+
if event not in self.hooks:
|
| 211 |
+
raise ValueError(f'Unsupported event specified, with event name "{event}"')
|
| 212 |
+
|
| 213 |
+
if isinstance(hook, Callable):
|
| 214 |
+
self.hooks[event].append(hook)
|
| 215 |
+
elif hasattr(hook, "__iter__"):
|
| 216 |
+
self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
|
| 217 |
+
|
| 218 |
+
def deregister_hook(self, event, hook):
|
| 219 |
+
"""Deregister a previously registered hook.
|
| 220 |
+
Returns True if the hook existed, False if not.
|
| 221 |
+
"""
|
| 222 |
+
|
| 223 |
+
try:
|
| 224 |
+
self.hooks[event].remove(hook)
|
| 225 |
+
return True
|
| 226 |
+
except ValueError:
|
| 227 |
+
return False
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
class Request(RequestHooksMixin):
|
| 231 |
+
"""A user-created :class:`Request <Request>` object.
|
| 232 |
+
|
| 233 |
+
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
|
| 234 |
+
|
| 235 |
+
:param method: HTTP method to use.
|
| 236 |
+
:param url: URL to send.
|
| 237 |
+
:param headers: dictionary of headers to send.
|
| 238 |
+
:param files: dictionary of {filename: fileobject} files to multipart upload.
|
| 239 |
+
:param data: the body to attach to the request. If a dictionary or
|
| 240 |
+
list of tuples ``[(key, value)]`` is provided, form-encoding will
|
| 241 |
+
take place.
|
| 242 |
+
:param json: json for the body to attach to the request (if files or data is not specified).
|
| 243 |
+
:param params: URL parameters to append to the URL. If a dictionary or
|
| 244 |
+
list of tuples ``[(key, value)]`` is provided, form-encoding will
|
| 245 |
+
take place.
|
| 246 |
+
:param auth: Auth handler or (user, pass) tuple.
|
| 247 |
+
:param cookies: dictionary or CookieJar of cookies to attach to this request.
|
| 248 |
+
:param hooks: dictionary of callback hooks, for internal usage.
|
| 249 |
+
|
| 250 |
+
Usage::
|
| 251 |
+
|
| 252 |
+
>>> import requests
|
| 253 |
+
>>> req = requests.Request('GET', 'https://httpbin.org/get')
|
| 254 |
+
>>> req.prepare()
|
| 255 |
+
<PreparedRequest [GET]>
|
| 256 |
+
"""
|
| 257 |
+
|
| 258 |
+
def __init__(
|
| 259 |
+
self,
|
| 260 |
+
method=None,
|
| 261 |
+
url=None,
|
| 262 |
+
headers=None,
|
| 263 |
+
files=None,
|
| 264 |
+
data=None,
|
| 265 |
+
params=None,
|
| 266 |
+
auth=None,
|
| 267 |
+
cookies=None,
|
| 268 |
+
hooks=None,
|
| 269 |
+
json=None,
|
| 270 |
+
):
|
| 271 |
+
# Default empty dicts for dict params.
|
| 272 |
+
data = [] if data is None else data
|
| 273 |
+
files = [] if files is None else files
|
| 274 |
+
headers = {} if headers is None else headers
|
| 275 |
+
params = {} if params is None else params
|
| 276 |
+
hooks = {} if hooks is None else hooks
|
| 277 |
+
|
| 278 |
+
self.hooks = default_hooks()
|
| 279 |
+
for k, v in list(hooks.items()):
|
| 280 |
+
self.register_hook(event=k, hook=v)
|
| 281 |
+
|
| 282 |
+
self.method = method
|
| 283 |
+
self.url = url
|
| 284 |
+
self.headers = headers
|
| 285 |
+
self.files = files
|
| 286 |
+
self.data = data
|
| 287 |
+
self.json = json
|
| 288 |
+
self.params = params
|
| 289 |
+
self.auth = auth
|
| 290 |
+
self.cookies = cookies
|
| 291 |
+
|
| 292 |
+
def __repr__(self):
|
| 293 |
+
return f"<Request [{self.method}]>"
|
| 294 |
+
|
| 295 |
+
def prepare(self):
|
| 296 |
+
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
|
| 297 |
+
p = PreparedRequest()
|
| 298 |
+
p.prepare(
|
| 299 |
+
method=self.method,
|
| 300 |
+
url=self.url,
|
| 301 |
+
headers=self.headers,
|
| 302 |
+
files=self.files,
|
| 303 |
+
data=self.data,
|
| 304 |
+
json=self.json,
|
| 305 |
+
params=self.params,
|
| 306 |
+
auth=self.auth,
|
| 307 |
+
cookies=self.cookies,
|
| 308 |
+
hooks=self.hooks,
|
| 309 |
+
)
|
| 310 |
+
return p
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
| 314 |
+
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
|
| 315 |
+
containing the exact bytes that will be sent to the server.
|
| 316 |
+
|
| 317 |
+
Instances are generated from a :class:`Request <Request>` object, and
|
| 318 |
+
should not be instantiated manually; doing so may produce undesirable
|
| 319 |
+
effects.
|
| 320 |
+
|
| 321 |
+
Usage::
|
| 322 |
+
|
| 323 |
+
>>> import requests
|
| 324 |
+
>>> req = requests.Request('GET', 'https://httpbin.org/get')
|
| 325 |
+
>>> r = req.prepare()
|
| 326 |
+
>>> r
|
| 327 |
+
<PreparedRequest [GET]>
|
| 328 |
+
|
| 329 |
+
>>> s = requests.Session()
|
| 330 |
+
>>> s.send(r)
|
| 331 |
+
<Response [200]>
|
| 332 |
+
"""
|
| 333 |
+
|
| 334 |
+
def __init__(self):
|
| 335 |
+
#: HTTP verb to send to the server.
|
| 336 |
+
self.method = None
|
| 337 |
+
#: HTTP URL to send the request to.
|
| 338 |
+
self.url = None
|
| 339 |
+
#: dictionary of HTTP headers.
|
| 340 |
+
self.headers = None
|
| 341 |
+
# The `CookieJar` used to create the Cookie header will be stored here
|
| 342 |
+
# after prepare_cookies is called
|
| 343 |
+
self._cookies = None
|
| 344 |
+
#: request body to send to the server.
|
| 345 |
+
self.body = None
|
| 346 |
+
#: dictionary of callback hooks, for internal usage.
|
| 347 |
+
self.hooks = default_hooks()
|
| 348 |
+
#: integer denoting starting position of a readable file-like body.
|
| 349 |
+
self._body_position = None
|
| 350 |
+
|
| 351 |
+
def prepare(
|
| 352 |
+
self,
|
| 353 |
+
method=None,
|
| 354 |
+
url=None,
|
| 355 |
+
headers=None,
|
| 356 |
+
files=None,
|
| 357 |
+
data=None,
|
| 358 |
+
params=None,
|
| 359 |
+
auth=None,
|
| 360 |
+
cookies=None,
|
| 361 |
+
hooks=None,
|
| 362 |
+
json=None,
|
| 363 |
+
):
|
| 364 |
+
"""Prepares the entire request with the given parameters."""
|
| 365 |
+
|
| 366 |
+
self.prepare_method(method)
|
| 367 |
+
self.prepare_url(url, params)
|
| 368 |
+
self.prepare_headers(headers)
|
| 369 |
+
self.prepare_cookies(cookies)
|
| 370 |
+
self.prepare_body(data, files, json)
|
| 371 |
+
self.prepare_auth(auth, url)
|
| 372 |
+
|
| 373 |
+
# Note that prepare_auth must be last to enable authentication schemes
|
| 374 |
+
# such as OAuth to work on a fully prepared request.
|
| 375 |
+
|
| 376 |
+
# This MUST go after prepare_auth. Authenticators could add a hook
|
| 377 |
+
self.prepare_hooks(hooks)
|
| 378 |
+
|
| 379 |
+
def __repr__(self):
|
| 380 |
+
return f"<PreparedRequest [{self.method}]>"
|
| 381 |
+
|
| 382 |
+
def copy(self):
|
| 383 |
+
p = PreparedRequest()
|
| 384 |
+
p.method = self.method
|
| 385 |
+
p.url = self.url
|
| 386 |
+
p.headers = self.headers.copy() if self.headers is not None else None
|
| 387 |
+
p._cookies = _copy_cookie_jar(self._cookies)
|
| 388 |
+
p.body = self.body
|
| 389 |
+
p.hooks = self.hooks
|
| 390 |
+
p._body_position = self._body_position
|
| 391 |
+
return p
|
| 392 |
+
|
| 393 |
+
def prepare_method(self, method):
|
| 394 |
+
"""Prepares the given HTTP method."""
|
| 395 |
+
self.method = method
|
| 396 |
+
if self.method is not None:
|
| 397 |
+
self.method = to_native_string(self.method.upper())
|
| 398 |
+
|
| 399 |
+
@staticmethod
|
| 400 |
+
def _get_idna_encoded_host(host):
|
| 401 |
+
from pip._vendor import idna
|
| 402 |
+
|
| 403 |
+
try:
|
| 404 |
+
host = idna.encode(host, uts46=True).decode("utf-8")
|
| 405 |
+
except idna.IDNAError:
|
| 406 |
+
raise UnicodeError
|
| 407 |
+
return host
|
| 408 |
+
|
| 409 |
+
def prepare_url(self, url, params):
|
| 410 |
+
"""Prepares the given HTTP URL."""
|
| 411 |
+
#: Accept objects that have string representations.
|
| 412 |
+
#: We're unable to blindly call unicode/str functions
|
| 413 |
+
#: as this will include the bytestring indicator (b'')
|
| 414 |
+
#: on python 3.x.
|
| 415 |
+
#: https://github.com/psf/requests/pull/2238
|
| 416 |
+
if isinstance(url, bytes):
|
| 417 |
+
url = url.decode("utf8")
|
| 418 |
+
else:
|
| 419 |
+
url = str(url)
|
| 420 |
+
|
| 421 |
+
# Remove leading whitespaces from url
|
| 422 |
+
url = url.lstrip()
|
| 423 |
+
|
| 424 |
+
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
|
| 425 |
+
# `data` etc to work around exceptions from `url_parse`, which
|
| 426 |
+
# handles RFC 3986 only.
|
| 427 |
+
if ":" in url and not url.lower().startswith("http"):
|
| 428 |
+
self.url = url
|
| 429 |
+
return
|
| 430 |
+
|
| 431 |
+
# Support for unicode domain names and paths.
|
| 432 |
+
try:
|
| 433 |
+
scheme, auth, host, port, path, query, fragment = parse_url(url)
|
| 434 |
+
except LocationParseError as e:
|
| 435 |
+
raise InvalidURL(*e.args)
|
| 436 |
+
|
| 437 |
+
if not scheme:
|
| 438 |
+
raise MissingSchema(
|
| 439 |
+
f"Invalid URL {url!r}: No scheme supplied. "
|
| 440 |
+
f"Perhaps you meant https://{url}?"
|
| 441 |
+
)
|
| 442 |
+
|
| 443 |
+
if not host:
|
| 444 |
+
raise InvalidURL(f"Invalid URL {url!r}: No host supplied")
|
| 445 |
+
|
| 446 |
+
# In general, we want to try IDNA encoding the hostname if the string contains
|
| 447 |
+
# non-ASCII characters. This allows users to automatically get the correct IDNA
|
| 448 |
+
# behaviour. For strings containing only ASCII characters, we need to also verify
|
| 449 |
+
# it doesn't start with a wildcard (*), before allowing the unencoded hostname.
|
| 450 |
+
if not unicode_is_ascii(host):
|
| 451 |
+
try:
|
| 452 |
+
host = self._get_idna_encoded_host(host)
|
| 453 |
+
except UnicodeError:
|
| 454 |
+
raise InvalidURL("URL has an invalid label.")
|
| 455 |
+
elif host.startswith(("*", ".")):
|
| 456 |
+
raise InvalidURL("URL has an invalid label.")
|
| 457 |
+
|
| 458 |
+
# Carefully reconstruct the network location
|
| 459 |
+
netloc = auth or ""
|
| 460 |
+
if netloc:
|
| 461 |
+
netloc += "@"
|
| 462 |
+
netloc += host
|
| 463 |
+
if port:
|
| 464 |
+
netloc += f":{port}"
|
| 465 |
+
|
| 466 |
+
# Bare domains aren't valid URLs.
|
| 467 |
+
if not path:
|
| 468 |
+
path = "/"
|
| 469 |
+
|
| 470 |
+
if isinstance(params, (str, bytes)):
|
| 471 |
+
params = to_native_string(params)
|
| 472 |
+
|
| 473 |
+
enc_params = self._encode_params(params)
|
| 474 |
+
if enc_params:
|
| 475 |
+
if query:
|
| 476 |
+
query = f"{query}&{enc_params}"
|
| 477 |
+
else:
|
| 478 |
+
query = enc_params
|
| 479 |
+
|
| 480 |
+
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
|
| 481 |
+
self.url = url
|
| 482 |
+
|
| 483 |
+
def prepare_headers(self, headers):
|
| 484 |
+
"""Prepares the given HTTP headers."""
|
| 485 |
+
|
| 486 |
+
self.headers = CaseInsensitiveDict()
|
| 487 |
+
if headers:
|
| 488 |
+
for header in headers.items():
|
| 489 |
+
# Raise exception on invalid header value.
|
| 490 |
+
check_header_validity(header)
|
| 491 |
+
name, value = header
|
| 492 |
+
self.headers[to_native_string(name)] = value
|
| 493 |
+
|
| 494 |
+
def prepare_body(self, data, files, json=None):
|
| 495 |
+
"""Prepares the given HTTP body data."""
|
| 496 |
+
|
| 497 |
+
# Check if file, fo, generator, iterator.
|
| 498 |
+
# If not, run through normal process.
|
| 499 |
+
|
| 500 |
+
# Nottin' on you.
|
| 501 |
+
body = None
|
| 502 |
+
content_type = None
|
| 503 |
+
|
| 504 |
+
if not data and json is not None:
|
| 505 |
+
# urllib3 requires a bytes-like body. Python 2's json.dumps
|
| 506 |
+
# provides this natively, but Python 3 gives a Unicode string.
|
| 507 |
+
content_type = "application/json"
|
| 508 |
+
|
| 509 |
+
try:
|
| 510 |
+
body = complexjson.dumps(json, allow_nan=False)
|
| 511 |
+
except ValueError as ve:
|
| 512 |
+
raise InvalidJSONError(ve, request=self)
|
| 513 |
+
|
| 514 |
+
if not isinstance(body, bytes):
|
| 515 |
+
body = body.encode("utf-8")
|
| 516 |
+
|
| 517 |
+
is_stream = all(
|
| 518 |
+
[
|
| 519 |
+
hasattr(data, "__iter__"),
|
| 520 |
+
not isinstance(data, (basestring, list, tuple, Mapping)),
|
| 521 |
+
]
|
| 522 |
+
)
|
| 523 |
+
|
| 524 |
+
if is_stream:
|
| 525 |
+
try:
|
| 526 |
+
length = super_len(data)
|
| 527 |
+
except (TypeError, AttributeError, UnsupportedOperation):
|
| 528 |
+
length = None
|
| 529 |
+
|
| 530 |
+
body = data
|
| 531 |
+
|
| 532 |
+
if getattr(body, "tell", None) is not None:
|
| 533 |
+
# Record the current file position before reading.
|
| 534 |
+
# This will allow us to rewind a file in the event
|
| 535 |
+
# of a redirect.
|
| 536 |
+
try:
|
| 537 |
+
self._body_position = body.tell()
|
| 538 |
+
except OSError:
|
| 539 |
+
# This differentiates from None, allowing us to catch
|
| 540 |
+
# a failed `tell()` later when trying to rewind the body
|
| 541 |
+
self._body_position = object()
|
| 542 |
+
|
| 543 |
+
if files:
|
| 544 |
+
raise NotImplementedError(
|
| 545 |
+
"Streamed bodies and files are mutually exclusive."
|
| 546 |
+
)
|
| 547 |
+
|
| 548 |
+
if length:
|
| 549 |
+
self.headers["Content-Length"] = builtin_str(length)
|
| 550 |
+
else:
|
| 551 |
+
self.headers["Transfer-Encoding"] = "chunked"
|
| 552 |
+
else:
|
| 553 |
+
# Multi-part file uploads.
|
| 554 |
+
if files:
|
| 555 |
+
(body, content_type) = self._encode_files(files, data)
|
| 556 |
+
else:
|
| 557 |
+
if data:
|
| 558 |
+
body = self._encode_params(data)
|
| 559 |
+
if isinstance(data, basestring) or hasattr(data, "read"):
|
| 560 |
+
content_type = None
|
| 561 |
+
else:
|
| 562 |
+
content_type = "application/x-www-form-urlencoded"
|
| 563 |
+
|
| 564 |
+
self.prepare_content_length(body)
|
| 565 |
+
|
| 566 |
+
# Add content-type if it wasn't explicitly provided.
|
| 567 |
+
if content_type and ("content-type" not in self.headers):
|
| 568 |
+
self.headers["Content-Type"] = content_type
|
| 569 |
+
|
| 570 |
+
self.body = body
|
| 571 |
+
|
| 572 |
+
def prepare_content_length(self, body):
|
| 573 |
+
"""Prepare Content-Length header based on request method and body"""
|
| 574 |
+
if body is not None:
|
| 575 |
+
length = super_len(body)
|
| 576 |
+
if length:
|
| 577 |
+
# If length exists, set it. Otherwise, we fallback
|
| 578 |
+
# to Transfer-Encoding: chunked.
|
| 579 |
+
self.headers["Content-Length"] = builtin_str(length)
|
| 580 |
+
elif (
|
| 581 |
+
self.method not in ("GET", "HEAD")
|
| 582 |
+
and self.headers.get("Content-Length") is None
|
| 583 |
+
):
|
| 584 |
+
# Set Content-Length to 0 for methods that can have a body
|
| 585 |
+
# but don't provide one. (i.e. not GET or HEAD)
|
| 586 |
+
self.headers["Content-Length"] = "0"
|
| 587 |
+
|
| 588 |
+
def prepare_auth(self, auth, url=""):
|
| 589 |
+
"""Prepares the given HTTP auth data."""
|
| 590 |
+
|
| 591 |
+
# If no Auth is explicitly provided, extract it from the URL first.
|
| 592 |
+
if auth is None:
|
| 593 |
+
url_auth = get_auth_from_url(self.url)
|
| 594 |
+
auth = url_auth if any(url_auth) else None
|
| 595 |
+
|
| 596 |
+
if auth:
|
| 597 |
+
if isinstance(auth, tuple) and len(auth) == 2:
|
| 598 |
+
# special-case basic HTTP auth
|
| 599 |
+
auth = HTTPBasicAuth(*auth)
|
| 600 |
+
|
| 601 |
+
# Allow auth to make its changes.
|
| 602 |
+
r = auth(self)
|
| 603 |
+
|
| 604 |
+
# Update self to reflect the auth changes.
|
| 605 |
+
self.__dict__.update(r.__dict__)
|
| 606 |
+
|
| 607 |
+
# Recompute Content-Length
|
| 608 |
+
self.prepare_content_length(self.body)
|
| 609 |
+
|
| 610 |
+
def prepare_cookies(self, cookies):
|
| 611 |
+
"""Prepares the given HTTP cookie data.
|
| 612 |
+
|
| 613 |
+
This function eventually generates a ``Cookie`` header from the
|
| 614 |
+
given cookies using cookielib. Due to cookielib's design, the header
|
| 615 |
+
will not be regenerated if it already exists, meaning this function
|
| 616 |
+
can only be called once for the life of the
|
| 617 |
+
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
|
| 618 |
+
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
|
| 619 |
+
header is removed beforehand.
|
| 620 |
+
"""
|
| 621 |
+
if isinstance(cookies, cookielib.CookieJar):
|
| 622 |
+
self._cookies = cookies
|
| 623 |
+
else:
|
| 624 |
+
self._cookies = cookiejar_from_dict(cookies)
|
| 625 |
+
|
| 626 |
+
cookie_header = get_cookie_header(self._cookies, self)
|
| 627 |
+
if cookie_header is not None:
|
| 628 |
+
self.headers["Cookie"] = cookie_header
|
| 629 |
+
|
| 630 |
+
def prepare_hooks(self, hooks):
|
| 631 |
+
"""Prepares the given hooks."""
|
| 632 |
+
# hooks can be passed as None to the prepare method and to this
|
| 633 |
+
# method. To prevent iterating over None, simply use an empty list
|
| 634 |
+
# if hooks is False-y
|
| 635 |
+
hooks = hooks or []
|
| 636 |
+
for event in hooks:
|
| 637 |
+
self.register_hook(event, hooks[event])
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
class Response:
|
| 641 |
+
"""The :class:`Response <Response>` object, which contains a
|
| 642 |
+
server's response to an HTTP request.
|
| 643 |
+
"""
|
| 644 |
+
|
| 645 |
+
__attrs__ = [
|
| 646 |
+
"_content",
|
| 647 |
+
"status_code",
|
| 648 |
+
"headers",
|
| 649 |
+
"url",
|
| 650 |
+
"history",
|
| 651 |
+
"encoding",
|
| 652 |
+
"reason",
|
| 653 |
+
"cookies",
|
| 654 |
+
"elapsed",
|
| 655 |
+
"request",
|
| 656 |
+
]
|
| 657 |
+
|
| 658 |
+
def __init__(self):
|
| 659 |
+
self._content = False
|
| 660 |
+
self._content_consumed = False
|
| 661 |
+
self._next = None
|
| 662 |
+
|
| 663 |
+
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
|
| 664 |
+
self.status_code = None
|
| 665 |
+
|
| 666 |
+
#: Case-insensitive Dictionary of Response Headers.
|
| 667 |
+
#: For example, ``headers['content-encoding']`` will return the
|
| 668 |
+
#: value of a ``'Content-Encoding'`` response header.
|
| 669 |
+
self.headers = CaseInsensitiveDict()
|
| 670 |
+
|
| 671 |
+
#: File-like object representation of response (for advanced usage).
|
| 672 |
+
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
|
| 673 |
+
#: This requirement does not apply for use internally to Requests.
|
| 674 |
+
self.raw = None
|
| 675 |
+
|
| 676 |
+
#: Final URL location of Response.
|
| 677 |
+
self.url = None
|
| 678 |
+
|
| 679 |
+
#: Encoding to decode with when accessing r.text.
|
| 680 |
+
self.encoding = None
|
| 681 |
+
|
| 682 |
+
#: A list of :class:`Response <Response>` objects from
|
| 683 |
+
#: the history of the Request. Any redirect responses will end
|
| 684 |
+
#: up here. The list is sorted from the oldest to the most recent request.
|
| 685 |
+
self.history = []
|
| 686 |
+
|
| 687 |
+
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
|
| 688 |
+
self.reason = None
|
| 689 |
+
|
| 690 |
+
#: A CookieJar of Cookies the server sent back.
|
| 691 |
+
self.cookies = cookiejar_from_dict({})
|
| 692 |
+
|
| 693 |
+
#: The amount of time elapsed between sending the request
|
| 694 |
+
#: and the arrival of the response (as a timedelta).
|
| 695 |
+
#: This property specifically measures the time taken between sending
|
| 696 |
+
#: the first byte of the request and finishing parsing the headers. It
|
| 697 |
+
#: is therefore unaffected by consuming the response content or the
|
| 698 |
+
#: value of the ``stream`` keyword argument.
|
| 699 |
+
self.elapsed = datetime.timedelta(0)
|
| 700 |
+
|
| 701 |
+
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
|
| 702 |
+
#: is a response.
|
| 703 |
+
self.request = None
|
| 704 |
+
|
| 705 |
+
def __enter__(self):
|
| 706 |
+
return self
|
| 707 |
+
|
| 708 |
+
def __exit__(self, *args):
|
| 709 |
+
self.close()
|
| 710 |
+
|
| 711 |
+
def __getstate__(self):
|
| 712 |
+
# Consume everything; accessing the content attribute makes
|
| 713 |
+
# sure the content has been fully read.
|
| 714 |
+
if not self._content_consumed:
|
| 715 |
+
self.content
|
| 716 |
+
|
| 717 |
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
| 718 |
+
|
| 719 |
+
def __setstate__(self, state):
|
| 720 |
+
for name, value in state.items():
|
| 721 |
+
setattr(self, name, value)
|
| 722 |
+
|
| 723 |
+
# pickled objects do not have .raw
|
| 724 |
+
setattr(self, "_content_consumed", True)
|
| 725 |
+
setattr(self, "raw", None)
|
| 726 |
+
|
| 727 |
+
def __repr__(self):
|
| 728 |
+
return f"<Response [{self.status_code}]>"
|
| 729 |
+
|
| 730 |
+
def __bool__(self):
|
| 731 |
+
"""Returns True if :attr:`status_code` is less than 400.
|
| 732 |
+
|
| 733 |
+
This attribute checks if the status code of the response is between
|
| 734 |
+
400 and 600 to see if there was a client error or a server error. If
|
| 735 |
+
the status code, is between 200 and 400, this will return True. This
|
| 736 |
+
is **not** a check to see if the response code is ``200 OK``.
|
| 737 |
+
"""
|
| 738 |
+
return self.ok
|
| 739 |
+
|
| 740 |
+
def __nonzero__(self):
|
| 741 |
+
"""Returns True if :attr:`status_code` is less than 400.
|
| 742 |
+
|
| 743 |
+
This attribute checks if the status code of the response is between
|
| 744 |
+
400 and 600 to see if there was a client error or a server error. If
|
| 745 |
+
the status code, is between 200 and 400, this will return True. This
|
| 746 |
+
is **not** a check to see if the response code is ``200 OK``.
|
| 747 |
+
"""
|
| 748 |
+
return self.ok
|
| 749 |
+
|
| 750 |
+
def __iter__(self):
|
| 751 |
+
"""Allows you to use a response as an iterator."""
|
| 752 |
+
return self.iter_content(128)
|
| 753 |
+
|
| 754 |
+
@property
|
| 755 |
+
def ok(self):
|
| 756 |
+
"""Returns True if :attr:`status_code` is less than 400, False if not.
|
| 757 |
+
|
| 758 |
+
This attribute checks if the status code of the response is between
|
| 759 |
+
400 and 600 to see if there was a client error or a server error. If
|
| 760 |
+
the status code is between 200 and 400, this will return True. This
|
| 761 |
+
is **not** a check to see if the response code is ``200 OK``.
|
| 762 |
+
"""
|
| 763 |
+
try:
|
| 764 |
+
self.raise_for_status()
|
| 765 |
+
except HTTPError:
|
| 766 |
+
return False
|
| 767 |
+
return True
|
| 768 |
+
|
| 769 |
+
@property
|
| 770 |
+
def is_redirect(self):
|
| 771 |
+
"""True if this Response is a well-formed HTTP redirect that could have
|
| 772 |
+
been processed automatically (by :meth:`Session.resolve_redirects`).
|
| 773 |
+
"""
|
| 774 |
+
return "location" in self.headers and self.status_code in REDIRECT_STATI
|
| 775 |
+
|
| 776 |
+
@property
|
| 777 |
+
def is_permanent_redirect(self):
|
| 778 |
+
"""True if this Response one of the permanent versions of redirect."""
|
| 779 |
+
return "location" in self.headers and self.status_code in (
|
| 780 |
+
codes.moved_permanently,
|
| 781 |
+
codes.permanent_redirect,
|
| 782 |
+
)
|
| 783 |
+
|
| 784 |
+
@property
|
| 785 |
+
def next(self):
|
| 786 |
+
"""Returns a PreparedRequest for the next request in a redirect chain, if there is one."""
|
| 787 |
+
return self._next
|
| 788 |
+
|
| 789 |
+
@property
|
| 790 |
+
def apparent_encoding(self):
|
| 791 |
+
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
|
| 792 |
+
if chardet is not None:
|
| 793 |
+
return chardet.detect(self.content)["encoding"]
|
| 794 |
+
else:
|
| 795 |
+
# If no character detection library is available, we'll fall back
|
| 796 |
+
# to a standard Python utf-8 str.
|
| 797 |
+
return "utf-8"
|
| 798 |
+
|
| 799 |
+
def iter_content(self, chunk_size=1, decode_unicode=False):
|
| 800 |
+
"""Iterates over the response data. When stream=True is set on the
|
| 801 |
+
request, this avoids reading the content at once into memory for
|
| 802 |
+
large responses. The chunk size is the number of bytes it should
|
| 803 |
+
read into memory. This is not necessarily the length of each item
|
| 804 |
+
returned as decoding can take place.
|
| 805 |
+
|
| 806 |
+
chunk_size must be of type int or None. A value of None will
|
| 807 |
+
function differently depending on the value of `stream`.
|
| 808 |
+
stream=True will read data as it arrives in whatever size the
|
| 809 |
+
chunks are received. If stream=False, data is returned as
|
| 810 |
+
a single chunk.
|
| 811 |
+
|
| 812 |
+
If decode_unicode is True, content will be decoded using the best
|
| 813 |
+
available encoding based on the response.
|
| 814 |
+
"""
|
| 815 |
+
|
| 816 |
+
def generate():
|
| 817 |
+
# Special case for urllib3.
|
| 818 |
+
if hasattr(self.raw, "stream"):
|
| 819 |
+
try:
|
| 820 |
+
yield from self.raw.stream(chunk_size, decode_content=True)
|
| 821 |
+
except ProtocolError as e:
|
| 822 |
+
raise ChunkedEncodingError(e)
|
| 823 |
+
except DecodeError as e:
|
| 824 |
+
raise ContentDecodingError(e)
|
| 825 |
+
except ReadTimeoutError as e:
|
| 826 |
+
raise ConnectionError(e)
|
| 827 |
+
except SSLError as e:
|
| 828 |
+
raise RequestsSSLError(e)
|
| 829 |
+
else:
|
| 830 |
+
# Standard file-like object.
|
| 831 |
+
while True:
|
| 832 |
+
chunk = self.raw.read(chunk_size)
|
| 833 |
+
if not chunk:
|
| 834 |
+
break
|
| 835 |
+
yield chunk
|
| 836 |
+
|
| 837 |
+
self._content_consumed = True
|
| 838 |
+
|
| 839 |
+
if self._content_consumed and isinstance(self._content, bool):
|
| 840 |
+
raise StreamConsumedError()
|
| 841 |
+
elif chunk_size is not None and not isinstance(chunk_size, int):
|
| 842 |
+
raise TypeError(
|
| 843 |
+
f"chunk_size must be an int, it is instead a {type(chunk_size)}."
|
| 844 |
+
)
|
| 845 |
+
# simulate reading small chunks of the content
|
| 846 |
+
reused_chunks = iter_slices(self._content, chunk_size)
|
| 847 |
+
|
| 848 |
+
stream_chunks = generate()
|
| 849 |
+
|
| 850 |
+
chunks = reused_chunks if self._content_consumed else stream_chunks
|
| 851 |
+
|
| 852 |
+
if decode_unicode:
|
| 853 |
+
chunks = stream_decode_response_unicode(chunks, self)
|
| 854 |
+
|
| 855 |
+
return chunks
|
| 856 |
+
|
| 857 |
+
def iter_lines(
|
| 858 |
+
self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None
|
| 859 |
+
):
|
| 860 |
+
"""Iterates over the response data, one line at a time. When
|
| 861 |
+
stream=True is set on the request, this avoids reading the
|
| 862 |
+
content at once into memory for large responses.
|
| 863 |
+
|
| 864 |
+
.. note:: This method is not reentrant safe.
|
| 865 |
+
"""
|
| 866 |
+
|
| 867 |
+
pending = None
|
| 868 |
+
|
| 869 |
+
for chunk in self.iter_content(
|
| 870 |
+
chunk_size=chunk_size, decode_unicode=decode_unicode
|
| 871 |
+
):
|
| 872 |
+
if pending is not None:
|
| 873 |
+
chunk = pending + chunk
|
| 874 |
+
|
| 875 |
+
if delimiter:
|
| 876 |
+
lines = chunk.split(delimiter)
|
| 877 |
+
else:
|
| 878 |
+
lines = chunk.splitlines()
|
| 879 |
+
|
| 880 |
+
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
|
| 881 |
+
pending = lines.pop()
|
| 882 |
+
else:
|
| 883 |
+
pending = None
|
| 884 |
+
|
| 885 |
+
yield from lines
|
| 886 |
+
|
| 887 |
+
if pending is not None:
|
| 888 |
+
yield pending
|
| 889 |
+
|
| 890 |
+
@property
|
| 891 |
+
def content(self):
|
| 892 |
+
"""Content of the response, in bytes."""
|
| 893 |
+
|
| 894 |
+
if self._content is False:
|
| 895 |
+
# Read the contents.
|
| 896 |
+
if self._content_consumed:
|
| 897 |
+
raise RuntimeError("The content for this response was already consumed")
|
| 898 |
+
|
| 899 |
+
if self.status_code == 0 or self.raw is None:
|
| 900 |
+
self._content = None
|
| 901 |
+
else:
|
| 902 |
+
self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b""
|
| 903 |
+
|
| 904 |
+
self._content_consumed = True
|
| 905 |
+
# don't need to release the connection; that's been handled by urllib3
|
| 906 |
+
# since we exhausted the data.
|
| 907 |
+
return self._content
|
| 908 |
+
|
| 909 |
+
@property
|
| 910 |
+
def text(self):
|
| 911 |
+
"""Content of the response, in unicode.
|
| 912 |
+
|
| 913 |
+
If Response.encoding is None, encoding will be guessed using
|
| 914 |
+
``charset_normalizer`` or ``chardet``.
|
| 915 |
+
|
| 916 |
+
The encoding of the response content is determined based solely on HTTP
|
| 917 |
+
headers, following RFC 2616 to the letter. If you can take advantage of
|
| 918 |
+
non-HTTP knowledge to make a better guess at the encoding, you should
|
| 919 |
+
set ``r.encoding`` appropriately before accessing this property.
|
| 920 |
+
"""
|
| 921 |
+
|
| 922 |
+
# Try charset from content-type
|
| 923 |
+
content = None
|
| 924 |
+
encoding = self.encoding
|
| 925 |
+
|
| 926 |
+
if not self.content:
|
| 927 |
+
return ""
|
| 928 |
+
|
| 929 |
+
# Fallback to auto-detected encoding.
|
| 930 |
+
if self.encoding is None:
|
| 931 |
+
encoding = self.apparent_encoding
|
| 932 |
+
|
| 933 |
+
# Decode unicode from given encoding.
|
| 934 |
+
try:
|
| 935 |
+
content = str(self.content, encoding, errors="replace")
|
| 936 |
+
except (LookupError, TypeError):
|
| 937 |
+
# A LookupError is raised if the encoding was not found which could
|
| 938 |
+
# indicate a misspelling or similar mistake.
|
| 939 |
+
#
|
| 940 |
+
# A TypeError can be raised if encoding is None
|
| 941 |
+
#
|
| 942 |
+
# So we try blindly encoding.
|
| 943 |
+
content = str(self.content, errors="replace")
|
| 944 |
+
|
| 945 |
+
return content
|
| 946 |
+
|
| 947 |
+
def json(self, **kwargs):
|
| 948 |
+
r"""Decodes the JSON response body (if any) as a Python object.
|
| 949 |
+
|
| 950 |
+
This may return a dictionary, list, etc. depending on what is in the response.
|
| 951 |
+
|
| 952 |
+
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
|
| 953 |
+
:raises requests.exceptions.JSONDecodeError: If the response body does not
|
| 954 |
+
contain valid json.
|
| 955 |
+
"""
|
| 956 |
+
|
| 957 |
+
if not self.encoding and self.content and len(self.content) > 3:
|
| 958 |
+
# No encoding set. JSON RFC 4627 section 3 states we should expect
|
| 959 |
+
# UTF-8, -16 or -32. Detect which one to use; If the detection or
|
| 960 |
+
# decoding fails, fall back to `self.text` (using charset_normalizer to make
|
| 961 |
+
# a best guess).
|
| 962 |
+
encoding = guess_json_utf(self.content)
|
| 963 |
+
if encoding is not None:
|
| 964 |
+
try:
|
| 965 |
+
return complexjson.loads(self.content.decode(encoding), **kwargs)
|
| 966 |
+
except UnicodeDecodeError:
|
| 967 |
+
# Wrong UTF codec detected; usually because it's not UTF-8
|
| 968 |
+
# but some other 8-bit codec. This is an RFC violation,
|
| 969 |
+
# and the server didn't bother to tell us what codec *was*
|
| 970 |
+
# used.
|
| 971 |
+
pass
|
| 972 |
+
except JSONDecodeError as e:
|
| 973 |
+
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
| 974 |
+
|
| 975 |
+
try:
|
| 976 |
+
return complexjson.loads(self.text, **kwargs)
|
| 977 |
+
except JSONDecodeError as e:
|
| 978 |
+
# Catch JSON-related errors and raise as requests.JSONDecodeError
|
| 979 |
+
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError
|
| 980 |
+
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
| 981 |
+
|
| 982 |
+
@property
|
| 983 |
+
def links(self):
|
| 984 |
+
"""Returns the parsed header links of the response, if any."""
|
| 985 |
+
|
| 986 |
+
header = self.headers.get("link")
|
| 987 |
+
|
| 988 |
+
resolved_links = {}
|
| 989 |
+
|
| 990 |
+
if header:
|
| 991 |
+
links = parse_header_links(header)
|
| 992 |
+
|
| 993 |
+
for link in links:
|
| 994 |
+
key = link.get("rel") or link.get("url")
|
| 995 |
+
resolved_links[key] = link
|
| 996 |
+
|
| 997 |
+
return resolved_links
|
| 998 |
+
|
| 999 |
+
def raise_for_status(self):
|
| 1000 |
+
"""Raises :class:`HTTPError`, if one occurred."""
|
| 1001 |
+
|
| 1002 |
+
http_error_msg = ""
|
| 1003 |
+
if isinstance(self.reason, bytes):
|
| 1004 |
+
# We attempt to decode utf-8 first because some servers
|
| 1005 |
+
# choose to localize their reason strings. If the string
|
| 1006 |
+
# isn't utf-8, we fall back to iso-8859-1 for all other
|
| 1007 |
+
# encodings. (See PR #3538)
|
| 1008 |
+
try:
|
| 1009 |
+
reason = self.reason.decode("utf-8")
|
| 1010 |
+
except UnicodeDecodeError:
|
| 1011 |
+
reason = self.reason.decode("iso-8859-1")
|
| 1012 |
+
else:
|
| 1013 |
+
reason = self.reason
|
| 1014 |
+
|
| 1015 |
+
if 400 <= self.status_code < 500:
|
| 1016 |
+
http_error_msg = (
|
| 1017 |
+
f"{self.status_code} Client Error: {reason} for url: {self.url}"
|
| 1018 |
+
)
|
| 1019 |
+
|
| 1020 |
+
elif 500 <= self.status_code < 600:
|
| 1021 |
+
http_error_msg = (
|
| 1022 |
+
f"{self.status_code} Server Error: {reason} for url: {self.url}"
|
| 1023 |
+
)
|
| 1024 |
+
|
| 1025 |
+
if http_error_msg:
|
| 1026 |
+
raise HTTPError(http_error_msg, response=self)
|
| 1027 |
+
|
| 1028 |
+
def close(self):
|
| 1029 |
+
"""Releases the connection back to the pool. Once this method has been
|
| 1030 |
+
called the underlying ``raw`` object must not be accessed again.
|
| 1031 |
+
|
| 1032 |
+
*Note: Should not normally need to be called explicitly.*
|
| 1033 |
+
"""
|
| 1034 |
+
if not self._content_consumed:
|
| 1035 |
+
self.raw.close()
|
| 1036 |
+
|
| 1037 |
+
release_conn = getattr(self.raw, "release_conn", None)
|
| 1038 |
+
if release_conn is not None:
|
| 1039 |
+
release_conn()
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/packages.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
from .compat import chardet
|
| 4 |
+
|
| 5 |
+
# This code exists for backwards compatibility reasons.
|
| 6 |
+
# I don't like it either. Just look the other way. :)
|
| 7 |
+
|
| 8 |
+
for package in ("urllib3", "idna"):
|
| 9 |
+
vendored_package = "pip._vendor." + package
|
| 10 |
+
locals()[package] = __import__(vendored_package)
|
| 11 |
+
# This traversal is apparently necessary such that the identities are
|
| 12 |
+
# preserved (requests.packages.urllib3.* is urllib3.*)
|
| 13 |
+
for mod in list(sys.modules):
|
| 14 |
+
if mod == vendored_package or mod.startswith(vendored_package + '.'):
|
| 15 |
+
unprefixed_mod = mod[len("pip._vendor."):]
|
| 16 |
+
sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod]
|
| 17 |
+
|
| 18 |
+
if chardet is not None:
|
| 19 |
+
target = chardet.__name__
|
| 20 |
+
for mod in list(sys.modules):
|
| 21 |
+
if mod == target or mod.startswith(f"{target}."):
|
| 22 |
+
imported_mod = sys.modules[mod]
|
| 23 |
+
sys.modules[f"requests.packages.{mod}"] = imported_mod
|
| 24 |
+
mod = mod.replace(target, "chardet")
|
| 25 |
+
sys.modules[f"requests.packages.{mod}"] = imported_mod
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/requests/sessions.py
ADDED
|
@@ -0,0 +1,831 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.sessions
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module provides a Session object to manage and persist settings across
|
| 6 |
+
requests (cookies, auth, proxies).
|
| 7 |
+
"""
|
| 8 |
+
import os
|
| 9 |
+
import sys
|
| 10 |
+
import time
|
| 11 |
+
from collections import OrderedDict
|
| 12 |
+
from datetime import timedelta
|
| 13 |
+
|
| 14 |
+
from ._internal_utils import to_native_string
|
| 15 |
+
from .adapters import HTTPAdapter
|
| 16 |
+
from .auth import _basic_auth_str
|
| 17 |
+
from .compat import Mapping, cookielib, urljoin, urlparse
|
| 18 |
+
from .cookies import (
|
| 19 |
+
RequestsCookieJar,
|
| 20 |
+
cookiejar_from_dict,
|
| 21 |
+
extract_cookies_to_jar,
|
| 22 |
+
merge_cookies,
|
| 23 |
+
)
|
| 24 |
+
from .exceptions import (
|
| 25 |
+
ChunkedEncodingError,
|
| 26 |
+
ContentDecodingError,
|
| 27 |
+
InvalidSchema,
|
| 28 |
+
TooManyRedirects,
|
| 29 |
+
)
|
| 30 |
+
from .hooks import default_hooks, dispatch_hook
|
| 31 |
+
|
| 32 |
+
# formerly defined here, reexposed here for backward compatibility
|
| 33 |
+
from .models import ( # noqa: F401
|
| 34 |
+
DEFAULT_REDIRECT_LIMIT,
|
| 35 |
+
REDIRECT_STATI,
|
| 36 |
+
PreparedRequest,
|
| 37 |
+
Request,
|
| 38 |
+
)
|
| 39 |
+
from .status_codes import codes
|
| 40 |
+
from .structures import CaseInsensitiveDict
|
| 41 |
+
from .utils import ( # noqa: F401
|
| 42 |
+
DEFAULT_PORTS,
|
| 43 |
+
default_headers,
|
| 44 |
+
get_auth_from_url,
|
| 45 |
+
get_environ_proxies,
|
| 46 |
+
get_netrc_auth,
|
| 47 |
+
requote_uri,
|
| 48 |
+
resolve_proxies,
|
| 49 |
+
rewind_body,
|
| 50 |
+
should_bypass_proxies,
|
| 51 |
+
to_key_val_list,
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
# Preferred clock, based on which one is more accurate on a given system.
|
| 55 |
+
if sys.platform == "win32":
|
| 56 |
+
preferred_clock = time.perf_counter
|
| 57 |
+
else:
|
| 58 |
+
preferred_clock = time.time
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
|
| 62 |
+
"""Determines appropriate setting for a given request, taking into account
|
| 63 |
+
the explicit setting on that request, and the setting in the session. If a
|
| 64 |
+
setting is a dictionary, they will be merged together using `dict_class`
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
if session_setting is None:
|
| 68 |
+
return request_setting
|
| 69 |
+
|
| 70 |
+
if request_setting is None:
|
| 71 |
+
return session_setting
|
| 72 |
+
|
| 73 |
+
# Bypass if not a dictionary (e.g. verify)
|
| 74 |
+
if not (
|
| 75 |
+
isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping)
|
| 76 |
+
):
|
| 77 |
+
return request_setting
|
| 78 |
+
|
| 79 |
+
merged_setting = dict_class(to_key_val_list(session_setting))
|
| 80 |
+
merged_setting.update(to_key_val_list(request_setting))
|
| 81 |
+
|
| 82 |
+
# Remove keys that are set to None. Extract keys first to avoid altering
|
| 83 |
+
# the dictionary during iteration.
|
| 84 |
+
none_keys = [k for (k, v) in merged_setting.items() if v is None]
|
| 85 |
+
for key in none_keys:
|
| 86 |
+
del merged_setting[key]
|
| 87 |
+
|
| 88 |
+
return merged_setting
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
|
| 92 |
+
"""Properly merges both requests and session hooks.
|
| 93 |
+
|
| 94 |
+
This is necessary because when request_hooks == {'response': []}, the
|
| 95 |
+
merge breaks Session hooks entirely.
|
| 96 |
+
"""
|
| 97 |
+
if session_hooks is None or session_hooks.get("response") == []:
|
| 98 |
+
return request_hooks
|
| 99 |
+
|
| 100 |
+
if request_hooks is None or request_hooks.get("response") == []:
|
| 101 |
+
return session_hooks
|
| 102 |
+
|
| 103 |
+
return merge_setting(request_hooks, session_hooks, dict_class)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class SessionRedirectMixin:
|
| 107 |
+
def get_redirect_target(self, resp):
|
| 108 |
+
"""Receives a Response. Returns a redirect URI or ``None``"""
|
| 109 |
+
# Due to the nature of how requests processes redirects this method will
|
| 110 |
+
# be called at least once upon the original response and at least twice
|
| 111 |
+
# on each subsequent redirect response (if any).
|
| 112 |
+
# If a custom mixin is used to handle this logic, it may be advantageous
|
| 113 |
+
# to cache the redirect location onto the response object as a private
|
| 114 |
+
# attribute.
|
| 115 |
+
if resp.is_redirect:
|
| 116 |
+
location = resp.headers["location"]
|
| 117 |
+
# Currently the underlying http module on py3 decode headers
|
| 118 |
+
# in latin1, but empirical evidence suggests that latin1 is very
|
| 119 |
+
# rarely used with non-ASCII characters in HTTP headers.
|
| 120 |
+
# It is more likely to get UTF8 header rather than latin1.
|
| 121 |
+
# This causes incorrect handling of UTF8 encoded location headers.
|
| 122 |
+
# To solve this, we re-encode the location in latin1.
|
| 123 |
+
location = location.encode("latin1")
|
| 124 |
+
return to_native_string(location, "utf8")
|
| 125 |
+
return None
|
| 126 |
+
|
| 127 |
+
def should_strip_auth(self, old_url, new_url):
|
| 128 |
+
"""Decide whether Authorization header should be removed when redirecting"""
|
| 129 |
+
old_parsed = urlparse(old_url)
|
| 130 |
+
new_parsed = urlparse(new_url)
|
| 131 |
+
if old_parsed.hostname != new_parsed.hostname:
|
| 132 |
+
return True
|
| 133 |
+
# Special case: allow http -> https redirect when using the standard
|
| 134 |
+
# ports. This isn't specified by RFC 7235, but is kept to avoid
|
| 135 |
+
# breaking backwards compatibility with older versions of requests
|
| 136 |
+
# that allowed any redirects on the same host.
|
| 137 |
+
if (
|
| 138 |
+
old_parsed.scheme == "http"
|
| 139 |
+
and old_parsed.port in (80, None)
|
| 140 |
+
and new_parsed.scheme == "https"
|
| 141 |
+
and new_parsed.port in (443, None)
|
| 142 |
+
):
|
| 143 |
+
return False
|
| 144 |
+
|
| 145 |
+
# Handle default port usage corresponding to scheme.
|
| 146 |
+
changed_port = old_parsed.port != new_parsed.port
|
| 147 |
+
changed_scheme = old_parsed.scheme != new_parsed.scheme
|
| 148 |
+
default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
|
| 149 |
+
if (
|
| 150 |
+
not changed_scheme
|
| 151 |
+
and old_parsed.port in default_port
|
| 152 |
+
and new_parsed.port in default_port
|
| 153 |
+
):
|
| 154 |
+
return False
|
| 155 |
+
|
| 156 |
+
# Standard case: root URI must match
|
| 157 |
+
return changed_port or changed_scheme
|
| 158 |
+
|
| 159 |
+
def resolve_redirects(
|
| 160 |
+
self,
|
| 161 |
+
resp,
|
| 162 |
+
req,
|
| 163 |
+
stream=False,
|
| 164 |
+
timeout=None,
|
| 165 |
+
verify=True,
|
| 166 |
+
cert=None,
|
| 167 |
+
proxies=None,
|
| 168 |
+
yield_requests=False,
|
| 169 |
+
**adapter_kwargs,
|
| 170 |
+
):
|
| 171 |
+
"""Receives a Response. Returns a generator of Responses or Requests."""
|
| 172 |
+
|
| 173 |
+
hist = [] # keep track of history
|
| 174 |
+
|
| 175 |
+
url = self.get_redirect_target(resp)
|
| 176 |
+
previous_fragment = urlparse(req.url).fragment
|
| 177 |
+
while url:
|
| 178 |
+
prepared_request = req.copy()
|
| 179 |
+
|
| 180 |
+
# Update history and keep track of redirects.
|
| 181 |
+
# resp.history must ignore the original request in this loop
|
| 182 |
+
hist.append(resp)
|
| 183 |
+
resp.history = hist[1:]
|
| 184 |
+
|
| 185 |
+
try:
|
| 186 |
+
resp.content # Consume socket so it can be released
|
| 187 |
+
except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
|
| 188 |
+
resp.raw.read(decode_content=False)
|
| 189 |
+
|
| 190 |
+
if len(resp.history) >= self.max_redirects:
|
| 191 |
+
raise TooManyRedirects(
|
| 192 |
+
f"Exceeded {self.max_redirects} redirects.", response=resp
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
# Release the connection back into the pool.
|
| 196 |
+
resp.close()
|
| 197 |
+
|
| 198 |
+
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
| 199 |
+
if url.startswith("//"):
|
| 200 |
+
parsed_rurl = urlparse(resp.url)
|
| 201 |
+
url = ":".join([to_native_string(parsed_rurl.scheme), url])
|
| 202 |
+
|
| 203 |
+
# Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
|
| 204 |
+
parsed = urlparse(url)
|
| 205 |
+
if parsed.fragment == "" and previous_fragment:
|
| 206 |
+
parsed = parsed._replace(fragment=previous_fragment)
|
| 207 |
+
elif parsed.fragment:
|
| 208 |
+
previous_fragment = parsed.fragment
|
| 209 |
+
url = parsed.geturl()
|
| 210 |
+
|
| 211 |
+
# Facilitate relative 'location' headers, as allowed by RFC 7231.
|
| 212 |
+
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
|
| 213 |
+
# Compliant with RFC3986, we percent encode the url.
|
| 214 |
+
if not parsed.netloc:
|
| 215 |
+
url = urljoin(resp.url, requote_uri(url))
|
| 216 |
+
else:
|
| 217 |
+
url = requote_uri(url)
|
| 218 |
+
|
| 219 |
+
prepared_request.url = to_native_string(url)
|
| 220 |
+
|
| 221 |
+
self.rebuild_method(prepared_request, resp)
|
| 222 |
+
|
| 223 |
+
# https://github.com/psf/requests/issues/1084
|
| 224 |
+
if resp.status_code not in (
|
| 225 |
+
codes.temporary_redirect,
|
| 226 |
+
codes.permanent_redirect,
|
| 227 |
+
):
|
| 228 |
+
# https://github.com/psf/requests/issues/3490
|
| 229 |
+
purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding")
|
| 230 |
+
for header in purged_headers:
|
| 231 |
+
prepared_request.headers.pop(header, None)
|
| 232 |
+
prepared_request.body = None
|
| 233 |
+
|
| 234 |
+
headers = prepared_request.headers
|
| 235 |
+
headers.pop("Cookie", None)
|
| 236 |
+
|
| 237 |
+
# Extract any cookies sent on the response to the cookiejar
|
| 238 |
+
# in the new request. Because we've mutated our copied prepared
|
| 239 |
+
# request, use the old one that we haven't yet touched.
|
| 240 |
+
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
|
| 241 |
+
merge_cookies(prepared_request._cookies, self.cookies)
|
| 242 |
+
prepared_request.prepare_cookies(prepared_request._cookies)
|
| 243 |
+
|
| 244 |
+
# Rebuild auth and proxy information.
|
| 245 |
+
proxies = self.rebuild_proxies(prepared_request, proxies)
|
| 246 |
+
self.rebuild_auth(prepared_request, resp)
|
| 247 |
+
|
| 248 |
+
# A failed tell() sets `_body_position` to `object()`. This non-None
|
| 249 |
+
# value ensures `rewindable` will be True, allowing us to raise an
|
| 250 |
+
# UnrewindableBodyError, instead of hanging the connection.
|
| 251 |
+
rewindable = prepared_request._body_position is not None and (
|
| 252 |
+
"Content-Length" in headers or "Transfer-Encoding" in headers
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
# Attempt to rewind consumed file-like object.
|
| 256 |
+
if rewindable:
|
| 257 |
+
rewind_body(prepared_request)
|
| 258 |
+
|
| 259 |
+
# Override the original request.
|
| 260 |
+
req = prepared_request
|
| 261 |
+
|
| 262 |
+
if yield_requests:
|
| 263 |
+
yield req
|
| 264 |
+
else:
|
| 265 |
+
resp = self.send(
|
| 266 |
+
req,
|
| 267 |
+
stream=stream,
|
| 268 |
+
timeout=timeout,
|
| 269 |
+
verify=verify,
|
| 270 |
+
cert=cert,
|
| 271 |
+
proxies=proxies,
|
| 272 |
+
allow_redirects=False,
|
| 273 |
+
**adapter_kwargs,
|
| 274 |
+
)
|
| 275 |
+
|
| 276 |
+
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
|
| 277 |
+
|
| 278 |
+
# extract redirect url, if any, for the next loop
|
| 279 |
+
url = self.get_redirect_target(resp)
|
| 280 |
+
yield resp
|
| 281 |
+
|
| 282 |
+
def rebuild_auth(self, prepared_request, response):
|
| 283 |
+
"""When being redirected we may want to strip authentication from the
|
| 284 |
+
request to avoid leaking credentials. This method intelligently removes
|
| 285 |
+
and reapplies authentication where possible to avoid credential loss.
|
| 286 |
+
"""
|
| 287 |
+
headers = prepared_request.headers
|
| 288 |
+
url = prepared_request.url
|
| 289 |
+
|
| 290 |
+
if "Authorization" in headers and self.should_strip_auth(
|
| 291 |
+
response.request.url, url
|
| 292 |
+
):
|
| 293 |
+
# If we get redirected to a new host, we should strip out any
|
| 294 |
+
# authentication headers.
|
| 295 |
+
del headers["Authorization"]
|
| 296 |
+
|
| 297 |
+
# .netrc might have more auth for us on our new host.
|
| 298 |
+
new_auth = get_netrc_auth(url) if self.trust_env else None
|
| 299 |
+
if new_auth is not None:
|
| 300 |
+
prepared_request.prepare_auth(new_auth)
|
| 301 |
+
|
| 302 |
+
def rebuild_proxies(self, prepared_request, proxies):
|
| 303 |
+
"""This method re-evaluates the proxy configuration by considering the
|
| 304 |
+
environment variables. If we are redirected to a URL covered by
|
| 305 |
+
NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
|
| 306 |
+
proxy keys for this URL (in case they were stripped by a previous
|
| 307 |
+
redirect).
|
| 308 |
+
|
| 309 |
+
This method also replaces the Proxy-Authorization header where
|
| 310 |
+
necessary.
|
| 311 |
+
|
| 312 |
+
:rtype: dict
|
| 313 |
+
"""
|
| 314 |
+
headers = prepared_request.headers
|
| 315 |
+
scheme = urlparse(prepared_request.url).scheme
|
| 316 |
+
new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
|
| 317 |
+
|
| 318 |
+
if "Proxy-Authorization" in headers:
|
| 319 |
+
del headers["Proxy-Authorization"]
|
| 320 |
+
|
| 321 |
+
try:
|
| 322 |
+
username, password = get_auth_from_url(new_proxies[scheme])
|
| 323 |
+
except KeyError:
|
| 324 |
+
username, password = None, None
|
| 325 |
+
|
| 326 |
+
# urllib3 handles proxy authorization for us in the standard adapter.
|
| 327 |
+
# Avoid appending this to TLS tunneled requests where it may be leaked.
|
| 328 |
+
if not scheme.startswith("https") and username and password:
|
| 329 |
+
headers["Proxy-Authorization"] = _basic_auth_str(username, password)
|
| 330 |
+
|
| 331 |
+
return new_proxies
|
| 332 |
+
|
| 333 |
+
def rebuild_method(self, prepared_request, response):
|
| 334 |
+
"""When being redirected we may want to change the method of the request
|
| 335 |
+
based on certain specs or browser behavior.
|
| 336 |
+
"""
|
| 337 |
+
method = prepared_request.method
|
| 338 |
+
|
| 339 |
+
# https://tools.ietf.org/html/rfc7231#section-6.4.4
|
| 340 |
+
if response.status_code == codes.see_other and method != "HEAD":
|
| 341 |
+
method = "GET"
|
| 342 |
+
|
| 343 |
+
# Do what the browsers do, despite standards...
|
| 344 |
+
# First, turn 302s into GETs.
|
| 345 |
+
if response.status_code == codes.found and method != "HEAD":
|
| 346 |
+
method = "GET"
|
| 347 |
+
|
| 348 |
+
# Second, if a POST is responded to with a 301, turn it into a GET.
|
| 349 |
+
# This bizarre behaviour is explained in Issue 1704.
|
| 350 |
+
if response.status_code == codes.moved and method == "POST":
|
| 351 |
+
method = "GET"
|
| 352 |
+
|
| 353 |
+
prepared_request.method = method
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
class Session(SessionRedirectMixin):
|
| 357 |
+
"""A Requests session.
|
| 358 |
+
|
| 359 |
+
Provides cookie persistence, connection-pooling, and configuration.
|
| 360 |
+
|
| 361 |
+
Basic Usage::
|
| 362 |
+
|
| 363 |
+
>>> import requests
|
| 364 |
+
>>> s = requests.Session()
|
| 365 |
+
>>> s.get('https://httpbin.org/get')
|
| 366 |
+
<Response [200]>
|
| 367 |
+
|
| 368 |
+
Or as a context manager::
|
| 369 |
+
|
| 370 |
+
>>> with requests.Session() as s:
|
| 371 |
+
... s.get('https://httpbin.org/get')
|
| 372 |
+
<Response [200]>
|
| 373 |
+
"""
|
| 374 |
+
|
| 375 |
+
__attrs__ = [
|
| 376 |
+
"headers",
|
| 377 |
+
"cookies",
|
| 378 |
+
"auth",
|
| 379 |
+
"proxies",
|
| 380 |
+
"hooks",
|
| 381 |
+
"params",
|
| 382 |
+
"verify",
|
| 383 |
+
"cert",
|
| 384 |
+
"adapters",
|
| 385 |
+
"stream",
|
| 386 |
+
"trust_env",
|
| 387 |
+
"max_redirects",
|
| 388 |
+
]
|
| 389 |
+
|
| 390 |
+
def __init__(self):
|
| 391 |
+
#: A case-insensitive dictionary of headers to be sent on each
|
| 392 |
+
#: :class:`Request <Request>` sent from this
|
| 393 |
+
#: :class:`Session <Session>`.
|
| 394 |
+
self.headers = default_headers()
|
| 395 |
+
|
| 396 |
+
#: Default Authentication tuple or object to attach to
|
| 397 |
+
#: :class:`Request <Request>`.
|
| 398 |
+
self.auth = None
|
| 399 |
+
|
| 400 |
+
#: Dictionary mapping protocol or protocol and host to the URL of the proxy
|
| 401 |
+
#: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
|
| 402 |
+
#: be used on each :class:`Request <Request>`.
|
| 403 |
+
self.proxies = {}
|
| 404 |
+
|
| 405 |
+
#: Event-handling hooks.
|
| 406 |
+
self.hooks = default_hooks()
|
| 407 |
+
|
| 408 |
+
#: Dictionary of querystring data to attach to each
|
| 409 |
+
#: :class:`Request <Request>`. The dictionary values may be lists for
|
| 410 |
+
#: representing multivalued query parameters.
|
| 411 |
+
self.params = {}
|
| 412 |
+
|
| 413 |
+
#: Stream response content default.
|
| 414 |
+
self.stream = False
|
| 415 |
+
|
| 416 |
+
#: SSL Verification default.
|
| 417 |
+
#: Defaults to `True`, requiring requests to verify the TLS certificate at the
|
| 418 |
+
#: remote end.
|
| 419 |
+
#: If verify is set to `False`, requests will accept any TLS certificate
|
| 420 |
+
#: presented by the server, and will ignore hostname mismatches and/or
|
| 421 |
+
#: expired certificates, which will make your application vulnerable to
|
| 422 |
+
#: man-in-the-middle (MitM) attacks.
|
| 423 |
+
#: Only set this to `False` for testing.
|
| 424 |
+
self.verify = True
|
| 425 |
+
|
| 426 |
+
#: SSL client certificate default, if String, path to ssl client
|
| 427 |
+
#: cert file (.pem). If Tuple, ('cert', 'key') pair.
|
| 428 |
+
self.cert = None
|
| 429 |
+
|
| 430 |
+
#: Maximum number of redirects allowed. If the request exceeds this
|
| 431 |
+
#: limit, a :class:`TooManyRedirects` exception is raised.
|
| 432 |
+
#: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is
|
| 433 |
+
#: 30.
|
| 434 |
+
self.max_redirects = DEFAULT_REDIRECT_LIMIT
|
| 435 |
+
|
| 436 |
+
#: Trust environment settings for proxy configuration, default
|
| 437 |
+
#: authentication and similar.
|
| 438 |
+
self.trust_env = True
|
| 439 |
+
|
| 440 |
+
#: A CookieJar containing all currently outstanding cookies set on this
|
| 441 |
+
#: session. By default it is a
|
| 442 |
+
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
|
| 443 |
+
#: may be any other ``cookielib.CookieJar`` compatible object.
|
| 444 |
+
self.cookies = cookiejar_from_dict({})
|
| 445 |
+
|
| 446 |
+
# Default connection adapters.
|
| 447 |
+
self.adapters = OrderedDict()
|
| 448 |
+
self.mount("https://", HTTPAdapter())
|
| 449 |
+
self.mount("http://", HTTPAdapter())
|
| 450 |
+
|
| 451 |
+
def __enter__(self):
|
| 452 |
+
return self
|
| 453 |
+
|
| 454 |
+
def __exit__(self, *args):
|
| 455 |
+
self.close()
|
| 456 |
+
|
| 457 |
+
def prepare_request(self, request):
|
| 458 |
+
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
|
| 459 |
+
transmission and returns it. The :class:`PreparedRequest` has settings
|
| 460 |
+
merged from the :class:`Request <Request>` instance and those of the
|
| 461 |
+
:class:`Session`.
|
| 462 |
+
|
| 463 |
+
:param request: :class:`Request` instance to prepare with this
|
| 464 |
+
session's settings.
|
| 465 |
+
:rtype: requests.PreparedRequest
|
| 466 |
+
"""
|
| 467 |
+
cookies = request.cookies or {}
|
| 468 |
+
|
| 469 |
+
# Bootstrap CookieJar.
|
| 470 |
+
if not isinstance(cookies, cookielib.CookieJar):
|
| 471 |
+
cookies = cookiejar_from_dict(cookies)
|
| 472 |
+
|
| 473 |
+
# Merge with session cookies
|
| 474 |
+
merged_cookies = merge_cookies(
|
| 475 |
+
merge_cookies(RequestsCookieJar(), self.cookies), cookies
|
| 476 |
+
)
|
| 477 |
+
|
| 478 |
+
# Set environment's basic authentication if not explicitly set.
|
| 479 |
+
auth = request.auth
|
| 480 |
+
if self.trust_env and not auth and not self.auth:
|
| 481 |
+
auth = get_netrc_auth(request.url)
|
| 482 |
+
|
| 483 |
+
p = PreparedRequest()
|
| 484 |
+
p.prepare(
|
| 485 |
+
method=request.method.upper(),
|
| 486 |
+
url=request.url,
|
| 487 |
+
files=request.files,
|
| 488 |
+
data=request.data,
|
| 489 |
+
json=request.json,
|
| 490 |
+
headers=merge_setting(
|
| 491 |
+
request.headers, self.headers, dict_class=CaseInsensitiveDict
|
| 492 |
+
),
|
| 493 |
+
params=merge_setting(request.params, self.params),
|
| 494 |
+
auth=merge_setting(auth, self.auth),
|
| 495 |
+
cookies=merged_cookies,
|
| 496 |
+
hooks=merge_hooks(request.hooks, self.hooks),
|
| 497 |
+
)
|
| 498 |
+
return p
|
| 499 |
+
|
| 500 |
+
def request(
|
| 501 |
+
self,
|
| 502 |
+
method,
|
| 503 |
+
url,
|
| 504 |
+
params=None,
|
| 505 |
+
data=None,
|
| 506 |
+
headers=None,
|
| 507 |
+
cookies=None,
|
| 508 |
+
files=None,
|
| 509 |
+
auth=None,
|
| 510 |
+
timeout=None,
|
| 511 |
+
allow_redirects=True,
|
| 512 |
+
proxies=None,
|
| 513 |
+
hooks=None,
|
| 514 |
+
stream=None,
|
| 515 |
+
verify=None,
|
| 516 |
+
cert=None,
|
| 517 |
+
json=None,
|
| 518 |
+
):
|
| 519 |
+
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
|
| 520 |
+
Returns :class:`Response <Response>` object.
|
| 521 |
+
|
| 522 |
+
:param method: method for the new :class:`Request` object.
|
| 523 |
+
:param url: URL for the new :class:`Request` object.
|
| 524 |
+
:param params: (optional) Dictionary or bytes to be sent in the query
|
| 525 |
+
string for the :class:`Request`.
|
| 526 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 527 |
+
object to send in the body of the :class:`Request`.
|
| 528 |
+
:param json: (optional) json to send in the body of the
|
| 529 |
+
:class:`Request`.
|
| 530 |
+
:param headers: (optional) Dictionary of HTTP Headers to send with the
|
| 531 |
+
:class:`Request`.
|
| 532 |
+
:param cookies: (optional) Dict or CookieJar object to send with the
|
| 533 |
+
:class:`Request`.
|
| 534 |
+
:param files: (optional) Dictionary of ``'filename': file-like-objects``
|
| 535 |
+
for multipart encoding upload.
|
| 536 |
+
:param auth: (optional) Auth tuple or callable to enable
|
| 537 |
+
Basic/Digest/Custom HTTP Auth.
|
| 538 |
+
:param timeout: (optional) How long to wait for the server to send
|
| 539 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
| 540 |
+
read timeout) <timeouts>` tuple.
|
| 541 |
+
:type timeout: float or tuple
|
| 542 |
+
:param allow_redirects: (optional) Set to True by default.
|
| 543 |
+
:type allow_redirects: bool
|
| 544 |
+
:param proxies: (optional) Dictionary mapping protocol or protocol and
|
| 545 |
+
hostname to the URL of the proxy.
|
| 546 |
+
:param hooks: (optional) Dictionary mapping hook name to one event or
|
| 547 |
+
list of events, event must be callable.
|
| 548 |
+
:param stream: (optional) whether to immediately download the response
|
| 549 |
+
content. Defaults to ``False``.
|
| 550 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
| 551 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
| 552 |
+
to a CA bundle to use. Defaults to ``True``. When set to
|
| 553 |
+
``False``, requests will accept any TLS certificate presented by
|
| 554 |
+
the server, and will ignore hostname mismatches and/or expired
|
| 555 |
+
certificates, which will make your application vulnerable to
|
| 556 |
+
man-in-the-middle (MitM) attacks. Setting verify to ``False``
|
| 557 |
+
may be useful during local development or testing.
|
| 558 |
+
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
| 559 |
+
If Tuple, ('cert', 'key') pair.
|
| 560 |
+
:rtype: requests.Response
|
| 561 |
+
"""
|
| 562 |
+
# Create the Request.
|
| 563 |
+
req = Request(
|
| 564 |
+
method=method.upper(),
|
| 565 |
+
url=url,
|
| 566 |
+
headers=headers,
|
| 567 |
+
files=files,
|
| 568 |
+
data=data or {},
|
| 569 |
+
json=json,
|
| 570 |
+
params=params or {},
|
| 571 |
+
auth=auth,
|
| 572 |
+
cookies=cookies,
|
| 573 |
+
hooks=hooks,
|
| 574 |
+
)
|
| 575 |
+
prep = self.prepare_request(req)
|
| 576 |
+
|
| 577 |
+
proxies = proxies or {}
|
| 578 |
+
|
| 579 |
+
settings = self.merge_environment_settings(
|
| 580 |
+
prep.url, proxies, stream, verify, cert
|
| 581 |
+
)
|
| 582 |
+
|
| 583 |
+
# Send the request.
|
| 584 |
+
send_kwargs = {
|
| 585 |
+
"timeout": timeout,
|
| 586 |
+
"allow_redirects": allow_redirects,
|
| 587 |
+
}
|
| 588 |
+
send_kwargs.update(settings)
|
| 589 |
+
resp = self.send(prep, **send_kwargs)
|
| 590 |
+
|
| 591 |
+
return resp
|
| 592 |
+
|
| 593 |
+
def get(self, url, **kwargs):
|
| 594 |
+
r"""Sends a GET request. Returns :class:`Response` object.
|
| 595 |
+
|
| 596 |
+
:param url: URL for the new :class:`Request` object.
|
| 597 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 598 |
+
:rtype: requests.Response
|
| 599 |
+
"""
|
| 600 |
+
|
| 601 |
+
kwargs.setdefault("allow_redirects", True)
|
| 602 |
+
return self.request("GET", url, **kwargs)
|
| 603 |
+
|
| 604 |
+
def options(self, url, **kwargs):
|
| 605 |
+
r"""Sends a OPTIONS request. Returns :class:`Response` object.
|
| 606 |
+
|
| 607 |
+
:param url: URL for the new :class:`Request` object.
|
| 608 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 609 |
+
:rtype: requests.Response
|
| 610 |
+
"""
|
| 611 |
+
|
| 612 |
+
kwargs.setdefault("allow_redirects", True)
|
| 613 |
+
return self.request("OPTIONS", url, **kwargs)
|
| 614 |
+
|
| 615 |
+
def head(self, url, **kwargs):
|
| 616 |
+
r"""Sends a HEAD request. Returns :class:`Response` object.
|
| 617 |
+
|
| 618 |
+
:param url: URL for the new :class:`Request` object.
|
| 619 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 620 |
+
:rtype: requests.Response
|
| 621 |
+
"""
|
| 622 |
+
|
| 623 |
+
kwargs.setdefault("allow_redirects", False)
|
| 624 |
+
return self.request("HEAD", url, **kwargs)
|
| 625 |
+
|
| 626 |
+
def post(self, url, data=None, json=None, **kwargs):
|
| 627 |
+
r"""Sends a POST request. Returns :class:`Response` object.
|
| 628 |
+
|
| 629 |
+
:param url: URL for the new :class:`Request` object.
|
| 630 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 631 |
+
object to send in the body of the :class:`Request`.
|
| 632 |
+
:param json: (optional) json to send in the body of the :class:`Request`.
|
| 633 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 634 |
+
:rtype: requests.Response
|
| 635 |
+
"""
|
| 636 |
+
|
| 637 |
+
return self.request("POST", url, data=data, json=json, **kwargs)
|
| 638 |
+
|
| 639 |
+
def put(self, url, data=None, **kwargs):
|
| 640 |
+
r"""Sends a PUT request. Returns :class:`Response` object.
|
| 641 |
+
|
| 642 |
+
:param url: URL for the new :class:`Request` object.
|
| 643 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 644 |
+
object to send in the body of the :class:`Request`.
|
| 645 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 646 |
+
:rtype: requests.Response
|
| 647 |
+
"""
|
| 648 |
+
|
| 649 |
+
return self.request("PUT", url, data=data, **kwargs)
|
| 650 |
+
|
| 651 |
+
def patch(self, url, data=None, **kwargs):
|
| 652 |
+
r"""Sends a PATCH request. Returns :class:`Response` object.
|
| 653 |
+
|
| 654 |
+
:param url: URL for the new :class:`Request` object.
|
| 655 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 656 |
+
object to send in the body of the :class:`Request`.
|
| 657 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 658 |
+
:rtype: requests.Response
|
| 659 |
+
"""
|
| 660 |
+
|
| 661 |
+
return self.request("PATCH", url, data=data, **kwargs)
|
| 662 |
+
|
| 663 |
+
def delete(self, url, **kwargs):
|
| 664 |
+
r"""Sends a DELETE request. Returns :class:`Response` object.
|
| 665 |
+
|
| 666 |
+
:param url: URL for the new :class:`Request` object.
|
| 667 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 668 |
+
:rtype: requests.Response
|
| 669 |
+
"""
|
| 670 |
+
|
| 671 |
+
return self.request("DELETE", url, **kwargs)
|
| 672 |
+
|
| 673 |
+
def send(self, request, **kwargs):
|
| 674 |
+
"""Send a given PreparedRequest.
|
| 675 |
+
|
| 676 |
+
:rtype: requests.Response
|
| 677 |
+
"""
|
| 678 |
+
# Set defaults that the hooks can utilize to ensure they always have
|
| 679 |
+
# the correct parameters to reproduce the previous request.
|
| 680 |
+
kwargs.setdefault("stream", self.stream)
|
| 681 |
+
kwargs.setdefault("verify", self.verify)
|
| 682 |
+
kwargs.setdefault("cert", self.cert)
|
| 683 |
+
if "proxies" not in kwargs:
|
| 684 |
+
kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env)
|
| 685 |
+
|
| 686 |
+
# It's possible that users might accidentally send a Request object.
|
| 687 |
+
# Guard against that specific failure case.
|
| 688 |
+
if isinstance(request, Request):
|
| 689 |
+
raise ValueError("You can only send PreparedRequests.")
|
| 690 |
+
|
| 691 |
+
# Set up variables needed for resolve_redirects and dispatching of hooks
|
| 692 |
+
allow_redirects = kwargs.pop("allow_redirects", True)
|
| 693 |
+
stream = kwargs.get("stream")
|
| 694 |
+
hooks = request.hooks
|
| 695 |
+
|
| 696 |
+
# Get the appropriate adapter to use
|
| 697 |
+
adapter = self.get_adapter(url=request.url)
|
| 698 |
+
|
| 699 |
+
# Start time (approximately) of the request
|
| 700 |
+
start = preferred_clock()
|
| 701 |
+
|
| 702 |
+
# Send the request
|
| 703 |
+
r = adapter.send(request, **kwargs)
|
| 704 |
+
|
| 705 |
+
# Total elapsed time of the request (approximately)
|
| 706 |
+
elapsed = preferred_clock() - start
|
| 707 |
+
r.elapsed = timedelta(seconds=elapsed)
|
| 708 |
+
|
| 709 |
+
# Response manipulation hooks
|
| 710 |
+
r = dispatch_hook("response", hooks, r, **kwargs)
|
| 711 |
+
|
| 712 |
+
# Persist cookies
|
| 713 |
+
if r.history:
|
| 714 |
+
# If the hooks create history then we want those cookies too
|
| 715 |
+
for resp in r.history:
|
| 716 |
+
extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
|
| 717 |
+
|
| 718 |
+
extract_cookies_to_jar(self.cookies, request, r.raw)
|
| 719 |
+
|
| 720 |
+
# Resolve redirects if allowed.
|
| 721 |
+
if allow_redirects:
|
| 722 |
+
# Redirect resolving generator.
|
| 723 |
+
gen = self.resolve_redirects(r, request, **kwargs)
|
| 724 |
+
history = [resp for resp in gen]
|
| 725 |
+
else:
|
| 726 |
+
history = []
|
| 727 |
+
|
| 728 |
+
# Shuffle things around if there's history.
|
| 729 |
+
if history:
|
| 730 |
+
# Insert the first (original) request at the start
|
| 731 |
+
history.insert(0, r)
|
| 732 |
+
# Get the last request made
|
| 733 |
+
r = history.pop()
|
| 734 |
+
r.history = history
|
| 735 |
+
|
| 736 |
+
# If redirects aren't being followed, store the response on the Request for Response.next().
|
| 737 |
+
if not allow_redirects:
|
| 738 |
+
try:
|
| 739 |
+
r._next = next(
|
| 740 |
+
self.resolve_redirects(r, request, yield_requests=True, **kwargs)
|
| 741 |
+
)
|
| 742 |
+
except StopIteration:
|
| 743 |
+
pass
|
| 744 |
+
|
| 745 |
+
if not stream:
|
| 746 |
+
r.content
|
| 747 |
+
|
| 748 |
+
return r
|
| 749 |
+
|
| 750 |
+
def merge_environment_settings(self, url, proxies, stream, verify, cert):
|
| 751 |
+
"""
|
| 752 |
+
Check the environment and merge it with some settings.
|
| 753 |
+
|
| 754 |
+
:rtype: dict
|
| 755 |
+
"""
|
| 756 |
+
# Gather clues from the surrounding environment.
|
| 757 |
+
if self.trust_env:
|
| 758 |
+
# Set environment's proxies.
|
| 759 |
+
no_proxy = proxies.get("no_proxy") if proxies is not None else None
|
| 760 |
+
env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
| 761 |
+
for k, v in env_proxies.items():
|
| 762 |
+
proxies.setdefault(k, v)
|
| 763 |
+
|
| 764 |
+
# Look for requests environment configuration
|
| 765 |
+
# and be compatible with cURL.
|
| 766 |
+
if verify is True or verify is None:
|
| 767 |
+
verify = (
|
| 768 |
+
os.environ.get("REQUESTS_CA_BUNDLE")
|
| 769 |
+
or os.environ.get("CURL_CA_BUNDLE")
|
| 770 |
+
or verify
|
| 771 |
+
)
|
| 772 |
+
|
| 773 |
+
# Merge all the kwargs.
|
| 774 |
+
proxies = merge_setting(proxies, self.proxies)
|
| 775 |
+
stream = merge_setting(stream, self.stream)
|
| 776 |
+
verify = merge_setting(verify, self.verify)
|
| 777 |
+
cert = merge_setting(cert, self.cert)
|
| 778 |
+
|
| 779 |
+
return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert}
|
| 780 |
+
|
| 781 |
+
def get_adapter(self, url):
|
| 782 |
+
"""
|
| 783 |
+
Returns the appropriate connection adapter for the given URL.
|
| 784 |
+
|
| 785 |
+
:rtype: requests.adapters.BaseAdapter
|
| 786 |
+
"""
|
| 787 |
+
for prefix, adapter in self.adapters.items():
|
| 788 |
+
if url.lower().startswith(prefix.lower()):
|
| 789 |
+
return adapter
|
| 790 |
+
|
| 791 |
+
# Nothing matches :-/
|
| 792 |
+
raise InvalidSchema(f"No connection adapters were found for {url!r}")
|
| 793 |
+
|
| 794 |
+
def close(self):
|
| 795 |
+
"""Closes all adapters and as such the session"""
|
| 796 |
+
for v in self.adapters.values():
|
| 797 |
+
v.close()
|
| 798 |
+
|
| 799 |
+
def mount(self, prefix, adapter):
|
| 800 |
+
"""Registers a connection adapter to a prefix.
|
| 801 |
+
|
| 802 |
+
Adapters are sorted in descending order by prefix length.
|
| 803 |
+
"""
|
| 804 |
+
self.adapters[prefix] = adapter
|
| 805 |
+
keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
|
| 806 |
+
|
| 807 |
+
for key in keys_to_move:
|
| 808 |
+
self.adapters[key] = self.adapters.pop(key)
|
| 809 |
+
|
| 810 |
+
def __getstate__(self):
|
| 811 |
+
state = {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
| 812 |
+
return state
|
| 813 |
+
|
| 814 |
+
def __setstate__(self, state):
|
| 815 |
+
for attr, value in state.items():
|
| 816 |
+
setattr(self, attr, value)
|
| 817 |
+
|
| 818 |
+
|
| 819 |
+
def session():
|
| 820 |
+
"""
|
| 821 |
+
Returns a :class:`Session` for context-management.
|
| 822 |
+
|
| 823 |
+
.. deprecated:: 1.0.0
|
| 824 |
+
|
| 825 |
+
This method has been deprecated since version 1.0.0 and is only kept for
|
| 826 |
+
backwards compatibility. New code should use :class:`~requests.sessions.Session`
|
| 827 |
+
to create a session. This may be removed at a future date.
|
| 828 |
+
|
| 829 |
+
:rtype: Session
|
| 830 |
+
"""
|
| 831 |
+
return Session()
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/live_render.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional, Tuple, Literal
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
from ._loop import loop_last
|
| 5 |
+
from .console import Console, ConsoleOptions, RenderableType, RenderResult
|
| 6 |
+
from .control import Control
|
| 7 |
+
from .segment import ControlType, Segment
|
| 8 |
+
from .style import StyleType
|
| 9 |
+
from .text import Text
|
| 10 |
+
|
| 11 |
+
VerticalOverflowMethod = Literal["crop", "ellipsis", "visible"]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class LiveRender:
|
| 15 |
+
"""Creates a renderable that may be updated.
|
| 16 |
+
|
| 17 |
+
Args:
|
| 18 |
+
renderable (RenderableType): Any renderable object.
|
| 19 |
+
style (StyleType, optional): An optional style to apply to the renderable. Defaults to "".
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
def __init__(
|
| 23 |
+
self,
|
| 24 |
+
renderable: RenderableType,
|
| 25 |
+
style: StyleType = "",
|
| 26 |
+
vertical_overflow: VerticalOverflowMethod = "ellipsis",
|
| 27 |
+
) -> None:
|
| 28 |
+
self.renderable = renderable
|
| 29 |
+
self.style = style
|
| 30 |
+
self.vertical_overflow = vertical_overflow
|
| 31 |
+
self._shape: Optional[Tuple[int, int]] = None
|
| 32 |
+
|
| 33 |
+
def set_renderable(self, renderable: RenderableType) -> None:
|
| 34 |
+
"""Set a new renderable.
|
| 35 |
+
|
| 36 |
+
Args:
|
| 37 |
+
renderable (RenderableType): Any renderable object, including str.
|
| 38 |
+
"""
|
| 39 |
+
self.renderable = renderable
|
| 40 |
+
|
| 41 |
+
def position_cursor(self) -> Control:
|
| 42 |
+
"""Get control codes to move cursor to beginning of live render.
|
| 43 |
+
|
| 44 |
+
Returns:
|
| 45 |
+
Control: A control instance that may be printed.
|
| 46 |
+
"""
|
| 47 |
+
if self._shape is not None:
|
| 48 |
+
_, height = self._shape
|
| 49 |
+
return Control(
|
| 50 |
+
ControlType.CARRIAGE_RETURN,
|
| 51 |
+
(ControlType.ERASE_IN_LINE, 2),
|
| 52 |
+
*(
|
| 53 |
+
(
|
| 54 |
+
(ControlType.CURSOR_UP, 1),
|
| 55 |
+
(ControlType.ERASE_IN_LINE, 2),
|
| 56 |
+
)
|
| 57 |
+
* (height - 1)
|
| 58 |
+
)
|
| 59 |
+
)
|
| 60 |
+
return Control()
|
| 61 |
+
|
| 62 |
+
def restore_cursor(self) -> Control:
|
| 63 |
+
"""Get control codes to clear the render and restore the cursor to its previous position.
|
| 64 |
+
|
| 65 |
+
Returns:
|
| 66 |
+
Control: A Control instance that may be printed.
|
| 67 |
+
"""
|
| 68 |
+
if self._shape is not None:
|
| 69 |
+
_, height = self._shape
|
| 70 |
+
return Control(
|
| 71 |
+
ControlType.CARRIAGE_RETURN,
|
| 72 |
+
*((ControlType.CURSOR_UP, 1), (ControlType.ERASE_IN_LINE, 2)) * height
|
| 73 |
+
)
|
| 74 |
+
return Control()
|
| 75 |
+
|
| 76 |
+
def __rich_console__(
|
| 77 |
+
self, console: Console, options: ConsoleOptions
|
| 78 |
+
) -> RenderResult:
|
| 79 |
+
renderable = self.renderable
|
| 80 |
+
style = console.get_style(self.style)
|
| 81 |
+
lines = console.render_lines(renderable, options, style=style, pad=False)
|
| 82 |
+
shape = Segment.get_shape(lines)
|
| 83 |
+
|
| 84 |
+
_, height = shape
|
| 85 |
+
if height > options.size.height:
|
| 86 |
+
if self.vertical_overflow == "crop":
|
| 87 |
+
lines = lines[: options.size.height]
|
| 88 |
+
shape = Segment.get_shape(lines)
|
| 89 |
+
elif self.vertical_overflow == "ellipsis":
|
| 90 |
+
lines = lines[: (options.size.height - 1)]
|
| 91 |
+
overflow_text = Text(
|
| 92 |
+
"...",
|
| 93 |
+
overflow="crop",
|
| 94 |
+
justify="center",
|
| 95 |
+
end="",
|
| 96 |
+
style="live.ellipsis",
|
| 97 |
+
)
|
| 98 |
+
lines.append(list(console.render(overflow_text)))
|
| 99 |
+
shape = Segment.get_shape(lines)
|
| 100 |
+
self._shape = shape
|
| 101 |
+
|
| 102 |
+
new_line = Segment.line()
|
| 103 |
+
for last, line in loop_last(lines):
|
| 104 |
+
yield from line
|
| 105 |
+
if not last:
|
| 106 |
+
yield new_line
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/logging.py
ADDED
|
@@ -0,0 +1,297 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
from logging import Handler, LogRecord
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from types import ModuleType
|
| 6 |
+
from typing import ClassVar, Iterable, List, Optional, Type, Union
|
| 7 |
+
|
| 8 |
+
from pip._vendor.rich._null_file import NullFile
|
| 9 |
+
|
| 10 |
+
from . import get_console
|
| 11 |
+
from ._log_render import FormatTimeCallable, LogRender
|
| 12 |
+
from .console import Console, ConsoleRenderable
|
| 13 |
+
from .highlighter import Highlighter, ReprHighlighter
|
| 14 |
+
from .text import Text
|
| 15 |
+
from .traceback import Traceback
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class RichHandler(Handler):
|
| 19 |
+
"""A logging handler that renders output with Rich. The time / level / message and file are displayed in columns.
|
| 20 |
+
The level is color coded, and the message is syntax highlighted.
|
| 21 |
+
|
| 22 |
+
Note:
|
| 23 |
+
Be careful when enabling console markup in log messages if you have configured logging for libraries not
|
| 24 |
+
under your control. If a dependency writes messages containing square brackets, it may not produce the intended output.
|
| 25 |
+
|
| 26 |
+
Args:
|
| 27 |
+
level (Union[int, str], optional): Log level. Defaults to logging.NOTSET.
|
| 28 |
+
console (:class:`~rich.console.Console`, optional): Optional console instance to write logs.
|
| 29 |
+
Default will use a global console instance writing to stdout.
|
| 30 |
+
show_time (bool, optional): Show a column for the time. Defaults to True.
|
| 31 |
+
omit_repeated_times (bool, optional): Omit repetition of the same time. Defaults to True.
|
| 32 |
+
show_level (bool, optional): Show a column for the level. Defaults to True.
|
| 33 |
+
show_path (bool, optional): Show the path to the original log call. Defaults to True.
|
| 34 |
+
enable_link_path (bool, optional): Enable terminal link of path column to file. Defaults to True.
|
| 35 |
+
highlighter (Highlighter, optional): Highlighter to style log messages, or None to use ReprHighlighter. Defaults to None.
|
| 36 |
+
markup (bool, optional): Enable console markup in log messages. Defaults to False.
|
| 37 |
+
rich_tracebacks (bool, optional): Enable rich tracebacks with syntax highlighting and formatting. Defaults to False.
|
| 38 |
+
tracebacks_width (Optional[int], optional): Number of characters used to render tracebacks, or None for full width. Defaults to None.
|
| 39 |
+
tracebacks_code_width (int, optional): Number of code characters used to render tracebacks, or None for full width. Defaults to 88.
|
| 40 |
+
tracebacks_extra_lines (int, optional): Additional lines of code to render tracebacks, or None for full width. Defaults to None.
|
| 41 |
+
tracebacks_theme (str, optional): Override pygments theme used in traceback.
|
| 42 |
+
tracebacks_word_wrap (bool, optional): Enable word wrapping of long tracebacks lines. Defaults to True.
|
| 43 |
+
tracebacks_show_locals (bool, optional): Enable display of locals in tracebacks. Defaults to False.
|
| 44 |
+
tracebacks_suppress (Sequence[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback.
|
| 45 |
+
tracebacks_max_frames (int, optional): Optional maximum number of frames returned by traceback.
|
| 46 |
+
locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
|
| 47 |
+
Defaults to 10.
|
| 48 |
+
locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80.
|
| 49 |
+
log_time_format (Union[str, TimeFormatterCallable], optional): If ``log_time`` is enabled, either string for strftime or callable that formats the time. Defaults to "[%x %X] ".
|
| 50 |
+
keywords (List[str], optional): List of words to highlight instead of ``RichHandler.KEYWORDS``.
|
| 51 |
+
"""
|
| 52 |
+
|
| 53 |
+
KEYWORDS: ClassVar[Optional[List[str]]] = [
|
| 54 |
+
"GET",
|
| 55 |
+
"POST",
|
| 56 |
+
"HEAD",
|
| 57 |
+
"PUT",
|
| 58 |
+
"DELETE",
|
| 59 |
+
"OPTIONS",
|
| 60 |
+
"TRACE",
|
| 61 |
+
"PATCH",
|
| 62 |
+
]
|
| 63 |
+
HIGHLIGHTER_CLASS: ClassVar[Type[Highlighter]] = ReprHighlighter
|
| 64 |
+
|
| 65 |
+
def __init__(
|
| 66 |
+
self,
|
| 67 |
+
level: Union[int, str] = logging.NOTSET,
|
| 68 |
+
console: Optional[Console] = None,
|
| 69 |
+
*,
|
| 70 |
+
show_time: bool = True,
|
| 71 |
+
omit_repeated_times: bool = True,
|
| 72 |
+
show_level: bool = True,
|
| 73 |
+
show_path: bool = True,
|
| 74 |
+
enable_link_path: bool = True,
|
| 75 |
+
highlighter: Optional[Highlighter] = None,
|
| 76 |
+
markup: bool = False,
|
| 77 |
+
rich_tracebacks: bool = False,
|
| 78 |
+
tracebacks_width: Optional[int] = None,
|
| 79 |
+
tracebacks_code_width: Optional[int] = 88,
|
| 80 |
+
tracebacks_extra_lines: int = 3,
|
| 81 |
+
tracebacks_theme: Optional[str] = None,
|
| 82 |
+
tracebacks_word_wrap: bool = True,
|
| 83 |
+
tracebacks_show_locals: bool = False,
|
| 84 |
+
tracebacks_suppress: Iterable[Union[str, ModuleType]] = (),
|
| 85 |
+
tracebacks_max_frames: int = 100,
|
| 86 |
+
locals_max_length: int = 10,
|
| 87 |
+
locals_max_string: int = 80,
|
| 88 |
+
log_time_format: Union[str, FormatTimeCallable] = "[%x %X]",
|
| 89 |
+
keywords: Optional[List[str]] = None,
|
| 90 |
+
) -> None:
|
| 91 |
+
super().__init__(level=level)
|
| 92 |
+
self.console = console or get_console()
|
| 93 |
+
self.highlighter = highlighter or self.HIGHLIGHTER_CLASS()
|
| 94 |
+
self._log_render = LogRender(
|
| 95 |
+
show_time=show_time,
|
| 96 |
+
show_level=show_level,
|
| 97 |
+
show_path=show_path,
|
| 98 |
+
time_format=log_time_format,
|
| 99 |
+
omit_repeated_times=omit_repeated_times,
|
| 100 |
+
level_width=None,
|
| 101 |
+
)
|
| 102 |
+
self.enable_link_path = enable_link_path
|
| 103 |
+
self.markup = markup
|
| 104 |
+
self.rich_tracebacks = rich_tracebacks
|
| 105 |
+
self.tracebacks_width = tracebacks_width
|
| 106 |
+
self.tracebacks_extra_lines = tracebacks_extra_lines
|
| 107 |
+
self.tracebacks_theme = tracebacks_theme
|
| 108 |
+
self.tracebacks_word_wrap = tracebacks_word_wrap
|
| 109 |
+
self.tracebacks_show_locals = tracebacks_show_locals
|
| 110 |
+
self.tracebacks_suppress = tracebacks_suppress
|
| 111 |
+
self.tracebacks_max_frames = tracebacks_max_frames
|
| 112 |
+
self.tracebacks_code_width = tracebacks_code_width
|
| 113 |
+
self.locals_max_length = locals_max_length
|
| 114 |
+
self.locals_max_string = locals_max_string
|
| 115 |
+
self.keywords = keywords
|
| 116 |
+
|
| 117 |
+
def get_level_text(self, record: LogRecord) -> Text:
|
| 118 |
+
"""Get the level name from the record.
|
| 119 |
+
|
| 120 |
+
Args:
|
| 121 |
+
record (LogRecord): LogRecord instance.
|
| 122 |
+
|
| 123 |
+
Returns:
|
| 124 |
+
Text: A tuple of the style and level name.
|
| 125 |
+
"""
|
| 126 |
+
level_name = record.levelname
|
| 127 |
+
level_text = Text.styled(
|
| 128 |
+
level_name.ljust(8), f"logging.level.{level_name.lower()}"
|
| 129 |
+
)
|
| 130 |
+
return level_text
|
| 131 |
+
|
| 132 |
+
def emit(self, record: LogRecord) -> None:
|
| 133 |
+
"""Invoked by logging."""
|
| 134 |
+
message = self.format(record)
|
| 135 |
+
traceback = None
|
| 136 |
+
if (
|
| 137 |
+
self.rich_tracebacks
|
| 138 |
+
and record.exc_info
|
| 139 |
+
and record.exc_info != (None, None, None)
|
| 140 |
+
):
|
| 141 |
+
exc_type, exc_value, exc_traceback = record.exc_info
|
| 142 |
+
assert exc_type is not None
|
| 143 |
+
assert exc_value is not None
|
| 144 |
+
traceback = Traceback.from_exception(
|
| 145 |
+
exc_type,
|
| 146 |
+
exc_value,
|
| 147 |
+
exc_traceback,
|
| 148 |
+
width=self.tracebacks_width,
|
| 149 |
+
code_width=self.tracebacks_code_width,
|
| 150 |
+
extra_lines=self.tracebacks_extra_lines,
|
| 151 |
+
theme=self.tracebacks_theme,
|
| 152 |
+
word_wrap=self.tracebacks_word_wrap,
|
| 153 |
+
show_locals=self.tracebacks_show_locals,
|
| 154 |
+
locals_max_length=self.locals_max_length,
|
| 155 |
+
locals_max_string=self.locals_max_string,
|
| 156 |
+
suppress=self.tracebacks_suppress,
|
| 157 |
+
max_frames=self.tracebacks_max_frames,
|
| 158 |
+
)
|
| 159 |
+
message = record.getMessage()
|
| 160 |
+
if self.formatter:
|
| 161 |
+
record.message = record.getMessage()
|
| 162 |
+
formatter = self.formatter
|
| 163 |
+
if hasattr(formatter, "usesTime") and formatter.usesTime():
|
| 164 |
+
record.asctime = formatter.formatTime(record, formatter.datefmt)
|
| 165 |
+
message = formatter.formatMessage(record)
|
| 166 |
+
|
| 167 |
+
message_renderable = self.render_message(record, message)
|
| 168 |
+
log_renderable = self.render(
|
| 169 |
+
record=record, traceback=traceback, message_renderable=message_renderable
|
| 170 |
+
)
|
| 171 |
+
if isinstance(self.console.file, NullFile):
|
| 172 |
+
# Handles pythonw, where stdout/stderr are null, and we return NullFile
|
| 173 |
+
# instance from Console.file. In this case, we still want to make a log record
|
| 174 |
+
# even though we won't be writing anything to a file.
|
| 175 |
+
self.handleError(record)
|
| 176 |
+
else:
|
| 177 |
+
try:
|
| 178 |
+
self.console.print(log_renderable)
|
| 179 |
+
except Exception:
|
| 180 |
+
self.handleError(record)
|
| 181 |
+
|
| 182 |
+
def render_message(self, record: LogRecord, message: str) -> "ConsoleRenderable":
|
| 183 |
+
"""Render message text in to Text.
|
| 184 |
+
|
| 185 |
+
Args:
|
| 186 |
+
record (LogRecord): logging Record.
|
| 187 |
+
message (str): String containing log message.
|
| 188 |
+
|
| 189 |
+
Returns:
|
| 190 |
+
ConsoleRenderable: Renderable to display log message.
|
| 191 |
+
"""
|
| 192 |
+
use_markup = getattr(record, "markup", self.markup)
|
| 193 |
+
message_text = Text.from_markup(message) if use_markup else Text(message)
|
| 194 |
+
|
| 195 |
+
highlighter = getattr(record, "highlighter", self.highlighter)
|
| 196 |
+
if highlighter:
|
| 197 |
+
message_text = highlighter(message_text)
|
| 198 |
+
|
| 199 |
+
if self.keywords is None:
|
| 200 |
+
self.keywords = self.KEYWORDS
|
| 201 |
+
|
| 202 |
+
if self.keywords:
|
| 203 |
+
message_text.highlight_words(self.keywords, "logging.keyword")
|
| 204 |
+
|
| 205 |
+
return message_text
|
| 206 |
+
|
| 207 |
+
def render(
|
| 208 |
+
self,
|
| 209 |
+
*,
|
| 210 |
+
record: LogRecord,
|
| 211 |
+
traceback: Optional[Traceback],
|
| 212 |
+
message_renderable: "ConsoleRenderable",
|
| 213 |
+
) -> "ConsoleRenderable":
|
| 214 |
+
"""Render log for display.
|
| 215 |
+
|
| 216 |
+
Args:
|
| 217 |
+
record (LogRecord): logging Record.
|
| 218 |
+
traceback (Optional[Traceback]): Traceback instance or None for no Traceback.
|
| 219 |
+
message_renderable (ConsoleRenderable): Renderable (typically Text) containing log message contents.
|
| 220 |
+
|
| 221 |
+
Returns:
|
| 222 |
+
ConsoleRenderable: Renderable to display log.
|
| 223 |
+
"""
|
| 224 |
+
path = Path(record.pathname).name
|
| 225 |
+
level = self.get_level_text(record)
|
| 226 |
+
time_format = None if self.formatter is None else self.formatter.datefmt
|
| 227 |
+
log_time = datetime.fromtimestamp(record.created)
|
| 228 |
+
|
| 229 |
+
log_renderable = self._log_render(
|
| 230 |
+
self.console,
|
| 231 |
+
[message_renderable] if not traceback else [message_renderable, traceback],
|
| 232 |
+
log_time=log_time,
|
| 233 |
+
time_format=time_format,
|
| 234 |
+
level=level,
|
| 235 |
+
path=path,
|
| 236 |
+
line_no=record.lineno,
|
| 237 |
+
link_path=record.pathname if self.enable_link_path else None,
|
| 238 |
+
)
|
| 239 |
+
return log_renderable
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
if __name__ == "__main__": # pragma: no cover
|
| 243 |
+
from time import sleep
|
| 244 |
+
|
| 245 |
+
FORMAT = "%(message)s"
|
| 246 |
+
# FORMAT = "%(asctime)-15s - %(levelname)s - %(message)s"
|
| 247 |
+
logging.basicConfig(
|
| 248 |
+
level="NOTSET",
|
| 249 |
+
format=FORMAT,
|
| 250 |
+
datefmt="[%X]",
|
| 251 |
+
handlers=[RichHandler(rich_tracebacks=True, tracebacks_show_locals=True)],
|
| 252 |
+
)
|
| 253 |
+
log = logging.getLogger("rich")
|
| 254 |
+
|
| 255 |
+
log.info("Server starting...")
|
| 256 |
+
log.info("Listening on http://127.0.0.1:8080")
|
| 257 |
+
sleep(1)
|
| 258 |
+
|
| 259 |
+
log.info("GET /index.html 200 1298")
|
| 260 |
+
log.info("GET /imgs/backgrounds/back1.jpg 200 54386")
|
| 261 |
+
log.info("GET /css/styles.css 200 54386")
|
| 262 |
+
log.warning("GET /favicon.ico 404 242")
|
| 263 |
+
sleep(1)
|
| 264 |
+
|
| 265 |
+
log.debug(
|
| 266 |
+
"JSONRPC request\n--> %r\n<-- %r",
|
| 267 |
+
{
|
| 268 |
+
"version": "1.1",
|
| 269 |
+
"method": "confirmFruitPurchase",
|
| 270 |
+
"params": [["apple", "orange", "mangoes", "pomelo"], 1.123],
|
| 271 |
+
"id": "194521489",
|
| 272 |
+
},
|
| 273 |
+
{"version": "1.1", "result": True, "error": None, "id": "194521489"},
|
| 274 |
+
)
|
| 275 |
+
log.debug(
|
| 276 |
+
"Loading configuration file /adasd/asdasd/qeqwe/qwrqwrqwr/sdgsdgsdg/werwerwer/dfgerert/ertertert/ertetert/werwerwer"
|
| 277 |
+
)
|
| 278 |
+
log.error("Unable to find 'pomelo' in database!")
|
| 279 |
+
log.info("POST /jsonrpc/ 200 65532")
|
| 280 |
+
log.info("POST /admin/ 401 42234")
|
| 281 |
+
log.warning("password was rejected for admin site.")
|
| 282 |
+
|
| 283 |
+
def divide() -> None:
|
| 284 |
+
number = 1
|
| 285 |
+
divisor = 0
|
| 286 |
+
foos = ["foo"] * 100
|
| 287 |
+
log.debug("in divide")
|
| 288 |
+
try:
|
| 289 |
+
number / divisor
|
| 290 |
+
except:
|
| 291 |
+
log.exception("An error of some kind occurred!")
|
| 292 |
+
|
| 293 |
+
divide()
|
| 294 |
+
sleep(1)
|
| 295 |
+
log.critical("Out of memory!")
|
| 296 |
+
log.info("Server exited with code=-1")
|
| 297 |
+
log.info("[bold]EXITING...[/bold]", extra=dict(markup=True))
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/markup.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from ast import literal_eval
|
| 3 |
+
from operator import attrgetter
|
| 4 |
+
from typing import Callable, Iterable, List, Match, NamedTuple, Optional, Tuple, Union
|
| 5 |
+
|
| 6 |
+
from ._emoji_replace import _emoji_replace
|
| 7 |
+
from .emoji import EmojiVariant
|
| 8 |
+
from .errors import MarkupError
|
| 9 |
+
from .style import Style
|
| 10 |
+
from .text import Span, Text
|
| 11 |
+
|
| 12 |
+
RE_TAGS = re.compile(
|
| 13 |
+
r"""((\\*)\[([a-z#/@][^[]*?)])""",
|
| 14 |
+
re.VERBOSE,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
RE_HANDLER = re.compile(r"^([\w.]*?)(\(.*?\))?$")
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Tag(NamedTuple):
|
| 21 |
+
"""A tag in console markup."""
|
| 22 |
+
|
| 23 |
+
name: str
|
| 24 |
+
"""The tag name. e.g. 'bold'."""
|
| 25 |
+
parameters: Optional[str]
|
| 26 |
+
"""Any additional parameters after the name."""
|
| 27 |
+
|
| 28 |
+
def __str__(self) -> str:
|
| 29 |
+
return (
|
| 30 |
+
self.name if self.parameters is None else f"{self.name} {self.parameters}"
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
@property
|
| 34 |
+
def markup(self) -> str:
|
| 35 |
+
"""Get the string representation of this tag."""
|
| 36 |
+
return (
|
| 37 |
+
f"[{self.name}]"
|
| 38 |
+
if self.parameters is None
|
| 39 |
+
else f"[{self.name}={self.parameters}]"
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
_ReStringMatch = Match[str] # regex match object
|
| 44 |
+
_ReSubCallable = Callable[[_ReStringMatch], str] # Callable invoked by re.sub
|
| 45 |
+
_EscapeSubMethod = Callable[[_ReSubCallable, str], str] # Sub method of a compiled re
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def escape(
|
| 49 |
+
markup: str,
|
| 50 |
+
_escape: _EscapeSubMethod = re.compile(r"(\\*)(\[[a-z#/@][^[]*?])").sub,
|
| 51 |
+
) -> str:
|
| 52 |
+
"""Escapes text so that it won't be interpreted as markup.
|
| 53 |
+
|
| 54 |
+
Args:
|
| 55 |
+
markup (str): Content to be inserted in to markup.
|
| 56 |
+
|
| 57 |
+
Returns:
|
| 58 |
+
str: Markup with square brackets escaped.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
def escape_backslashes(match: Match[str]) -> str:
|
| 62 |
+
"""Called by re.sub replace matches."""
|
| 63 |
+
backslashes, text = match.groups()
|
| 64 |
+
return f"{backslashes}{backslashes}\\{text}"
|
| 65 |
+
|
| 66 |
+
markup = _escape(escape_backslashes, markup)
|
| 67 |
+
if markup.endswith("\\") and not markup.endswith("\\\\"):
|
| 68 |
+
return markup + "\\"
|
| 69 |
+
|
| 70 |
+
return markup
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def _parse(markup: str) -> Iterable[Tuple[int, Optional[str], Optional[Tag]]]:
|
| 74 |
+
"""Parse markup in to an iterable of tuples of (position, text, tag).
|
| 75 |
+
|
| 76 |
+
Args:
|
| 77 |
+
markup (str): A string containing console markup
|
| 78 |
+
|
| 79 |
+
"""
|
| 80 |
+
position = 0
|
| 81 |
+
_divmod = divmod
|
| 82 |
+
_Tag = Tag
|
| 83 |
+
for match in RE_TAGS.finditer(markup):
|
| 84 |
+
full_text, escapes, tag_text = match.groups()
|
| 85 |
+
start, end = match.span()
|
| 86 |
+
if start > position:
|
| 87 |
+
yield start, markup[position:start], None
|
| 88 |
+
if escapes:
|
| 89 |
+
backslashes, escaped = _divmod(len(escapes), 2)
|
| 90 |
+
if backslashes:
|
| 91 |
+
# Literal backslashes
|
| 92 |
+
yield start, "\\" * backslashes, None
|
| 93 |
+
start += backslashes * 2
|
| 94 |
+
if escaped:
|
| 95 |
+
# Escape of tag
|
| 96 |
+
yield start, full_text[len(escapes) :], None
|
| 97 |
+
position = end
|
| 98 |
+
continue
|
| 99 |
+
text, equals, parameters = tag_text.partition("=")
|
| 100 |
+
yield start, None, _Tag(text, parameters if equals else None)
|
| 101 |
+
position = end
|
| 102 |
+
if position < len(markup):
|
| 103 |
+
yield position, markup[position:], None
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def render(
|
| 107 |
+
markup: str,
|
| 108 |
+
style: Union[str, Style] = "",
|
| 109 |
+
emoji: bool = True,
|
| 110 |
+
emoji_variant: Optional[EmojiVariant] = None,
|
| 111 |
+
) -> Text:
|
| 112 |
+
"""Render console markup in to a Text instance.
|
| 113 |
+
|
| 114 |
+
Args:
|
| 115 |
+
markup (str): A string containing console markup.
|
| 116 |
+
style: (Union[str, Style]): The style to use.
|
| 117 |
+
emoji (bool, optional): Also render emoji code. Defaults to True.
|
| 118 |
+
emoji_variant (str, optional): Optional emoji variant, either "text" or "emoji". Defaults to None.
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
Raises:
|
| 122 |
+
MarkupError: If there is a syntax error in the markup.
|
| 123 |
+
|
| 124 |
+
Returns:
|
| 125 |
+
Text: A test instance.
|
| 126 |
+
"""
|
| 127 |
+
emoji_replace = _emoji_replace
|
| 128 |
+
if "[" not in markup:
|
| 129 |
+
return Text(
|
| 130 |
+
emoji_replace(markup, default_variant=emoji_variant) if emoji else markup,
|
| 131 |
+
style=style,
|
| 132 |
+
)
|
| 133 |
+
text = Text(style=style)
|
| 134 |
+
append = text.append
|
| 135 |
+
normalize = Style.normalize
|
| 136 |
+
|
| 137 |
+
style_stack: List[Tuple[int, Tag]] = []
|
| 138 |
+
pop = style_stack.pop
|
| 139 |
+
|
| 140 |
+
spans: List[Span] = []
|
| 141 |
+
append_span = spans.append
|
| 142 |
+
|
| 143 |
+
_Span = Span
|
| 144 |
+
_Tag = Tag
|
| 145 |
+
|
| 146 |
+
def pop_style(style_name: str) -> Tuple[int, Tag]:
|
| 147 |
+
"""Pop tag matching given style name."""
|
| 148 |
+
for index, (_, tag) in enumerate(reversed(style_stack), 1):
|
| 149 |
+
if tag.name == style_name:
|
| 150 |
+
return pop(-index)
|
| 151 |
+
raise KeyError(style_name)
|
| 152 |
+
|
| 153 |
+
for position, plain_text, tag in _parse(markup):
|
| 154 |
+
if plain_text is not None:
|
| 155 |
+
# Handle open brace escapes, where the brace is not part of a tag.
|
| 156 |
+
plain_text = plain_text.replace("\\[", "[")
|
| 157 |
+
append(emoji_replace(plain_text) if emoji else plain_text)
|
| 158 |
+
elif tag is not None:
|
| 159 |
+
if tag.name.startswith("/"): # Closing tag
|
| 160 |
+
style_name = tag.name[1:].strip()
|
| 161 |
+
|
| 162 |
+
if style_name: # explicit close
|
| 163 |
+
style_name = normalize(style_name)
|
| 164 |
+
try:
|
| 165 |
+
start, open_tag = pop_style(style_name)
|
| 166 |
+
except KeyError:
|
| 167 |
+
raise MarkupError(
|
| 168 |
+
f"closing tag '{tag.markup}' at position {position} doesn't match any open tag"
|
| 169 |
+
) from None
|
| 170 |
+
else: # implicit close
|
| 171 |
+
try:
|
| 172 |
+
start, open_tag = pop()
|
| 173 |
+
except IndexError:
|
| 174 |
+
raise MarkupError(
|
| 175 |
+
f"closing tag '[/]' at position {position} has nothing to close"
|
| 176 |
+
) from None
|
| 177 |
+
|
| 178 |
+
if open_tag.name.startswith("@"):
|
| 179 |
+
if open_tag.parameters:
|
| 180 |
+
handler_name = ""
|
| 181 |
+
parameters = open_tag.parameters.strip()
|
| 182 |
+
handler_match = RE_HANDLER.match(parameters)
|
| 183 |
+
if handler_match is not None:
|
| 184 |
+
handler_name, match_parameters = handler_match.groups()
|
| 185 |
+
parameters = (
|
| 186 |
+
"()" if match_parameters is None else match_parameters
|
| 187 |
+
)
|
| 188 |
+
|
| 189 |
+
try:
|
| 190 |
+
meta_params = literal_eval(parameters)
|
| 191 |
+
except SyntaxError as error:
|
| 192 |
+
raise MarkupError(
|
| 193 |
+
f"error parsing {parameters!r} in {open_tag.parameters!r}; {error.msg}"
|
| 194 |
+
)
|
| 195 |
+
except Exception as error:
|
| 196 |
+
raise MarkupError(
|
| 197 |
+
f"error parsing {open_tag.parameters!r}; {error}"
|
| 198 |
+
) from None
|
| 199 |
+
|
| 200 |
+
if handler_name:
|
| 201 |
+
meta_params = (
|
| 202 |
+
handler_name,
|
| 203 |
+
meta_params
|
| 204 |
+
if isinstance(meta_params, tuple)
|
| 205 |
+
else (meta_params,),
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
else:
|
| 209 |
+
meta_params = ()
|
| 210 |
+
|
| 211 |
+
append_span(
|
| 212 |
+
_Span(
|
| 213 |
+
start, len(text), Style(meta={open_tag.name: meta_params})
|
| 214 |
+
)
|
| 215 |
+
)
|
| 216 |
+
else:
|
| 217 |
+
append_span(_Span(start, len(text), str(open_tag)))
|
| 218 |
+
|
| 219 |
+
else: # Opening tag
|
| 220 |
+
normalized_tag = _Tag(normalize(tag.name), tag.parameters)
|
| 221 |
+
style_stack.append((len(text), normalized_tag))
|
| 222 |
+
|
| 223 |
+
text_length = len(text)
|
| 224 |
+
while style_stack:
|
| 225 |
+
start, tag = style_stack.pop()
|
| 226 |
+
style = str(tag)
|
| 227 |
+
if style:
|
| 228 |
+
append_span(_Span(start, text_length, style))
|
| 229 |
+
|
| 230 |
+
text.spans = sorted(spans[::-1], key=attrgetter("start"))
|
| 231 |
+
return text
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
if __name__ == "__main__": # pragma: no cover
|
| 235 |
+
MARKUP = [
|
| 236 |
+
"[red]Hello World[/red]",
|
| 237 |
+
"[magenta]Hello [b]World[/b]",
|
| 238 |
+
"[bold]Bold[italic] bold and italic [/bold]italic[/italic]",
|
| 239 |
+
"Click [link=https://www.willmcgugan.com]here[/link] to visit my Blog",
|
| 240 |
+
":warning-emoji: [bold red blink] DANGER![/]",
|
| 241 |
+
]
|
| 242 |
+
|
| 243 |
+
from pip._vendor.rich import print
|
| 244 |
+
from pip._vendor.rich.table import Table
|
| 245 |
+
|
| 246 |
+
grid = Table("Markup", "Result", padding=(0, 1))
|
| 247 |
+
|
| 248 |
+
for markup in MARKUP:
|
| 249 |
+
grid.add_row(Text(markup), markup)
|
| 250 |
+
|
| 251 |
+
print(grid)
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/measure.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from operator import itemgetter
|
| 2 |
+
from typing import TYPE_CHECKING, Callable, NamedTuple, Optional, Sequence
|
| 3 |
+
|
| 4 |
+
from . import errors
|
| 5 |
+
from .protocol import is_renderable, rich_cast
|
| 6 |
+
|
| 7 |
+
if TYPE_CHECKING:
|
| 8 |
+
from .console import Console, ConsoleOptions, RenderableType
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class Measurement(NamedTuple):
|
| 12 |
+
"""Stores the minimum and maximum widths (in characters) required to render an object."""
|
| 13 |
+
|
| 14 |
+
minimum: int
|
| 15 |
+
"""Minimum number of cells required to render."""
|
| 16 |
+
maximum: int
|
| 17 |
+
"""Maximum number of cells required to render."""
|
| 18 |
+
|
| 19 |
+
@property
|
| 20 |
+
def span(self) -> int:
|
| 21 |
+
"""Get difference between maximum and minimum."""
|
| 22 |
+
return self.maximum - self.minimum
|
| 23 |
+
|
| 24 |
+
def normalize(self) -> "Measurement":
|
| 25 |
+
"""Get measurement that ensures that minimum <= maximum and minimum >= 0
|
| 26 |
+
|
| 27 |
+
Returns:
|
| 28 |
+
Measurement: A normalized measurement.
|
| 29 |
+
"""
|
| 30 |
+
minimum, maximum = self
|
| 31 |
+
minimum = min(max(0, minimum), maximum)
|
| 32 |
+
return Measurement(max(0, minimum), max(0, max(minimum, maximum)))
|
| 33 |
+
|
| 34 |
+
def with_maximum(self, width: int) -> "Measurement":
|
| 35 |
+
"""Get a RenderableWith where the widths are <= width.
|
| 36 |
+
|
| 37 |
+
Args:
|
| 38 |
+
width (int): Maximum desired width.
|
| 39 |
+
|
| 40 |
+
Returns:
|
| 41 |
+
Measurement: New Measurement object.
|
| 42 |
+
"""
|
| 43 |
+
minimum, maximum = self
|
| 44 |
+
return Measurement(min(minimum, width), min(maximum, width))
|
| 45 |
+
|
| 46 |
+
def with_minimum(self, width: int) -> "Measurement":
|
| 47 |
+
"""Get a RenderableWith where the widths are >= width.
|
| 48 |
+
|
| 49 |
+
Args:
|
| 50 |
+
width (int): Minimum desired width.
|
| 51 |
+
|
| 52 |
+
Returns:
|
| 53 |
+
Measurement: New Measurement object.
|
| 54 |
+
"""
|
| 55 |
+
minimum, maximum = self
|
| 56 |
+
width = max(0, width)
|
| 57 |
+
return Measurement(max(minimum, width), max(maximum, width))
|
| 58 |
+
|
| 59 |
+
def clamp(
|
| 60 |
+
self, min_width: Optional[int] = None, max_width: Optional[int] = None
|
| 61 |
+
) -> "Measurement":
|
| 62 |
+
"""Clamp a measurement within the specified range.
|
| 63 |
+
|
| 64 |
+
Args:
|
| 65 |
+
min_width (int): Minimum desired width, or ``None`` for no minimum. Defaults to None.
|
| 66 |
+
max_width (int): Maximum desired width, or ``None`` for no maximum. Defaults to None.
|
| 67 |
+
|
| 68 |
+
Returns:
|
| 69 |
+
Measurement: New Measurement object.
|
| 70 |
+
"""
|
| 71 |
+
measurement = self
|
| 72 |
+
if min_width is not None:
|
| 73 |
+
measurement = measurement.with_minimum(min_width)
|
| 74 |
+
if max_width is not None:
|
| 75 |
+
measurement = measurement.with_maximum(max_width)
|
| 76 |
+
return measurement
|
| 77 |
+
|
| 78 |
+
@classmethod
|
| 79 |
+
def get(
|
| 80 |
+
cls, console: "Console", options: "ConsoleOptions", renderable: "RenderableType"
|
| 81 |
+
) -> "Measurement":
|
| 82 |
+
"""Get a measurement for a renderable.
|
| 83 |
+
|
| 84 |
+
Args:
|
| 85 |
+
console (~rich.console.Console): Console instance.
|
| 86 |
+
options (~rich.console.ConsoleOptions): Console options.
|
| 87 |
+
renderable (RenderableType): An object that may be rendered with Rich.
|
| 88 |
+
|
| 89 |
+
Raises:
|
| 90 |
+
errors.NotRenderableError: If the object is not renderable.
|
| 91 |
+
|
| 92 |
+
Returns:
|
| 93 |
+
Measurement: Measurement object containing range of character widths required to render the object.
|
| 94 |
+
"""
|
| 95 |
+
_max_width = options.max_width
|
| 96 |
+
if _max_width < 1:
|
| 97 |
+
return Measurement(0, 0)
|
| 98 |
+
if isinstance(renderable, str):
|
| 99 |
+
renderable = console.render_str(
|
| 100 |
+
renderable, markup=options.markup, highlight=False
|
| 101 |
+
)
|
| 102 |
+
renderable = rich_cast(renderable)
|
| 103 |
+
if is_renderable(renderable):
|
| 104 |
+
get_console_width: Optional[
|
| 105 |
+
Callable[["Console", "ConsoleOptions"], "Measurement"]
|
| 106 |
+
] = getattr(renderable, "__rich_measure__", None)
|
| 107 |
+
if get_console_width is not None:
|
| 108 |
+
render_width = (
|
| 109 |
+
get_console_width(console, options)
|
| 110 |
+
.normalize()
|
| 111 |
+
.with_maximum(_max_width)
|
| 112 |
+
)
|
| 113 |
+
if render_width.maximum < 1:
|
| 114 |
+
return Measurement(0, 0)
|
| 115 |
+
return render_width.normalize()
|
| 116 |
+
else:
|
| 117 |
+
return Measurement(0, _max_width)
|
| 118 |
+
else:
|
| 119 |
+
raise errors.NotRenderableError(
|
| 120 |
+
f"Unable to get render width for {renderable!r}; "
|
| 121 |
+
"a str, Segment, or object with __rich_console__ method is required"
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def measure_renderables(
|
| 126 |
+
console: "Console",
|
| 127 |
+
options: "ConsoleOptions",
|
| 128 |
+
renderables: Sequence["RenderableType"],
|
| 129 |
+
) -> "Measurement":
|
| 130 |
+
"""Get a measurement that would fit a number of renderables.
|
| 131 |
+
|
| 132 |
+
Args:
|
| 133 |
+
console (~rich.console.Console): Console instance.
|
| 134 |
+
options (~rich.console.ConsoleOptions): Console options.
|
| 135 |
+
renderables (Iterable[RenderableType]): One or more renderable objects.
|
| 136 |
+
|
| 137 |
+
Returns:
|
| 138 |
+
Measurement: Measurement object containing range of character widths required to
|
| 139 |
+
contain all given renderables.
|
| 140 |
+
"""
|
| 141 |
+
if not renderables:
|
| 142 |
+
return Measurement(0, 0)
|
| 143 |
+
get_measurement = Measurement.get
|
| 144 |
+
measurements = [
|
| 145 |
+
get_measurement(console, options, renderable) for renderable in renderables
|
| 146 |
+
]
|
| 147 |
+
measured_width = Measurement(
|
| 148 |
+
max(measurements, key=itemgetter(0)).minimum,
|
| 149 |
+
max(measurements, key=itemgetter(1)).maximum,
|
| 150 |
+
)
|
| 151 |
+
return measured_width
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/padding.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import TYPE_CHECKING, List, Optional, Tuple, Union
|
| 2 |
+
|
| 3 |
+
if TYPE_CHECKING:
|
| 4 |
+
from .console import (
|
| 5 |
+
Console,
|
| 6 |
+
ConsoleOptions,
|
| 7 |
+
RenderableType,
|
| 8 |
+
RenderResult,
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
from .jupyter import JupyterMixin
|
| 12 |
+
from .measure import Measurement
|
| 13 |
+
from .segment import Segment
|
| 14 |
+
from .style import Style
|
| 15 |
+
|
| 16 |
+
PaddingDimensions = Union[int, Tuple[int], Tuple[int, int], Tuple[int, int, int, int]]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class Padding(JupyterMixin):
|
| 20 |
+
"""Draw space around content.
|
| 21 |
+
|
| 22 |
+
Example:
|
| 23 |
+
>>> print(Padding("Hello", (2, 4), style="on blue"))
|
| 24 |
+
|
| 25 |
+
Args:
|
| 26 |
+
renderable (RenderableType): String or other renderable.
|
| 27 |
+
pad (Union[int, Tuple[int]]): Padding for top, right, bottom, and left borders.
|
| 28 |
+
May be specified with 1, 2, or 4 integers (CSS style).
|
| 29 |
+
style (Union[str, Style], optional): Style for padding characters. Defaults to "none".
|
| 30 |
+
expand (bool, optional): Expand padding to fit available width. Defaults to True.
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
def __init__(
|
| 34 |
+
self,
|
| 35 |
+
renderable: "RenderableType",
|
| 36 |
+
pad: "PaddingDimensions" = (0, 0, 0, 0),
|
| 37 |
+
*,
|
| 38 |
+
style: Union[str, Style] = "none",
|
| 39 |
+
expand: bool = True,
|
| 40 |
+
):
|
| 41 |
+
self.renderable = renderable
|
| 42 |
+
self.top, self.right, self.bottom, self.left = self.unpack(pad)
|
| 43 |
+
self.style = style
|
| 44 |
+
self.expand = expand
|
| 45 |
+
|
| 46 |
+
@classmethod
|
| 47 |
+
def indent(cls, renderable: "RenderableType", level: int) -> "Padding":
|
| 48 |
+
"""Make padding instance to render an indent.
|
| 49 |
+
|
| 50 |
+
Args:
|
| 51 |
+
renderable (RenderableType): String or other renderable.
|
| 52 |
+
level (int): Number of characters to indent.
|
| 53 |
+
|
| 54 |
+
Returns:
|
| 55 |
+
Padding: A Padding instance.
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
return Padding(renderable, pad=(0, 0, 0, level), expand=False)
|
| 59 |
+
|
| 60 |
+
@staticmethod
|
| 61 |
+
def unpack(pad: "PaddingDimensions") -> Tuple[int, int, int, int]:
|
| 62 |
+
"""Unpack padding specified in CSS style."""
|
| 63 |
+
if isinstance(pad, int):
|
| 64 |
+
return (pad, pad, pad, pad)
|
| 65 |
+
if len(pad) == 1:
|
| 66 |
+
_pad = pad[0]
|
| 67 |
+
return (_pad, _pad, _pad, _pad)
|
| 68 |
+
if len(pad) == 2:
|
| 69 |
+
pad_top, pad_right = pad
|
| 70 |
+
return (pad_top, pad_right, pad_top, pad_right)
|
| 71 |
+
if len(pad) == 4:
|
| 72 |
+
top, right, bottom, left = pad
|
| 73 |
+
return (top, right, bottom, left)
|
| 74 |
+
raise ValueError(f"1, 2 or 4 integers required for padding; {len(pad)} given")
|
| 75 |
+
|
| 76 |
+
def __repr__(self) -> str:
|
| 77 |
+
return f"Padding({self.renderable!r}, ({self.top},{self.right},{self.bottom},{self.left}))"
|
| 78 |
+
|
| 79 |
+
def __rich_console__(
|
| 80 |
+
self, console: "Console", options: "ConsoleOptions"
|
| 81 |
+
) -> "RenderResult":
|
| 82 |
+
style = console.get_style(self.style)
|
| 83 |
+
if self.expand:
|
| 84 |
+
width = options.max_width
|
| 85 |
+
else:
|
| 86 |
+
width = min(
|
| 87 |
+
Measurement.get(console, options, self.renderable).maximum
|
| 88 |
+
+ self.left
|
| 89 |
+
+ self.right,
|
| 90 |
+
options.max_width,
|
| 91 |
+
)
|
| 92 |
+
render_options = options.update_width(width - self.left - self.right)
|
| 93 |
+
if render_options.height is not None:
|
| 94 |
+
render_options = render_options.update_height(
|
| 95 |
+
height=render_options.height - self.top - self.bottom
|
| 96 |
+
)
|
| 97 |
+
lines = console.render_lines(
|
| 98 |
+
self.renderable, render_options, style=style, pad=True
|
| 99 |
+
)
|
| 100 |
+
_Segment = Segment
|
| 101 |
+
|
| 102 |
+
left = _Segment(" " * self.left, style) if self.left else None
|
| 103 |
+
right = (
|
| 104 |
+
[_Segment(f'{" " * self.right}', style), _Segment.line()]
|
| 105 |
+
if self.right
|
| 106 |
+
else [_Segment.line()]
|
| 107 |
+
)
|
| 108 |
+
blank_line: Optional[List[Segment]] = None
|
| 109 |
+
if self.top:
|
| 110 |
+
blank_line = [_Segment(f'{" " * width}\n', style)]
|
| 111 |
+
yield from blank_line * self.top
|
| 112 |
+
if left:
|
| 113 |
+
for line in lines:
|
| 114 |
+
yield left
|
| 115 |
+
yield from line
|
| 116 |
+
yield from right
|
| 117 |
+
else:
|
| 118 |
+
for line in lines:
|
| 119 |
+
yield from line
|
| 120 |
+
yield from right
|
| 121 |
+
if self.bottom:
|
| 122 |
+
blank_line = blank_line or [_Segment(f'{" " * width}\n', style)]
|
| 123 |
+
yield from blank_line * self.bottom
|
| 124 |
+
|
| 125 |
+
def __rich_measure__(
|
| 126 |
+
self, console: "Console", options: "ConsoleOptions"
|
| 127 |
+
) -> "Measurement":
|
| 128 |
+
max_width = options.max_width
|
| 129 |
+
extra_width = self.left + self.right
|
| 130 |
+
if max_width - extra_width < 1:
|
| 131 |
+
return Measurement(max_width, max_width)
|
| 132 |
+
measure_min, measure_max = Measurement.get(console, options, self.renderable)
|
| 133 |
+
measurement = Measurement(measure_min + extra_width, measure_max + extra_width)
|
| 134 |
+
measurement = measurement.with_maximum(max_width)
|
| 135 |
+
return measurement
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
if __name__ == "__main__": # pragma: no cover
|
| 139 |
+
from pip._vendor.rich import print
|
| 140 |
+
|
| 141 |
+
print(Padding("Hello, World", (2, 4), style="on blue"))
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/pager.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from abc import ABC, abstractmethod
|
| 2 |
+
from typing import Any
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class Pager(ABC):
|
| 6 |
+
"""Base class for a pager."""
|
| 7 |
+
|
| 8 |
+
@abstractmethod
|
| 9 |
+
def show(self, content: str) -> None:
|
| 10 |
+
"""Show content in pager.
|
| 11 |
+
|
| 12 |
+
Args:
|
| 13 |
+
content (str): Content to be displayed.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class SystemPager(Pager):
|
| 18 |
+
"""Uses the pager installed on the system."""
|
| 19 |
+
|
| 20 |
+
def _pager(self, content: str) -> Any: # pragma: no cover
|
| 21 |
+
return __import__("pydoc").pager(content)
|
| 22 |
+
|
| 23 |
+
def show(self, content: str) -> None:
|
| 24 |
+
"""Use the same pager used by pydoc."""
|
| 25 |
+
self._pager(content)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
if __name__ == "__main__": # pragma: no cover
|
| 29 |
+
from .__main__ import make_test_card
|
| 30 |
+
from .console import Console
|
| 31 |
+
|
| 32 |
+
console = Console()
|
| 33 |
+
with console.pager(styles=True):
|
| 34 |
+
console.print(make_test_card())
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/palette.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from math import sqrt
|
| 2 |
+
from functools import lru_cache
|
| 3 |
+
from typing import Sequence, Tuple, TYPE_CHECKING
|
| 4 |
+
|
| 5 |
+
from .color_triplet import ColorTriplet
|
| 6 |
+
|
| 7 |
+
if TYPE_CHECKING:
|
| 8 |
+
from pip._vendor.rich.table import Table
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class Palette:
|
| 12 |
+
"""A palette of available colors."""
|
| 13 |
+
|
| 14 |
+
def __init__(self, colors: Sequence[Tuple[int, int, int]]):
|
| 15 |
+
self._colors = colors
|
| 16 |
+
|
| 17 |
+
def __getitem__(self, number: int) -> ColorTriplet:
|
| 18 |
+
return ColorTriplet(*self._colors[number])
|
| 19 |
+
|
| 20 |
+
def __rich__(self) -> "Table":
|
| 21 |
+
from pip._vendor.rich.color import Color
|
| 22 |
+
from pip._vendor.rich.style import Style
|
| 23 |
+
from pip._vendor.rich.text import Text
|
| 24 |
+
from pip._vendor.rich.table import Table
|
| 25 |
+
|
| 26 |
+
table = Table(
|
| 27 |
+
"index",
|
| 28 |
+
"RGB",
|
| 29 |
+
"Color",
|
| 30 |
+
title="Palette",
|
| 31 |
+
caption=f"{len(self._colors)} colors",
|
| 32 |
+
highlight=True,
|
| 33 |
+
caption_justify="right",
|
| 34 |
+
)
|
| 35 |
+
for index, color in enumerate(self._colors):
|
| 36 |
+
table.add_row(
|
| 37 |
+
str(index),
|
| 38 |
+
repr(color),
|
| 39 |
+
Text(" " * 16, style=Style(bgcolor=Color.from_rgb(*color))),
|
| 40 |
+
)
|
| 41 |
+
return table
|
| 42 |
+
|
| 43 |
+
# This is somewhat inefficient and needs caching
|
| 44 |
+
@lru_cache(maxsize=1024)
|
| 45 |
+
def match(self, color: Tuple[int, int, int]) -> int:
|
| 46 |
+
"""Find a color from a palette that most closely matches a given color.
|
| 47 |
+
|
| 48 |
+
Args:
|
| 49 |
+
color (Tuple[int, int, int]): RGB components in range 0 > 255.
|
| 50 |
+
|
| 51 |
+
Returns:
|
| 52 |
+
int: Index of closes matching color.
|
| 53 |
+
"""
|
| 54 |
+
red1, green1, blue1 = color
|
| 55 |
+
_sqrt = sqrt
|
| 56 |
+
get_color = self._colors.__getitem__
|
| 57 |
+
|
| 58 |
+
def get_color_distance(index: int) -> float:
|
| 59 |
+
"""Get the distance to a color."""
|
| 60 |
+
red2, green2, blue2 = get_color(index)
|
| 61 |
+
red_mean = (red1 + red2) // 2
|
| 62 |
+
red = red1 - red2
|
| 63 |
+
green = green1 - green2
|
| 64 |
+
blue = blue1 - blue2
|
| 65 |
+
return _sqrt(
|
| 66 |
+
(((512 + red_mean) * red * red) >> 8)
|
| 67 |
+
+ 4 * green * green
|
| 68 |
+
+ (((767 - red_mean) * blue * blue) >> 8)
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
min_index = min(range(len(self._colors)), key=get_color_distance)
|
| 72 |
+
return min_index
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
if __name__ == "__main__": # pragma: no cover
|
| 76 |
+
import colorsys
|
| 77 |
+
from typing import Iterable
|
| 78 |
+
from pip._vendor.rich.color import Color
|
| 79 |
+
from pip._vendor.rich.console import Console, ConsoleOptions
|
| 80 |
+
from pip._vendor.rich.segment import Segment
|
| 81 |
+
from pip._vendor.rich.style import Style
|
| 82 |
+
|
| 83 |
+
class ColorBox:
|
| 84 |
+
def __rich_console__(
|
| 85 |
+
self, console: Console, options: ConsoleOptions
|
| 86 |
+
) -> Iterable[Segment]:
|
| 87 |
+
height = console.size.height - 3
|
| 88 |
+
for y in range(0, height):
|
| 89 |
+
for x in range(options.max_width):
|
| 90 |
+
h = x / options.max_width
|
| 91 |
+
l = y / (height + 1)
|
| 92 |
+
r1, g1, b1 = colorsys.hls_to_rgb(h, l, 1.0)
|
| 93 |
+
r2, g2, b2 = colorsys.hls_to_rgb(h, l + (1 / height / 2), 1.0)
|
| 94 |
+
bgcolor = Color.from_rgb(r1 * 255, g1 * 255, b1 * 255)
|
| 95 |
+
color = Color.from_rgb(r2 * 255, g2 * 255, b2 * 255)
|
| 96 |
+
yield Segment("▄", Style(color=color, bgcolor=bgcolor))
|
| 97 |
+
yield Segment.line()
|
| 98 |
+
|
| 99 |
+
console = Console()
|
| 100 |
+
console.print(ColorBox())
|
external/alphageometry/.venv-ag/Lib/site-packages/pip/_vendor/rich/panel.py
ADDED
|
@@ -0,0 +1,317 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import TYPE_CHECKING, Optional
|
| 2 |
+
|
| 3 |
+
from .align import AlignMethod
|
| 4 |
+
from .box import ROUNDED, Box
|
| 5 |
+
from .cells import cell_len
|
| 6 |
+
from .jupyter import JupyterMixin
|
| 7 |
+
from .measure import Measurement, measure_renderables
|
| 8 |
+
from .padding import Padding, PaddingDimensions
|
| 9 |
+
from .segment import Segment
|
| 10 |
+
from .style import Style, StyleType
|
| 11 |
+
from .text import Text, TextType
|
| 12 |
+
|
| 13 |
+
if TYPE_CHECKING:
|
| 14 |
+
from .console import Console, ConsoleOptions, RenderableType, RenderResult
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class Panel(JupyterMixin):
|
| 18 |
+
"""A console renderable that draws a border around its contents.
|
| 19 |
+
|
| 20 |
+
Example:
|
| 21 |
+
>>> console.print(Panel("Hello, World!"))
|
| 22 |
+
|
| 23 |
+
Args:
|
| 24 |
+
renderable (RenderableType): A console renderable object.
|
| 25 |
+
box (Box): A Box instance that defines the look of the border (see :ref:`appendix_box`. Defaults to box.ROUNDED.
|
| 26 |
+
title (Optional[TextType], optional): Optional title displayed in panel header. Defaults to None.
|
| 27 |
+
title_align (AlignMethod, optional): Alignment of title. Defaults to "center".
|
| 28 |
+
subtitle (Optional[TextType], optional): Optional subtitle displayed in panel footer. Defaults to None.
|
| 29 |
+
subtitle_align (AlignMethod, optional): Alignment of subtitle. Defaults to "center".
|
| 30 |
+
safe_box (bool, optional): Disable box characters that don't display on windows legacy terminal with *raster* fonts. Defaults to True.
|
| 31 |
+
expand (bool, optional): If True the panel will stretch to fill the console width, otherwise it will be sized to fit the contents. Defaults to True.
|
| 32 |
+
style (str, optional): The style of the panel (border and contents). Defaults to "none".
|
| 33 |
+
border_style (str, optional): The style of the border. Defaults to "none".
|
| 34 |
+
width (Optional[int], optional): Optional width of panel. Defaults to None to auto-detect.
|
| 35 |
+
height (Optional[int], optional): Optional height of panel. Defaults to None to auto-detect.
|
| 36 |
+
padding (Optional[PaddingDimensions]): Optional padding around renderable. Defaults to 0.
|
| 37 |
+
highlight (bool, optional): Enable automatic highlighting of panel title (if str). Defaults to False.
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
def __init__(
|
| 41 |
+
self,
|
| 42 |
+
renderable: "RenderableType",
|
| 43 |
+
box: Box = ROUNDED,
|
| 44 |
+
*,
|
| 45 |
+
title: Optional[TextType] = None,
|
| 46 |
+
title_align: AlignMethod = "center",
|
| 47 |
+
subtitle: Optional[TextType] = None,
|
| 48 |
+
subtitle_align: AlignMethod = "center",
|
| 49 |
+
safe_box: Optional[bool] = None,
|
| 50 |
+
expand: bool = True,
|
| 51 |
+
style: StyleType = "none",
|
| 52 |
+
border_style: StyleType = "none",
|
| 53 |
+
width: Optional[int] = None,
|
| 54 |
+
height: Optional[int] = None,
|
| 55 |
+
padding: PaddingDimensions = (0, 1),
|
| 56 |
+
highlight: bool = False,
|
| 57 |
+
) -> None:
|
| 58 |
+
self.renderable = renderable
|
| 59 |
+
self.box = box
|
| 60 |
+
self.title = title
|
| 61 |
+
self.title_align: AlignMethod = title_align
|
| 62 |
+
self.subtitle = subtitle
|
| 63 |
+
self.subtitle_align = subtitle_align
|
| 64 |
+
self.safe_box = safe_box
|
| 65 |
+
self.expand = expand
|
| 66 |
+
self.style = style
|
| 67 |
+
self.border_style = border_style
|
| 68 |
+
self.width = width
|
| 69 |
+
self.height = height
|
| 70 |
+
self.padding = padding
|
| 71 |
+
self.highlight = highlight
|
| 72 |
+
|
| 73 |
+
@classmethod
|
| 74 |
+
def fit(
|
| 75 |
+
cls,
|
| 76 |
+
renderable: "RenderableType",
|
| 77 |
+
box: Box = ROUNDED,
|
| 78 |
+
*,
|
| 79 |
+
title: Optional[TextType] = None,
|
| 80 |
+
title_align: AlignMethod = "center",
|
| 81 |
+
subtitle: Optional[TextType] = None,
|
| 82 |
+
subtitle_align: AlignMethod = "center",
|
| 83 |
+
safe_box: Optional[bool] = None,
|
| 84 |
+
style: StyleType = "none",
|
| 85 |
+
border_style: StyleType = "none",
|
| 86 |
+
width: Optional[int] = None,
|
| 87 |
+
height: Optional[int] = None,
|
| 88 |
+
padding: PaddingDimensions = (0, 1),
|
| 89 |
+
highlight: bool = False,
|
| 90 |
+
) -> "Panel":
|
| 91 |
+
"""An alternative constructor that sets expand=False."""
|
| 92 |
+
return cls(
|
| 93 |
+
renderable,
|
| 94 |
+
box,
|
| 95 |
+
title=title,
|
| 96 |
+
title_align=title_align,
|
| 97 |
+
subtitle=subtitle,
|
| 98 |
+
subtitle_align=subtitle_align,
|
| 99 |
+
safe_box=safe_box,
|
| 100 |
+
style=style,
|
| 101 |
+
border_style=border_style,
|
| 102 |
+
width=width,
|
| 103 |
+
height=height,
|
| 104 |
+
padding=padding,
|
| 105 |
+
highlight=highlight,
|
| 106 |
+
expand=False,
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
@property
|
| 110 |
+
def _title(self) -> Optional[Text]:
|
| 111 |
+
if self.title:
|
| 112 |
+
title_text = (
|
| 113 |
+
Text.from_markup(self.title)
|
| 114 |
+
if isinstance(self.title, str)
|
| 115 |
+
else self.title.copy()
|
| 116 |
+
)
|
| 117 |
+
title_text.end = ""
|
| 118 |
+
title_text.plain = title_text.plain.replace("\n", " ")
|
| 119 |
+
title_text.no_wrap = True
|
| 120 |
+
title_text.expand_tabs()
|
| 121 |
+
title_text.pad(1)
|
| 122 |
+
return title_text
|
| 123 |
+
return None
|
| 124 |
+
|
| 125 |
+
@property
|
| 126 |
+
def _subtitle(self) -> Optional[Text]:
|
| 127 |
+
if self.subtitle:
|
| 128 |
+
subtitle_text = (
|
| 129 |
+
Text.from_markup(self.subtitle)
|
| 130 |
+
if isinstance(self.subtitle, str)
|
| 131 |
+
else self.subtitle.copy()
|
| 132 |
+
)
|
| 133 |
+
subtitle_text.end = ""
|
| 134 |
+
subtitle_text.plain = subtitle_text.plain.replace("\n", " ")
|
| 135 |
+
subtitle_text.no_wrap = True
|
| 136 |
+
subtitle_text.expand_tabs()
|
| 137 |
+
subtitle_text.pad(1)
|
| 138 |
+
return subtitle_text
|
| 139 |
+
return None
|
| 140 |
+
|
| 141 |
+
def __rich_console__(
|
| 142 |
+
self, console: "Console", options: "ConsoleOptions"
|
| 143 |
+
) -> "RenderResult":
|
| 144 |
+
_padding = Padding.unpack(self.padding)
|
| 145 |
+
renderable = (
|
| 146 |
+
Padding(self.renderable, _padding) if any(_padding) else self.renderable
|
| 147 |
+
)
|
| 148 |
+
style = console.get_style(self.style)
|
| 149 |
+
border_style = style + console.get_style(self.border_style)
|
| 150 |
+
width = (
|
| 151 |
+
options.max_width
|
| 152 |
+
if self.width is None
|
| 153 |
+
else min(options.max_width, self.width)
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
safe_box: bool = console.safe_box if self.safe_box is None else self.safe_box
|
| 157 |
+
box = self.box.substitute(options, safe=safe_box)
|
| 158 |
+
|
| 159 |
+
def align_text(
|
| 160 |
+
text: Text, width: int, align: str, character: str, style: Style
|
| 161 |
+
) -> Text:
|
| 162 |
+
"""Gets new aligned text.
|
| 163 |
+
|
| 164 |
+
Args:
|
| 165 |
+
text (Text): Title or subtitle text.
|
| 166 |
+
width (int): Desired width.
|
| 167 |
+
align (str): Alignment.
|
| 168 |
+
character (str): Character for alignment.
|
| 169 |
+
style (Style): Border style
|
| 170 |
+
|
| 171 |
+
Returns:
|
| 172 |
+
Text: New text instance
|
| 173 |
+
"""
|
| 174 |
+
text = text.copy()
|
| 175 |
+
text.truncate(width)
|
| 176 |
+
excess_space = width - cell_len(text.plain)
|
| 177 |
+
if text.style:
|
| 178 |
+
text.stylize(console.get_style(text.style))
|
| 179 |
+
|
| 180 |
+
if excess_space:
|
| 181 |
+
if align == "left":
|
| 182 |
+
return Text.assemble(
|
| 183 |
+
text,
|
| 184 |
+
(character * excess_space, style),
|
| 185 |
+
no_wrap=True,
|
| 186 |
+
end="",
|
| 187 |
+
)
|
| 188 |
+
elif align == "center":
|
| 189 |
+
left = excess_space // 2
|
| 190 |
+
return Text.assemble(
|
| 191 |
+
(character * left, style),
|
| 192 |
+
text,
|
| 193 |
+
(character * (excess_space - left), style),
|
| 194 |
+
no_wrap=True,
|
| 195 |
+
end="",
|
| 196 |
+
)
|
| 197 |
+
else:
|
| 198 |
+
return Text.assemble(
|
| 199 |
+
(character * excess_space, style),
|
| 200 |
+
text,
|
| 201 |
+
no_wrap=True,
|
| 202 |
+
end="",
|
| 203 |
+
)
|
| 204 |
+
return text
|
| 205 |
+
|
| 206 |
+
title_text = self._title
|
| 207 |
+
if title_text is not None:
|
| 208 |
+
title_text.stylize_before(border_style)
|
| 209 |
+
|
| 210 |
+
child_width = (
|
| 211 |
+
width - 2
|
| 212 |
+
if self.expand
|
| 213 |
+
else console.measure(
|
| 214 |
+
renderable, options=options.update_width(width - 2)
|
| 215 |
+
).maximum
|
| 216 |
+
)
|
| 217 |
+
child_height = self.height or options.height or None
|
| 218 |
+
if child_height:
|
| 219 |
+
child_height -= 2
|
| 220 |
+
if title_text is not None:
|
| 221 |
+
child_width = min(
|
| 222 |
+
options.max_width - 2, max(child_width, title_text.cell_len + 2)
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
width = child_width + 2
|
| 226 |
+
child_options = options.update(
|
| 227 |
+
width=child_width, height=child_height, highlight=self.highlight
|
| 228 |
+
)
|
| 229 |
+
lines = console.render_lines(renderable, child_options, style=style)
|
| 230 |
+
|
| 231 |
+
line_start = Segment(box.mid_left, border_style)
|
| 232 |
+
line_end = Segment(f"{box.mid_right}", border_style)
|
| 233 |
+
new_line = Segment.line()
|
| 234 |
+
if title_text is None or width <= 4:
|
| 235 |
+
yield Segment(box.get_top([width - 2]), border_style)
|
| 236 |
+
else:
|
| 237 |
+
title_text = align_text(
|
| 238 |
+
title_text,
|
| 239 |
+
width - 4,
|
| 240 |
+
self.title_align,
|
| 241 |
+
box.top,
|
| 242 |
+
border_style,
|
| 243 |
+
)
|
| 244 |
+
yield Segment(box.top_left + box.top, border_style)
|
| 245 |
+
yield from console.render(title_text, child_options.update_width(width - 4))
|
| 246 |
+
yield Segment(box.top + box.top_right, border_style)
|
| 247 |
+
|
| 248 |
+
yield new_line
|
| 249 |
+
for line in lines:
|
| 250 |
+
yield line_start
|
| 251 |
+
yield from line
|
| 252 |
+
yield line_end
|
| 253 |
+
yield new_line
|
| 254 |
+
|
| 255 |
+
subtitle_text = self._subtitle
|
| 256 |
+
if subtitle_text is not None:
|
| 257 |
+
subtitle_text.stylize_before(border_style)
|
| 258 |
+
|
| 259 |
+
if subtitle_text is None or width <= 4:
|
| 260 |
+
yield Segment(box.get_bottom([width - 2]), border_style)
|
| 261 |
+
else:
|
| 262 |
+
subtitle_text = align_text(
|
| 263 |
+
subtitle_text,
|
| 264 |
+
width - 4,
|
| 265 |
+
self.subtitle_align,
|
| 266 |
+
box.bottom,
|
| 267 |
+
border_style,
|
| 268 |
+
)
|
| 269 |
+
yield Segment(box.bottom_left + box.bottom, border_style)
|
| 270 |
+
yield from console.render(
|
| 271 |
+
subtitle_text, child_options.update_width(width - 4)
|
| 272 |
+
)
|
| 273 |
+
yield Segment(box.bottom + box.bottom_right, border_style)
|
| 274 |
+
|
| 275 |
+
yield new_line
|
| 276 |
+
|
| 277 |
+
def __rich_measure__(
|
| 278 |
+
self, console: "Console", options: "ConsoleOptions"
|
| 279 |
+
) -> "Measurement":
|
| 280 |
+
_title = self._title
|
| 281 |
+
_, right, _, left = Padding.unpack(self.padding)
|
| 282 |
+
padding = left + right
|
| 283 |
+
renderables = [self.renderable, _title] if _title else [self.renderable]
|
| 284 |
+
|
| 285 |
+
if self.width is None:
|
| 286 |
+
width = (
|
| 287 |
+
measure_renderables(
|
| 288 |
+
console,
|
| 289 |
+
options.update_width(options.max_width - padding - 2),
|
| 290 |
+
renderables,
|
| 291 |
+
).maximum
|
| 292 |
+
+ padding
|
| 293 |
+
+ 2
|
| 294 |
+
)
|
| 295 |
+
else:
|
| 296 |
+
width = self.width
|
| 297 |
+
return Measurement(width, width)
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
if __name__ == "__main__": # pragma: no cover
|
| 301 |
+
from .console import Console
|
| 302 |
+
|
| 303 |
+
c = Console()
|
| 304 |
+
|
| 305 |
+
from .box import DOUBLE, ROUNDED
|
| 306 |
+
from .padding import Padding
|
| 307 |
+
|
| 308 |
+
p = Panel(
|
| 309 |
+
"Hello, World!",
|
| 310 |
+
title="rich.Panel",
|
| 311 |
+
style="white on blue",
|
| 312 |
+
box=DOUBLE,
|
| 313 |
+
padding=1,
|
| 314 |
+
)
|
| 315 |
+
|
| 316 |
+
c.print()
|
| 317 |
+
c.print(p)
|
hfenv/Lib/site-packages/setuptools/command/build_py.py
ADDED
|
@@ -0,0 +1,368 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from functools import partial
|
| 2 |
+
from glob import glob
|
| 3 |
+
from distutils.util import convert_path
|
| 4 |
+
import distutils.command.build_py as orig
|
| 5 |
+
import os
|
| 6 |
+
import fnmatch
|
| 7 |
+
import textwrap
|
| 8 |
+
import io
|
| 9 |
+
import distutils.errors
|
| 10 |
+
import itertools
|
| 11 |
+
import stat
|
| 12 |
+
import warnings
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
from typing import Dict, Iterable, Iterator, List, Optional, Tuple
|
| 15 |
+
|
| 16 |
+
from setuptools._deprecation_warning import SetuptoolsDeprecationWarning
|
| 17 |
+
from setuptools.extern.more_itertools import unique_everseen
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def make_writable(target):
|
| 21 |
+
os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class build_py(orig.build_py):
|
| 25 |
+
"""Enhanced 'build_py' command that includes data files with packages
|
| 26 |
+
|
| 27 |
+
The data files are specified via a 'package_data' argument to 'setup()'.
|
| 28 |
+
See 'setuptools.dist.Distribution' for more details.
|
| 29 |
+
|
| 30 |
+
Also, this version of the 'build_py' command allows you to specify both
|
| 31 |
+
'py_modules' and 'packages' in the same setup operation.
|
| 32 |
+
"""
|
| 33 |
+
editable_mode: bool = False
|
| 34 |
+
existing_egg_info_dir: Optional[str] = None #: Private API, internal use only.
|
| 35 |
+
|
| 36 |
+
def finalize_options(self):
|
| 37 |
+
orig.build_py.finalize_options(self)
|
| 38 |
+
self.package_data = self.distribution.package_data
|
| 39 |
+
self.exclude_package_data = self.distribution.exclude_package_data or {}
|
| 40 |
+
if 'data_files' in self.__dict__:
|
| 41 |
+
del self.__dict__['data_files']
|
| 42 |
+
self.__updated_files = []
|
| 43 |
+
|
| 44 |
+
def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1,
|
| 45 |
+
link=None, level=1):
|
| 46 |
+
# Overwrite base class to allow using links
|
| 47 |
+
if link:
|
| 48 |
+
infile = str(Path(infile).resolve())
|
| 49 |
+
outfile = str(Path(outfile).resolve())
|
| 50 |
+
return super().copy_file(infile, outfile, preserve_mode, preserve_times,
|
| 51 |
+
link, level)
|
| 52 |
+
|
| 53 |
+
def run(self):
|
| 54 |
+
"""Build modules, packages, and copy data files to build directory"""
|
| 55 |
+
if not (self.py_modules or self.packages) or self.editable_mode:
|
| 56 |
+
return
|
| 57 |
+
|
| 58 |
+
if self.py_modules:
|
| 59 |
+
self.build_modules()
|
| 60 |
+
|
| 61 |
+
if self.packages:
|
| 62 |
+
self.build_packages()
|
| 63 |
+
self.build_package_data()
|
| 64 |
+
|
| 65 |
+
# Only compile actual .py files, using our base class' idea of what our
|
| 66 |
+
# output files are.
|
| 67 |
+
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
|
| 68 |
+
|
| 69 |
+
def __getattr__(self, attr):
|
| 70 |
+
"lazily compute data files"
|
| 71 |
+
if attr == 'data_files':
|
| 72 |
+
self.data_files = self._get_data_files()
|
| 73 |
+
return self.data_files
|
| 74 |
+
return orig.build_py.__getattr__(self, attr)
|
| 75 |
+
|
| 76 |
+
def build_module(self, module, module_file, package):
|
| 77 |
+
outfile, copied = orig.build_py.build_module(self, module, module_file, package)
|
| 78 |
+
if copied:
|
| 79 |
+
self.__updated_files.append(outfile)
|
| 80 |
+
return outfile, copied
|
| 81 |
+
|
| 82 |
+
def _get_data_files(self):
|
| 83 |
+
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
| 84 |
+
self.analyze_manifest()
|
| 85 |
+
return list(map(self._get_pkg_data_files, self.packages or ()))
|
| 86 |
+
|
| 87 |
+
def get_data_files_without_manifest(self):
|
| 88 |
+
"""
|
| 89 |
+
Generate list of ``(package,src_dir,build_dir,filenames)`` tuples,
|
| 90 |
+
but without triggering any attempt to analyze or build the manifest.
|
| 91 |
+
"""
|
| 92 |
+
# Prevent eventual errors from unset `manifest_files`
|
| 93 |
+
# (that would otherwise be set by `analyze_manifest`)
|
| 94 |
+
self.__dict__.setdefault('manifest_files', {})
|
| 95 |
+
return list(map(self._get_pkg_data_files, self.packages or ()))
|
| 96 |
+
|
| 97 |
+
def _get_pkg_data_files(self, package):
|
| 98 |
+
# Locate package source directory
|
| 99 |
+
src_dir = self.get_package_dir(package)
|
| 100 |
+
|
| 101 |
+
# Compute package build directory
|
| 102 |
+
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
| 103 |
+
|
| 104 |
+
# Strip directory from globbed filenames
|
| 105 |
+
filenames = [
|
| 106 |
+
os.path.relpath(file, src_dir)
|
| 107 |
+
for file in self.find_data_files(package, src_dir)
|
| 108 |
+
]
|
| 109 |
+
return package, src_dir, build_dir, filenames
|
| 110 |
+
|
| 111 |
+
def find_data_files(self, package, src_dir):
|
| 112 |
+
"""Return filenames for package's data files in 'src_dir'"""
|
| 113 |
+
patterns = self._get_platform_patterns(
|
| 114 |
+
self.package_data,
|
| 115 |
+
package,
|
| 116 |
+
src_dir,
|
| 117 |
+
)
|
| 118 |
+
globs_expanded = map(partial(glob, recursive=True), patterns)
|
| 119 |
+
# flatten the expanded globs into an iterable of matches
|
| 120 |
+
globs_matches = itertools.chain.from_iterable(globs_expanded)
|
| 121 |
+
glob_files = filter(os.path.isfile, globs_matches)
|
| 122 |
+
files = itertools.chain(
|
| 123 |
+
self.manifest_files.get(package, []),
|
| 124 |
+
glob_files,
|
| 125 |
+
)
|
| 126 |
+
return self.exclude_data_files(package, src_dir, files)
|
| 127 |
+
|
| 128 |
+
def get_outputs(self, include_bytecode=1) -> List[str]:
|
| 129 |
+
"""See :class:`setuptools.commands.build.SubCommand`"""
|
| 130 |
+
if self.editable_mode:
|
| 131 |
+
return list(self.get_output_mapping().keys())
|
| 132 |
+
return super().get_outputs(include_bytecode)
|
| 133 |
+
|
| 134 |
+
def get_output_mapping(self) -> Dict[str, str]:
|
| 135 |
+
"""See :class:`setuptools.commands.build.SubCommand`"""
|
| 136 |
+
mapping = itertools.chain(
|
| 137 |
+
self._get_package_data_output_mapping(),
|
| 138 |
+
self._get_module_mapping(),
|
| 139 |
+
)
|
| 140 |
+
return dict(sorted(mapping, key=lambda x: x[0]))
|
| 141 |
+
|
| 142 |
+
def _get_module_mapping(self) -> Iterator[Tuple[str, str]]:
|
| 143 |
+
"""Iterate over all modules producing (dest, src) pairs."""
|
| 144 |
+
for (package, module, module_file) in self.find_all_modules():
|
| 145 |
+
package = package.split('.')
|
| 146 |
+
filename = self.get_module_outfile(self.build_lib, package, module)
|
| 147 |
+
yield (filename, module_file)
|
| 148 |
+
|
| 149 |
+
def _get_package_data_output_mapping(self) -> Iterator[Tuple[str, str]]:
|
| 150 |
+
"""Iterate over package data producing (dest, src) pairs."""
|
| 151 |
+
for package, src_dir, build_dir, filenames in self.data_files:
|
| 152 |
+
for filename in filenames:
|
| 153 |
+
target = os.path.join(build_dir, filename)
|
| 154 |
+
srcfile = os.path.join(src_dir, filename)
|
| 155 |
+
yield (target, srcfile)
|
| 156 |
+
|
| 157 |
+
def build_package_data(self):
|
| 158 |
+
"""Copy data files into build directory"""
|
| 159 |
+
for target, srcfile in self._get_package_data_output_mapping():
|
| 160 |
+
self.mkpath(os.path.dirname(target))
|
| 161 |
+
_outf, _copied = self.copy_file(srcfile, target)
|
| 162 |
+
make_writable(target)
|
| 163 |
+
|
| 164 |
+
def analyze_manifest(self):
|
| 165 |
+
self.manifest_files = mf = {}
|
| 166 |
+
if not self.distribution.include_package_data:
|
| 167 |
+
return
|
| 168 |
+
src_dirs = {}
|
| 169 |
+
for package in self.packages or ():
|
| 170 |
+
# Locate package source directory
|
| 171 |
+
src_dirs[assert_relative(self.get_package_dir(package))] = package
|
| 172 |
+
|
| 173 |
+
if (
|
| 174 |
+
getattr(self, 'existing_egg_info_dir', None)
|
| 175 |
+
and Path(self.existing_egg_info_dir, "SOURCES.txt").exists()
|
| 176 |
+
):
|
| 177 |
+
egg_info_dir = self.existing_egg_info_dir
|
| 178 |
+
manifest = Path(egg_info_dir, "SOURCES.txt")
|
| 179 |
+
files = manifest.read_text(encoding="utf-8").splitlines()
|
| 180 |
+
else:
|
| 181 |
+
self.run_command('egg_info')
|
| 182 |
+
ei_cmd = self.get_finalized_command('egg_info')
|
| 183 |
+
egg_info_dir = ei_cmd.egg_info
|
| 184 |
+
files = ei_cmd.filelist.files
|
| 185 |
+
|
| 186 |
+
check = _IncludePackageDataAbuse()
|
| 187 |
+
for path in self._filter_build_files(files, egg_info_dir):
|
| 188 |
+
d, f = os.path.split(assert_relative(path))
|
| 189 |
+
prev = None
|
| 190 |
+
oldf = f
|
| 191 |
+
while d and d != prev and d not in src_dirs:
|
| 192 |
+
prev = d
|
| 193 |
+
d, df = os.path.split(d)
|
| 194 |
+
f = os.path.join(df, f)
|
| 195 |
+
if d in src_dirs:
|
| 196 |
+
if f == oldf:
|
| 197 |
+
if check.is_module(f):
|
| 198 |
+
continue # it's a module, not data
|
| 199 |
+
else:
|
| 200 |
+
importable = check.importable_subpackage(src_dirs[d], f)
|
| 201 |
+
if importable:
|
| 202 |
+
check.warn(importable)
|
| 203 |
+
mf.setdefault(src_dirs[d], []).append(path)
|
| 204 |
+
|
| 205 |
+
def _filter_build_files(self, files: Iterable[str], egg_info: str) -> Iterator[str]:
|
| 206 |
+
"""
|
| 207 |
+
``build_meta`` may try to create egg_info outside of the project directory,
|
| 208 |
+
and this can be problematic for certain plugins (reported in issue #3500).
|
| 209 |
+
|
| 210 |
+
Extensions might also include between their sources files created on the
|
| 211 |
+
``build_lib`` and ``build_temp`` directories.
|
| 212 |
+
|
| 213 |
+
This function should filter this case of invalid files out.
|
| 214 |
+
"""
|
| 215 |
+
build = self.get_finalized_command("build")
|
| 216 |
+
build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base)
|
| 217 |
+
norm_dirs = [os.path.normpath(p) for p in build_dirs if p]
|
| 218 |
+
|
| 219 |
+
for file in files:
|
| 220 |
+
norm_path = os.path.normpath(file)
|
| 221 |
+
if not os.path.isabs(file) or all(d not in norm_path for d in norm_dirs):
|
| 222 |
+
yield file
|
| 223 |
+
|
| 224 |
+
def get_data_files(self):
|
| 225 |
+
pass # Lazily compute data files in _get_data_files() function.
|
| 226 |
+
|
| 227 |
+
def check_package(self, package, package_dir):
|
| 228 |
+
"""Check namespace packages' __init__ for declare_namespace"""
|
| 229 |
+
try:
|
| 230 |
+
return self.packages_checked[package]
|
| 231 |
+
except KeyError:
|
| 232 |
+
pass
|
| 233 |
+
|
| 234 |
+
init_py = orig.build_py.check_package(self, package, package_dir)
|
| 235 |
+
self.packages_checked[package] = init_py
|
| 236 |
+
|
| 237 |
+
if not init_py or not self.distribution.namespace_packages:
|
| 238 |
+
return init_py
|
| 239 |
+
|
| 240 |
+
for pkg in self.distribution.namespace_packages:
|
| 241 |
+
if pkg == package or pkg.startswith(package + '.'):
|
| 242 |
+
break
|
| 243 |
+
else:
|
| 244 |
+
return init_py
|
| 245 |
+
|
| 246 |
+
with io.open(init_py, 'rb') as f:
|
| 247 |
+
contents = f.read()
|
| 248 |
+
if b'declare_namespace' not in contents:
|
| 249 |
+
raise distutils.errors.DistutilsError(
|
| 250 |
+
"Namespace package problem: %s is a namespace package, but "
|
| 251 |
+
"its\n__init__.py does not call declare_namespace()! Please "
|
| 252 |
+
'fix it.\n(See the setuptools manual under '
|
| 253 |
+
'"Namespace Packages" for details.)\n"' % (package,)
|
| 254 |
+
)
|
| 255 |
+
return init_py
|
| 256 |
+
|
| 257 |
+
def initialize_options(self):
|
| 258 |
+
self.packages_checked = {}
|
| 259 |
+
orig.build_py.initialize_options(self)
|
| 260 |
+
self.editable_mode = False
|
| 261 |
+
self.existing_egg_info_dir = None
|
| 262 |
+
|
| 263 |
+
def get_package_dir(self, package):
|
| 264 |
+
res = orig.build_py.get_package_dir(self, package)
|
| 265 |
+
if self.distribution.src_root is not None:
|
| 266 |
+
return os.path.join(self.distribution.src_root, res)
|
| 267 |
+
return res
|
| 268 |
+
|
| 269 |
+
def exclude_data_files(self, package, src_dir, files):
|
| 270 |
+
"""Filter filenames for package's data files in 'src_dir'"""
|
| 271 |
+
files = list(files)
|
| 272 |
+
patterns = self._get_platform_patterns(
|
| 273 |
+
self.exclude_package_data,
|
| 274 |
+
package,
|
| 275 |
+
src_dir,
|
| 276 |
+
)
|
| 277 |
+
match_groups = (fnmatch.filter(files, pattern) for pattern in patterns)
|
| 278 |
+
# flatten the groups of matches into an iterable of matches
|
| 279 |
+
matches = itertools.chain.from_iterable(match_groups)
|
| 280 |
+
bad = set(matches)
|
| 281 |
+
keepers = (fn for fn in files if fn not in bad)
|
| 282 |
+
# ditch dupes
|
| 283 |
+
return list(unique_everseen(keepers))
|
| 284 |
+
|
| 285 |
+
@staticmethod
|
| 286 |
+
def _get_platform_patterns(spec, package, src_dir):
|
| 287 |
+
"""
|
| 288 |
+
yield platform-specific path patterns (suitable for glob
|
| 289 |
+
or fn_match) from a glob-based spec (such as
|
| 290 |
+
self.package_data or self.exclude_package_data)
|
| 291 |
+
matching package in src_dir.
|
| 292 |
+
"""
|
| 293 |
+
raw_patterns = itertools.chain(
|
| 294 |
+
spec.get('', []),
|
| 295 |
+
spec.get(package, []),
|
| 296 |
+
)
|
| 297 |
+
return (
|
| 298 |
+
# Each pattern has to be converted to a platform-specific path
|
| 299 |
+
os.path.join(src_dir, convert_path(pattern))
|
| 300 |
+
for pattern in raw_patterns
|
| 301 |
+
)
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def assert_relative(path):
|
| 305 |
+
if not os.path.isabs(path):
|
| 306 |
+
return path
|
| 307 |
+
from distutils.errors import DistutilsSetupError
|
| 308 |
+
|
| 309 |
+
msg = (
|
| 310 |
+
textwrap.dedent(
|
| 311 |
+
"""
|
| 312 |
+
Error: setup script specifies an absolute path:
|
| 313 |
+
|
| 314 |
+
%s
|
| 315 |
+
|
| 316 |
+
setup() arguments must *always* be /-separated paths relative to the
|
| 317 |
+
setup.py directory, *never* absolute paths.
|
| 318 |
+
"""
|
| 319 |
+
).lstrip()
|
| 320 |
+
% path
|
| 321 |
+
)
|
| 322 |
+
raise DistutilsSetupError(msg)
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
class _IncludePackageDataAbuse:
|
| 326 |
+
"""Inform users that package or module is included as 'data file'"""
|
| 327 |
+
|
| 328 |
+
MESSAGE = """\
|
| 329 |
+
Installing {importable!r} as data is deprecated, please list it in `packages`.
|
| 330 |
+
!!\n\n
|
| 331 |
+
############################
|
| 332 |
+
# Package would be ignored #
|
| 333 |
+
############################
|
| 334 |
+
Python recognizes {importable!r} as an importable package,
|
| 335 |
+
but it is not listed in the `packages` configuration of setuptools.
|
| 336 |
+
|
| 337 |
+
{importable!r} has been automatically added to the distribution only
|
| 338 |
+
because it may contain data files, but this behavior is likely to change
|
| 339 |
+
in future versions of setuptools (and therefore is considered deprecated).
|
| 340 |
+
|
| 341 |
+
Please make sure that {importable!r} is included as a package by using
|
| 342 |
+
the `packages` configuration field or the proper discovery methods
|
| 343 |
+
(for example by using `find_namespace_packages(...)`/`find_namespace:`
|
| 344 |
+
instead of `find_packages(...)`/`find:`).
|
| 345 |
+
|
| 346 |
+
You can read more about "package discovery" and "data files" on setuptools
|
| 347 |
+
documentation page.
|
| 348 |
+
\n\n!!
|
| 349 |
+
"""
|
| 350 |
+
|
| 351 |
+
def __init__(self):
|
| 352 |
+
self._already_warned = set()
|
| 353 |
+
|
| 354 |
+
def is_module(self, file):
|
| 355 |
+
return file.endswith(".py") and file[:-len(".py")].isidentifier()
|
| 356 |
+
|
| 357 |
+
def importable_subpackage(self, parent, file):
|
| 358 |
+
pkg = Path(file).parent
|
| 359 |
+
parts = list(itertools.takewhile(str.isidentifier, pkg.parts))
|
| 360 |
+
if parts:
|
| 361 |
+
return ".".join([parent, *parts])
|
| 362 |
+
return None
|
| 363 |
+
|
| 364 |
+
def warn(self, importable):
|
| 365 |
+
if importable not in self._already_warned:
|
| 366 |
+
msg = textwrap.dedent(self.MESSAGE).format(importable=importable)
|
| 367 |
+
warnings.warn(msg, SetuptoolsDeprecationWarning, stacklevel=2)
|
| 368 |
+
self._already_warned.add(importable)
|
hfenv/Lib/site-packages/setuptools/command/develop.py
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils.util import convert_path
|
| 2 |
+
from distutils import log
|
| 3 |
+
from distutils.errors import DistutilsError, DistutilsOptionError
|
| 4 |
+
import os
|
| 5 |
+
import glob
|
| 6 |
+
import io
|
| 7 |
+
|
| 8 |
+
import pkg_resources
|
| 9 |
+
from setuptools.command.easy_install import easy_install
|
| 10 |
+
from setuptools import namespaces
|
| 11 |
+
import setuptools
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class develop(namespaces.DevelopInstaller, easy_install):
|
| 15 |
+
"""Set up package for development"""
|
| 16 |
+
|
| 17 |
+
description = "install package in 'development mode'"
|
| 18 |
+
|
| 19 |
+
user_options = easy_install.user_options + [
|
| 20 |
+
("uninstall", "u", "Uninstall this source package"),
|
| 21 |
+
("egg-path=", None, "Set the path to be used in the .egg-link file"),
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
boolean_options = easy_install.boolean_options + ['uninstall']
|
| 25 |
+
|
| 26 |
+
command_consumes_arguments = False # override base
|
| 27 |
+
|
| 28 |
+
def run(self):
|
| 29 |
+
if self.uninstall:
|
| 30 |
+
self.multi_version = True
|
| 31 |
+
self.uninstall_link()
|
| 32 |
+
self.uninstall_namespaces()
|
| 33 |
+
else:
|
| 34 |
+
self.install_for_development()
|
| 35 |
+
self.warn_deprecated_options()
|
| 36 |
+
|
| 37 |
+
def initialize_options(self):
|
| 38 |
+
self.uninstall = None
|
| 39 |
+
self.egg_path = None
|
| 40 |
+
easy_install.initialize_options(self)
|
| 41 |
+
self.setup_path = None
|
| 42 |
+
self.always_copy_from = '.' # always copy eggs installed in curdir
|
| 43 |
+
|
| 44 |
+
def finalize_options(self):
|
| 45 |
+
ei = self.get_finalized_command("egg_info")
|
| 46 |
+
if ei.broken_egg_info:
|
| 47 |
+
template = "Please rename %r to %r before using 'develop'"
|
| 48 |
+
args = ei.egg_info, ei.broken_egg_info
|
| 49 |
+
raise DistutilsError(template % args)
|
| 50 |
+
self.args = [ei.egg_name]
|
| 51 |
+
|
| 52 |
+
easy_install.finalize_options(self)
|
| 53 |
+
self.expand_basedirs()
|
| 54 |
+
self.expand_dirs()
|
| 55 |
+
# pick up setup-dir .egg files only: no .egg-info
|
| 56 |
+
self.package_index.scan(glob.glob('*.egg'))
|
| 57 |
+
|
| 58 |
+
egg_link_fn = ei.egg_name + '.egg-link'
|
| 59 |
+
self.egg_link = os.path.join(self.install_dir, egg_link_fn)
|
| 60 |
+
self.egg_base = ei.egg_base
|
| 61 |
+
if self.egg_path is None:
|
| 62 |
+
self.egg_path = os.path.abspath(ei.egg_base)
|
| 63 |
+
|
| 64 |
+
target = pkg_resources.normalize_path(self.egg_base)
|
| 65 |
+
egg_path = pkg_resources.normalize_path(
|
| 66 |
+
os.path.join(self.install_dir, self.egg_path)
|
| 67 |
+
)
|
| 68 |
+
if egg_path != target:
|
| 69 |
+
raise DistutilsOptionError(
|
| 70 |
+
"--egg-path must be a relative path from the install"
|
| 71 |
+
" directory to " + target
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
# Make a distribution for the package's source
|
| 75 |
+
self.dist = pkg_resources.Distribution(
|
| 76 |
+
target,
|
| 77 |
+
pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
|
| 78 |
+
project_name=ei.egg_name,
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
self.setup_path = self._resolve_setup_path(
|
| 82 |
+
self.egg_base,
|
| 83 |
+
self.install_dir,
|
| 84 |
+
self.egg_path,
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
@staticmethod
|
| 88 |
+
def _resolve_setup_path(egg_base, install_dir, egg_path):
|
| 89 |
+
"""
|
| 90 |
+
Generate a path from egg_base back to '.' where the
|
| 91 |
+
setup script resides and ensure that path points to the
|
| 92 |
+
setup path from $install_dir/$egg_path.
|
| 93 |
+
"""
|
| 94 |
+
path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
|
| 95 |
+
if path_to_setup != os.curdir:
|
| 96 |
+
path_to_setup = '../' * (path_to_setup.count('/') + 1)
|
| 97 |
+
resolved = pkg_resources.normalize_path(
|
| 98 |
+
os.path.join(install_dir, egg_path, path_to_setup)
|
| 99 |
+
)
|
| 100 |
+
if resolved != pkg_resources.normalize_path(os.curdir):
|
| 101 |
+
raise DistutilsOptionError(
|
| 102 |
+
"Can't get a consistent path to setup script from"
|
| 103 |
+
" installation directory",
|
| 104 |
+
resolved,
|
| 105 |
+
pkg_resources.normalize_path(os.curdir),
|
| 106 |
+
)
|
| 107 |
+
return path_to_setup
|
| 108 |
+
|
| 109 |
+
def install_for_development(self):
|
| 110 |
+
self.run_command('egg_info')
|
| 111 |
+
|
| 112 |
+
# Build extensions in-place
|
| 113 |
+
self.reinitialize_command('build_ext', inplace=1)
|
| 114 |
+
self.run_command('build_ext')
|
| 115 |
+
|
| 116 |
+
if setuptools.bootstrap_install_from:
|
| 117 |
+
self.easy_install(setuptools.bootstrap_install_from)
|
| 118 |
+
setuptools.bootstrap_install_from = None
|
| 119 |
+
|
| 120 |
+
self.install_namespaces()
|
| 121 |
+
|
| 122 |
+
# create an .egg-link in the installation dir, pointing to our egg
|
| 123 |
+
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
|
| 124 |
+
if not self.dry_run:
|
| 125 |
+
with open(self.egg_link, "w") as f:
|
| 126 |
+
f.write(self.egg_path + "\n" + self.setup_path)
|
| 127 |
+
# postprocess the installed distro, fixing up .pth, installing scripts,
|
| 128 |
+
# and handling requirements
|
| 129 |
+
self.process_distribution(None, self.dist, not self.no_deps)
|
| 130 |
+
|
| 131 |
+
def uninstall_link(self):
|
| 132 |
+
if os.path.exists(self.egg_link):
|
| 133 |
+
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
|
| 134 |
+
egg_link_file = open(self.egg_link)
|
| 135 |
+
contents = [line.rstrip() for line in egg_link_file]
|
| 136 |
+
egg_link_file.close()
|
| 137 |
+
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
|
| 138 |
+
log.warn("Link points to %s: uninstall aborted", contents)
|
| 139 |
+
return
|
| 140 |
+
if not self.dry_run:
|
| 141 |
+
os.unlink(self.egg_link)
|
| 142 |
+
if not self.dry_run:
|
| 143 |
+
self.update_pth(self.dist) # remove any .pth link to us
|
| 144 |
+
if self.distribution.scripts:
|
| 145 |
+
# XXX should also check for entry point scripts!
|
| 146 |
+
log.warn("Note: you must uninstall or replace scripts manually!")
|
| 147 |
+
|
| 148 |
+
def install_egg_scripts(self, dist):
|
| 149 |
+
if dist is not self.dist:
|
| 150 |
+
# Installing a dependency, so fall back to normal behavior
|
| 151 |
+
return easy_install.install_egg_scripts(self, dist)
|
| 152 |
+
|
| 153 |
+
# create wrapper scripts in the script dir, pointing to dist.scripts
|
| 154 |
+
|
| 155 |
+
# new-style...
|
| 156 |
+
self.install_wrapper_scripts(dist)
|
| 157 |
+
|
| 158 |
+
# ...and old-style
|
| 159 |
+
for script_name in self.distribution.scripts or []:
|
| 160 |
+
script_path = os.path.abspath(convert_path(script_name))
|
| 161 |
+
script_name = os.path.basename(script_path)
|
| 162 |
+
with io.open(script_path) as strm:
|
| 163 |
+
script_text = strm.read()
|
| 164 |
+
self.install_script(dist, script_name, script_text, script_path)
|
| 165 |
+
|
| 166 |
+
def install_wrapper_scripts(self, dist):
|
| 167 |
+
dist = VersionlessRequirement(dist)
|
| 168 |
+
return easy_install.install_wrapper_scripts(self, dist)
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
class VersionlessRequirement:
|
| 172 |
+
"""
|
| 173 |
+
Adapt a pkg_resources.Distribution to simply return the project
|
| 174 |
+
name as the 'requirement' so that scripts will work across
|
| 175 |
+
multiple versions.
|
| 176 |
+
|
| 177 |
+
>>> from pkg_resources import Distribution
|
| 178 |
+
>>> dist = Distribution(project_name='foo', version='1.0')
|
| 179 |
+
>>> str(dist.as_requirement())
|
| 180 |
+
'foo==1.0'
|
| 181 |
+
>>> adapted_dist = VersionlessRequirement(dist)
|
| 182 |
+
>>> str(adapted_dist.as_requirement())
|
| 183 |
+
'foo'
|
| 184 |
+
"""
|
| 185 |
+
|
| 186 |
+
def __init__(self, dist):
|
| 187 |
+
self.__dist = dist
|
| 188 |
+
|
| 189 |
+
def __getattr__(self, name):
|
| 190 |
+
return getattr(self.__dist, name)
|
| 191 |
+
|
| 192 |
+
def as_requirement(self):
|
| 193 |
+
return self.project_name
|
hfenv/Lib/site-packages/setuptools/command/dist_info.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Create a dist_info directory
|
| 3 |
+
As defined in the wheel specification
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import re
|
| 8 |
+
import shutil
|
| 9 |
+
import sys
|
| 10 |
+
import warnings
|
| 11 |
+
from contextlib import contextmanager
|
| 12 |
+
from inspect import cleandoc
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
|
| 15 |
+
from distutils.core import Command
|
| 16 |
+
from distutils import log
|
| 17 |
+
from setuptools.extern import packaging
|
| 18 |
+
from setuptools._deprecation_warning import SetuptoolsDeprecationWarning
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class dist_info(Command):
|
| 22 |
+
|
| 23 |
+
description = 'create a .dist-info directory'
|
| 24 |
+
|
| 25 |
+
user_options = [
|
| 26 |
+
('egg-base=', 'e', "directory containing .egg-info directories"
|
| 27 |
+
" (default: top of the source tree)"
|
| 28 |
+
" DEPRECATED: use --output-dir."),
|
| 29 |
+
('output-dir=', 'o', "directory inside of which the .dist-info will be"
|
| 30 |
+
"created (default: top of the source tree)"),
|
| 31 |
+
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
| 32 |
+
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
| 33 |
+
('no-date', 'D', "Don't include date stamp [default]"),
|
| 34 |
+
('keep-egg-info', None, "*TRANSITIONAL* will be removed in the future"),
|
| 35 |
+
]
|
| 36 |
+
|
| 37 |
+
boolean_options = ['tag-date', 'keep-egg-info']
|
| 38 |
+
negative_opt = {'no-date': 'tag-date'}
|
| 39 |
+
|
| 40 |
+
def initialize_options(self):
|
| 41 |
+
self.egg_base = None
|
| 42 |
+
self.output_dir = None
|
| 43 |
+
self.name = None
|
| 44 |
+
self.dist_info_dir = None
|
| 45 |
+
self.tag_date = None
|
| 46 |
+
self.tag_build = None
|
| 47 |
+
self.keep_egg_info = False
|
| 48 |
+
|
| 49 |
+
def finalize_options(self):
|
| 50 |
+
if self.egg_base:
|
| 51 |
+
msg = "--egg-base is deprecated for dist_info command. Use --output-dir."
|
| 52 |
+
warnings.warn(msg, SetuptoolsDeprecationWarning)
|
| 53 |
+
self.output_dir = self.egg_base or self.output_dir
|
| 54 |
+
|
| 55 |
+
dist = self.distribution
|
| 56 |
+
project_dir = dist.src_root or os.curdir
|
| 57 |
+
self.output_dir = Path(self.output_dir or project_dir)
|
| 58 |
+
|
| 59 |
+
egg_info = self.reinitialize_command("egg_info")
|
| 60 |
+
egg_info.egg_base = str(self.output_dir)
|
| 61 |
+
|
| 62 |
+
if self.tag_date:
|
| 63 |
+
egg_info.tag_date = self.tag_date
|
| 64 |
+
else:
|
| 65 |
+
self.tag_date = egg_info.tag_date
|
| 66 |
+
|
| 67 |
+
if self.tag_build:
|
| 68 |
+
egg_info.tag_build = self.tag_build
|
| 69 |
+
else:
|
| 70 |
+
self.tag_build = egg_info.tag_build
|
| 71 |
+
|
| 72 |
+
egg_info.finalize_options()
|
| 73 |
+
self.egg_info = egg_info
|
| 74 |
+
|
| 75 |
+
name = _safe(dist.get_name())
|
| 76 |
+
version = _version(dist.get_version())
|
| 77 |
+
self.name = f"{name}-{version}"
|
| 78 |
+
self.dist_info_dir = os.path.join(self.output_dir, f"{self.name}.dist-info")
|
| 79 |
+
|
| 80 |
+
@contextmanager
|
| 81 |
+
def _maybe_bkp_dir(self, dir_path: str, requires_bkp: bool):
|
| 82 |
+
if requires_bkp:
|
| 83 |
+
bkp_name = f"{dir_path}.__bkp__"
|
| 84 |
+
_rm(bkp_name, ignore_errors=True)
|
| 85 |
+
_copy(dir_path, bkp_name, dirs_exist_ok=True, symlinks=True)
|
| 86 |
+
try:
|
| 87 |
+
yield
|
| 88 |
+
finally:
|
| 89 |
+
_rm(dir_path, ignore_errors=True)
|
| 90 |
+
shutil.move(bkp_name, dir_path)
|
| 91 |
+
else:
|
| 92 |
+
yield
|
| 93 |
+
|
| 94 |
+
def run(self):
|
| 95 |
+
self.output_dir.mkdir(parents=True, exist_ok=True)
|
| 96 |
+
self.egg_info.run()
|
| 97 |
+
egg_info_dir = self.egg_info.egg_info
|
| 98 |
+
assert os.path.isdir(egg_info_dir), ".egg-info dir should have been created"
|
| 99 |
+
|
| 100 |
+
log.info("creating '{}'".format(os.path.abspath(self.dist_info_dir)))
|
| 101 |
+
bdist_wheel = self.get_finalized_command('bdist_wheel')
|
| 102 |
+
|
| 103 |
+
# TODO: if bdist_wheel if merged into setuptools, just add "keep_egg_info" there
|
| 104 |
+
with self._maybe_bkp_dir(egg_info_dir, self.keep_egg_info):
|
| 105 |
+
bdist_wheel.egg2dist(egg_info_dir, self.dist_info_dir)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def _safe(component: str) -> str:
|
| 109 |
+
"""Escape a component used to form a wheel name according to PEP 491"""
|
| 110 |
+
return re.sub(r"[^\w\d.]+", "_", component)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def _version(version: str) -> str:
|
| 114 |
+
"""Convert an arbitrary string to a version string."""
|
| 115 |
+
v = version.replace(' ', '.')
|
| 116 |
+
try:
|
| 117 |
+
return str(packaging.version.Version(v)).replace("-", "_")
|
| 118 |
+
except packaging.version.InvalidVersion:
|
| 119 |
+
msg = f"""Invalid version: {version!r}.
|
| 120 |
+
!!\n\n
|
| 121 |
+
###################
|
| 122 |
+
# Invalid version #
|
| 123 |
+
###################
|
| 124 |
+
{version!r} is not valid according to PEP 440.\n
|
| 125 |
+
Please make sure specify a valid version for your package.
|
| 126 |
+
Also note that future releases of setuptools may halt the build process
|
| 127 |
+
if an invalid version is given.
|
| 128 |
+
\n\n!!
|
| 129 |
+
"""
|
| 130 |
+
warnings.warn(cleandoc(msg))
|
| 131 |
+
return _safe(v).strip("_")
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def _rm(dir_name, **opts):
|
| 135 |
+
if os.path.isdir(dir_name):
|
| 136 |
+
shutil.rmtree(dir_name, **opts)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def _copy(src, dst, **opts):
|
| 140 |
+
if sys.version_info < (3, 8):
|
| 141 |
+
opts.pop("dirs_exist_ok", None)
|
| 142 |
+
shutil.copytree(src, dst, **opts)
|