ZTWHHH commited on
Commit
dc1bcb2
·
verified ·
1 Parent(s): f7bd521

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. valley/lib/python3.10/site-packages/httpcore/_api.py +92 -0
  3. valley/lib/python3.10/site-packages/httpcore/_backends/trio.py +161 -0
  4. valley/lib/python3.10/site-packages/httpcore/_models.py +492 -0
  5. valley/lib/python3.10/site-packages/httpcore/_ssl.py +9 -0
  6. valley/lib/python3.10/site-packages/httpcore/_sync/__init__.py +39 -0
  7. valley/lib/python3.10/site-packages/httpcore/_sync/__pycache__/connection.cpython-310.pyc +0 -0
  8. valley/lib/python3.10/site-packages/httpcore/_sync/__pycache__/http11.cpython-310.pyc +0 -0
  9. valley/lib/python3.10/site-packages/httpcore/_sync/__pycache__/http2.cpython-310.pyc +0 -0
  10. valley/lib/python3.10/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-310.pyc +0 -0
  11. valley/lib/python3.10/site-packages/httpcore/_sync/connection.py +220 -0
  12. valley/lib/python3.10/site-packages/httpcore/_sync/http11.py +386 -0
  13. valley/lib/python3.10/site-packages/httpcore/_sync/http2.py +589 -0
  14. valley/lib/python3.10/site-packages/httpcore/_sync/http_proxy.py +368 -0
  15. valley/lib/python3.10/site-packages/httpcore/_sync/socks_proxy.py +342 -0
  16. valley/lib/python3.10/site-packages/httpcore/_synchronization.py +317 -0
  17. valley/lib/python3.10/site-packages/httpcore/py.typed +0 -0
  18. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__init__.py +10 -0
  19. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/__init__.cpython-310.pyc +0 -0
  20. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/anchored_artists.cpython-310.pyc +0 -0
  21. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/axes_divider.cpython-310.pyc +0 -0
  22. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/axes_grid.cpython-310.pyc +0 -0
  23. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/axes_rgb.cpython-310.pyc +0 -0
  24. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/axes_size.cpython-310.pyc +0 -0
  25. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/inset_locator.cpython-310.pyc +0 -0
  26. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/mpl_axes.cpython-310.pyc +0 -0
  27. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/parasite_axes.cpython-310.pyc +0 -0
  28. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/anchored_artists.py +462 -0
  29. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/axes_divider.py +694 -0
  30. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/axes_grid.py +563 -0
  31. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/axes_rgb.py +157 -0
  32. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/axes_size.py +248 -0
  33. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/inset_locator.py +561 -0
  34. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/mpl_axes.py +128 -0
  35. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/parasite_axes.py +257 -0
  36. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/__init__.py +10 -0
  37. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  38. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/__pycache__/conftest.cpython-310.pyc +0 -0
  39. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/__pycache__/test_axes_grid1.cpython-310.pyc +0 -0
  40. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/conftest.py +2 -0
  41. valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/test_axes_grid1.py +792 -0
  42. valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/__init__.cpython-310.pyc +0 -0
  43. valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/axes_divider.cpython-310.pyc +0 -0
  44. valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/axis_artist.cpython-310.pyc +0 -0
  45. valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/axisline_style.cpython-310.pyc +0 -0
  46. valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/grid_helper_curvelinear.cpython-310.pyc +0 -0
  47. valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/parasite_axes.cpython-310.pyc +0 -0
  48. valley/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__init__.py +3 -0
  49. valley/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/__init__.cpython-310.pyc +0 -0
  50. valley/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/art3d.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -598,3 +598,4 @@ wemm/lib/libtinfow.so.6.4 filter=lfs diff=lfs merge=lfs -text
598
  valley/lib/python3.10/site-packages/triton/backends/nvidia/bin/nvdisasm filter=lfs diff=lfs merge=lfs -text
599
  valley/lib/python3.10/site-packages/triton/_C/libproton.so filter=lfs diff=lfs merge=lfs -text
600
  valley/lib/python3.10/site-packages/sympy/core/__pycache__/expr.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
 
 
598
  valley/lib/python3.10/site-packages/triton/backends/nvidia/bin/nvdisasm filter=lfs diff=lfs merge=lfs -text
599
  valley/lib/python3.10/site-packages/triton/_C/libproton.so filter=lfs diff=lfs merge=lfs -text
600
  valley/lib/python3.10/site-packages/sympy/core/__pycache__/expr.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
601
+ videollama2/lib/python3.10/site-packages/bitsandbytes/libbitsandbytes_cuda121.so filter=lfs diff=lfs merge=lfs -text
valley/lib/python3.10/site-packages/httpcore/_api.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import contextmanager
2
+ from typing import Iterator, Optional, Union
3
+
4
+ from ._models import URL, Extensions, HeaderTypes, Response
5
+ from ._sync.connection_pool import ConnectionPool
6
+
7
+
8
+ def request(
9
+ method: Union[bytes, str],
10
+ url: Union[URL, bytes, str],
11
+ *,
12
+ headers: HeaderTypes = None,
13
+ content: Union[bytes, Iterator[bytes], None] = None,
14
+ extensions: Optional[Extensions] = None,
15
+ ) -> Response:
16
+ """
17
+ Sends an HTTP request, returning the response.
18
+
19
+ ```
20
+ response = httpcore.request("GET", "https://www.example.com/")
21
+ ```
22
+
23
+ Arguments:
24
+ method: The HTTP method for the request. Typically one of `"GET"`,
25
+ `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`.
26
+ url: The URL of the HTTP request. Either as an instance of `httpcore.URL`,
27
+ or as str/bytes.
28
+ headers: The HTTP request headers. Either as a dictionary of str/bytes,
29
+ or as a list of two-tuples of str/bytes.
30
+ content: The content of the request body. Either as bytes,
31
+ or as a bytes iterator.
32
+ extensions: A dictionary of optional extra information included on the request.
33
+ Possible keys include `"timeout"`.
34
+
35
+ Returns:
36
+ An instance of `httpcore.Response`.
37
+ """
38
+ with ConnectionPool() as pool:
39
+ return pool.request(
40
+ method=method,
41
+ url=url,
42
+ headers=headers,
43
+ content=content,
44
+ extensions=extensions,
45
+ )
46
+
47
+
48
+ @contextmanager
49
+ def stream(
50
+ method: Union[bytes, str],
51
+ url: Union[URL, bytes, str],
52
+ *,
53
+ headers: HeaderTypes = None,
54
+ content: Union[bytes, Iterator[bytes], None] = None,
55
+ extensions: Optional[Extensions] = None,
56
+ ) -> Iterator[Response]:
57
+ """
58
+ Sends an HTTP request, returning the response within a content manager.
59
+
60
+ ```
61
+ with httpcore.stream("GET", "https://www.example.com/") as response:
62
+ ...
63
+ ```
64
+
65
+ When using the `stream()` function, the body of the response will not be
66
+ automatically read. If you want to access the response body you should
67
+ either use `content = response.read()`, or `for chunk in response.iter_content()`.
68
+
69
+ Arguments:
70
+ method: The HTTP method for the request. Typically one of `"GET"`,
71
+ `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`.
72
+ url: The URL of the HTTP request. Either as an instance of `httpcore.URL`,
73
+ or as str/bytes.
74
+ headers: The HTTP request headers. Either as a dictionary of str/bytes,
75
+ or as a list of two-tuples of str/bytes.
76
+ content: The content of the request body. Either as bytes,
77
+ or as a bytes iterator.
78
+ extensions: A dictionary of optional extra information included on the request.
79
+ Possible keys include `"timeout"`.
80
+
81
+ Returns:
82
+ An instance of `httpcore.Response`.
83
+ """
84
+ with ConnectionPool() as pool:
85
+ with pool.stream(
86
+ method=method,
87
+ url=url,
88
+ headers=headers,
89
+ content=content,
90
+ extensions=extensions,
91
+ ) as response:
92
+ yield response
valley/lib/python3.10/site-packages/httpcore/_backends/trio.py ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ssl
2
+ import typing
3
+
4
+ import trio
5
+
6
+ from .._exceptions import (
7
+ ConnectError,
8
+ ConnectTimeout,
9
+ ExceptionMapping,
10
+ ReadError,
11
+ ReadTimeout,
12
+ WriteError,
13
+ WriteTimeout,
14
+ map_exceptions,
15
+ )
16
+ from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream
17
+
18
+
19
+ class TrioStream(AsyncNetworkStream):
20
+ def __init__(self, stream: trio.abc.Stream) -> None:
21
+ self._stream = stream
22
+
23
+ async def read(
24
+ self, max_bytes: int, timeout: typing.Optional[float] = None
25
+ ) -> bytes:
26
+ timeout_or_inf = float("inf") if timeout is None else timeout
27
+ exc_map: ExceptionMapping = {
28
+ trio.TooSlowError: ReadTimeout,
29
+ trio.BrokenResourceError: ReadError,
30
+ trio.ClosedResourceError: ReadError,
31
+ }
32
+ with map_exceptions(exc_map):
33
+ with trio.fail_after(timeout_or_inf):
34
+ data: bytes = await self._stream.receive_some(max_bytes=max_bytes)
35
+ return data
36
+
37
+ async def write(
38
+ self, buffer: bytes, timeout: typing.Optional[float] = None
39
+ ) -> None:
40
+ if not buffer:
41
+ return
42
+
43
+ timeout_or_inf = float("inf") if timeout is None else timeout
44
+ exc_map: ExceptionMapping = {
45
+ trio.TooSlowError: WriteTimeout,
46
+ trio.BrokenResourceError: WriteError,
47
+ trio.ClosedResourceError: WriteError,
48
+ }
49
+ with map_exceptions(exc_map):
50
+ with trio.fail_after(timeout_or_inf):
51
+ await self._stream.send_all(data=buffer)
52
+
53
+ async def aclose(self) -> None:
54
+ await self._stream.aclose()
55
+
56
+ async def start_tls(
57
+ self,
58
+ ssl_context: ssl.SSLContext,
59
+ server_hostname: typing.Optional[str] = None,
60
+ timeout: typing.Optional[float] = None,
61
+ ) -> AsyncNetworkStream:
62
+ timeout_or_inf = float("inf") if timeout is None else timeout
63
+ exc_map: ExceptionMapping = {
64
+ trio.TooSlowError: ConnectTimeout,
65
+ trio.BrokenResourceError: ConnectError,
66
+ }
67
+ ssl_stream = trio.SSLStream(
68
+ self._stream,
69
+ ssl_context=ssl_context,
70
+ server_hostname=server_hostname,
71
+ https_compatible=True,
72
+ server_side=False,
73
+ )
74
+ with map_exceptions(exc_map):
75
+ try:
76
+ with trio.fail_after(timeout_or_inf):
77
+ await ssl_stream.do_handshake()
78
+ except Exception as exc: # pragma: nocover
79
+ await self.aclose()
80
+ raise exc
81
+ return TrioStream(ssl_stream)
82
+
83
+ def get_extra_info(self, info: str) -> typing.Any:
84
+ if info == "ssl_object" and isinstance(self._stream, trio.SSLStream):
85
+ # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__.
86
+ # Tracked at https://github.com/python-trio/trio/issues/542
87
+ return self._stream._ssl_object # type: ignore[attr-defined]
88
+ if info == "client_addr":
89
+ return self._get_socket_stream().socket.getsockname()
90
+ if info == "server_addr":
91
+ return self._get_socket_stream().socket.getpeername()
92
+ if info == "socket":
93
+ stream = self._stream
94
+ while isinstance(stream, trio.SSLStream):
95
+ stream = stream.transport_stream
96
+ assert isinstance(stream, trio.SocketStream)
97
+ return stream.socket
98
+ if info == "is_readable":
99
+ socket = self.get_extra_info("socket")
100
+ return socket.is_readable()
101
+ return None
102
+
103
+ def _get_socket_stream(self) -> trio.SocketStream:
104
+ stream = self._stream
105
+ while isinstance(stream, trio.SSLStream):
106
+ stream = stream.transport_stream
107
+ assert isinstance(stream, trio.SocketStream)
108
+ return stream
109
+
110
+
111
+ class TrioBackend(AsyncNetworkBackend):
112
+ async def connect_tcp(
113
+ self,
114
+ host: str,
115
+ port: int,
116
+ timeout: typing.Optional[float] = None,
117
+ local_address: typing.Optional[str] = None,
118
+ socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
119
+ ) -> AsyncNetworkStream:
120
+ # By default for TCP sockets, trio enables TCP_NODELAY.
121
+ # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream
122
+ if socket_options is None:
123
+ socket_options = [] # pragma: no cover
124
+ timeout_or_inf = float("inf") if timeout is None else timeout
125
+ exc_map: ExceptionMapping = {
126
+ trio.TooSlowError: ConnectTimeout,
127
+ trio.BrokenResourceError: ConnectError,
128
+ OSError: ConnectError,
129
+ }
130
+ with map_exceptions(exc_map):
131
+ with trio.fail_after(timeout_or_inf):
132
+ stream: trio.abc.Stream = await trio.open_tcp_stream(
133
+ host=host, port=port, local_address=local_address
134
+ )
135
+ for option in socket_options:
136
+ stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover
137
+ return TrioStream(stream)
138
+
139
+ async def connect_unix_socket(
140
+ self,
141
+ path: str,
142
+ timeout: typing.Optional[float] = None,
143
+ socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
144
+ ) -> AsyncNetworkStream: # pragma: nocover
145
+ if socket_options is None:
146
+ socket_options = []
147
+ timeout_or_inf = float("inf") if timeout is None else timeout
148
+ exc_map: ExceptionMapping = {
149
+ trio.TooSlowError: ConnectTimeout,
150
+ trio.BrokenResourceError: ConnectError,
151
+ OSError: ConnectError,
152
+ }
153
+ with map_exceptions(exc_map):
154
+ with trio.fail_after(timeout_or_inf):
155
+ stream: trio.abc.Stream = await trio.open_unix_socket(path)
156
+ for option in socket_options:
157
+ stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover
158
+ return TrioStream(stream)
159
+
160
+ async def sleep(self, seconds: float) -> None:
161
+ await trio.sleep(seconds) # pragma: nocover
valley/lib/python3.10/site-packages/httpcore/_models.py ADDED
@@ -0,0 +1,492 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Any,
3
+ AsyncIterable,
4
+ AsyncIterator,
5
+ Iterable,
6
+ Iterator,
7
+ List,
8
+ Mapping,
9
+ MutableMapping,
10
+ Optional,
11
+ Sequence,
12
+ Tuple,
13
+ Union,
14
+ )
15
+ from urllib.parse import urlparse
16
+
17
+ # Functions for typechecking...
18
+
19
+
20
+ HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]]
21
+ HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]]
22
+ HeaderTypes = Union[HeadersAsSequence, HeadersAsMapping, None]
23
+
24
+ Extensions = MutableMapping[str, Any]
25
+
26
+
27
+ def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes:
28
+ """
29
+ Any arguments that are ultimately represented as bytes can be specified
30
+ either as bytes or as strings.
31
+
32
+ However we enforce that any string arguments must only contain characters in
33
+ the plain ASCII range. chr(0)...chr(127). If you need to use characters
34
+ outside that range then be precise, and use a byte-wise argument.
35
+ """
36
+ if isinstance(value, str):
37
+ try:
38
+ return value.encode("ascii")
39
+ except UnicodeEncodeError:
40
+ raise TypeError(f"{name} strings may not include unicode characters.")
41
+ elif isinstance(value, bytes):
42
+ return value
43
+
44
+ seen_type = type(value).__name__
45
+ raise TypeError(f"{name} must be bytes or str, but got {seen_type}.")
46
+
47
+
48
+ def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL":
49
+ """
50
+ Type check for URL parameters.
51
+ """
52
+ if isinstance(value, (bytes, str)):
53
+ return URL(value)
54
+ elif isinstance(value, URL):
55
+ return value
56
+
57
+ seen_type = type(value).__name__
58
+ raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.")
59
+
60
+
61
+ def enforce_headers(
62
+ value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str
63
+ ) -> List[Tuple[bytes, bytes]]:
64
+ """
65
+ Convienence function that ensure all items in request or response headers
66
+ are either bytes or strings in the plain ASCII range.
67
+ """
68
+ if value is None:
69
+ return []
70
+ elif isinstance(value, Mapping):
71
+ return [
72
+ (
73
+ enforce_bytes(k, name="header name"),
74
+ enforce_bytes(v, name="header value"),
75
+ )
76
+ for k, v in value.items()
77
+ ]
78
+ elif isinstance(value, Sequence):
79
+ return [
80
+ (
81
+ enforce_bytes(k, name="header name"),
82
+ enforce_bytes(v, name="header value"),
83
+ )
84
+ for k, v in value
85
+ ]
86
+
87
+ seen_type = type(value).__name__
88
+ raise TypeError(
89
+ f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}."
90
+ )
91
+
92
+
93
+ def enforce_stream(
94
+ value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str
95
+ ) -> Union[Iterable[bytes], AsyncIterable[bytes]]:
96
+ if value is None:
97
+ return ByteStream(b"")
98
+ elif isinstance(value, bytes):
99
+ return ByteStream(value)
100
+ return value
101
+
102
+
103
+ # * https://tools.ietf.org/html/rfc3986#section-3.2.3
104
+ # * https://url.spec.whatwg.org/#url-miscellaneous
105
+ # * https://url.spec.whatwg.org/#scheme-state
106
+ DEFAULT_PORTS = {
107
+ b"ftp": 21,
108
+ b"http": 80,
109
+ b"https": 443,
110
+ b"ws": 80,
111
+ b"wss": 443,
112
+ }
113
+
114
+
115
+ def include_request_headers(
116
+ headers: List[Tuple[bytes, bytes]],
117
+ *,
118
+ url: "URL",
119
+ content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]],
120
+ ) -> List[Tuple[bytes, bytes]]:
121
+ headers_set = set(k.lower() for k, v in headers)
122
+
123
+ if b"host" not in headers_set:
124
+ default_port = DEFAULT_PORTS.get(url.scheme)
125
+ if url.port is None or url.port == default_port:
126
+ header_value = url.host
127
+ else:
128
+ header_value = b"%b:%d" % (url.host, url.port)
129
+ headers = [(b"Host", header_value)] + headers
130
+
131
+ if (
132
+ content is not None
133
+ and b"content-length" not in headers_set
134
+ and b"transfer-encoding" not in headers_set
135
+ ):
136
+ if isinstance(content, bytes):
137
+ content_length = str(len(content)).encode("ascii")
138
+ headers += [(b"Content-Length", content_length)]
139
+ else:
140
+ headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover
141
+
142
+ return headers
143
+
144
+
145
+ # Interfaces for byte streams...
146
+
147
+
148
+ class ByteStream:
149
+ """
150
+ A container for non-streaming content, and that supports both sync and async
151
+ stream iteration.
152
+ """
153
+
154
+ def __init__(self, content: bytes) -> None:
155
+ self._content = content
156
+
157
+ def __iter__(self) -> Iterator[bytes]:
158
+ yield self._content
159
+
160
+ async def __aiter__(self) -> AsyncIterator[bytes]:
161
+ yield self._content
162
+
163
+ def __repr__(self) -> str:
164
+ return f"<{self.__class__.__name__} [{len(self._content)} bytes]>"
165
+
166
+
167
+ class Origin:
168
+ def __init__(self, scheme: bytes, host: bytes, port: int) -> None:
169
+ self.scheme = scheme
170
+ self.host = host
171
+ self.port = port
172
+
173
+ def __eq__(self, other: Any) -> bool:
174
+ return (
175
+ isinstance(other, Origin)
176
+ and self.scheme == other.scheme
177
+ and self.host == other.host
178
+ and self.port == other.port
179
+ )
180
+
181
+ def __str__(self) -> str:
182
+ scheme = self.scheme.decode("ascii")
183
+ host = self.host.decode("ascii")
184
+ port = str(self.port)
185
+ return f"{scheme}://{host}:{port}"
186
+
187
+
188
+ class URL:
189
+ """
190
+ Represents the URL against which an HTTP request may be made.
191
+
192
+ The URL may either be specified as a plain string, for convienence:
193
+
194
+ ```python
195
+ url = httpcore.URL("https://www.example.com/")
196
+ ```
197
+
198
+ Or be constructed with explicitily pre-parsed components:
199
+
200
+ ```python
201
+ url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/')
202
+ ```
203
+
204
+ Using this second more explicit style allows integrations that are using
205
+ `httpcore` to pass through URLs that have already been parsed in order to use
206
+ libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures
207
+ that URL parsing is treated identically at both the networking level and at any
208
+ higher layers of abstraction.
209
+
210
+ The four components are important here, as they allow the URL to be precisely
211
+ specified in a pre-parsed format. They also allow certain types of request to
212
+ be created that could not otherwise be expressed.
213
+
214
+ For example, an HTTP request to `http://www.example.com/` forwarded via a proxy
215
+ at `http://localhost:8080`...
216
+
217
+ ```python
218
+ # Constructs an HTTP request with a complete URL as the target:
219
+ # GET https://www.example.com/ HTTP/1.1
220
+ url = httpcore.URL(
221
+ scheme=b'http',
222
+ host=b'localhost',
223
+ port=8080,
224
+ target=b'https://www.example.com/'
225
+ )
226
+ request = httpcore.Request(
227
+ method="GET",
228
+ url=url
229
+ )
230
+ ```
231
+
232
+ Another example is constructing an `OPTIONS *` request...
233
+
234
+ ```python
235
+ # Constructs an 'OPTIONS *' HTTP request:
236
+ # OPTIONS * HTTP/1.1
237
+ url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*')
238
+ request = httpcore.Request(method="OPTIONS", url=url)
239
+ ```
240
+
241
+ This kind of request is not possible to formulate with a URL string,
242
+ because the `/` delimiter is always used to demark the target from the
243
+ host/port portion of the URL.
244
+
245
+ For convenience, string-like arguments may be specified either as strings or
246
+ as bytes. However, once a request is being issue over-the-wire, the URL
247
+ components are always ultimately required to be a bytewise representation.
248
+
249
+ In order to avoid any ambiguity over character encodings, when strings are used
250
+ as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`.
251
+ If you require a bytewise representation that is outside this range you must
252
+ handle the character encoding directly, and pass a bytes instance.
253
+ """
254
+
255
+ def __init__(
256
+ self,
257
+ url: Union[bytes, str] = "",
258
+ *,
259
+ scheme: Union[bytes, str] = b"",
260
+ host: Union[bytes, str] = b"",
261
+ port: Optional[int] = None,
262
+ target: Union[bytes, str] = b"",
263
+ ) -> None:
264
+ """
265
+ Parameters:
266
+ url: The complete URL as a string or bytes.
267
+ scheme: The URL scheme as a string or bytes.
268
+ Typically either `"http"` or `"https"`.
269
+ host: The URL host as a string or bytes. Such as `"www.example.com"`.
270
+ port: The port to connect to. Either an integer or `None`.
271
+ target: The target of the HTTP request. Such as `"/items?search=red"`.
272
+ """
273
+ if url:
274
+ parsed = urlparse(enforce_bytes(url, name="url"))
275
+ self.scheme = parsed.scheme
276
+ self.host = parsed.hostname or b""
277
+ self.port = parsed.port
278
+ self.target = (parsed.path or b"/") + (
279
+ b"?" + parsed.query if parsed.query else b""
280
+ )
281
+ else:
282
+ self.scheme = enforce_bytes(scheme, name="scheme")
283
+ self.host = enforce_bytes(host, name="host")
284
+ self.port = port
285
+ self.target = enforce_bytes(target, name="target")
286
+
287
+ @property
288
+ def origin(self) -> Origin:
289
+ default_port = {
290
+ b"http": 80,
291
+ b"https": 443,
292
+ b"ws": 80,
293
+ b"wss": 443,
294
+ b"socks5": 1080,
295
+ }[self.scheme]
296
+ return Origin(
297
+ scheme=self.scheme, host=self.host, port=self.port or default_port
298
+ )
299
+
300
+ def __eq__(self, other: Any) -> bool:
301
+ return (
302
+ isinstance(other, URL)
303
+ and other.scheme == self.scheme
304
+ and other.host == self.host
305
+ and other.port == self.port
306
+ and other.target == self.target
307
+ )
308
+
309
+ def __bytes__(self) -> bytes:
310
+ if self.port is None:
311
+ return b"%b://%b%b" % (self.scheme, self.host, self.target)
312
+ return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target)
313
+
314
+ def __repr__(self) -> str:
315
+ return (
316
+ f"{self.__class__.__name__}(scheme={self.scheme!r}, "
317
+ f"host={self.host!r}, port={self.port!r}, target={self.target!r})"
318
+ )
319
+
320
+
321
+ class Request:
322
+ """
323
+ An HTTP request.
324
+ """
325
+
326
+ def __init__(
327
+ self,
328
+ method: Union[bytes, str],
329
+ url: Union[URL, bytes, str],
330
+ *,
331
+ headers: HeaderTypes = None,
332
+ content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None,
333
+ extensions: Optional[Extensions] = None,
334
+ ) -> None:
335
+ """
336
+ Parameters:
337
+ method: The HTTP request method, either as a string or bytes.
338
+ For example: `GET`.
339
+ url: The request URL, either as a `URL` instance, or as a string or bytes.
340
+ For example: `"https://www.example.com".`
341
+ headers: The HTTP request headers.
342
+ content: The content of the request body.
343
+ extensions: A dictionary of optional extra information included on
344
+ the request. Possible keys include `"timeout"`, and `"trace"`.
345
+ """
346
+ self.method: bytes = enforce_bytes(method, name="method")
347
+ self.url: URL = enforce_url(url, name="url")
348
+ self.headers: List[Tuple[bytes, bytes]] = enforce_headers(
349
+ headers, name="headers"
350
+ )
351
+ self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream(
352
+ content, name="content"
353
+ )
354
+ self.extensions = {} if extensions is None else extensions
355
+
356
+ if "target" in self.extensions:
357
+ self.url = URL(
358
+ scheme=self.url.scheme,
359
+ host=self.url.host,
360
+ port=self.url.port,
361
+ target=self.extensions["target"],
362
+ )
363
+
364
+ def __repr__(self) -> str:
365
+ return f"<{self.__class__.__name__} [{self.method!r}]>"
366
+
367
+
368
+ class Response:
369
+ """
370
+ An HTTP response.
371
+ """
372
+
373
+ def __init__(
374
+ self,
375
+ status: int,
376
+ *,
377
+ headers: HeaderTypes = None,
378
+ content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None,
379
+ extensions: Optional[Extensions] = None,
380
+ ) -> None:
381
+ """
382
+ Parameters:
383
+ status: The HTTP status code of the response. For example `200`.
384
+ headers: The HTTP response headers.
385
+ content: The content of the response body.
386
+ extensions: A dictionary of optional extra information included on
387
+ the responseself.Possible keys include `"http_version"`,
388
+ `"reason_phrase"`, and `"network_stream"`.
389
+ """
390
+ self.status: int = status
391
+ self.headers: List[Tuple[bytes, bytes]] = enforce_headers(
392
+ headers, name="headers"
393
+ )
394
+ self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream(
395
+ content, name="content"
396
+ )
397
+ self.extensions = {} if extensions is None else extensions
398
+
399
+ self._stream_consumed = False
400
+
401
+ @property
402
+ def content(self) -> bytes:
403
+ if not hasattr(self, "_content"):
404
+ if isinstance(self.stream, Iterable):
405
+ raise RuntimeError(
406
+ "Attempted to access 'response.content' on a streaming response. "
407
+ "Call 'response.read()' first."
408
+ )
409
+ else:
410
+ raise RuntimeError(
411
+ "Attempted to access 'response.content' on a streaming response. "
412
+ "Call 'await response.aread()' first."
413
+ )
414
+ return self._content
415
+
416
+ def __repr__(self) -> str:
417
+ return f"<{self.__class__.__name__} [{self.status}]>"
418
+
419
+ # Sync interface...
420
+
421
+ def read(self) -> bytes:
422
+ if not isinstance(self.stream, Iterable): # pragma: nocover
423
+ raise RuntimeError(
424
+ "Attempted to read an asynchronous response using 'response.read()'. "
425
+ "You should use 'await response.aread()' instead."
426
+ )
427
+ if not hasattr(self, "_content"):
428
+ self._content = b"".join([part for part in self.iter_stream()])
429
+ return self._content
430
+
431
+ def iter_stream(self) -> Iterator[bytes]:
432
+ if not isinstance(self.stream, Iterable): # pragma: nocover
433
+ raise RuntimeError(
434
+ "Attempted to stream an asynchronous response using 'for ... in "
435
+ "response.iter_stream()'. "
436
+ "You should use 'async for ... in response.aiter_stream()' instead."
437
+ )
438
+ if self._stream_consumed:
439
+ raise RuntimeError(
440
+ "Attempted to call 'for ... in response.iter_stream()' more than once."
441
+ )
442
+ self._stream_consumed = True
443
+ for chunk in self.stream:
444
+ yield chunk
445
+
446
+ def close(self) -> None:
447
+ if not isinstance(self.stream, Iterable): # pragma: nocover
448
+ raise RuntimeError(
449
+ "Attempted to close an asynchronous response using 'response.close()'. "
450
+ "You should use 'await response.aclose()' instead."
451
+ )
452
+ if hasattr(self.stream, "close"):
453
+ self.stream.close()
454
+
455
+ # Async interface...
456
+
457
+ async def aread(self) -> bytes:
458
+ if not isinstance(self.stream, AsyncIterable): # pragma: nocover
459
+ raise RuntimeError(
460
+ "Attempted to read an synchronous response using "
461
+ "'await response.aread()'. "
462
+ "You should use 'response.read()' instead."
463
+ )
464
+ if not hasattr(self, "_content"):
465
+ self._content = b"".join([part async for part in self.aiter_stream()])
466
+ return self._content
467
+
468
+ async def aiter_stream(self) -> AsyncIterator[bytes]:
469
+ if not isinstance(self.stream, AsyncIterable): # pragma: nocover
470
+ raise RuntimeError(
471
+ "Attempted to stream an synchronous response using 'async for ... in "
472
+ "response.aiter_stream()'. "
473
+ "You should use 'for ... in response.iter_stream()' instead."
474
+ )
475
+ if self._stream_consumed:
476
+ raise RuntimeError(
477
+ "Attempted to call 'async for ... in response.aiter_stream()' "
478
+ "more than once."
479
+ )
480
+ self._stream_consumed = True
481
+ async for chunk in self.stream:
482
+ yield chunk
483
+
484
+ async def aclose(self) -> None:
485
+ if not isinstance(self.stream, AsyncIterable): # pragma: nocover
486
+ raise RuntimeError(
487
+ "Attempted to close a synchronous response using "
488
+ "'await response.aclose()'. "
489
+ "You should use 'response.close()' instead."
490
+ )
491
+ if hasattr(self.stream, "aclose"):
492
+ await self.stream.aclose()
valley/lib/python3.10/site-packages/httpcore/_ssl.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import ssl
2
+
3
+ import certifi
4
+
5
+
6
+ def default_ssl_context() -> ssl.SSLContext:
7
+ context = ssl.create_default_context()
8
+ context.load_verify_locations(certifi.where())
9
+ return context
valley/lib/python3.10/site-packages/httpcore/_sync/__init__.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .connection import HTTPConnection
2
+ from .connection_pool import ConnectionPool
3
+ from .http11 import HTTP11Connection
4
+ from .http_proxy import HTTPProxy
5
+ from .interfaces import ConnectionInterface
6
+
7
+ try:
8
+ from .http2 import HTTP2Connection
9
+ except ImportError: # pragma: nocover
10
+
11
+ class HTTP2Connection: # type: ignore
12
+ def __init__(self, *args, **kwargs) -> None: # type: ignore
13
+ raise RuntimeError(
14
+ "Attempted to use http2 support, but the `h2` package is not "
15
+ "installed. Use 'pip install httpcore[http2]'."
16
+ )
17
+
18
+
19
+ try:
20
+ from .socks_proxy import SOCKSProxy
21
+ except ImportError: # pragma: nocover
22
+
23
+ class SOCKSProxy: # type: ignore
24
+ def __init__(self, *args, **kwargs) -> None: # type: ignore
25
+ raise RuntimeError(
26
+ "Attempted to use SOCKS support, but the `socksio` package is not "
27
+ "installed. Use 'pip install httpcore[socks]'."
28
+ )
29
+
30
+
31
+ __all__ = [
32
+ "HTTPConnection",
33
+ "ConnectionPool",
34
+ "HTTPProxy",
35
+ "HTTP11Connection",
36
+ "HTTP2Connection",
37
+ "ConnectionInterface",
38
+ "SOCKSProxy",
39
+ ]
valley/lib/python3.10/site-packages/httpcore/_sync/__pycache__/connection.cpython-310.pyc ADDED
Binary file (6.35 kB). View file
 
valley/lib/python3.10/site-packages/httpcore/_sync/__pycache__/http11.cpython-310.pyc ADDED
Binary file (10.8 kB). View file
 
valley/lib/python3.10/site-packages/httpcore/_sync/__pycache__/http2.cpython-310.pyc ADDED
Binary file (15.6 kB). View file
 
valley/lib/python3.10/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-310.pyc ADDED
Binary file (4.27 kB). View file
 
valley/lib/python3.10/site-packages/httpcore/_sync/connection.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+ import logging
3
+ import ssl
4
+ from types import TracebackType
5
+ from typing import Iterable, Iterator, Optional, Type
6
+
7
+ from .._backends.sync import SyncBackend
8
+ from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream
9
+ from .._exceptions import ConnectError, ConnectTimeout
10
+ from .._models import Origin, Request, Response
11
+ from .._ssl import default_ssl_context
12
+ from .._synchronization import Lock
13
+ from .._trace import Trace
14
+ from .http11 import HTTP11Connection
15
+ from .interfaces import ConnectionInterface
16
+
17
+ RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc.
18
+
19
+
20
+ logger = logging.getLogger("httpcore.connection")
21
+
22
+
23
+ def exponential_backoff(factor: float) -> Iterator[float]:
24
+ """
25
+ Generate a geometric sequence that has a ratio of 2 and starts with 0.
26
+
27
+ For example:
28
+ - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...`
29
+ - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...`
30
+ """
31
+ yield 0
32
+ for n in itertools.count():
33
+ yield factor * 2**n
34
+
35
+
36
+ class HTTPConnection(ConnectionInterface):
37
+ def __init__(
38
+ self,
39
+ origin: Origin,
40
+ ssl_context: Optional[ssl.SSLContext] = None,
41
+ keepalive_expiry: Optional[float] = None,
42
+ http1: bool = True,
43
+ http2: bool = False,
44
+ retries: int = 0,
45
+ local_address: Optional[str] = None,
46
+ uds: Optional[str] = None,
47
+ network_backend: Optional[NetworkBackend] = None,
48
+ socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
49
+ ) -> None:
50
+ self._origin = origin
51
+ self._ssl_context = ssl_context
52
+ self._keepalive_expiry = keepalive_expiry
53
+ self._http1 = http1
54
+ self._http2 = http2
55
+ self._retries = retries
56
+ self._local_address = local_address
57
+ self._uds = uds
58
+
59
+ self._network_backend: NetworkBackend = (
60
+ SyncBackend() if network_backend is None else network_backend
61
+ )
62
+ self._connection: Optional[ConnectionInterface] = None
63
+ self._connect_failed: bool = False
64
+ self._request_lock = Lock()
65
+ self._socket_options = socket_options
66
+
67
+ def handle_request(self, request: Request) -> Response:
68
+ if not self.can_handle_request(request.url.origin):
69
+ raise RuntimeError(
70
+ f"Attempted to send request to {request.url.origin} on connection to {self._origin}"
71
+ )
72
+
73
+ try:
74
+ with self._request_lock:
75
+ if self._connection is None:
76
+ stream = self._connect(request)
77
+
78
+ ssl_object = stream.get_extra_info("ssl_object")
79
+ http2_negotiated = (
80
+ ssl_object is not None
81
+ and ssl_object.selected_alpn_protocol() == "h2"
82
+ )
83
+ if http2_negotiated or (self._http2 and not self._http1):
84
+ from .http2 import HTTP2Connection
85
+
86
+ self._connection = HTTP2Connection(
87
+ origin=self._origin,
88
+ stream=stream,
89
+ keepalive_expiry=self._keepalive_expiry,
90
+ )
91
+ else:
92
+ self._connection = HTTP11Connection(
93
+ origin=self._origin,
94
+ stream=stream,
95
+ keepalive_expiry=self._keepalive_expiry,
96
+ )
97
+ except BaseException as exc:
98
+ self._connect_failed = True
99
+ raise exc
100
+
101
+ return self._connection.handle_request(request)
102
+
103
+ def _connect(self, request: Request) -> NetworkStream:
104
+ timeouts = request.extensions.get("timeout", {})
105
+ sni_hostname = request.extensions.get("sni_hostname", None)
106
+ timeout = timeouts.get("connect", None)
107
+
108
+ retries_left = self._retries
109
+ delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR)
110
+
111
+ while True:
112
+ try:
113
+ if self._uds is None:
114
+ kwargs = {
115
+ "host": self._origin.host.decode("ascii"),
116
+ "port": self._origin.port,
117
+ "local_address": self._local_address,
118
+ "timeout": timeout,
119
+ "socket_options": self._socket_options,
120
+ }
121
+ with Trace("connect_tcp", logger, request, kwargs) as trace:
122
+ stream = self._network_backend.connect_tcp(**kwargs)
123
+ trace.return_value = stream
124
+ else:
125
+ kwargs = {
126
+ "path": self._uds,
127
+ "timeout": timeout,
128
+ "socket_options": self._socket_options,
129
+ }
130
+ with Trace(
131
+ "connect_unix_socket", logger, request, kwargs
132
+ ) as trace:
133
+ stream = self._network_backend.connect_unix_socket(
134
+ **kwargs
135
+ )
136
+ trace.return_value = stream
137
+
138
+ if self._origin.scheme in (b"https", b"wss"):
139
+ ssl_context = (
140
+ default_ssl_context()
141
+ if self._ssl_context is None
142
+ else self._ssl_context
143
+ )
144
+ alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"]
145
+ ssl_context.set_alpn_protocols(alpn_protocols)
146
+
147
+ kwargs = {
148
+ "ssl_context": ssl_context,
149
+ "server_hostname": sni_hostname
150
+ or self._origin.host.decode("ascii"),
151
+ "timeout": timeout,
152
+ }
153
+ with Trace("start_tls", logger, request, kwargs) as trace:
154
+ stream = stream.start_tls(**kwargs)
155
+ trace.return_value = stream
156
+ return stream
157
+ except (ConnectError, ConnectTimeout):
158
+ if retries_left <= 0:
159
+ raise
160
+ retries_left -= 1
161
+ delay = next(delays)
162
+ with Trace("retry", logger, request, kwargs) as trace:
163
+ self._network_backend.sleep(delay)
164
+
165
+ def can_handle_request(self, origin: Origin) -> bool:
166
+ return origin == self._origin
167
+
168
+ def close(self) -> None:
169
+ if self._connection is not None:
170
+ with Trace("close", logger, None, {}):
171
+ self._connection.close()
172
+
173
+ def is_available(self) -> bool:
174
+ if self._connection is None:
175
+ # If HTTP/2 support is enabled, and the resulting connection could
176
+ # end up as HTTP/2 then we should indicate the connection as being
177
+ # available to service multiple requests.
178
+ return (
179
+ self._http2
180
+ and (self._origin.scheme == b"https" or not self._http1)
181
+ and not self._connect_failed
182
+ )
183
+ return self._connection.is_available()
184
+
185
+ def has_expired(self) -> bool:
186
+ if self._connection is None:
187
+ return self._connect_failed
188
+ return self._connection.has_expired()
189
+
190
+ def is_idle(self) -> bool:
191
+ if self._connection is None:
192
+ return self._connect_failed
193
+ return self._connection.is_idle()
194
+
195
+ def is_closed(self) -> bool:
196
+ if self._connection is None:
197
+ return self._connect_failed
198
+ return self._connection.is_closed()
199
+
200
+ def info(self) -> str:
201
+ if self._connection is None:
202
+ return "CONNECTION FAILED" if self._connect_failed else "CONNECTING"
203
+ return self._connection.info()
204
+
205
+ def __repr__(self) -> str:
206
+ return f"<{self.__class__.__name__} [{self.info()}]>"
207
+
208
+ # These context managers are not used in the standard flow, but are
209
+ # useful for testing or working with connection instances directly.
210
+
211
+ def __enter__(self) -> "HTTPConnection":
212
+ return self
213
+
214
+ def __exit__(
215
+ self,
216
+ exc_type: Optional[Type[BaseException]] = None,
217
+ exc_value: Optional[BaseException] = None,
218
+ traceback: Optional[TracebackType] = None,
219
+ ) -> None:
220
+ self.close()
valley/lib/python3.10/site-packages/httpcore/_sync/http11.py ADDED
@@ -0,0 +1,386 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import enum
2
+ import logging
3
+ import ssl
4
+ import time
5
+ from types import TracebackType
6
+ from typing import (
7
+ Any,
8
+ Iterable,
9
+ Iterator,
10
+ List,
11
+ Optional,
12
+ Tuple,
13
+ Type,
14
+ Union,
15
+ )
16
+
17
+ import h11
18
+
19
+ from .._backends.base import NetworkStream
20
+ from .._exceptions import (
21
+ ConnectionNotAvailable,
22
+ LocalProtocolError,
23
+ RemoteProtocolError,
24
+ WriteError,
25
+ map_exceptions,
26
+ )
27
+ from .._models import Origin, Request, Response
28
+ from .._synchronization import Lock, ShieldCancellation
29
+ from .._trace import Trace
30
+ from .interfaces import ConnectionInterface
31
+
32
+ logger = logging.getLogger("httpcore.http11")
33
+
34
+
35
+ # A subset of `h11.Event` types supported by `_send_event`
36
+ H11SendEvent = Union[
37
+ h11.Request,
38
+ h11.Data,
39
+ h11.EndOfMessage,
40
+ ]
41
+
42
+
43
+ class HTTPConnectionState(enum.IntEnum):
44
+ NEW = 0
45
+ ACTIVE = 1
46
+ IDLE = 2
47
+ CLOSED = 3
48
+
49
+
50
+ class HTTP11Connection(ConnectionInterface):
51
+ READ_NUM_BYTES = 64 * 1024
52
+ MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024
53
+
54
+ def __init__(
55
+ self,
56
+ origin: Origin,
57
+ stream: NetworkStream,
58
+ keepalive_expiry: Optional[float] = None,
59
+ ) -> None:
60
+ self._origin = origin
61
+ self._network_stream = stream
62
+ self._keepalive_expiry: Optional[float] = keepalive_expiry
63
+ self._expire_at: Optional[float] = None
64
+ self._state = HTTPConnectionState.NEW
65
+ self._state_lock = Lock()
66
+ self._request_count = 0
67
+ self._h11_state = h11.Connection(
68
+ our_role=h11.CLIENT,
69
+ max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE,
70
+ )
71
+
72
+ def handle_request(self, request: Request) -> Response:
73
+ if not self.can_handle_request(request.url.origin):
74
+ raise RuntimeError(
75
+ f"Attempted to send request to {request.url.origin} on connection "
76
+ f"to {self._origin}"
77
+ )
78
+
79
+ with self._state_lock:
80
+ if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE):
81
+ self._request_count += 1
82
+ self._state = HTTPConnectionState.ACTIVE
83
+ self._expire_at = None
84
+ else:
85
+ raise ConnectionNotAvailable()
86
+
87
+ try:
88
+ kwargs = {"request": request}
89
+ try:
90
+ with Trace(
91
+ "send_request_headers", logger, request, kwargs
92
+ ) as trace:
93
+ self._send_request_headers(**kwargs)
94
+ with Trace("send_request_body", logger, request, kwargs) as trace:
95
+ self._send_request_body(**kwargs)
96
+ except WriteError:
97
+ # If we get a write error while we're writing the request,
98
+ # then we supress this error and move on to attempting to
99
+ # read the response. Servers can sometimes close the request
100
+ # pre-emptively and then respond with a well formed HTTP
101
+ # error response.
102
+ pass
103
+
104
+ with Trace(
105
+ "receive_response_headers", logger, request, kwargs
106
+ ) as trace:
107
+ (
108
+ http_version,
109
+ status,
110
+ reason_phrase,
111
+ headers,
112
+ trailing_data,
113
+ ) = self._receive_response_headers(**kwargs)
114
+ trace.return_value = (
115
+ http_version,
116
+ status,
117
+ reason_phrase,
118
+ headers,
119
+ )
120
+
121
+ network_stream = self._network_stream
122
+
123
+ # CONNECT or Upgrade request
124
+ if (status == 101) or (
125
+ (request.method == b"CONNECT") and (200 <= status < 300)
126
+ ):
127
+ network_stream = HTTP11UpgradeStream(network_stream, trailing_data)
128
+
129
+ return Response(
130
+ status=status,
131
+ headers=headers,
132
+ content=HTTP11ConnectionByteStream(self, request),
133
+ extensions={
134
+ "http_version": http_version,
135
+ "reason_phrase": reason_phrase,
136
+ "network_stream": network_stream,
137
+ },
138
+ )
139
+ except BaseException as exc:
140
+ with ShieldCancellation():
141
+ with Trace("response_closed", logger, request) as trace:
142
+ self._response_closed()
143
+ raise exc
144
+
145
+ # Sending the request...
146
+
147
+ def _send_request_headers(self, request: Request) -> None:
148
+ timeouts = request.extensions.get("timeout", {})
149
+ timeout = timeouts.get("write", None)
150
+
151
+ with map_exceptions({h11.LocalProtocolError: LocalProtocolError}):
152
+ event = h11.Request(
153
+ method=request.method,
154
+ target=request.url.target,
155
+ headers=request.headers,
156
+ )
157
+ self._send_event(event, timeout=timeout)
158
+
159
+ def _send_request_body(self, request: Request) -> None:
160
+ timeouts = request.extensions.get("timeout", {})
161
+ timeout = timeouts.get("write", None)
162
+
163
+ assert isinstance(request.stream, Iterable)
164
+ for chunk in request.stream:
165
+ event = h11.Data(data=chunk)
166
+ self._send_event(event, timeout=timeout)
167
+
168
+ self._send_event(h11.EndOfMessage(), timeout=timeout)
169
+
170
+ def _send_event(
171
+ self, event: h11.Event, timeout: Optional[float] = None
172
+ ) -> None:
173
+ bytes_to_send = self._h11_state.send(event)
174
+ if bytes_to_send is not None:
175
+ self._network_stream.write(bytes_to_send, timeout=timeout)
176
+
177
+ # Receiving the response...
178
+
179
+ def _receive_response_headers(
180
+ self, request: Request
181
+ ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]:
182
+ timeouts = request.extensions.get("timeout", {})
183
+ timeout = timeouts.get("read", None)
184
+
185
+ while True:
186
+ event = self._receive_event(timeout=timeout)
187
+ if isinstance(event, h11.Response):
188
+ break
189
+ if (
190
+ isinstance(event, h11.InformationalResponse)
191
+ and event.status_code == 101
192
+ ):
193
+ break
194
+
195
+ http_version = b"HTTP/" + event.http_version
196
+
197
+ # h11 version 0.11+ supports a `raw_items` interface to get the
198
+ # raw header casing, rather than the enforced lowercase headers.
199
+ headers = event.headers.raw_items()
200
+
201
+ trailing_data, _ = self._h11_state.trailing_data
202
+
203
+ return http_version, event.status_code, event.reason, headers, trailing_data
204
+
205
+ def _receive_response_body(self, request: Request) -> Iterator[bytes]:
206
+ timeouts = request.extensions.get("timeout", {})
207
+ timeout = timeouts.get("read", None)
208
+
209
+ while True:
210
+ event = self._receive_event(timeout=timeout)
211
+ if isinstance(event, h11.Data):
212
+ yield bytes(event.data)
213
+ elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)):
214
+ break
215
+
216
+ def _receive_event(
217
+ self, timeout: Optional[float] = None
218
+ ) -> Union[h11.Event, Type[h11.PAUSED]]:
219
+ while True:
220
+ with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}):
221
+ event = self._h11_state.next_event()
222
+
223
+ if event is h11.NEED_DATA:
224
+ data = self._network_stream.read(
225
+ self.READ_NUM_BYTES, timeout=timeout
226
+ )
227
+
228
+ # If we feed this case through h11 we'll raise an exception like:
229
+ #
230
+ # httpcore.RemoteProtocolError: can't handle event type
231
+ # ConnectionClosed when role=SERVER and state=SEND_RESPONSE
232
+ #
233
+ # Which is accurate, but not very informative from an end-user
234
+ # perspective. Instead we handle this case distinctly and treat
235
+ # it as a ConnectError.
236
+ if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE:
237
+ msg = "Server disconnected without sending a response."
238
+ raise RemoteProtocolError(msg)
239
+
240
+ self._h11_state.receive_data(data)
241
+ else:
242
+ # mypy fails to narrow the type in the above if statement above
243
+ return event # type: ignore[return-value]
244
+
245
+ def _response_closed(self) -> None:
246
+ with self._state_lock:
247
+ if (
248
+ self._h11_state.our_state is h11.DONE
249
+ and self._h11_state.their_state is h11.DONE
250
+ ):
251
+ self._state = HTTPConnectionState.IDLE
252
+ self._h11_state.start_next_cycle()
253
+ if self._keepalive_expiry is not None:
254
+ now = time.monotonic()
255
+ self._expire_at = now + self._keepalive_expiry
256
+ else:
257
+ self.close()
258
+
259
+ # Once the connection is no longer required...
260
+
261
+ def close(self) -> None:
262
+ # Note that this method unilaterally closes the connection, and does
263
+ # not have any kind of locking in place around it.
264
+ self._state = HTTPConnectionState.CLOSED
265
+ self._network_stream.close()
266
+
267
+ # The ConnectionInterface methods provide information about the state of
268
+ # the connection, allowing for a connection pooling implementation to
269
+ # determine when to reuse and when to close the connection...
270
+
271
+ def can_handle_request(self, origin: Origin) -> bool:
272
+ return origin == self._origin
273
+
274
+ def is_available(self) -> bool:
275
+ # Note that HTTP/1.1 connections in the "NEW" state are not treated as
276
+ # being "available". The control flow which created the connection will
277
+ # be able to send an outgoing request, but the connection will not be
278
+ # acquired from the connection pool for any other request.
279
+ return self._state == HTTPConnectionState.IDLE
280
+
281
+ def has_expired(self) -> bool:
282
+ now = time.monotonic()
283
+ keepalive_expired = self._expire_at is not None and now > self._expire_at
284
+
285
+ # If the HTTP connection is idle but the socket is readable, then the
286
+ # only valid state is that the socket is about to return b"", indicating
287
+ # a server-initiated disconnect.
288
+ server_disconnected = (
289
+ self._state == HTTPConnectionState.IDLE
290
+ and self._network_stream.get_extra_info("is_readable")
291
+ )
292
+
293
+ return keepalive_expired or server_disconnected
294
+
295
+ def is_idle(self) -> bool:
296
+ return self._state == HTTPConnectionState.IDLE
297
+
298
+ def is_closed(self) -> bool:
299
+ return self._state == HTTPConnectionState.CLOSED
300
+
301
+ def info(self) -> str:
302
+ origin = str(self._origin)
303
+ return (
304
+ f"{origin!r}, HTTP/1.1, {self._state.name}, "
305
+ f"Request Count: {self._request_count}"
306
+ )
307
+
308
+ def __repr__(self) -> str:
309
+ class_name = self.__class__.__name__
310
+ origin = str(self._origin)
311
+ return (
312
+ f"<{class_name} [{origin!r}, {self._state.name}, "
313
+ f"Request Count: {self._request_count}]>"
314
+ )
315
+
316
+ # These context managers are not used in the standard flow, but are
317
+ # useful for testing or working with connection instances directly.
318
+
319
+ def __enter__(self) -> "HTTP11Connection":
320
+ return self
321
+
322
+ def __exit__(
323
+ self,
324
+ exc_type: Optional[Type[BaseException]] = None,
325
+ exc_value: Optional[BaseException] = None,
326
+ traceback: Optional[TracebackType] = None,
327
+ ) -> None:
328
+ self.close()
329
+
330
+
331
+ class HTTP11ConnectionByteStream:
332
+ def __init__(self, connection: HTTP11Connection, request: Request) -> None:
333
+ self._connection = connection
334
+ self._request = request
335
+ self._closed = False
336
+
337
+ def __iter__(self) -> Iterator[bytes]:
338
+ kwargs = {"request": self._request}
339
+ try:
340
+ with Trace("receive_response_body", logger, self._request, kwargs):
341
+ for chunk in self._connection._receive_response_body(**kwargs):
342
+ yield chunk
343
+ except BaseException as exc:
344
+ # If we get an exception while streaming the response,
345
+ # we want to close the response (and possibly the connection)
346
+ # before raising that exception.
347
+ with ShieldCancellation():
348
+ self.close()
349
+ raise exc
350
+
351
+ def close(self) -> None:
352
+ if not self._closed:
353
+ self._closed = True
354
+ with Trace("response_closed", logger, self._request):
355
+ self._connection._response_closed()
356
+
357
+
358
+ class HTTP11UpgradeStream(NetworkStream):
359
+ def __init__(self, stream: NetworkStream, leading_data: bytes) -> None:
360
+ self._stream = stream
361
+ self._leading_data = leading_data
362
+
363
+ def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes:
364
+ if self._leading_data:
365
+ buffer = self._leading_data[:max_bytes]
366
+ self._leading_data = self._leading_data[max_bytes:]
367
+ return buffer
368
+ else:
369
+ return self._stream.read(max_bytes, timeout)
370
+
371
+ def write(self, buffer: bytes, timeout: Optional[float] = None) -> None:
372
+ self._stream.write(buffer, timeout)
373
+
374
+ def close(self) -> None:
375
+ self._stream.close()
376
+
377
+ def start_tls(
378
+ self,
379
+ ssl_context: ssl.SSLContext,
380
+ server_hostname: Optional[str] = None,
381
+ timeout: Optional[float] = None,
382
+ ) -> NetworkStream:
383
+ return self._stream.start_tls(ssl_context, server_hostname, timeout)
384
+
385
+ def get_extra_info(self, info: str) -> Any:
386
+ return self._stream.get_extra_info(info)
valley/lib/python3.10/site-packages/httpcore/_sync/http2.py ADDED
@@ -0,0 +1,589 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import enum
2
+ import logging
3
+ import time
4
+ import types
5
+ import typing
6
+
7
+ import h2.config
8
+ import h2.connection
9
+ import h2.events
10
+ import h2.exceptions
11
+ import h2.settings
12
+
13
+ from .._backends.base import NetworkStream
14
+ from .._exceptions import (
15
+ ConnectionNotAvailable,
16
+ LocalProtocolError,
17
+ RemoteProtocolError,
18
+ )
19
+ from .._models import Origin, Request, Response
20
+ from .._synchronization import Lock, Semaphore, ShieldCancellation
21
+ from .._trace import Trace
22
+ from .interfaces import ConnectionInterface
23
+
24
+ logger = logging.getLogger("httpcore.http2")
25
+
26
+
27
+ def has_body_headers(request: Request) -> bool:
28
+ return any(
29
+ k.lower() == b"content-length" or k.lower() == b"transfer-encoding"
30
+ for k, v in request.headers
31
+ )
32
+
33
+
34
+ class HTTPConnectionState(enum.IntEnum):
35
+ ACTIVE = 1
36
+ IDLE = 2
37
+ CLOSED = 3
38
+
39
+
40
+ class HTTP2Connection(ConnectionInterface):
41
+ READ_NUM_BYTES = 64 * 1024
42
+ CONFIG = h2.config.H2Configuration(validate_inbound_headers=False)
43
+
44
+ def __init__(
45
+ self,
46
+ origin: Origin,
47
+ stream: NetworkStream,
48
+ keepalive_expiry: typing.Optional[float] = None,
49
+ ):
50
+ self._origin = origin
51
+ self._network_stream = stream
52
+ self._keepalive_expiry: typing.Optional[float] = keepalive_expiry
53
+ self._h2_state = h2.connection.H2Connection(config=self.CONFIG)
54
+ self._state = HTTPConnectionState.IDLE
55
+ self._expire_at: typing.Optional[float] = None
56
+ self._request_count = 0
57
+ self._init_lock = Lock()
58
+ self._state_lock = Lock()
59
+ self._read_lock = Lock()
60
+ self._write_lock = Lock()
61
+ self._sent_connection_init = False
62
+ self._used_all_stream_ids = False
63
+ self._connection_error = False
64
+
65
+ # Mapping from stream ID to response stream events.
66
+ self._events: typing.Dict[
67
+ int,
68
+ typing.Union[
69
+ h2.events.ResponseReceived,
70
+ h2.events.DataReceived,
71
+ h2.events.StreamEnded,
72
+ h2.events.StreamReset,
73
+ ],
74
+ ] = {}
75
+
76
+ # Connection terminated events are stored as state since
77
+ # we need to handle them for all streams.
78
+ self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = (
79
+ None
80
+ )
81
+
82
+ self._read_exception: typing.Optional[Exception] = None
83
+ self._write_exception: typing.Optional[Exception] = None
84
+
85
+ def handle_request(self, request: Request) -> Response:
86
+ if not self.can_handle_request(request.url.origin):
87
+ # This cannot occur in normal operation, since the connection pool
88
+ # will only send requests on connections that handle them.
89
+ # It's in place simply for resilience as a guard against incorrect
90
+ # usage, for anyone working directly with httpcore connections.
91
+ raise RuntimeError(
92
+ f"Attempted to send request to {request.url.origin} on connection "
93
+ f"to {self._origin}"
94
+ )
95
+
96
+ with self._state_lock:
97
+ if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE):
98
+ self._request_count += 1
99
+ self._expire_at = None
100
+ self._state = HTTPConnectionState.ACTIVE
101
+ else:
102
+ raise ConnectionNotAvailable()
103
+
104
+ with self._init_lock:
105
+ if not self._sent_connection_init:
106
+ try:
107
+ kwargs = {"request": request}
108
+ with Trace("send_connection_init", logger, request, kwargs):
109
+ self._send_connection_init(**kwargs)
110
+ except BaseException as exc:
111
+ with ShieldCancellation():
112
+ self.close()
113
+ raise exc
114
+
115
+ self._sent_connection_init = True
116
+
117
+ # Initially start with just 1 until the remote server provides
118
+ # its max_concurrent_streams value
119
+ self._max_streams = 1
120
+
121
+ local_settings_max_streams = (
122
+ self._h2_state.local_settings.max_concurrent_streams
123
+ )
124
+ self._max_streams_semaphore = Semaphore(local_settings_max_streams)
125
+
126
+ for _ in range(local_settings_max_streams - self._max_streams):
127
+ self._max_streams_semaphore.acquire()
128
+
129
+ self._max_streams_semaphore.acquire()
130
+
131
+ try:
132
+ stream_id = self._h2_state.get_next_available_stream_id()
133
+ self._events[stream_id] = []
134
+ except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover
135
+ self._used_all_stream_ids = True
136
+ self._request_count -= 1
137
+ raise ConnectionNotAvailable()
138
+
139
+ try:
140
+ kwargs = {"request": request, "stream_id": stream_id}
141
+ with Trace("send_request_headers", logger, request, kwargs):
142
+ self._send_request_headers(request=request, stream_id=stream_id)
143
+ with Trace("send_request_body", logger, request, kwargs):
144
+ self._send_request_body(request=request, stream_id=stream_id)
145
+ with Trace(
146
+ "receive_response_headers", logger, request, kwargs
147
+ ) as trace:
148
+ status, headers = self._receive_response(
149
+ request=request, stream_id=stream_id
150
+ )
151
+ trace.return_value = (status, headers)
152
+
153
+ return Response(
154
+ status=status,
155
+ headers=headers,
156
+ content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id),
157
+ extensions={
158
+ "http_version": b"HTTP/2",
159
+ "network_stream": self._network_stream,
160
+ "stream_id": stream_id,
161
+ },
162
+ )
163
+ except BaseException as exc: # noqa: PIE786
164
+ with ShieldCancellation():
165
+ kwargs = {"stream_id": stream_id}
166
+ with Trace("response_closed", logger, request, kwargs):
167
+ self._response_closed(stream_id=stream_id)
168
+
169
+ if isinstance(exc, h2.exceptions.ProtocolError):
170
+ # One case where h2 can raise a protocol error is when a
171
+ # closed frame has been seen by the state machine.
172
+ #
173
+ # This happens when one stream is reading, and encounters
174
+ # a GOAWAY event. Other flows of control may then raise
175
+ # a protocol error at any point they interact with the 'h2_state'.
176
+ #
177
+ # In this case we'll have stored the event, and should raise
178
+ # it as a RemoteProtocolError.
179
+ if self._connection_terminated: # pragma: nocover
180
+ raise RemoteProtocolError(self._connection_terminated)
181
+ # If h2 raises a protocol error in some other state then we
182
+ # must somehow have made a protocol violation.
183
+ raise LocalProtocolError(exc) # pragma: nocover
184
+
185
+ raise exc
186
+
187
+ def _send_connection_init(self, request: Request) -> None:
188
+ """
189
+ The HTTP/2 connection requires some initial setup before we can start
190
+ using individual request/response streams on it.
191
+ """
192
+ # Need to set these manually here instead of manipulating via
193
+ # __setitem__() otherwise the H2Connection will emit SettingsUpdate
194
+ # frames in addition to sending the undesired defaults.
195
+ self._h2_state.local_settings = h2.settings.Settings(
196
+ client=True,
197
+ initial_values={
198
+ # Disable PUSH_PROMISE frames from the server since we don't do anything
199
+ # with them for now. Maybe when we support caching?
200
+ h2.settings.SettingCodes.ENABLE_PUSH: 0,
201
+ # These two are taken from h2 for safe defaults
202
+ h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100,
203
+ h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536,
204
+ },
205
+ )
206
+
207
+ # Some websites (*cough* Yahoo *cough*) balk at this setting being
208
+ # present in the initial handshake since it's not defined in the original
209
+ # RFC despite the RFC mandating ignoring settings you don't know about.
210
+ del self._h2_state.local_settings[
211
+ h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL
212
+ ]
213
+
214
+ self._h2_state.initiate_connection()
215
+ self._h2_state.increment_flow_control_window(2**24)
216
+ self._write_outgoing_data(request)
217
+
218
+ # Sending the request...
219
+
220
+ def _send_request_headers(self, request: Request, stream_id: int) -> None:
221
+ """
222
+ Send the request headers to a given stream ID.
223
+ """
224
+ end_stream = not has_body_headers(request)
225
+
226
+ # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'.
227
+ # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require
228
+ # HTTP/1.1 style headers, and map them appropriately if we end up on
229
+ # an HTTP/2 connection.
230
+ authority = [v for k, v in request.headers if k.lower() == b"host"][0]
231
+
232
+ headers = [
233
+ (b":method", request.method),
234
+ (b":authority", authority),
235
+ (b":scheme", request.url.scheme),
236
+ (b":path", request.url.target),
237
+ ] + [
238
+ (k.lower(), v)
239
+ for k, v in request.headers
240
+ if k.lower()
241
+ not in (
242
+ b"host",
243
+ b"transfer-encoding",
244
+ )
245
+ ]
246
+
247
+ self._h2_state.send_headers(stream_id, headers, end_stream=end_stream)
248
+ self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id)
249
+ self._write_outgoing_data(request)
250
+
251
+ def _send_request_body(self, request: Request, stream_id: int) -> None:
252
+ """
253
+ Iterate over the request body sending it to a given stream ID.
254
+ """
255
+ if not has_body_headers(request):
256
+ return
257
+
258
+ assert isinstance(request.stream, typing.Iterable)
259
+ for data in request.stream:
260
+ self._send_stream_data(request, stream_id, data)
261
+ self._send_end_stream(request, stream_id)
262
+
263
+ def _send_stream_data(
264
+ self, request: Request, stream_id: int, data: bytes
265
+ ) -> None:
266
+ """
267
+ Send a single chunk of data in one or more data frames.
268
+ """
269
+ while data:
270
+ max_flow = self._wait_for_outgoing_flow(request, stream_id)
271
+ chunk_size = min(len(data), max_flow)
272
+ chunk, data = data[:chunk_size], data[chunk_size:]
273
+ self._h2_state.send_data(stream_id, chunk)
274
+ self._write_outgoing_data(request)
275
+
276
+ def _send_end_stream(self, request: Request, stream_id: int) -> None:
277
+ """
278
+ Send an empty data frame on on a given stream ID with the END_STREAM flag set.
279
+ """
280
+ self._h2_state.end_stream(stream_id)
281
+ self._write_outgoing_data(request)
282
+
283
+ # Receiving the response...
284
+
285
+ def _receive_response(
286
+ self, request: Request, stream_id: int
287
+ ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]:
288
+ """
289
+ Return the response status code and headers for a given stream ID.
290
+ """
291
+ while True:
292
+ event = self._receive_stream_event(request, stream_id)
293
+ if isinstance(event, h2.events.ResponseReceived):
294
+ break
295
+
296
+ status_code = 200
297
+ headers = []
298
+ for k, v in event.headers:
299
+ if k == b":status":
300
+ status_code = int(v.decode("ascii", errors="ignore"))
301
+ elif not k.startswith(b":"):
302
+ headers.append((k, v))
303
+
304
+ return (status_code, headers)
305
+
306
+ def _receive_response_body(
307
+ self, request: Request, stream_id: int
308
+ ) -> typing.Iterator[bytes]:
309
+ """
310
+ Iterator that returns the bytes of the response body for a given stream ID.
311
+ """
312
+ while True:
313
+ event = self._receive_stream_event(request, stream_id)
314
+ if isinstance(event, h2.events.DataReceived):
315
+ amount = event.flow_controlled_length
316
+ self._h2_state.acknowledge_received_data(amount, stream_id)
317
+ self._write_outgoing_data(request)
318
+ yield event.data
319
+ elif isinstance(event, h2.events.StreamEnded):
320
+ break
321
+
322
+ def _receive_stream_event(
323
+ self, request: Request, stream_id: int
324
+ ) -> typing.Union[
325
+ h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded
326
+ ]:
327
+ """
328
+ Return the next available event for a given stream ID.
329
+
330
+ Will read more data from the network if required.
331
+ """
332
+ while not self._events.get(stream_id):
333
+ self._receive_events(request, stream_id)
334
+ event = self._events[stream_id].pop(0)
335
+ if isinstance(event, h2.events.StreamReset):
336
+ raise RemoteProtocolError(event)
337
+ return event
338
+
339
+ def _receive_events(
340
+ self, request: Request, stream_id: typing.Optional[int] = None
341
+ ) -> None:
342
+ """
343
+ Read some data from the network until we see one or more events
344
+ for a given stream ID.
345
+ """
346
+ with self._read_lock:
347
+ if self._connection_terminated is not None:
348
+ last_stream_id = self._connection_terminated.last_stream_id
349
+ if stream_id and last_stream_id and stream_id > last_stream_id:
350
+ self._request_count -= 1
351
+ raise ConnectionNotAvailable()
352
+ raise RemoteProtocolError(self._connection_terminated)
353
+
354
+ # This conditional is a bit icky. We don't want to block reading if we've
355
+ # actually got an event to return for a given stream. We need to do that
356
+ # check *within* the atomic read lock. Though it also need to be optional,
357
+ # because when we call it from `_wait_for_outgoing_flow` we *do* want to
358
+ # block until we've available flow control, event when we have events
359
+ # pending for the stream ID we're attempting to send on.
360
+ if stream_id is None or not self._events.get(stream_id):
361
+ events = self._read_incoming_data(request)
362
+ for event in events:
363
+ if isinstance(event, h2.events.RemoteSettingsChanged):
364
+ with Trace(
365
+ "receive_remote_settings", logger, request
366
+ ) as trace:
367
+ self._receive_remote_settings_change(event)
368
+ trace.return_value = event
369
+
370
+ elif isinstance(
371
+ event,
372
+ (
373
+ h2.events.ResponseReceived,
374
+ h2.events.DataReceived,
375
+ h2.events.StreamEnded,
376
+ h2.events.StreamReset,
377
+ ),
378
+ ):
379
+ if event.stream_id in self._events:
380
+ self._events[event.stream_id].append(event)
381
+
382
+ elif isinstance(event, h2.events.ConnectionTerminated):
383
+ self._connection_terminated = event
384
+
385
+ self._write_outgoing_data(request)
386
+
387
+ def _receive_remote_settings_change(self, event: h2.events.Event) -> None:
388
+ max_concurrent_streams = event.changed_settings.get(
389
+ h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS
390
+ )
391
+ if max_concurrent_streams:
392
+ new_max_streams = min(
393
+ max_concurrent_streams.new_value,
394
+ self._h2_state.local_settings.max_concurrent_streams,
395
+ )
396
+ if new_max_streams and new_max_streams != self._max_streams:
397
+ while new_max_streams > self._max_streams:
398
+ self._max_streams_semaphore.release()
399
+ self._max_streams += 1
400
+ while new_max_streams < self._max_streams:
401
+ self._max_streams_semaphore.acquire()
402
+ self._max_streams -= 1
403
+
404
+ def _response_closed(self, stream_id: int) -> None:
405
+ self._max_streams_semaphore.release()
406
+ del self._events[stream_id]
407
+ with self._state_lock:
408
+ if self._connection_terminated and not self._events:
409
+ self.close()
410
+
411
+ elif self._state == HTTPConnectionState.ACTIVE and not self._events:
412
+ self._state = HTTPConnectionState.IDLE
413
+ if self._keepalive_expiry is not None:
414
+ now = time.monotonic()
415
+ self._expire_at = now + self._keepalive_expiry
416
+ if self._used_all_stream_ids: # pragma: nocover
417
+ self.close()
418
+
419
+ def close(self) -> None:
420
+ # Note that this method unilaterally closes the connection, and does
421
+ # not have any kind of locking in place around it.
422
+ self._h2_state.close_connection()
423
+ self._state = HTTPConnectionState.CLOSED
424
+ self._network_stream.close()
425
+
426
+ # Wrappers around network read/write operations...
427
+
428
+ def _read_incoming_data(
429
+ self, request: Request
430
+ ) -> typing.List[h2.events.Event]:
431
+ timeouts = request.extensions.get("timeout", {})
432
+ timeout = timeouts.get("read", None)
433
+
434
+ if self._read_exception is not None:
435
+ raise self._read_exception # pragma: nocover
436
+
437
+ try:
438
+ data = self._network_stream.read(self.READ_NUM_BYTES, timeout)
439
+ if data == b"":
440
+ raise RemoteProtocolError("Server disconnected")
441
+ except Exception as exc:
442
+ # If we get a network error we should:
443
+ #
444
+ # 1. Save the exception and just raise it immediately on any future reads.
445
+ # (For example, this means that a single read timeout or disconnect will
446
+ # immediately close all pending streams. Without requiring multiple
447
+ # sequential timeouts.)
448
+ # 2. Mark the connection as errored, so that we don't accept any other
449
+ # incoming requests.
450
+ self._read_exception = exc
451
+ self._connection_error = True
452
+ raise exc
453
+
454
+ events: typing.List[h2.events.Event] = self._h2_state.receive_data(data)
455
+
456
+ return events
457
+
458
+ def _write_outgoing_data(self, request: Request) -> None:
459
+ timeouts = request.extensions.get("timeout", {})
460
+ timeout = timeouts.get("write", None)
461
+
462
+ with self._write_lock:
463
+ data_to_send = self._h2_state.data_to_send()
464
+
465
+ if self._write_exception is not None:
466
+ raise self._write_exception # pragma: nocover
467
+
468
+ try:
469
+ self._network_stream.write(data_to_send, timeout)
470
+ except Exception as exc: # pragma: nocover
471
+ # If we get a network error we should:
472
+ #
473
+ # 1. Save the exception and just raise it immediately on any future write.
474
+ # (For example, this means that a single write timeout or disconnect will
475
+ # immediately close all pending streams. Without requiring multiple
476
+ # sequential timeouts.)
477
+ # 2. Mark the connection as errored, so that we don't accept any other
478
+ # incoming requests.
479
+ self._write_exception = exc
480
+ self._connection_error = True
481
+ raise exc
482
+
483
+ # Flow control...
484
+
485
+ def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int:
486
+ """
487
+ Returns the maximum allowable outgoing flow for a given stream.
488
+
489
+ If the allowable flow is zero, then waits on the network until
490
+ WindowUpdated frames have increased the flow rate.
491
+ https://tools.ietf.org/html/rfc7540#section-6.9
492
+ """
493
+ local_flow: int = self._h2_state.local_flow_control_window(stream_id)
494
+ max_frame_size: int = self._h2_state.max_outbound_frame_size
495
+ flow = min(local_flow, max_frame_size)
496
+ while flow == 0:
497
+ self._receive_events(request)
498
+ local_flow = self._h2_state.local_flow_control_window(stream_id)
499
+ max_frame_size = self._h2_state.max_outbound_frame_size
500
+ flow = min(local_flow, max_frame_size)
501
+ return flow
502
+
503
+ # Interface for connection pooling...
504
+
505
+ def can_handle_request(self, origin: Origin) -> bool:
506
+ return origin == self._origin
507
+
508
+ def is_available(self) -> bool:
509
+ return (
510
+ self._state != HTTPConnectionState.CLOSED
511
+ and not self._connection_error
512
+ and not self._used_all_stream_ids
513
+ and not (
514
+ self._h2_state.state_machine.state
515
+ == h2.connection.ConnectionState.CLOSED
516
+ )
517
+ )
518
+
519
+ def has_expired(self) -> bool:
520
+ now = time.monotonic()
521
+ return self._expire_at is not None and now > self._expire_at
522
+
523
+ def is_idle(self) -> bool:
524
+ return self._state == HTTPConnectionState.IDLE
525
+
526
+ def is_closed(self) -> bool:
527
+ return self._state == HTTPConnectionState.CLOSED
528
+
529
+ def info(self) -> str:
530
+ origin = str(self._origin)
531
+ return (
532
+ f"{origin!r}, HTTP/2, {self._state.name}, "
533
+ f"Request Count: {self._request_count}"
534
+ )
535
+
536
+ def __repr__(self) -> str:
537
+ class_name = self.__class__.__name__
538
+ origin = str(self._origin)
539
+ return (
540
+ f"<{class_name} [{origin!r}, {self._state.name}, "
541
+ f"Request Count: {self._request_count}]>"
542
+ )
543
+
544
+ # These context managers are not used in the standard flow, but are
545
+ # useful for testing or working with connection instances directly.
546
+
547
+ def __enter__(self) -> "HTTP2Connection":
548
+ return self
549
+
550
+ def __exit__(
551
+ self,
552
+ exc_type: typing.Optional[typing.Type[BaseException]] = None,
553
+ exc_value: typing.Optional[BaseException] = None,
554
+ traceback: typing.Optional[types.TracebackType] = None,
555
+ ) -> None:
556
+ self.close()
557
+
558
+
559
+ class HTTP2ConnectionByteStream:
560
+ def __init__(
561
+ self, connection: HTTP2Connection, request: Request, stream_id: int
562
+ ) -> None:
563
+ self._connection = connection
564
+ self._request = request
565
+ self._stream_id = stream_id
566
+ self._closed = False
567
+
568
+ def __iter__(self) -> typing.Iterator[bytes]:
569
+ kwargs = {"request": self._request, "stream_id": self._stream_id}
570
+ try:
571
+ with Trace("receive_response_body", logger, self._request, kwargs):
572
+ for chunk in self._connection._receive_response_body(
573
+ request=self._request, stream_id=self._stream_id
574
+ ):
575
+ yield chunk
576
+ except BaseException as exc:
577
+ # If we get an exception while streaming the response,
578
+ # we want to close the response (and possibly the connection)
579
+ # before raising that exception.
580
+ with ShieldCancellation():
581
+ self.close()
582
+ raise exc
583
+
584
+ def close(self) -> None:
585
+ if not self._closed:
586
+ self._closed = True
587
+ kwargs = {"stream_id": self._stream_id}
588
+ with Trace("response_closed", logger, self._request, kwargs):
589
+ self._connection._response_closed(stream_id=self._stream_id)
valley/lib/python3.10/site-packages/httpcore/_sync/http_proxy.py ADDED
@@ -0,0 +1,368 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import ssl
3
+ from base64 import b64encode
4
+ from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union
5
+
6
+ from .._backends.base import SOCKET_OPTION, NetworkBackend
7
+ from .._exceptions import ProxyError
8
+ from .._models import (
9
+ URL,
10
+ Origin,
11
+ Request,
12
+ Response,
13
+ enforce_bytes,
14
+ enforce_headers,
15
+ enforce_url,
16
+ )
17
+ from .._ssl import default_ssl_context
18
+ from .._synchronization import Lock
19
+ from .._trace import Trace
20
+ from .connection import HTTPConnection
21
+ from .connection_pool import ConnectionPool
22
+ from .http11 import HTTP11Connection
23
+ from .interfaces import ConnectionInterface
24
+
25
+ HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]]
26
+ HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]]
27
+
28
+
29
+ logger = logging.getLogger("httpcore.proxy")
30
+
31
+
32
+ def merge_headers(
33
+ default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
34
+ override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
35
+ ) -> List[Tuple[bytes, bytes]]:
36
+ """
37
+ Append default_headers and override_headers, de-duplicating if a key exists
38
+ in both cases.
39
+ """
40
+ default_headers = [] if default_headers is None else list(default_headers)
41
+ override_headers = [] if override_headers is None else list(override_headers)
42
+ has_override = set(key.lower() for key, value in override_headers)
43
+ default_headers = [
44
+ (key, value)
45
+ for key, value in default_headers
46
+ if key.lower() not in has_override
47
+ ]
48
+ return default_headers + override_headers
49
+
50
+
51
+ def build_auth_header(username: bytes, password: bytes) -> bytes:
52
+ userpass = username + b":" + password
53
+ return b"Basic " + b64encode(userpass)
54
+
55
+
56
+ class HTTPProxy(ConnectionPool):
57
+ """
58
+ A connection pool that sends requests via an HTTP proxy.
59
+ """
60
+
61
+ def __init__(
62
+ self,
63
+ proxy_url: Union[URL, bytes, str],
64
+ proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None,
65
+ proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
66
+ ssl_context: Optional[ssl.SSLContext] = None,
67
+ proxy_ssl_context: Optional[ssl.SSLContext] = None,
68
+ max_connections: Optional[int] = 10,
69
+ max_keepalive_connections: Optional[int] = None,
70
+ keepalive_expiry: Optional[float] = None,
71
+ http1: bool = True,
72
+ http2: bool = False,
73
+ retries: int = 0,
74
+ local_address: Optional[str] = None,
75
+ uds: Optional[str] = None,
76
+ network_backend: Optional[NetworkBackend] = None,
77
+ socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
78
+ ) -> None:
79
+ """
80
+ A connection pool for making HTTP requests.
81
+
82
+ Parameters:
83
+ proxy_url: The URL to use when connecting to the proxy server.
84
+ For example `"http://127.0.0.1:8080/"`.
85
+ proxy_auth: Any proxy authentication as a two-tuple of
86
+ (username, password). May be either bytes or ascii-only str.
87
+ proxy_headers: Any HTTP headers to use for the proxy requests.
88
+ For example `{"Proxy-Authorization": "Basic <username>:<password>"}`.
89
+ ssl_context: An SSL context to use for verifying connections.
90
+ If not specified, the default `httpcore.default_ssl_context()`
91
+ will be used.
92
+ proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin.
93
+ max_connections: The maximum number of concurrent HTTP connections that
94
+ the pool should allow. Any attempt to send a request on a pool that
95
+ would exceed this amount will block until a connection is available.
96
+ max_keepalive_connections: The maximum number of idle HTTP connections
97
+ that will be maintained in the pool.
98
+ keepalive_expiry: The duration in seconds that an idle HTTP connection
99
+ may be maintained for before being expired from the pool.
100
+ http1: A boolean indicating if HTTP/1.1 requests should be supported
101
+ by the connection pool. Defaults to True.
102
+ http2: A boolean indicating if HTTP/2 requests should be supported by
103
+ the connection pool. Defaults to False.
104
+ retries: The maximum number of retries when trying to establish
105
+ a connection.
106
+ local_address: Local address to connect from. Can also be used to
107
+ connect using a particular address family. Using
108
+ `local_address="0.0.0.0"` will connect using an `AF_INET` address
109
+ (IPv4), while using `local_address="::"` will connect using an
110
+ `AF_INET6` address (IPv6).
111
+ uds: Path to a Unix Domain Socket to use instead of TCP sockets.
112
+ network_backend: A backend instance to use for handling network I/O.
113
+ """
114
+ super().__init__(
115
+ ssl_context=ssl_context,
116
+ max_connections=max_connections,
117
+ max_keepalive_connections=max_keepalive_connections,
118
+ keepalive_expiry=keepalive_expiry,
119
+ http1=http1,
120
+ http2=http2,
121
+ network_backend=network_backend,
122
+ retries=retries,
123
+ local_address=local_address,
124
+ uds=uds,
125
+ socket_options=socket_options,
126
+ )
127
+
128
+ self._proxy_url = enforce_url(proxy_url, name="proxy_url")
129
+ if (
130
+ self._proxy_url.scheme == b"http" and proxy_ssl_context is not None
131
+ ): # pragma: no cover
132
+ raise RuntimeError(
133
+ "The `proxy_ssl_context` argument is not allowed for the http scheme"
134
+ )
135
+
136
+ self._ssl_context = ssl_context
137
+ self._proxy_ssl_context = proxy_ssl_context
138
+ self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers")
139
+ if proxy_auth is not None:
140
+ username = enforce_bytes(proxy_auth[0], name="proxy_auth")
141
+ password = enforce_bytes(proxy_auth[1], name="proxy_auth")
142
+ authorization = build_auth_header(username, password)
143
+ self._proxy_headers = [
144
+ (b"Proxy-Authorization", authorization)
145
+ ] + self._proxy_headers
146
+
147
+ def create_connection(self, origin: Origin) -> ConnectionInterface:
148
+ if origin.scheme == b"http":
149
+ return ForwardHTTPConnection(
150
+ proxy_origin=self._proxy_url.origin,
151
+ proxy_headers=self._proxy_headers,
152
+ remote_origin=origin,
153
+ keepalive_expiry=self._keepalive_expiry,
154
+ network_backend=self._network_backend,
155
+ proxy_ssl_context=self._proxy_ssl_context,
156
+ )
157
+ return TunnelHTTPConnection(
158
+ proxy_origin=self._proxy_url.origin,
159
+ proxy_headers=self._proxy_headers,
160
+ remote_origin=origin,
161
+ ssl_context=self._ssl_context,
162
+ proxy_ssl_context=self._proxy_ssl_context,
163
+ keepalive_expiry=self._keepalive_expiry,
164
+ http1=self._http1,
165
+ http2=self._http2,
166
+ network_backend=self._network_backend,
167
+ )
168
+
169
+
170
+ class ForwardHTTPConnection(ConnectionInterface):
171
+ def __init__(
172
+ self,
173
+ proxy_origin: Origin,
174
+ remote_origin: Origin,
175
+ proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
176
+ keepalive_expiry: Optional[float] = None,
177
+ network_backend: Optional[NetworkBackend] = None,
178
+ socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
179
+ proxy_ssl_context: Optional[ssl.SSLContext] = None,
180
+ ) -> None:
181
+ self._connection = HTTPConnection(
182
+ origin=proxy_origin,
183
+ keepalive_expiry=keepalive_expiry,
184
+ network_backend=network_backend,
185
+ socket_options=socket_options,
186
+ ssl_context=proxy_ssl_context,
187
+ )
188
+ self._proxy_origin = proxy_origin
189
+ self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers")
190
+ self._remote_origin = remote_origin
191
+
192
+ def handle_request(self, request: Request) -> Response:
193
+ headers = merge_headers(self._proxy_headers, request.headers)
194
+ url = URL(
195
+ scheme=self._proxy_origin.scheme,
196
+ host=self._proxy_origin.host,
197
+ port=self._proxy_origin.port,
198
+ target=bytes(request.url),
199
+ )
200
+ proxy_request = Request(
201
+ method=request.method,
202
+ url=url,
203
+ headers=headers,
204
+ content=request.stream,
205
+ extensions=request.extensions,
206
+ )
207
+ return self._connection.handle_request(proxy_request)
208
+
209
+ def can_handle_request(self, origin: Origin) -> bool:
210
+ return origin == self._remote_origin
211
+
212
+ def close(self) -> None:
213
+ self._connection.close()
214
+
215
+ def info(self) -> str:
216
+ return self._connection.info()
217
+
218
+ def is_available(self) -> bool:
219
+ return self._connection.is_available()
220
+
221
+ def has_expired(self) -> bool:
222
+ return self._connection.has_expired()
223
+
224
+ def is_idle(self) -> bool:
225
+ return self._connection.is_idle()
226
+
227
+ def is_closed(self) -> bool:
228
+ return self._connection.is_closed()
229
+
230
+ def __repr__(self) -> str:
231
+ return f"<{self.__class__.__name__} [{self.info()}]>"
232
+
233
+
234
+ class TunnelHTTPConnection(ConnectionInterface):
235
+ def __init__(
236
+ self,
237
+ proxy_origin: Origin,
238
+ remote_origin: Origin,
239
+ ssl_context: Optional[ssl.SSLContext] = None,
240
+ proxy_ssl_context: Optional[ssl.SSLContext] = None,
241
+ proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
242
+ keepalive_expiry: Optional[float] = None,
243
+ http1: bool = True,
244
+ http2: bool = False,
245
+ network_backend: Optional[NetworkBackend] = None,
246
+ socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
247
+ ) -> None:
248
+ self._connection: ConnectionInterface = HTTPConnection(
249
+ origin=proxy_origin,
250
+ keepalive_expiry=keepalive_expiry,
251
+ network_backend=network_backend,
252
+ socket_options=socket_options,
253
+ ssl_context=proxy_ssl_context,
254
+ )
255
+ self._proxy_origin = proxy_origin
256
+ self._remote_origin = remote_origin
257
+ self._ssl_context = ssl_context
258
+ self._proxy_ssl_context = proxy_ssl_context
259
+ self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers")
260
+ self._keepalive_expiry = keepalive_expiry
261
+ self._http1 = http1
262
+ self._http2 = http2
263
+ self._connect_lock = Lock()
264
+ self._connected = False
265
+
266
+ def handle_request(self, request: Request) -> Response:
267
+ timeouts = request.extensions.get("timeout", {})
268
+ timeout = timeouts.get("connect", None)
269
+
270
+ with self._connect_lock:
271
+ if not self._connected:
272
+ target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port)
273
+
274
+ connect_url = URL(
275
+ scheme=self._proxy_origin.scheme,
276
+ host=self._proxy_origin.host,
277
+ port=self._proxy_origin.port,
278
+ target=target,
279
+ )
280
+ connect_headers = merge_headers(
281
+ [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers
282
+ )
283
+ connect_request = Request(
284
+ method=b"CONNECT",
285
+ url=connect_url,
286
+ headers=connect_headers,
287
+ extensions=request.extensions,
288
+ )
289
+ connect_response = self._connection.handle_request(
290
+ connect_request
291
+ )
292
+
293
+ if connect_response.status < 200 or connect_response.status > 299:
294
+ reason_bytes = connect_response.extensions.get("reason_phrase", b"")
295
+ reason_str = reason_bytes.decode("ascii", errors="ignore")
296
+ msg = "%d %s" % (connect_response.status, reason_str)
297
+ self._connection.close()
298
+ raise ProxyError(msg)
299
+
300
+ stream = connect_response.extensions["network_stream"]
301
+
302
+ # Upgrade the stream to SSL
303
+ ssl_context = (
304
+ default_ssl_context()
305
+ if self._ssl_context is None
306
+ else self._ssl_context
307
+ )
308
+ alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"]
309
+ ssl_context.set_alpn_protocols(alpn_protocols)
310
+
311
+ kwargs = {
312
+ "ssl_context": ssl_context,
313
+ "server_hostname": self._remote_origin.host.decode("ascii"),
314
+ "timeout": timeout,
315
+ }
316
+ with Trace("start_tls", logger, request, kwargs) as trace:
317
+ stream = stream.start_tls(**kwargs)
318
+ trace.return_value = stream
319
+
320
+ # Determine if we should be using HTTP/1.1 or HTTP/2
321
+ ssl_object = stream.get_extra_info("ssl_object")
322
+ http2_negotiated = (
323
+ ssl_object is not None
324
+ and ssl_object.selected_alpn_protocol() == "h2"
325
+ )
326
+
327
+ # Create the HTTP/1.1 or HTTP/2 connection
328
+ if http2_negotiated or (self._http2 and not self._http1):
329
+ from .http2 import HTTP2Connection
330
+
331
+ self._connection = HTTP2Connection(
332
+ origin=self._remote_origin,
333
+ stream=stream,
334
+ keepalive_expiry=self._keepalive_expiry,
335
+ )
336
+ else:
337
+ self._connection = HTTP11Connection(
338
+ origin=self._remote_origin,
339
+ stream=stream,
340
+ keepalive_expiry=self._keepalive_expiry,
341
+ )
342
+
343
+ self._connected = True
344
+ return self._connection.handle_request(request)
345
+
346
+ def can_handle_request(self, origin: Origin) -> bool:
347
+ return origin == self._remote_origin
348
+
349
+ def close(self) -> None:
350
+ self._connection.close()
351
+
352
+ def info(self) -> str:
353
+ return self._connection.info()
354
+
355
+ def is_available(self) -> bool:
356
+ return self._connection.is_available()
357
+
358
+ def has_expired(self) -> bool:
359
+ return self._connection.has_expired()
360
+
361
+ def is_idle(self) -> bool:
362
+ return self._connection.is_idle()
363
+
364
+ def is_closed(self) -> bool:
365
+ return self._connection.is_closed()
366
+
367
+ def __repr__(self) -> str:
368
+ return f"<{self.__class__.__name__} [{self.info()}]>"
valley/lib/python3.10/site-packages/httpcore/_sync/socks_proxy.py ADDED
@@ -0,0 +1,342 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import ssl
3
+ import typing
4
+
5
+ from socksio import socks5
6
+
7
+ from .._backends.sync import SyncBackend
8
+ from .._backends.base import NetworkBackend, NetworkStream
9
+ from .._exceptions import ConnectionNotAvailable, ProxyError
10
+ from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url
11
+ from .._ssl import default_ssl_context
12
+ from .._synchronization import Lock
13
+ from .._trace import Trace
14
+ from .connection_pool import ConnectionPool
15
+ from .http11 import HTTP11Connection
16
+ from .interfaces import ConnectionInterface
17
+
18
+ logger = logging.getLogger("httpcore.socks")
19
+
20
+
21
+ AUTH_METHODS = {
22
+ b"\x00": "NO AUTHENTICATION REQUIRED",
23
+ b"\x01": "GSSAPI",
24
+ b"\x02": "USERNAME/PASSWORD",
25
+ b"\xff": "NO ACCEPTABLE METHODS",
26
+ }
27
+
28
+ REPLY_CODES = {
29
+ b"\x00": "Succeeded",
30
+ b"\x01": "General SOCKS server failure",
31
+ b"\x02": "Connection not allowed by ruleset",
32
+ b"\x03": "Network unreachable",
33
+ b"\x04": "Host unreachable",
34
+ b"\x05": "Connection refused",
35
+ b"\x06": "TTL expired",
36
+ b"\x07": "Command not supported",
37
+ b"\x08": "Address type not supported",
38
+ }
39
+
40
+
41
+ def _init_socks5_connection(
42
+ stream: NetworkStream,
43
+ *,
44
+ host: bytes,
45
+ port: int,
46
+ auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
47
+ ) -> None:
48
+ conn = socks5.SOCKS5Connection()
49
+
50
+ # Auth method request
51
+ auth_method = (
52
+ socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED
53
+ if auth is None
54
+ else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD
55
+ )
56
+ conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method]))
57
+ outgoing_bytes = conn.data_to_send()
58
+ stream.write(outgoing_bytes)
59
+
60
+ # Auth method response
61
+ incoming_bytes = stream.read(max_bytes=4096)
62
+ response = conn.receive_data(incoming_bytes)
63
+ assert isinstance(response, socks5.SOCKS5AuthReply)
64
+ if response.method != auth_method:
65
+ requested = AUTH_METHODS.get(auth_method, "UNKNOWN")
66
+ responded = AUTH_METHODS.get(response.method, "UNKNOWN")
67
+ raise ProxyError(
68
+ f"Requested {requested} from proxy server, but got {responded}."
69
+ )
70
+
71
+ if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD:
72
+ # Username/password request
73
+ assert auth is not None
74
+ username, password = auth
75
+ conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password))
76
+ outgoing_bytes = conn.data_to_send()
77
+ stream.write(outgoing_bytes)
78
+
79
+ # Username/password response
80
+ incoming_bytes = stream.read(max_bytes=4096)
81
+ response = conn.receive_data(incoming_bytes)
82
+ assert isinstance(response, socks5.SOCKS5UsernamePasswordReply)
83
+ if not response.success:
84
+ raise ProxyError("Invalid username/password")
85
+
86
+ # Connect request
87
+ conn.send(
88
+ socks5.SOCKS5CommandRequest.from_address(
89
+ socks5.SOCKS5Command.CONNECT, (host, port)
90
+ )
91
+ )
92
+ outgoing_bytes = conn.data_to_send()
93
+ stream.write(outgoing_bytes)
94
+
95
+ # Connect response
96
+ incoming_bytes = stream.read(max_bytes=4096)
97
+ response = conn.receive_data(incoming_bytes)
98
+ assert isinstance(response, socks5.SOCKS5Reply)
99
+ if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED:
100
+ reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN")
101
+ raise ProxyError(f"Proxy Server could not connect: {reply_code}.")
102
+
103
+
104
+ class SOCKSProxy(ConnectionPool):
105
+ """
106
+ A connection pool that sends requests via an HTTP proxy.
107
+ """
108
+
109
+ def __init__(
110
+ self,
111
+ proxy_url: typing.Union[URL, bytes, str],
112
+ proxy_auth: typing.Optional[
113
+ typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]]
114
+ ] = None,
115
+ ssl_context: typing.Optional[ssl.SSLContext] = None,
116
+ max_connections: typing.Optional[int] = 10,
117
+ max_keepalive_connections: typing.Optional[int] = None,
118
+ keepalive_expiry: typing.Optional[float] = None,
119
+ http1: bool = True,
120
+ http2: bool = False,
121
+ retries: int = 0,
122
+ network_backend: typing.Optional[NetworkBackend] = None,
123
+ ) -> None:
124
+ """
125
+ A connection pool for making HTTP requests.
126
+
127
+ Parameters:
128
+ proxy_url: The URL to use when connecting to the proxy server.
129
+ For example `"http://127.0.0.1:8080/"`.
130
+ ssl_context: An SSL context to use for verifying connections.
131
+ If not specified, the default `httpcore.default_ssl_context()`
132
+ will be used.
133
+ max_connections: The maximum number of concurrent HTTP connections that
134
+ the pool should allow. Any attempt to send a request on a pool that
135
+ would exceed this amount will block until a connection is available.
136
+ max_keepalive_connections: The maximum number of idle HTTP connections
137
+ that will be maintained in the pool.
138
+ keepalive_expiry: The duration in seconds that an idle HTTP connection
139
+ may be maintained for before being expired from the pool.
140
+ http1: A boolean indicating if HTTP/1.1 requests should be supported
141
+ by the connection pool. Defaults to True.
142
+ http2: A boolean indicating if HTTP/2 requests should be supported by
143
+ the connection pool. Defaults to False.
144
+ retries: The maximum number of retries when trying to establish
145
+ a connection.
146
+ local_address: Local address to connect from. Can also be used to
147
+ connect using a particular address family. Using
148
+ `local_address="0.0.0.0"` will connect using an `AF_INET` address
149
+ (IPv4), while using `local_address="::"` will connect using an
150
+ `AF_INET6` address (IPv6).
151
+ uds: Path to a Unix Domain Socket to use instead of TCP sockets.
152
+ network_backend: A backend instance to use for handling network I/O.
153
+ """
154
+ super().__init__(
155
+ ssl_context=ssl_context,
156
+ max_connections=max_connections,
157
+ max_keepalive_connections=max_keepalive_connections,
158
+ keepalive_expiry=keepalive_expiry,
159
+ http1=http1,
160
+ http2=http2,
161
+ network_backend=network_backend,
162
+ retries=retries,
163
+ )
164
+ self._ssl_context = ssl_context
165
+ self._proxy_url = enforce_url(proxy_url, name="proxy_url")
166
+ if proxy_auth is not None:
167
+ username, password = proxy_auth
168
+ username_bytes = enforce_bytes(username, name="proxy_auth")
169
+ password_bytes = enforce_bytes(password, name="proxy_auth")
170
+ self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = (
171
+ username_bytes,
172
+ password_bytes,
173
+ )
174
+ else:
175
+ self._proxy_auth = None
176
+
177
+ def create_connection(self, origin: Origin) -> ConnectionInterface:
178
+ return Socks5Connection(
179
+ proxy_origin=self._proxy_url.origin,
180
+ remote_origin=origin,
181
+ proxy_auth=self._proxy_auth,
182
+ ssl_context=self._ssl_context,
183
+ keepalive_expiry=self._keepalive_expiry,
184
+ http1=self._http1,
185
+ http2=self._http2,
186
+ network_backend=self._network_backend,
187
+ )
188
+
189
+
190
+ class Socks5Connection(ConnectionInterface):
191
+ def __init__(
192
+ self,
193
+ proxy_origin: Origin,
194
+ remote_origin: Origin,
195
+ proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
196
+ ssl_context: typing.Optional[ssl.SSLContext] = None,
197
+ keepalive_expiry: typing.Optional[float] = None,
198
+ http1: bool = True,
199
+ http2: bool = False,
200
+ network_backend: typing.Optional[NetworkBackend] = None,
201
+ ) -> None:
202
+ self._proxy_origin = proxy_origin
203
+ self._remote_origin = remote_origin
204
+ self._proxy_auth = proxy_auth
205
+ self._ssl_context = ssl_context
206
+ self._keepalive_expiry = keepalive_expiry
207
+ self._http1 = http1
208
+ self._http2 = http2
209
+
210
+ self._network_backend: NetworkBackend = (
211
+ SyncBackend() if network_backend is None else network_backend
212
+ )
213
+ self._connect_lock = Lock()
214
+ self._connection: typing.Optional[ConnectionInterface] = None
215
+ self._connect_failed = False
216
+
217
+ def handle_request(self, request: Request) -> Response:
218
+ timeouts = request.extensions.get("timeout", {})
219
+ sni_hostname = request.extensions.get("sni_hostname", None)
220
+ timeout = timeouts.get("connect", None)
221
+
222
+ with self._connect_lock:
223
+ if self._connection is None:
224
+ try:
225
+ # Connect to the proxy
226
+ kwargs = {
227
+ "host": self._proxy_origin.host.decode("ascii"),
228
+ "port": self._proxy_origin.port,
229
+ "timeout": timeout,
230
+ }
231
+ with Trace("connect_tcp", logger, request, kwargs) as trace:
232
+ stream = self._network_backend.connect_tcp(**kwargs)
233
+ trace.return_value = stream
234
+
235
+ # Connect to the remote host using socks5
236
+ kwargs = {
237
+ "stream": stream,
238
+ "host": self._remote_origin.host.decode("ascii"),
239
+ "port": self._remote_origin.port,
240
+ "auth": self._proxy_auth,
241
+ }
242
+ with Trace(
243
+ "setup_socks5_connection", logger, request, kwargs
244
+ ) as trace:
245
+ _init_socks5_connection(**kwargs)
246
+ trace.return_value = stream
247
+
248
+ # Upgrade the stream to SSL
249
+ if self._remote_origin.scheme == b"https":
250
+ ssl_context = (
251
+ default_ssl_context()
252
+ if self._ssl_context is None
253
+ else self._ssl_context
254
+ )
255
+ alpn_protocols = (
256
+ ["http/1.1", "h2"] if self._http2 else ["http/1.1"]
257
+ )
258
+ ssl_context.set_alpn_protocols(alpn_protocols)
259
+
260
+ kwargs = {
261
+ "ssl_context": ssl_context,
262
+ "server_hostname": sni_hostname
263
+ or self._remote_origin.host.decode("ascii"),
264
+ "timeout": timeout,
265
+ }
266
+ with Trace("start_tls", logger, request, kwargs) as trace:
267
+ stream = stream.start_tls(**kwargs)
268
+ trace.return_value = stream
269
+
270
+ # Determine if we should be using HTTP/1.1 or HTTP/2
271
+ ssl_object = stream.get_extra_info("ssl_object")
272
+ http2_negotiated = (
273
+ ssl_object is not None
274
+ and ssl_object.selected_alpn_protocol() == "h2"
275
+ )
276
+
277
+ # Create the HTTP/1.1 or HTTP/2 connection
278
+ if http2_negotiated or (
279
+ self._http2 and not self._http1
280
+ ): # pragma: nocover
281
+ from .http2 import HTTP2Connection
282
+
283
+ self._connection = HTTP2Connection(
284
+ origin=self._remote_origin,
285
+ stream=stream,
286
+ keepalive_expiry=self._keepalive_expiry,
287
+ )
288
+ else:
289
+ self._connection = HTTP11Connection(
290
+ origin=self._remote_origin,
291
+ stream=stream,
292
+ keepalive_expiry=self._keepalive_expiry,
293
+ )
294
+ except Exception as exc:
295
+ self._connect_failed = True
296
+ raise exc
297
+ elif not self._connection.is_available(): # pragma: nocover
298
+ raise ConnectionNotAvailable()
299
+
300
+ return self._connection.handle_request(request)
301
+
302
+ def can_handle_request(self, origin: Origin) -> bool:
303
+ return origin == self._remote_origin
304
+
305
+ def close(self) -> None:
306
+ if self._connection is not None:
307
+ self._connection.close()
308
+
309
+ def is_available(self) -> bool:
310
+ if self._connection is None: # pragma: nocover
311
+ # If HTTP/2 support is enabled, and the resulting connection could
312
+ # end up as HTTP/2 then we should indicate the connection as being
313
+ # available to service multiple requests.
314
+ return (
315
+ self._http2
316
+ and (self._remote_origin.scheme == b"https" or not self._http1)
317
+ and not self._connect_failed
318
+ )
319
+ return self._connection.is_available()
320
+
321
+ def has_expired(self) -> bool:
322
+ if self._connection is None: # pragma: nocover
323
+ return self._connect_failed
324
+ return self._connection.has_expired()
325
+
326
+ def is_idle(self) -> bool:
327
+ if self._connection is None: # pragma: nocover
328
+ return self._connect_failed
329
+ return self._connection.is_idle()
330
+
331
+ def is_closed(self) -> bool:
332
+ if self._connection is None: # pragma: nocover
333
+ return self._connect_failed
334
+ return self._connection.is_closed()
335
+
336
+ def info(self) -> str:
337
+ if self._connection is None: # pragma: nocover
338
+ return "CONNECTION FAILED" if self._connect_failed else "CONNECTING"
339
+ return self._connection.info()
340
+
341
+ def __repr__(self) -> str:
342
+ return f"<{self.__class__.__name__} [{self.info()}]>"
valley/lib/python3.10/site-packages/httpcore/_synchronization.py ADDED
@@ -0,0 +1,317 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import threading
2
+ from types import TracebackType
3
+ from typing import Optional, Type
4
+
5
+ from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions
6
+
7
+ # Our async synchronization primatives use either 'anyio' or 'trio' depending
8
+ # on if they're running under asyncio or trio.
9
+
10
+ try:
11
+ import trio
12
+ except ImportError: # pragma: nocover
13
+ trio = None # type: ignore
14
+
15
+ try:
16
+ import anyio
17
+ except ImportError: # pragma: nocover
18
+ anyio = None # type: ignore
19
+
20
+
21
+ def current_async_library() -> str:
22
+ # Determine if we're running under trio or asyncio.
23
+ # See https://sniffio.readthedocs.io/en/latest/
24
+ try:
25
+ import sniffio
26
+ except ImportError: # pragma: nocover
27
+ environment = "asyncio"
28
+ else:
29
+ environment = sniffio.current_async_library()
30
+
31
+ if environment not in ("asyncio", "trio"): # pragma: nocover
32
+ raise RuntimeError("Running under an unsupported async environment.")
33
+
34
+ if environment == "asyncio" and anyio is None: # pragma: nocover
35
+ raise RuntimeError(
36
+ "Running with asyncio requires installation of 'httpcore[asyncio]'."
37
+ )
38
+
39
+ if environment == "trio" and trio is None: # pragma: nocover
40
+ raise RuntimeError(
41
+ "Running with trio requires installation of 'httpcore[trio]'."
42
+ )
43
+
44
+ return environment
45
+
46
+
47
+ class AsyncLock:
48
+ """
49
+ This is a standard lock.
50
+
51
+ In the sync case `Lock` provides thread locking.
52
+ In the async case `AsyncLock` provides async locking.
53
+ """
54
+
55
+ def __init__(self) -> None:
56
+ self._backend = ""
57
+
58
+ def setup(self) -> None:
59
+ """
60
+ Detect if we're running under 'asyncio' or 'trio' and create
61
+ a lock with the correct implementation.
62
+ """
63
+ self._backend = current_async_library()
64
+ if self._backend == "trio":
65
+ self._trio_lock = trio.Lock()
66
+ elif self._backend == "asyncio":
67
+ self._anyio_lock = anyio.Lock()
68
+
69
+ async def __aenter__(self) -> "AsyncLock":
70
+ if not self._backend:
71
+ self.setup()
72
+
73
+ if self._backend == "trio":
74
+ await self._trio_lock.acquire()
75
+ elif self._backend == "asyncio":
76
+ await self._anyio_lock.acquire()
77
+
78
+ return self
79
+
80
+ async def __aexit__(
81
+ self,
82
+ exc_type: Optional[Type[BaseException]] = None,
83
+ exc_value: Optional[BaseException] = None,
84
+ traceback: Optional[TracebackType] = None,
85
+ ) -> None:
86
+ if self._backend == "trio":
87
+ self._trio_lock.release()
88
+ elif self._backend == "asyncio":
89
+ self._anyio_lock.release()
90
+
91
+
92
+ class AsyncThreadLock:
93
+ """
94
+ This is a threading-only lock for no-I/O contexts.
95
+
96
+ In the sync case `ThreadLock` provides thread locking.
97
+ In the async case `AsyncThreadLock` is a no-op.
98
+ """
99
+
100
+ def __enter__(self) -> "AsyncThreadLock":
101
+ return self
102
+
103
+ def __exit__(
104
+ self,
105
+ exc_type: Optional[Type[BaseException]] = None,
106
+ exc_value: Optional[BaseException] = None,
107
+ traceback: Optional[TracebackType] = None,
108
+ ) -> None:
109
+ pass
110
+
111
+
112
+ class AsyncEvent:
113
+ def __init__(self) -> None:
114
+ self._backend = ""
115
+
116
+ def setup(self) -> None:
117
+ """
118
+ Detect if we're running under 'asyncio' or 'trio' and create
119
+ a lock with the correct implementation.
120
+ """
121
+ self._backend = current_async_library()
122
+ if self._backend == "trio":
123
+ self._trio_event = trio.Event()
124
+ elif self._backend == "asyncio":
125
+ self._anyio_event = anyio.Event()
126
+
127
+ def set(self) -> None:
128
+ if not self._backend:
129
+ self.setup()
130
+
131
+ if self._backend == "trio":
132
+ self._trio_event.set()
133
+ elif self._backend == "asyncio":
134
+ self._anyio_event.set()
135
+
136
+ async def wait(self, timeout: Optional[float] = None) -> None:
137
+ if not self._backend:
138
+ self.setup()
139
+
140
+ if self._backend == "trio":
141
+ trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout}
142
+ timeout_or_inf = float("inf") if timeout is None else timeout
143
+ with map_exceptions(trio_exc_map):
144
+ with trio.fail_after(timeout_or_inf):
145
+ await self._trio_event.wait()
146
+ elif self._backend == "asyncio":
147
+ anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout}
148
+ with map_exceptions(anyio_exc_map):
149
+ with anyio.fail_after(timeout):
150
+ await self._anyio_event.wait()
151
+
152
+
153
+ class AsyncSemaphore:
154
+ def __init__(self, bound: int) -> None:
155
+ self._bound = bound
156
+ self._backend = ""
157
+
158
+ def setup(self) -> None:
159
+ """
160
+ Detect if we're running under 'asyncio' or 'trio' and create
161
+ a semaphore with the correct implementation.
162
+ """
163
+ self._backend = current_async_library()
164
+ if self._backend == "trio":
165
+ self._trio_semaphore = trio.Semaphore(
166
+ initial_value=self._bound, max_value=self._bound
167
+ )
168
+ elif self._backend == "asyncio":
169
+ self._anyio_semaphore = anyio.Semaphore(
170
+ initial_value=self._bound, max_value=self._bound
171
+ )
172
+
173
+ async def acquire(self) -> None:
174
+ if not self._backend:
175
+ self.setup()
176
+
177
+ if self._backend == "trio":
178
+ await self._trio_semaphore.acquire()
179
+ elif self._backend == "asyncio":
180
+ await self._anyio_semaphore.acquire()
181
+
182
+ async def release(self) -> None:
183
+ if self._backend == "trio":
184
+ self._trio_semaphore.release()
185
+ elif self._backend == "asyncio":
186
+ self._anyio_semaphore.release()
187
+
188
+
189
+ class AsyncShieldCancellation:
190
+ # For certain portions of our codebase where we're dealing with
191
+ # closing connections during exception handling we want to shield
192
+ # the operation from being cancelled.
193
+ #
194
+ # with AsyncShieldCancellation():
195
+ # ... # clean-up operations, shielded from cancellation.
196
+
197
+ def __init__(self) -> None:
198
+ """
199
+ Detect if we're running under 'asyncio' or 'trio' and create
200
+ a shielded scope with the correct implementation.
201
+ """
202
+ self._backend = current_async_library()
203
+
204
+ if self._backend == "trio":
205
+ self._trio_shield = trio.CancelScope(shield=True)
206
+ elif self._backend == "asyncio":
207
+ self._anyio_shield = anyio.CancelScope(shield=True)
208
+
209
+ def __enter__(self) -> "AsyncShieldCancellation":
210
+ if self._backend == "trio":
211
+ self._trio_shield.__enter__()
212
+ elif self._backend == "asyncio":
213
+ self._anyio_shield.__enter__()
214
+ return self
215
+
216
+ def __exit__(
217
+ self,
218
+ exc_type: Optional[Type[BaseException]] = None,
219
+ exc_value: Optional[BaseException] = None,
220
+ traceback: Optional[TracebackType] = None,
221
+ ) -> None:
222
+ if self._backend == "trio":
223
+ self._trio_shield.__exit__(exc_type, exc_value, traceback)
224
+ elif self._backend == "asyncio":
225
+ self._anyio_shield.__exit__(exc_type, exc_value, traceback)
226
+
227
+
228
+ # Our thread-based synchronization primitives...
229
+
230
+
231
+ class Lock:
232
+ """
233
+ This is a standard lock.
234
+
235
+ In the sync case `Lock` provides thread locking.
236
+ In the async case `AsyncLock` provides async locking.
237
+ """
238
+
239
+ def __init__(self) -> None:
240
+ self._lock = threading.Lock()
241
+
242
+ def __enter__(self) -> "Lock":
243
+ self._lock.acquire()
244
+ return self
245
+
246
+ def __exit__(
247
+ self,
248
+ exc_type: Optional[Type[BaseException]] = None,
249
+ exc_value: Optional[BaseException] = None,
250
+ traceback: Optional[TracebackType] = None,
251
+ ) -> None:
252
+ self._lock.release()
253
+
254
+
255
+ class ThreadLock:
256
+ """
257
+ This is a threading-only lock for no-I/O contexts.
258
+
259
+ In the sync case `ThreadLock` provides thread locking.
260
+ In the async case `AsyncThreadLock` is a no-op.
261
+ """
262
+
263
+ def __init__(self) -> None:
264
+ self._lock = threading.Lock()
265
+
266
+ def __enter__(self) -> "ThreadLock":
267
+ self._lock.acquire()
268
+ return self
269
+
270
+ def __exit__(
271
+ self,
272
+ exc_type: Optional[Type[BaseException]] = None,
273
+ exc_value: Optional[BaseException] = None,
274
+ traceback: Optional[TracebackType] = None,
275
+ ) -> None:
276
+ self._lock.release()
277
+
278
+
279
+ class Event:
280
+ def __init__(self) -> None:
281
+ self._event = threading.Event()
282
+
283
+ def set(self) -> None:
284
+ self._event.set()
285
+
286
+ def wait(self, timeout: Optional[float] = None) -> None:
287
+ if timeout == float("inf"): # pragma: no cover
288
+ timeout = None
289
+ if not self._event.wait(timeout=timeout):
290
+ raise PoolTimeout() # pragma: nocover
291
+
292
+
293
+ class Semaphore:
294
+ def __init__(self, bound: int) -> None:
295
+ self._semaphore = threading.Semaphore(value=bound)
296
+
297
+ def acquire(self) -> None:
298
+ self._semaphore.acquire()
299
+
300
+ def release(self) -> None:
301
+ self._semaphore.release()
302
+
303
+
304
+ class ShieldCancellation:
305
+ # Thread-synchronous codebases don't support cancellation semantics.
306
+ # We have this class because we need to mirror the async and sync
307
+ # cases within our package, but it's just a no-op.
308
+ def __enter__(self) -> "ShieldCancellation":
309
+ return self
310
+
311
+ def __exit__(
312
+ self,
313
+ exc_type: Optional[Type[BaseException]] = None,
314
+ exc_value: Optional[BaseException] = None,
315
+ traceback: Optional[TracebackType] = None,
316
+ ) -> None:
317
+ pass
valley/lib/python3.10/site-packages/httpcore/py.typed ADDED
File without changes
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__init__.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from . import axes_size as Size
2
+ from .axes_divider import Divider, SubplotDivider, make_axes_locatable
3
+ from .axes_grid import AxesGrid, Grid, ImageGrid
4
+
5
+ from .parasite_axes import host_subplot, host_axes
6
+
7
+ __all__ = ["Size",
8
+ "Divider", "SubplotDivider", "make_axes_locatable",
9
+ "AxesGrid", "Grid", "ImageGrid",
10
+ "host_subplot", "host_axes"]
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (527 Bytes). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/anchored_artists.cpython-310.pyc ADDED
Binary file (17.7 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/axes_divider.cpython-310.pyc ADDED
Binary file (22.9 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/axes_grid.cpython-310.pyc ADDED
Binary file (17.4 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/axes_rgb.cpython-310.pyc ADDED
Binary file (5.1 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/axes_size.cpython-310.pyc ADDED
Binary file (8.95 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/inset_locator.cpython-310.pyc ADDED
Binary file (20.9 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/mpl_axes.cpython-310.pyc ADDED
Binary file (5.23 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/__pycache__/parasite_axes.cpython-310.pyc ADDED
Binary file (8.85 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/anchored_artists.py ADDED
@@ -0,0 +1,462 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from matplotlib import _api, transforms
2
+ from matplotlib.offsetbox import (AnchoredOffsetbox, AuxTransformBox,
3
+ DrawingArea, TextArea, VPacker)
4
+ from matplotlib.patches import (Rectangle, Ellipse, ArrowStyle,
5
+ FancyArrowPatch, PathPatch)
6
+ from matplotlib.text import TextPath
7
+
8
+ __all__ = ['AnchoredDrawingArea', 'AnchoredAuxTransformBox',
9
+ 'AnchoredEllipse', 'AnchoredSizeBar', 'AnchoredDirectionArrows']
10
+
11
+
12
+ class AnchoredDrawingArea(AnchoredOffsetbox):
13
+ def __init__(self, width, height, xdescent, ydescent,
14
+ loc, pad=0.4, borderpad=0.5, prop=None, frameon=True,
15
+ **kwargs):
16
+ """
17
+ An anchored container with a fixed size and fillable `.DrawingArea`.
18
+
19
+ Artists added to the *drawing_area* will have their coordinates
20
+ interpreted as pixels. Any transformations set on the artists will be
21
+ overridden.
22
+
23
+ Parameters
24
+ ----------
25
+ width, height : float
26
+ Width and height of the container, in pixels.
27
+ xdescent, ydescent : float
28
+ Descent of the container in the x- and y- direction, in pixels.
29
+ loc : str
30
+ Location of this artist. Valid locations are
31
+ 'upper left', 'upper center', 'upper right',
32
+ 'center left', 'center', 'center right',
33
+ 'lower left', 'lower center', 'lower right'.
34
+ For backward compatibility, numeric values are accepted as well.
35
+ See the parameter *loc* of `.Legend` for details.
36
+ pad : float, default: 0.4
37
+ Padding around the child objects, in fraction of the font size.
38
+ borderpad : float, default: 0.5
39
+ Border padding, in fraction of the font size.
40
+ prop : `~matplotlib.font_manager.FontProperties`, optional
41
+ Font property used as a reference for paddings.
42
+ frameon : bool, default: True
43
+ If True, draw a box around this artist.
44
+ **kwargs
45
+ Keyword arguments forwarded to `.AnchoredOffsetbox`.
46
+
47
+ Attributes
48
+ ----------
49
+ drawing_area : `~matplotlib.offsetbox.DrawingArea`
50
+ A container for artists to display.
51
+
52
+ Examples
53
+ --------
54
+ To display blue and red circles of different sizes in the upper right
55
+ of an Axes *ax*:
56
+
57
+ >>> ada = AnchoredDrawingArea(20, 20, 0, 0,
58
+ ... loc='upper right', frameon=False)
59
+ >>> ada.drawing_area.add_artist(Circle((10, 10), 10, fc="b"))
60
+ >>> ada.drawing_area.add_artist(Circle((30, 10), 5, fc="r"))
61
+ >>> ax.add_artist(ada)
62
+ """
63
+ self.da = DrawingArea(width, height, xdescent, ydescent)
64
+ self.drawing_area = self.da
65
+
66
+ super().__init__(
67
+ loc, pad=pad, borderpad=borderpad, child=self.da, prop=None,
68
+ frameon=frameon, **kwargs
69
+ )
70
+
71
+
72
+ class AnchoredAuxTransformBox(AnchoredOffsetbox):
73
+ def __init__(self, transform, loc,
74
+ pad=0.4, borderpad=0.5, prop=None, frameon=True, **kwargs):
75
+ """
76
+ An anchored container with transformed coordinates.
77
+
78
+ Artists added to the *drawing_area* are scaled according to the
79
+ coordinates of the transformation used. The dimensions of this artist
80
+ will scale to contain the artists added.
81
+
82
+ Parameters
83
+ ----------
84
+ transform : `~matplotlib.transforms.Transform`
85
+ The transformation object for the coordinate system in use, i.e.,
86
+ :attr:`matplotlib.axes.Axes.transData`.
87
+ loc : str
88
+ Location of this artist. Valid locations are
89
+ 'upper left', 'upper center', 'upper right',
90
+ 'center left', 'center', 'center right',
91
+ 'lower left', 'lower center', 'lower right'.
92
+ For backward compatibility, numeric values are accepted as well.
93
+ See the parameter *loc* of `.Legend` for details.
94
+ pad : float, default: 0.4
95
+ Padding around the child objects, in fraction of the font size.
96
+ borderpad : float, default: 0.5
97
+ Border padding, in fraction of the font size.
98
+ prop : `~matplotlib.font_manager.FontProperties`, optional
99
+ Font property used as a reference for paddings.
100
+ frameon : bool, default: True
101
+ If True, draw a box around this artist.
102
+ **kwargs
103
+ Keyword arguments forwarded to `.AnchoredOffsetbox`.
104
+
105
+ Attributes
106
+ ----------
107
+ drawing_area : `~matplotlib.offsetbox.AuxTransformBox`
108
+ A container for artists to display.
109
+
110
+ Examples
111
+ --------
112
+ To display an ellipse in the upper left, with a width of 0.1 and
113
+ height of 0.4 in data coordinates:
114
+
115
+ >>> box = AnchoredAuxTransformBox(ax.transData, loc='upper left')
116
+ >>> el = Ellipse((0, 0), width=0.1, height=0.4, angle=30)
117
+ >>> box.drawing_area.add_artist(el)
118
+ >>> ax.add_artist(box)
119
+ """
120
+ self.drawing_area = AuxTransformBox(transform)
121
+
122
+ super().__init__(loc, pad=pad, borderpad=borderpad,
123
+ child=self.drawing_area, prop=prop, frameon=frameon,
124
+ **kwargs)
125
+
126
+
127
+ @_api.deprecated("3.8")
128
+ class AnchoredEllipse(AnchoredOffsetbox):
129
+ def __init__(self, transform, width, height, angle, loc,
130
+ pad=0.1, borderpad=0.1, prop=None, frameon=True, **kwargs):
131
+ """
132
+ Draw an anchored ellipse of a given size.
133
+
134
+ Parameters
135
+ ----------
136
+ transform : `~matplotlib.transforms.Transform`
137
+ The transformation object for the coordinate system in use, i.e.,
138
+ :attr:`matplotlib.axes.Axes.transData`.
139
+ width, height : float
140
+ Width and height of the ellipse, given in coordinates of
141
+ *transform*.
142
+ angle : float
143
+ Rotation of the ellipse, in degrees, anti-clockwise.
144
+ loc : str
145
+ Location of the ellipse. Valid locations are
146
+ 'upper left', 'upper center', 'upper right',
147
+ 'center left', 'center', 'center right',
148
+ 'lower left', 'lower center', 'lower right'.
149
+ For backward compatibility, numeric values are accepted as well.
150
+ See the parameter *loc* of `.Legend` for details.
151
+ pad : float, default: 0.1
152
+ Padding around the ellipse, in fraction of the font size.
153
+ borderpad : float, default: 0.1
154
+ Border padding, in fraction of the font size.
155
+ frameon : bool, default: True
156
+ If True, draw a box around the ellipse.
157
+ prop : `~matplotlib.font_manager.FontProperties`, optional
158
+ Font property used as a reference for paddings.
159
+ **kwargs
160
+ Keyword arguments forwarded to `.AnchoredOffsetbox`.
161
+
162
+ Attributes
163
+ ----------
164
+ ellipse : `~matplotlib.patches.Ellipse`
165
+ Ellipse patch drawn.
166
+ """
167
+ self._box = AuxTransformBox(transform)
168
+ self.ellipse = Ellipse((0, 0), width, height, angle=angle)
169
+ self._box.add_artist(self.ellipse)
170
+
171
+ super().__init__(loc, pad=pad, borderpad=borderpad, child=self._box,
172
+ prop=prop, frameon=frameon, **kwargs)
173
+
174
+
175
+ class AnchoredSizeBar(AnchoredOffsetbox):
176
+ def __init__(self, transform, size, label, loc,
177
+ pad=0.1, borderpad=0.1, sep=2,
178
+ frameon=True, size_vertical=0, color='black',
179
+ label_top=False, fontproperties=None, fill_bar=None,
180
+ **kwargs):
181
+ """
182
+ Draw a horizontal scale bar with a center-aligned label underneath.
183
+
184
+ Parameters
185
+ ----------
186
+ transform : `~matplotlib.transforms.Transform`
187
+ The transformation object for the coordinate system in use, i.e.,
188
+ :attr:`matplotlib.axes.Axes.transData`.
189
+ size : float
190
+ Horizontal length of the size bar, given in coordinates of
191
+ *transform*.
192
+ label : str
193
+ Label to display.
194
+ loc : str
195
+ Location of the size bar. Valid locations are
196
+ 'upper left', 'upper center', 'upper right',
197
+ 'center left', 'center', 'center right',
198
+ 'lower left', 'lower center', 'lower right'.
199
+ For backward compatibility, numeric values are accepted as well.
200
+ See the parameter *loc* of `.Legend` for details.
201
+ pad : float, default: 0.1
202
+ Padding around the label and size bar, in fraction of the font
203
+ size.
204
+ borderpad : float, default: 0.1
205
+ Border padding, in fraction of the font size.
206
+ sep : float, default: 2
207
+ Separation between the label and the size bar, in points.
208
+ frameon : bool, default: True
209
+ If True, draw a box around the horizontal bar and label.
210
+ size_vertical : float, default: 0
211
+ Vertical length of the size bar, given in coordinates of
212
+ *transform*.
213
+ color : str, default: 'black'
214
+ Color for the size bar and label.
215
+ label_top : bool, default: False
216
+ If True, the label will be over the size bar.
217
+ fontproperties : `~matplotlib.font_manager.FontProperties`, optional
218
+ Font properties for the label text.
219
+ fill_bar : bool, optional
220
+ If True and if *size_vertical* is nonzero, the size bar will
221
+ be filled in with the color specified by the size bar.
222
+ Defaults to True if *size_vertical* is greater than
223
+ zero and False otherwise.
224
+ **kwargs
225
+ Keyword arguments forwarded to `.AnchoredOffsetbox`.
226
+
227
+ Attributes
228
+ ----------
229
+ size_bar : `~matplotlib.offsetbox.AuxTransformBox`
230
+ Container for the size bar.
231
+ txt_label : `~matplotlib.offsetbox.TextArea`
232
+ Container for the label of the size bar.
233
+
234
+ Notes
235
+ -----
236
+ If *prop* is passed as a keyword argument, but *fontproperties* is
237
+ not, then *prop* is assumed to be the intended *fontproperties*.
238
+ Using both *prop* and *fontproperties* is not supported.
239
+
240
+ Examples
241
+ --------
242
+ >>> import matplotlib.pyplot as plt
243
+ >>> import numpy as np
244
+ >>> from mpl_toolkits.axes_grid1.anchored_artists import (
245
+ ... AnchoredSizeBar)
246
+ >>> fig, ax = plt.subplots()
247
+ >>> ax.imshow(np.random.random((10, 10)))
248
+ >>> bar = AnchoredSizeBar(ax.transData, 3, '3 data units', 4)
249
+ >>> ax.add_artist(bar)
250
+ >>> fig.show()
251
+
252
+ Using all the optional parameters
253
+
254
+ >>> import matplotlib.font_manager as fm
255
+ >>> fontprops = fm.FontProperties(size=14, family='monospace')
256
+ >>> bar = AnchoredSizeBar(ax.transData, 3, '3 units', 4, pad=0.5,
257
+ ... sep=5, borderpad=0.5, frameon=False,
258
+ ... size_vertical=0.5, color='white',
259
+ ... fontproperties=fontprops)
260
+ """
261
+ if fill_bar is None:
262
+ fill_bar = size_vertical > 0
263
+
264
+ self.size_bar = AuxTransformBox(transform)
265
+ self.size_bar.add_artist(Rectangle((0, 0), size, size_vertical,
266
+ fill=fill_bar, facecolor=color,
267
+ edgecolor=color))
268
+
269
+ if fontproperties is None and 'prop' in kwargs:
270
+ fontproperties = kwargs.pop('prop')
271
+
272
+ if fontproperties is None:
273
+ textprops = {'color': color}
274
+ else:
275
+ textprops = {'color': color, 'fontproperties': fontproperties}
276
+
277
+ self.txt_label = TextArea(label, textprops=textprops)
278
+
279
+ if label_top:
280
+ _box_children = [self.txt_label, self.size_bar]
281
+ else:
282
+ _box_children = [self.size_bar, self.txt_label]
283
+
284
+ self._box = VPacker(children=_box_children,
285
+ align="center",
286
+ pad=0, sep=sep)
287
+
288
+ super().__init__(loc, pad=pad, borderpad=borderpad, child=self._box,
289
+ prop=fontproperties, frameon=frameon, **kwargs)
290
+
291
+
292
+ class AnchoredDirectionArrows(AnchoredOffsetbox):
293
+ def __init__(self, transform, label_x, label_y, length=0.15,
294
+ fontsize=0.08, loc='upper left', angle=0, aspect_ratio=1,
295
+ pad=0.4, borderpad=0.4, frameon=False, color='w', alpha=1,
296
+ sep_x=0.01, sep_y=0, fontproperties=None, back_length=0.15,
297
+ head_width=10, head_length=15, tail_width=2,
298
+ text_props=None, arrow_props=None,
299
+ **kwargs):
300
+ """
301
+ Draw two perpendicular arrows to indicate directions.
302
+
303
+ Parameters
304
+ ----------
305
+ transform : `~matplotlib.transforms.Transform`
306
+ The transformation object for the coordinate system in use, i.e.,
307
+ :attr:`matplotlib.axes.Axes.transAxes`.
308
+ label_x, label_y : str
309
+ Label text for the x and y arrows
310
+ length : float, default: 0.15
311
+ Length of the arrow, given in coordinates of *transform*.
312
+ fontsize : float, default: 0.08
313
+ Size of label strings, given in coordinates of *transform*.
314
+ loc : str, default: 'upper left'
315
+ Location of the arrow. Valid locations are
316
+ 'upper left', 'upper center', 'upper right',
317
+ 'center left', 'center', 'center right',
318
+ 'lower left', 'lower center', 'lower right'.
319
+ For backward compatibility, numeric values are accepted as well.
320
+ See the parameter *loc* of `.Legend` for details.
321
+ angle : float, default: 0
322
+ The angle of the arrows in degrees.
323
+ aspect_ratio : float, default: 1
324
+ The ratio of the length of arrow_x and arrow_y.
325
+ Negative numbers can be used to change the direction.
326
+ pad : float, default: 0.4
327
+ Padding around the labels and arrows, in fraction of the font size.
328
+ borderpad : float, default: 0.4
329
+ Border padding, in fraction of the font size.
330
+ frameon : bool, default: False
331
+ If True, draw a box around the arrows and labels.
332
+ color : str, default: 'white'
333
+ Color for the arrows and labels.
334
+ alpha : float, default: 1
335
+ Alpha values of the arrows and labels
336
+ sep_x, sep_y : float, default: 0.01 and 0 respectively
337
+ Separation between the arrows and labels in coordinates of
338
+ *transform*.
339
+ fontproperties : `~matplotlib.font_manager.FontProperties`, optional
340
+ Font properties for the label text.
341
+ back_length : float, default: 0.15
342
+ Fraction of the arrow behind the arrow crossing.
343
+ head_width : float, default: 10
344
+ Width of arrow head, sent to `.ArrowStyle`.
345
+ head_length : float, default: 15
346
+ Length of arrow head, sent to `.ArrowStyle`.
347
+ tail_width : float, default: 2
348
+ Width of arrow tail, sent to `.ArrowStyle`.
349
+ text_props, arrow_props : dict
350
+ Properties of the text and arrows, passed to `.TextPath` and
351
+ `.FancyArrowPatch`.
352
+ **kwargs
353
+ Keyword arguments forwarded to `.AnchoredOffsetbox`.
354
+
355
+ Attributes
356
+ ----------
357
+ arrow_x, arrow_y : `~matplotlib.patches.FancyArrowPatch`
358
+ Arrow x and y
359
+ text_path_x, text_path_y : `~matplotlib.text.TextPath`
360
+ Path for arrow labels
361
+ p_x, p_y : `~matplotlib.patches.PathPatch`
362
+ Patch for arrow labels
363
+ box : `~matplotlib.offsetbox.AuxTransformBox`
364
+ Container for the arrows and labels.
365
+
366
+ Notes
367
+ -----
368
+ If *prop* is passed as a keyword argument, but *fontproperties* is
369
+ not, then *prop* is assumed to be the intended *fontproperties*.
370
+ Using both *prop* and *fontproperties* is not supported.
371
+
372
+ Examples
373
+ --------
374
+ >>> import matplotlib.pyplot as plt
375
+ >>> import numpy as np
376
+ >>> from mpl_toolkits.axes_grid1.anchored_artists import (
377
+ ... AnchoredDirectionArrows)
378
+ >>> fig, ax = plt.subplots()
379
+ >>> ax.imshow(np.random.random((10, 10)))
380
+ >>> arrows = AnchoredDirectionArrows(ax.transAxes, '111', '110')
381
+ >>> ax.add_artist(arrows)
382
+ >>> fig.show()
383
+
384
+ Using several of the optional parameters, creating downward pointing
385
+ arrow and high contrast text labels.
386
+
387
+ >>> import matplotlib.font_manager as fm
388
+ >>> fontprops = fm.FontProperties(family='monospace')
389
+ >>> arrows = AnchoredDirectionArrows(ax.transAxes, 'East', 'South',
390
+ ... loc='lower left', color='k',
391
+ ... aspect_ratio=-1, sep_x=0.02,
392
+ ... sep_y=-0.01,
393
+ ... text_props={'ec':'w', 'fc':'k'},
394
+ ... fontproperties=fontprops)
395
+ """
396
+ if arrow_props is None:
397
+ arrow_props = {}
398
+
399
+ if text_props is None:
400
+ text_props = {}
401
+
402
+ arrowstyle = ArrowStyle("Simple",
403
+ head_width=head_width,
404
+ head_length=head_length,
405
+ tail_width=tail_width)
406
+
407
+ if fontproperties is None and 'prop' in kwargs:
408
+ fontproperties = kwargs.pop('prop')
409
+
410
+ if 'color' not in arrow_props:
411
+ arrow_props['color'] = color
412
+
413
+ if 'alpha' not in arrow_props:
414
+ arrow_props['alpha'] = alpha
415
+
416
+ if 'color' not in text_props:
417
+ text_props['color'] = color
418
+
419
+ if 'alpha' not in text_props:
420
+ text_props['alpha'] = alpha
421
+
422
+ t_start = transform
423
+ t_end = t_start + transforms.Affine2D().rotate_deg(angle)
424
+
425
+ self.box = AuxTransformBox(t_end)
426
+
427
+ length_x = length
428
+ length_y = length*aspect_ratio
429
+
430
+ self.arrow_x = FancyArrowPatch(
431
+ (0, back_length*length_y),
432
+ (length_x, back_length*length_y),
433
+ arrowstyle=arrowstyle,
434
+ shrinkA=0.0,
435
+ shrinkB=0.0,
436
+ **arrow_props)
437
+
438
+ self.arrow_y = FancyArrowPatch(
439
+ (back_length*length_x, 0),
440
+ (back_length*length_x, length_y),
441
+ arrowstyle=arrowstyle,
442
+ shrinkA=0.0,
443
+ shrinkB=0.0,
444
+ **arrow_props)
445
+
446
+ self.box.add_artist(self.arrow_x)
447
+ self.box.add_artist(self.arrow_y)
448
+
449
+ text_path_x = TextPath((
450
+ length_x+sep_x, back_length*length_y+sep_y), label_x,
451
+ size=fontsize, prop=fontproperties)
452
+ self.p_x = PathPatch(text_path_x, transform=t_start, **text_props)
453
+ self.box.add_artist(self.p_x)
454
+
455
+ text_path_y = TextPath((
456
+ length_x*back_length+sep_x, length_y*(1-back_length)+sep_y),
457
+ label_y, size=fontsize, prop=fontproperties)
458
+ self.p_y = PathPatch(text_path_y, **text_props)
459
+ self.box.add_artist(self.p_y)
460
+
461
+ super().__init__(loc, pad=pad, borderpad=borderpad, child=self.box,
462
+ frameon=frameon, **kwargs)
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/axes_divider.py ADDED
@@ -0,0 +1,694 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Helper classes to adjust the positions of multiple axes at drawing time.
3
+ """
4
+
5
+ import functools
6
+
7
+ import numpy as np
8
+
9
+ import matplotlib as mpl
10
+ from matplotlib import _api
11
+ from matplotlib.gridspec import SubplotSpec
12
+ import matplotlib.transforms as mtransforms
13
+ from . import axes_size as Size
14
+
15
+
16
+ class Divider:
17
+ """
18
+ An Axes positioning class.
19
+
20
+ The divider is initialized with lists of horizontal and vertical sizes
21
+ (:mod:`mpl_toolkits.axes_grid1.axes_size`) based on which a given
22
+ rectangular area will be divided.
23
+
24
+ The `new_locator` method then creates a callable object
25
+ that can be used as the *axes_locator* of the axes.
26
+ """
27
+
28
+ def __init__(self, fig, pos, horizontal, vertical,
29
+ aspect=None, anchor="C"):
30
+ """
31
+ Parameters
32
+ ----------
33
+ fig : Figure
34
+ pos : tuple of 4 floats
35
+ Position of the rectangle that will be divided.
36
+ horizontal : list of :mod:`~mpl_toolkits.axes_grid1.axes_size`
37
+ Sizes for horizontal division.
38
+ vertical : list of :mod:`~mpl_toolkits.axes_grid1.axes_size`
39
+ Sizes for vertical division.
40
+ aspect : bool, optional
41
+ Whether overall rectangular area is reduced so that the relative
42
+ part of the horizontal and vertical scales have the same scale.
43
+ anchor : (float, float) or {'C', 'SW', 'S', 'SE', 'E', 'NE', 'N', \
44
+ 'NW', 'W'}, default: 'C'
45
+ Placement of the reduced rectangle, when *aspect* is True.
46
+ """
47
+
48
+ self._fig = fig
49
+ self._pos = pos
50
+ self._horizontal = horizontal
51
+ self._vertical = vertical
52
+ self._anchor = anchor
53
+ self.set_anchor(anchor)
54
+ self._aspect = aspect
55
+ self._xrefindex = 0
56
+ self._yrefindex = 0
57
+ self._locator = None
58
+
59
+ def get_horizontal_sizes(self, renderer):
60
+ return np.array([s.get_size(renderer) for s in self.get_horizontal()])
61
+
62
+ def get_vertical_sizes(self, renderer):
63
+ return np.array([s.get_size(renderer) for s in self.get_vertical()])
64
+
65
+ def set_position(self, pos):
66
+ """
67
+ Set the position of the rectangle.
68
+
69
+ Parameters
70
+ ----------
71
+ pos : tuple of 4 floats
72
+ position of the rectangle that will be divided
73
+ """
74
+ self._pos = pos
75
+
76
+ def get_position(self):
77
+ """Return the position of the rectangle."""
78
+ return self._pos
79
+
80
+ def set_anchor(self, anchor):
81
+ """
82
+ Parameters
83
+ ----------
84
+ anchor : (float, float) or {'C', 'SW', 'S', 'SE', 'E', 'NE', 'N', \
85
+ 'NW', 'W'}
86
+ Either an (*x*, *y*) pair of relative coordinates (0 is left or
87
+ bottom, 1 is right or top), 'C' (center), or a cardinal direction
88
+ ('SW', southwest, is bottom left, etc.).
89
+
90
+ See Also
91
+ --------
92
+ .Axes.set_anchor
93
+ """
94
+ if isinstance(anchor, str):
95
+ _api.check_in_list(mtransforms.Bbox.coefs, anchor=anchor)
96
+ elif not isinstance(anchor, (tuple, list)) or len(anchor) != 2:
97
+ raise TypeError("anchor must be str or 2-tuple")
98
+ self._anchor = anchor
99
+
100
+ def get_anchor(self):
101
+ """Return the anchor."""
102
+ return self._anchor
103
+
104
+ def get_subplotspec(self):
105
+ return None
106
+
107
+ def set_horizontal(self, h):
108
+ """
109
+ Parameters
110
+ ----------
111
+ h : list of :mod:`~mpl_toolkits.axes_grid1.axes_size`
112
+ sizes for horizontal division
113
+ """
114
+ self._horizontal = h
115
+
116
+ def get_horizontal(self):
117
+ """Return horizontal sizes."""
118
+ return self._horizontal
119
+
120
+ def set_vertical(self, v):
121
+ """
122
+ Parameters
123
+ ----------
124
+ v : list of :mod:`~mpl_toolkits.axes_grid1.axes_size`
125
+ sizes for vertical division
126
+ """
127
+ self._vertical = v
128
+
129
+ def get_vertical(self):
130
+ """Return vertical sizes."""
131
+ return self._vertical
132
+
133
+ def set_aspect(self, aspect=False):
134
+ """
135
+ Parameters
136
+ ----------
137
+ aspect : bool
138
+ """
139
+ self._aspect = aspect
140
+
141
+ def get_aspect(self):
142
+ """Return aspect."""
143
+ return self._aspect
144
+
145
+ def set_locator(self, _locator):
146
+ self._locator = _locator
147
+
148
+ def get_locator(self):
149
+ return self._locator
150
+
151
+ def get_position_runtime(self, ax, renderer):
152
+ if self._locator is None:
153
+ return self.get_position()
154
+ else:
155
+ return self._locator(ax, renderer).bounds
156
+
157
+ @staticmethod
158
+ def _calc_k(sizes, total):
159
+ # sizes is a (n, 2) array of (rel_size, abs_size); this method finds
160
+ # the k factor such that sum(rel_size * k + abs_size) == total.
161
+ rel_sum, abs_sum = sizes.sum(0)
162
+ return (total - abs_sum) / rel_sum if rel_sum else 0
163
+
164
+ @staticmethod
165
+ def _calc_offsets(sizes, k):
166
+ # Apply k factors to (n, 2) sizes array of (rel_size, abs_size); return
167
+ # the resulting cumulative offset positions.
168
+ return np.cumsum([0, *(sizes @ [k, 1])])
169
+
170
+ def new_locator(self, nx, ny, nx1=None, ny1=None):
171
+ """
172
+ Return an axes locator callable for the specified cell.
173
+
174
+ Parameters
175
+ ----------
176
+ nx, nx1 : int
177
+ Integers specifying the column-position of the
178
+ cell. When *nx1* is None, a single *nx*-th column is
179
+ specified. Otherwise, location of columns spanning between *nx*
180
+ to *nx1* (but excluding *nx1*-th column) is specified.
181
+ ny, ny1 : int
182
+ Same as *nx* and *nx1*, but for row positions.
183
+ """
184
+ if nx1 is None:
185
+ nx1 = nx + 1
186
+ if ny1 is None:
187
+ ny1 = ny + 1
188
+ # append_size("left") adds a new size at the beginning of the
189
+ # horizontal size lists; this shift transforms e.g.
190
+ # new_locator(nx=2, ...) into effectively new_locator(nx=3, ...). To
191
+ # take that into account, instead of recording nx, we record
192
+ # nx-self._xrefindex, where _xrefindex is shifted by 1 by each
193
+ # append_size("left"), and re-add self._xrefindex back to nx in
194
+ # _locate, when the actual axes position is computed. Ditto for y.
195
+ xref = self._xrefindex
196
+ yref = self._yrefindex
197
+ locator = functools.partial(
198
+ self._locate, nx - xref, ny - yref, nx1 - xref, ny1 - yref)
199
+ locator.get_subplotspec = self.get_subplotspec
200
+ return locator
201
+
202
+ @_api.deprecated(
203
+ "3.8", alternative="divider.new_locator(...)(ax, renderer)")
204
+ def locate(self, nx, ny, nx1=None, ny1=None, axes=None, renderer=None):
205
+ """
206
+ Implementation of ``divider.new_locator().__call__``.
207
+
208
+ Parameters
209
+ ----------
210
+ nx, nx1 : int
211
+ Integers specifying the column-position of the cell. When *nx1* is
212
+ None, a single *nx*-th column is specified. Otherwise, the
213
+ location of columns spanning between *nx* to *nx1* (but excluding
214
+ *nx1*-th column) is specified.
215
+ ny, ny1 : int
216
+ Same as *nx* and *nx1*, but for row positions.
217
+ axes
218
+ renderer
219
+ """
220
+ xref = self._xrefindex
221
+ yref = self._yrefindex
222
+ return self._locate(
223
+ nx - xref, (nx + 1 if nx1 is None else nx1) - xref,
224
+ ny - yref, (ny + 1 if ny1 is None else ny1) - yref,
225
+ axes, renderer)
226
+
227
+ def _locate(self, nx, ny, nx1, ny1, axes, renderer):
228
+ """
229
+ Implementation of ``divider.new_locator().__call__``.
230
+
231
+ The axes locator callable returned by ``new_locator()`` is created as
232
+ a `functools.partial` of this method with *nx*, *ny*, *nx1*, and *ny1*
233
+ specifying the requested cell.
234
+ """
235
+ nx += self._xrefindex
236
+ nx1 += self._xrefindex
237
+ ny += self._yrefindex
238
+ ny1 += self._yrefindex
239
+
240
+ fig_w, fig_h = self._fig.bbox.size / self._fig.dpi
241
+ x, y, w, h = self.get_position_runtime(axes, renderer)
242
+
243
+ hsizes = self.get_horizontal_sizes(renderer)
244
+ vsizes = self.get_vertical_sizes(renderer)
245
+ k_h = self._calc_k(hsizes, fig_w * w)
246
+ k_v = self._calc_k(vsizes, fig_h * h)
247
+
248
+ if self.get_aspect():
249
+ k = min(k_h, k_v)
250
+ ox = self._calc_offsets(hsizes, k)
251
+ oy = self._calc_offsets(vsizes, k)
252
+
253
+ ww = (ox[-1] - ox[0]) / fig_w
254
+ hh = (oy[-1] - oy[0]) / fig_h
255
+ pb = mtransforms.Bbox.from_bounds(x, y, w, h)
256
+ pb1 = mtransforms.Bbox.from_bounds(x, y, ww, hh)
257
+ x0, y0 = pb1.anchored(self.get_anchor(), pb).p0
258
+
259
+ else:
260
+ ox = self._calc_offsets(hsizes, k_h)
261
+ oy = self._calc_offsets(vsizes, k_v)
262
+ x0, y0 = x, y
263
+
264
+ if nx1 is None:
265
+ nx1 = -1
266
+ if ny1 is None:
267
+ ny1 = -1
268
+
269
+ x1, w1 = x0 + ox[nx] / fig_w, (ox[nx1] - ox[nx]) / fig_w
270
+ y1, h1 = y0 + oy[ny] / fig_h, (oy[ny1] - oy[ny]) / fig_h
271
+
272
+ return mtransforms.Bbox.from_bounds(x1, y1, w1, h1)
273
+
274
+ def append_size(self, position, size):
275
+ _api.check_in_list(["left", "right", "bottom", "top"],
276
+ position=position)
277
+ if position == "left":
278
+ self._horizontal.insert(0, size)
279
+ self._xrefindex += 1
280
+ elif position == "right":
281
+ self._horizontal.append(size)
282
+ elif position == "bottom":
283
+ self._vertical.insert(0, size)
284
+ self._yrefindex += 1
285
+ else: # 'top'
286
+ self._vertical.append(size)
287
+
288
+ def add_auto_adjustable_area(self, use_axes, pad=0.1, adjust_dirs=None):
289
+ """
290
+ Add auto-adjustable padding around *use_axes* to take their decorations
291
+ (title, labels, ticks, ticklabels) into account during layout.
292
+
293
+ Parameters
294
+ ----------
295
+ use_axes : `~matplotlib.axes.Axes` or list of `~matplotlib.axes.Axes`
296
+ The Axes whose decorations are taken into account.
297
+ pad : float, default: 0.1
298
+ Additional padding in inches.
299
+ adjust_dirs : list of {"left", "right", "bottom", "top"}, optional
300
+ The sides where padding is added; defaults to all four sides.
301
+ """
302
+ if adjust_dirs is None:
303
+ adjust_dirs = ["left", "right", "bottom", "top"]
304
+ for d in adjust_dirs:
305
+ self.append_size(d, Size._AxesDecorationsSize(use_axes, d) + pad)
306
+
307
+
308
+ @_api.deprecated("3.8")
309
+ class AxesLocator:
310
+ """
311
+ A callable object which returns the position and size of a given
312
+ `.AxesDivider` cell.
313
+ """
314
+
315
+ def __init__(self, axes_divider, nx, ny, nx1=None, ny1=None):
316
+ """
317
+ Parameters
318
+ ----------
319
+ axes_divider : `~mpl_toolkits.axes_grid1.axes_divider.AxesDivider`
320
+ nx, nx1 : int
321
+ Integers specifying the column-position of the
322
+ cell. When *nx1* is None, a single *nx*-th column is
323
+ specified. Otherwise, location of columns spanning between *nx*
324
+ to *nx1* (but excluding *nx1*-th column) is specified.
325
+ ny, ny1 : int
326
+ Same as *nx* and *nx1*, but for row positions.
327
+ """
328
+ self._axes_divider = axes_divider
329
+
330
+ _xrefindex = axes_divider._xrefindex
331
+ _yrefindex = axes_divider._yrefindex
332
+
333
+ self._nx, self._ny = nx - _xrefindex, ny - _yrefindex
334
+
335
+ if nx1 is None:
336
+ nx1 = len(self._axes_divider)
337
+ if ny1 is None:
338
+ ny1 = len(self._axes_divider[0])
339
+
340
+ self._nx1 = nx1 - _xrefindex
341
+ self._ny1 = ny1 - _yrefindex
342
+
343
+ def __call__(self, axes, renderer):
344
+
345
+ _xrefindex = self._axes_divider._xrefindex
346
+ _yrefindex = self._axes_divider._yrefindex
347
+
348
+ return self._axes_divider.locate(self._nx + _xrefindex,
349
+ self._ny + _yrefindex,
350
+ self._nx1 + _xrefindex,
351
+ self._ny1 + _yrefindex,
352
+ axes,
353
+ renderer)
354
+
355
+ def get_subplotspec(self):
356
+ return self._axes_divider.get_subplotspec()
357
+
358
+
359
+ class SubplotDivider(Divider):
360
+ """
361
+ The Divider class whose rectangle area is specified as a subplot geometry.
362
+ """
363
+
364
+ def __init__(self, fig, *args, horizontal=None, vertical=None,
365
+ aspect=None, anchor='C'):
366
+ """
367
+ Parameters
368
+ ----------
369
+ fig : `~matplotlib.figure.Figure`
370
+
371
+ *args : tuple (*nrows*, *ncols*, *index*) or int
372
+ The array of subplots in the figure has dimensions ``(nrows,
373
+ ncols)``, and *index* is the index of the subplot being created.
374
+ *index* starts at 1 in the upper left corner and increases to the
375
+ right.
376
+
377
+ If *nrows*, *ncols*, and *index* are all single digit numbers, then
378
+ *args* can be passed as a single 3-digit number (e.g. 234 for
379
+ (2, 3, 4)).
380
+ horizontal : list of :mod:`~mpl_toolkits.axes_grid1.axes_size`, optional
381
+ Sizes for horizontal division.
382
+ vertical : list of :mod:`~mpl_toolkits.axes_grid1.axes_size`, optional
383
+ Sizes for vertical division.
384
+ aspect : bool, optional
385
+ Whether overall rectangular area is reduced so that the relative
386
+ part of the horizontal and vertical scales have the same scale.
387
+ anchor : (float, float) or {'C', 'SW', 'S', 'SE', 'E', 'NE', 'N', \
388
+ 'NW', 'W'}, default: 'C'
389
+ Placement of the reduced rectangle, when *aspect* is True.
390
+ """
391
+ self.figure = fig
392
+ super().__init__(fig, [0, 0, 1, 1],
393
+ horizontal=horizontal or [], vertical=vertical or [],
394
+ aspect=aspect, anchor=anchor)
395
+ self.set_subplotspec(SubplotSpec._from_subplot_args(fig, args))
396
+
397
+ def get_position(self):
398
+ """Return the bounds of the subplot box."""
399
+ return self.get_subplotspec().get_position(self.figure).bounds
400
+
401
+ def get_subplotspec(self):
402
+ """Get the SubplotSpec instance."""
403
+ return self._subplotspec
404
+
405
+ def set_subplotspec(self, subplotspec):
406
+ """Set the SubplotSpec instance."""
407
+ self._subplotspec = subplotspec
408
+ self.set_position(subplotspec.get_position(self.figure))
409
+
410
+
411
+ class AxesDivider(Divider):
412
+ """
413
+ Divider based on the preexisting axes.
414
+ """
415
+
416
+ def __init__(self, axes, xref=None, yref=None):
417
+ """
418
+ Parameters
419
+ ----------
420
+ axes : :class:`~matplotlib.axes.Axes`
421
+ xref
422
+ yref
423
+ """
424
+ self._axes = axes
425
+ if xref is None:
426
+ self._xref = Size.AxesX(axes)
427
+ else:
428
+ self._xref = xref
429
+ if yref is None:
430
+ self._yref = Size.AxesY(axes)
431
+ else:
432
+ self._yref = yref
433
+
434
+ super().__init__(fig=axes.get_figure(), pos=None,
435
+ horizontal=[self._xref], vertical=[self._yref],
436
+ aspect=None, anchor="C")
437
+
438
+ def _get_new_axes(self, *, axes_class=None, **kwargs):
439
+ axes = self._axes
440
+ if axes_class is None:
441
+ axes_class = type(axes)
442
+ return axes_class(axes.get_figure(), axes.get_position(original=True),
443
+ **kwargs)
444
+
445
+ def new_horizontal(self, size, pad=None, pack_start=False, **kwargs):
446
+ """
447
+ Helper method for ``append_axes("left")`` and ``append_axes("right")``.
448
+
449
+ See the documentation of `append_axes` for more details.
450
+
451
+ :meta private:
452
+ """
453
+ if pad is None:
454
+ pad = mpl.rcParams["figure.subplot.wspace"] * self._xref
455
+ pos = "left" if pack_start else "right"
456
+ if pad:
457
+ if not isinstance(pad, Size._Base):
458
+ pad = Size.from_any(pad, fraction_ref=self._xref)
459
+ self.append_size(pos, pad)
460
+ if not isinstance(size, Size._Base):
461
+ size = Size.from_any(size, fraction_ref=self._xref)
462
+ self.append_size(pos, size)
463
+ locator = self.new_locator(
464
+ nx=0 if pack_start else len(self._horizontal) - 1,
465
+ ny=self._yrefindex)
466
+ ax = self._get_new_axes(**kwargs)
467
+ ax.set_axes_locator(locator)
468
+ return ax
469
+
470
+ def new_vertical(self, size, pad=None, pack_start=False, **kwargs):
471
+ """
472
+ Helper method for ``append_axes("top")`` and ``append_axes("bottom")``.
473
+
474
+ See the documentation of `append_axes` for more details.
475
+
476
+ :meta private:
477
+ """
478
+ if pad is None:
479
+ pad = mpl.rcParams["figure.subplot.hspace"] * self._yref
480
+ pos = "bottom" if pack_start else "top"
481
+ if pad:
482
+ if not isinstance(pad, Size._Base):
483
+ pad = Size.from_any(pad, fraction_ref=self._yref)
484
+ self.append_size(pos, pad)
485
+ if not isinstance(size, Size._Base):
486
+ size = Size.from_any(size, fraction_ref=self._yref)
487
+ self.append_size(pos, size)
488
+ locator = self.new_locator(
489
+ nx=self._xrefindex,
490
+ ny=0 if pack_start else len(self._vertical) - 1)
491
+ ax = self._get_new_axes(**kwargs)
492
+ ax.set_axes_locator(locator)
493
+ return ax
494
+
495
+ def append_axes(self, position, size, pad=None, *, axes_class=None,
496
+ **kwargs):
497
+ """
498
+ Add a new axes on a given side of the main axes.
499
+
500
+ Parameters
501
+ ----------
502
+ position : {"left", "right", "bottom", "top"}
503
+ Where the new axes is positioned relative to the main axes.
504
+ size : :mod:`~mpl_toolkits.axes_grid1.axes_size` or float or str
505
+ The axes width or height. float or str arguments are interpreted
506
+ as ``axes_size.from_any(size, AxesX(<main_axes>))`` for left or
507
+ right axes, and likewise with ``AxesY`` for bottom or top axes.
508
+ pad : :mod:`~mpl_toolkits.axes_grid1.axes_size` or float or str
509
+ Padding between the axes. float or str arguments are interpreted
510
+ as for *size*. Defaults to :rc:`figure.subplot.wspace` times the
511
+ main Axes width (left or right axes) or :rc:`figure.subplot.hspace`
512
+ times the main Axes height (bottom or top axes).
513
+ axes_class : subclass type of `~.axes.Axes`, optional
514
+ The type of the new axes. Defaults to the type of the main axes.
515
+ **kwargs
516
+ All extra keywords arguments are passed to the created axes.
517
+ """
518
+ create_axes, pack_start = _api.check_getitem({
519
+ "left": (self.new_horizontal, True),
520
+ "right": (self.new_horizontal, False),
521
+ "bottom": (self.new_vertical, True),
522
+ "top": (self.new_vertical, False),
523
+ }, position=position)
524
+ ax = create_axes(
525
+ size, pad, pack_start=pack_start, axes_class=axes_class, **kwargs)
526
+ self._fig.add_axes(ax)
527
+ return ax
528
+
529
+ def get_aspect(self):
530
+ if self._aspect is None:
531
+ aspect = self._axes.get_aspect()
532
+ if aspect == "auto":
533
+ return False
534
+ else:
535
+ return True
536
+ else:
537
+ return self._aspect
538
+
539
+ def get_position(self):
540
+ if self._pos is None:
541
+ bbox = self._axes.get_position(original=True)
542
+ return bbox.bounds
543
+ else:
544
+ return self._pos
545
+
546
+ def get_anchor(self):
547
+ if self._anchor is None:
548
+ return self._axes.get_anchor()
549
+ else:
550
+ return self._anchor
551
+
552
+ def get_subplotspec(self):
553
+ return self._axes.get_subplotspec()
554
+
555
+
556
+ # Helper for HBoxDivider/VBoxDivider.
557
+ # The variable names are written for a horizontal layout, but the calculations
558
+ # work identically for vertical layouts.
559
+ def _locate(x, y, w, h, summed_widths, equal_heights, fig_w, fig_h, anchor):
560
+
561
+ total_width = fig_w * w
562
+ max_height = fig_h * h
563
+
564
+ # Determine the k factors.
565
+ n = len(equal_heights)
566
+ eq_rels, eq_abss = equal_heights.T
567
+ sm_rels, sm_abss = summed_widths.T
568
+ A = np.diag([*eq_rels, 0])
569
+ A[:n, -1] = -1
570
+ A[-1, :-1] = sm_rels
571
+ B = [*(-eq_abss), total_width - sm_abss.sum()]
572
+ # A @ K = B: This finds factors {k_0, ..., k_{N-1}, H} so that
573
+ # eq_rel_i * k_i + eq_abs_i = H for all i: all axes have the same height
574
+ # sum(sm_rel_i * k_i + sm_abs_i) = total_width: fixed total width
575
+ # (foo_rel_i * k_i + foo_abs_i will end up being the size of foo.)
576
+ *karray, height = np.linalg.solve(A, B)
577
+ if height > max_height: # Additionally, upper-bound the height.
578
+ karray = (max_height - eq_abss) / eq_rels
579
+
580
+ # Compute the offsets corresponding to these factors.
581
+ ox = np.cumsum([0, *(sm_rels * karray + sm_abss)])
582
+ ww = (ox[-1] - ox[0]) / fig_w
583
+ h0_rel, h0_abs = equal_heights[0]
584
+ hh = (karray[0]*h0_rel + h0_abs) / fig_h
585
+ pb = mtransforms.Bbox.from_bounds(x, y, w, h)
586
+ pb1 = mtransforms.Bbox.from_bounds(x, y, ww, hh)
587
+ x0, y0 = pb1.anchored(anchor, pb).p0
588
+
589
+ return x0, y0, ox, hh
590
+
591
+
592
+ class HBoxDivider(SubplotDivider):
593
+ """
594
+ A `.SubplotDivider` for laying out axes horizontally, while ensuring that
595
+ they have equal heights.
596
+
597
+ Examples
598
+ --------
599
+ .. plot:: gallery/axes_grid1/demo_axes_hbox_divider.py
600
+ """
601
+
602
+ def new_locator(self, nx, nx1=None):
603
+ """
604
+ Create an axes locator callable for the specified cell.
605
+
606
+ Parameters
607
+ ----------
608
+ nx, nx1 : int
609
+ Integers specifying the column-position of the
610
+ cell. When *nx1* is None, a single *nx*-th column is
611
+ specified. Otherwise, location of columns spanning between *nx*
612
+ to *nx1* (but excluding *nx1*-th column) is specified.
613
+ """
614
+ return super().new_locator(nx, 0, nx1, 0)
615
+
616
+ def _locate(self, nx, ny, nx1, ny1, axes, renderer):
617
+ # docstring inherited
618
+ nx += self._xrefindex
619
+ nx1 += self._xrefindex
620
+ fig_w, fig_h = self._fig.bbox.size / self._fig.dpi
621
+ x, y, w, h = self.get_position_runtime(axes, renderer)
622
+ summed_ws = self.get_horizontal_sizes(renderer)
623
+ equal_hs = self.get_vertical_sizes(renderer)
624
+ x0, y0, ox, hh = _locate(
625
+ x, y, w, h, summed_ws, equal_hs, fig_w, fig_h, self.get_anchor())
626
+ if nx1 is None:
627
+ nx1 = -1
628
+ x1, w1 = x0 + ox[nx] / fig_w, (ox[nx1] - ox[nx]) / fig_w
629
+ y1, h1 = y0, hh
630
+ return mtransforms.Bbox.from_bounds(x1, y1, w1, h1)
631
+
632
+
633
+ class VBoxDivider(SubplotDivider):
634
+ """
635
+ A `.SubplotDivider` for laying out axes vertically, while ensuring that
636
+ they have equal widths.
637
+ """
638
+
639
+ def new_locator(self, ny, ny1=None):
640
+ """
641
+ Create an axes locator callable for the specified cell.
642
+
643
+ Parameters
644
+ ----------
645
+ ny, ny1 : int
646
+ Integers specifying the row-position of the
647
+ cell. When *ny1* is None, a single *ny*-th row is
648
+ specified. Otherwise, location of rows spanning between *ny*
649
+ to *ny1* (but excluding *ny1*-th row) is specified.
650
+ """
651
+ return super().new_locator(0, ny, 0, ny1)
652
+
653
+ def _locate(self, nx, ny, nx1, ny1, axes, renderer):
654
+ # docstring inherited
655
+ ny += self._yrefindex
656
+ ny1 += self._yrefindex
657
+ fig_w, fig_h = self._fig.bbox.size / self._fig.dpi
658
+ x, y, w, h = self.get_position_runtime(axes, renderer)
659
+ summed_hs = self.get_vertical_sizes(renderer)
660
+ equal_ws = self.get_horizontal_sizes(renderer)
661
+ y0, x0, oy, ww = _locate(
662
+ y, x, h, w, summed_hs, equal_ws, fig_h, fig_w, self.get_anchor())
663
+ if ny1 is None:
664
+ ny1 = -1
665
+ x1, w1 = x0, ww
666
+ y1, h1 = y0 + oy[ny] / fig_h, (oy[ny1] - oy[ny]) / fig_h
667
+ return mtransforms.Bbox.from_bounds(x1, y1, w1, h1)
668
+
669
+
670
+ def make_axes_locatable(axes):
671
+ divider = AxesDivider(axes)
672
+ locator = divider.new_locator(nx=0, ny=0)
673
+ axes.set_axes_locator(locator)
674
+
675
+ return divider
676
+
677
+
678
+ def make_axes_area_auto_adjustable(
679
+ ax, use_axes=None, pad=0.1, adjust_dirs=None):
680
+ """
681
+ Add auto-adjustable padding around *ax* to take its decorations (title,
682
+ labels, ticks, ticklabels) into account during layout, using
683
+ `.Divider.add_auto_adjustable_area`.
684
+
685
+ By default, padding is determined from the decorations of *ax*.
686
+ Pass *use_axes* to consider the decorations of other Axes instead.
687
+ """
688
+ if adjust_dirs is None:
689
+ adjust_dirs = ["left", "right", "bottom", "top"]
690
+ divider = make_axes_locatable(ax)
691
+ if use_axes is None:
692
+ use_axes = ax
693
+ divider.add_auto_adjustable_area(use_axes=use_axes, pad=pad,
694
+ adjust_dirs=adjust_dirs)
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/axes_grid.py ADDED
@@ -0,0 +1,563 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from numbers import Number
2
+ import functools
3
+ from types import MethodType
4
+
5
+ import numpy as np
6
+
7
+ from matplotlib import _api, cbook
8
+ from matplotlib.gridspec import SubplotSpec
9
+
10
+ from .axes_divider import Size, SubplotDivider, Divider
11
+ from .mpl_axes import Axes, SimpleAxisArtist
12
+
13
+
14
+ class CbarAxesBase:
15
+ def __init__(self, *args, orientation, **kwargs):
16
+ self.orientation = orientation
17
+ super().__init__(*args, **kwargs)
18
+
19
+ def colorbar(self, mappable, **kwargs):
20
+ return self.figure.colorbar(
21
+ mappable, cax=self, location=self.orientation, **kwargs)
22
+
23
+ @_api.deprecated("3.8", alternative="ax.tick_params and colorbar.set_label")
24
+ def toggle_label(self, b):
25
+ axis = self.axis[self.orientation]
26
+ axis.toggle(ticklabels=b, label=b)
27
+
28
+
29
+ _cbaraxes_class_factory = cbook._make_class_factory(CbarAxesBase, "Cbar{}")
30
+
31
+
32
+ class Grid:
33
+ """
34
+ A grid of Axes.
35
+
36
+ In Matplotlib, the Axes location (and size) is specified in normalized
37
+ figure coordinates. This may not be ideal for images that needs to be
38
+ displayed with a given aspect ratio; for example, it is difficult to
39
+ display multiple images of a same size with some fixed padding between
40
+ them. AxesGrid can be used in such case.
41
+
42
+ Attributes
43
+ ----------
44
+ axes_all : list of Axes
45
+ A flat list of Axes. Note that you can also access this directly
46
+ from the grid. The following is equivalent ::
47
+
48
+ grid[i] == grid.axes_all[i]
49
+ len(grid) == len(grid.axes_all)
50
+
51
+ axes_column : list of list of Axes
52
+ A 2D list of Axes where the first index is the column. This results
53
+ in the usage pattern ``grid.axes_column[col][row]``.
54
+ axes_row : list of list of Axes
55
+ A 2D list of Axes where the first index is the row. This results
56
+ in the usage pattern ``grid.axes_row[row][col]``.
57
+ axes_llc : Axes
58
+ The Axes in the lower left corner.
59
+ ngrids : int
60
+ Number of Axes in the grid.
61
+ """
62
+
63
+ _defaultAxesClass = Axes
64
+
65
+ def __init__(self, fig,
66
+ rect,
67
+ nrows_ncols,
68
+ ngrids=None,
69
+ direction="row",
70
+ axes_pad=0.02,
71
+ *,
72
+ share_all=False,
73
+ share_x=True,
74
+ share_y=True,
75
+ label_mode="L",
76
+ axes_class=None,
77
+ aspect=False,
78
+ ):
79
+ """
80
+ Parameters
81
+ ----------
82
+ fig : `.Figure`
83
+ The parent figure.
84
+ rect : (float, float, float, float), (int, int, int), int, or \
85
+ `~.SubplotSpec`
86
+ The axes position, as a ``(left, bottom, width, height)`` tuple,
87
+ as a three-digit subplot position code (e.g., ``(1, 2, 1)`` or
88
+ ``121``), or as a `~.SubplotSpec`.
89
+ nrows_ncols : (int, int)
90
+ Number of rows and columns in the grid.
91
+ ngrids : int or None, default: None
92
+ If not None, only the first *ngrids* axes in the grid are created.
93
+ direction : {"row", "column"}, default: "row"
94
+ Whether axes are created in row-major ("row by row") or
95
+ column-major order ("column by column"). This also affects the
96
+ order in which axes are accessed using indexing (``grid[index]``).
97
+ axes_pad : float or (float, float), default: 0.02
98
+ Padding or (horizontal padding, vertical padding) between axes, in
99
+ inches.
100
+ share_all : bool, default: False
101
+ Whether all axes share their x- and y-axis. Overrides *share_x*
102
+ and *share_y*.
103
+ share_x : bool, default: True
104
+ Whether all axes of a column share their x-axis.
105
+ share_y : bool, default: True
106
+ Whether all axes of a row share their y-axis.
107
+ label_mode : {"L", "1", "all", "keep"}, default: "L"
108
+ Determines which axes will get tick labels:
109
+
110
+ - "L": All axes on the left column get vertical tick labels;
111
+ all axes on the bottom row get horizontal tick labels.
112
+ - "1": Only the bottom left axes is labelled.
113
+ - "all": All axes are labelled.
114
+ - "keep": Do not do anything.
115
+
116
+ axes_class : subclass of `matplotlib.axes.Axes`, default: `.mpl_axes.Axes`
117
+ The type of Axes to create.
118
+ aspect : bool, default: False
119
+ Whether the axes aspect ratio follows the aspect ratio of the data
120
+ limits.
121
+ """
122
+ self._nrows, self._ncols = nrows_ncols
123
+
124
+ if ngrids is None:
125
+ ngrids = self._nrows * self._ncols
126
+ else:
127
+ if not 0 < ngrids <= self._nrows * self._ncols:
128
+ raise ValueError(
129
+ "ngrids must be positive and not larger than nrows*ncols")
130
+
131
+ self.ngrids = ngrids
132
+
133
+ self._horiz_pad_size, self._vert_pad_size = map(
134
+ Size.Fixed, np.broadcast_to(axes_pad, 2))
135
+
136
+ _api.check_in_list(["column", "row"], direction=direction)
137
+ self._direction = direction
138
+
139
+ if axes_class is None:
140
+ axes_class = self._defaultAxesClass
141
+ elif isinstance(axes_class, (list, tuple)):
142
+ cls, kwargs = axes_class
143
+ axes_class = functools.partial(cls, **kwargs)
144
+
145
+ kw = dict(horizontal=[], vertical=[], aspect=aspect)
146
+ if isinstance(rect, (Number, SubplotSpec)):
147
+ self._divider = SubplotDivider(fig, rect, **kw)
148
+ elif len(rect) == 3:
149
+ self._divider = SubplotDivider(fig, *rect, **kw)
150
+ elif len(rect) == 4:
151
+ self._divider = Divider(fig, rect, **kw)
152
+ else:
153
+ raise TypeError("Incorrect rect format")
154
+
155
+ rect = self._divider.get_position()
156
+
157
+ axes_array = np.full((self._nrows, self._ncols), None, dtype=object)
158
+ for i in range(self.ngrids):
159
+ col, row = self._get_col_row(i)
160
+ if share_all:
161
+ sharex = sharey = axes_array[0, 0]
162
+ else:
163
+ sharex = axes_array[0, col] if share_x else None
164
+ sharey = axes_array[row, 0] if share_y else None
165
+ axes_array[row, col] = axes_class(
166
+ fig, rect, sharex=sharex, sharey=sharey)
167
+ self.axes_all = axes_array.ravel(
168
+ order="C" if self._direction == "row" else "F").tolist()
169
+ self.axes_column = axes_array.T.tolist()
170
+ self.axes_row = axes_array.tolist()
171
+ self.axes_llc = self.axes_column[0][-1]
172
+
173
+ self._init_locators()
174
+
175
+ for ax in self.axes_all:
176
+ fig.add_axes(ax)
177
+
178
+ self.set_label_mode(label_mode)
179
+
180
+ def _init_locators(self):
181
+ self._divider.set_horizontal(
182
+ [Size.Scaled(1), self._horiz_pad_size] * (self._ncols-1) + [Size.Scaled(1)])
183
+ self._divider.set_vertical(
184
+ [Size.Scaled(1), self._vert_pad_size] * (self._nrows-1) + [Size.Scaled(1)])
185
+ for i in range(self.ngrids):
186
+ col, row = self._get_col_row(i)
187
+ self.axes_all[i].set_axes_locator(
188
+ self._divider.new_locator(nx=2 * col, ny=2 * (self._nrows - 1 - row)))
189
+
190
+ def _get_col_row(self, n):
191
+ if self._direction == "column":
192
+ col, row = divmod(n, self._nrows)
193
+ else:
194
+ row, col = divmod(n, self._ncols)
195
+
196
+ return col, row
197
+
198
+ # Good to propagate __len__ if we have __getitem__
199
+ def __len__(self):
200
+ return len(self.axes_all)
201
+
202
+ def __getitem__(self, i):
203
+ return self.axes_all[i]
204
+
205
+ def get_geometry(self):
206
+ """
207
+ Return the number of rows and columns of the grid as (nrows, ncols).
208
+ """
209
+ return self._nrows, self._ncols
210
+
211
+ def set_axes_pad(self, axes_pad):
212
+ """
213
+ Set the padding between the axes.
214
+
215
+ Parameters
216
+ ----------
217
+ axes_pad : (float, float)
218
+ The padding (horizontal pad, vertical pad) in inches.
219
+ """
220
+ self._horiz_pad_size.fixed_size = axes_pad[0]
221
+ self._vert_pad_size.fixed_size = axes_pad[1]
222
+
223
+ def get_axes_pad(self):
224
+ """
225
+ Return the axes padding.
226
+
227
+ Returns
228
+ -------
229
+ hpad, vpad
230
+ Padding (horizontal pad, vertical pad) in inches.
231
+ """
232
+ return (self._horiz_pad_size.fixed_size,
233
+ self._vert_pad_size.fixed_size)
234
+
235
+ def set_aspect(self, aspect):
236
+ """Set the aspect of the SubplotDivider."""
237
+ self._divider.set_aspect(aspect)
238
+
239
+ def get_aspect(self):
240
+ """Return the aspect of the SubplotDivider."""
241
+ return self._divider.get_aspect()
242
+
243
+ def set_label_mode(self, mode):
244
+ """
245
+ Define which axes have tick labels.
246
+
247
+ Parameters
248
+ ----------
249
+ mode : {"L", "1", "all", "keep"}
250
+ The label mode:
251
+
252
+ - "L": All axes on the left column get vertical tick labels;
253
+ all axes on the bottom row get horizontal tick labels.
254
+ - "1": Only the bottom left axes is labelled.
255
+ - "all": All axes are labelled.
256
+ - "keep": Do not do anything.
257
+ """
258
+ _api.check_in_list(["all", "L", "1", "keep"], mode=mode)
259
+ is_last_row, is_first_col = (
260
+ np.mgrid[:self._nrows, :self._ncols] == [[[self._nrows - 1]], [[0]]])
261
+ if mode == "all":
262
+ bottom = left = np.full((self._nrows, self._ncols), True)
263
+ elif mode == "L":
264
+ bottom = is_last_row
265
+ left = is_first_col
266
+ elif mode == "1":
267
+ bottom = left = is_last_row & is_first_col
268
+ else:
269
+ return
270
+ for i in range(self._nrows):
271
+ for j in range(self._ncols):
272
+ ax = self.axes_row[i][j]
273
+ if isinstance(ax.axis, MethodType):
274
+ bottom_axis = SimpleAxisArtist(ax.xaxis, 1, ax.spines["bottom"])
275
+ left_axis = SimpleAxisArtist(ax.yaxis, 1, ax.spines["left"])
276
+ else:
277
+ bottom_axis = ax.axis["bottom"]
278
+ left_axis = ax.axis["left"]
279
+ bottom_axis.toggle(ticklabels=bottom[i, j], label=bottom[i, j])
280
+ left_axis.toggle(ticklabels=left[i, j], label=left[i, j])
281
+
282
+ def get_divider(self):
283
+ return self._divider
284
+
285
+ def set_axes_locator(self, locator):
286
+ self._divider.set_locator(locator)
287
+
288
+ def get_axes_locator(self):
289
+ return self._divider.get_locator()
290
+
291
+
292
+ class ImageGrid(Grid):
293
+ """
294
+ A grid of Axes for Image display.
295
+
296
+ This class is a specialization of `~.axes_grid1.axes_grid.Grid` for displaying a
297
+ grid of images. In particular, it forces all axes in a column to share their x-axis
298
+ and all axes in a row to share their y-axis. It further provides helpers to add
299
+ colorbars to some or all axes.
300
+ """
301
+
302
+ def __init__(self, fig,
303
+ rect,
304
+ nrows_ncols,
305
+ ngrids=None,
306
+ direction="row",
307
+ axes_pad=0.02,
308
+ *,
309
+ share_all=False,
310
+ aspect=True,
311
+ label_mode="L",
312
+ cbar_mode=None,
313
+ cbar_location="right",
314
+ cbar_pad=None,
315
+ cbar_size="5%",
316
+ cbar_set_cax=True,
317
+ axes_class=None,
318
+ ):
319
+ """
320
+ Parameters
321
+ ----------
322
+ fig : `.Figure`
323
+ The parent figure.
324
+ rect : (float, float, float, float) or int
325
+ The axes position, as a ``(left, bottom, width, height)`` tuple or
326
+ as a three-digit subplot position code (e.g., "121").
327
+ nrows_ncols : (int, int)
328
+ Number of rows and columns in the grid.
329
+ ngrids : int or None, default: None
330
+ If not None, only the first *ngrids* axes in the grid are created.
331
+ direction : {"row", "column"}, default: "row"
332
+ Whether axes are created in row-major ("row by row") or
333
+ column-major order ("column by column"). This also affects the
334
+ order in which axes are accessed using indexing (``grid[index]``).
335
+ axes_pad : float or (float, float), default: 0.02in
336
+ Padding or (horizontal padding, vertical padding) between axes, in
337
+ inches.
338
+ share_all : bool, default: False
339
+ Whether all axes share their x- and y-axis. Note that in any case,
340
+ all axes in a column share their x-axis and all axes in a row share
341
+ their y-axis.
342
+ aspect : bool, default: True
343
+ Whether the axes aspect ratio follows the aspect ratio of the data
344
+ limits.
345
+ label_mode : {"L", "1", "all"}, default: "L"
346
+ Determines which axes will get tick labels:
347
+
348
+ - "L": All axes on the left column get vertical tick labels;
349
+ all axes on the bottom row get horizontal tick labels.
350
+ - "1": Only the bottom left axes is labelled.
351
+ - "all": all axes are labelled.
352
+
353
+ cbar_mode : {"each", "single", "edge", None}, default: None
354
+ Whether to create a colorbar for "each" axes, a "single" colorbar
355
+ for the entire grid, colorbars only for axes on the "edge"
356
+ determined by *cbar_location*, or no colorbars. The colorbars are
357
+ stored in the :attr:`cbar_axes` attribute.
358
+ cbar_location : {"left", "right", "bottom", "top"}, default: "right"
359
+ cbar_pad : float, default: None
360
+ Padding between the image axes and the colorbar axes.
361
+ cbar_size : size specification (see `.Size.from_any`), default: "5%"
362
+ Colorbar size.
363
+ cbar_set_cax : bool, default: True
364
+ If True, each axes in the grid has a *cax* attribute that is bound
365
+ to associated *cbar_axes*.
366
+ axes_class : subclass of `matplotlib.axes.Axes`, default: None
367
+ """
368
+ _api.check_in_list(["each", "single", "edge", None],
369
+ cbar_mode=cbar_mode)
370
+ _api.check_in_list(["left", "right", "bottom", "top"],
371
+ cbar_location=cbar_location)
372
+ self._colorbar_mode = cbar_mode
373
+ self._colorbar_location = cbar_location
374
+ self._colorbar_pad = cbar_pad
375
+ self._colorbar_size = cbar_size
376
+ # The colorbar axes are created in _init_locators().
377
+
378
+ super().__init__(
379
+ fig, rect, nrows_ncols, ngrids,
380
+ direction=direction, axes_pad=axes_pad,
381
+ share_all=share_all, share_x=True, share_y=True, aspect=aspect,
382
+ label_mode=label_mode, axes_class=axes_class)
383
+
384
+ for ax in self.cbar_axes:
385
+ fig.add_axes(ax)
386
+
387
+ if cbar_set_cax:
388
+ if self._colorbar_mode == "single":
389
+ for ax in self.axes_all:
390
+ ax.cax = self.cbar_axes[0]
391
+ elif self._colorbar_mode == "edge":
392
+ for index, ax in enumerate(self.axes_all):
393
+ col, row = self._get_col_row(index)
394
+ if self._colorbar_location in ("left", "right"):
395
+ ax.cax = self.cbar_axes[row]
396
+ else:
397
+ ax.cax = self.cbar_axes[col]
398
+ else:
399
+ for ax, cax in zip(self.axes_all, self.cbar_axes):
400
+ ax.cax = cax
401
+
402
+ def _init_locators(self):
403
+ # Slightly abusing this method to inject colorbar creation into init.
404
+
405
+ if self._colorbar_pad is None:
406
+ # horizontal or vertical arrangement?
407
+ if self._colorbar_location in ("left", "right"):
408
+ self._colorbar_pad = self._horiz_pad_size.fixed_size
409
+ else:
410
+ self._colorbar_pad = self._vert_pad_size.fixed_size
411
+ self.cbar_axes = [
412
+ _cbaraxes_class_factory(self._defaultAxesClass)(
413
+ self.axes_all[0].figure, self._divider.get_position(),
414
+ orientation=self._colorbar_location)
415
+ for _ in range(self.ngrids)]
416
+
417
+ cb_mode = self._colorbar_mode
418
+ cb_location = self._colorbar_location
419
+
420
+ h = []
421
+ v = []
422
+
423
+ h_ax_pos = []
424
+ h_cb_pos = []
425
+ if cb_mode == "single" and cb_location in ("left", "bottom"):
426
+ if cb_location == "left":
427
+ sz = self._nrows * Size.AxesX(self.axes_llc)
428
+ h.append(Size.from_any(self._colorbar_size, sz))
429
+ h.append(Size.from_any(self._colorbar_pad, sz))
430
+ locator = self._divider.new_locator(nx=0, ny=0, ny1=-1)
431
+ elif cb_location == "bottom":
432
+ sz = self._ncols * Size.AxesY(self.axes_llc)
433
+ v.append(Size.from_any(self._colorbar_size, sz))
434
+ v.append(Size.from_any(self._colorbar_pad, sz))
435
+ locator = self._divider.new_locator(nx=0, nx1=-1, ny=0)
436
+ for i in range(self.ngrids):
437
+ self.cbar_axes[i].set_visible(False)
438
+ self.cbar_axes[0].set_axes_locator(locator)
439
+ self.cbar_axes[0].set_visible(True)
440
+
441
+ for col, ax in enumerate(self.axes_row[0]):
442
+ if h:
443
+ h.append(self._horiz_pad_size)
444
+
445
+ if ax:
446
+ sz = Size.AxesX(ax, aspect="axes", ref_ax=self.axes_all[0])
447
+ else:
448
+ sz = Size.AxesX(self.axes_all[0],
449
+ aspect="axes", ref_ax=self.axes_all[0])
450
+
451
+ if (cb_location == "left"
452
+ and (cb_mode == "each"
453
+ or (cb_mode == "edge" and col == 0))):
454
+ h_cb_pos.append(len(h))
455
+ h.append(Size.from_any(self._colorbar_size, sz))
456
+ h.append(Size.from_any(self._colorbar_pad, sz))
457
+
458
+ h_ax_pos.append(len(h))
459
+ h.append(sz)
460
+
461
+ if (cb_location == "right"
462
+ and (cb_mode == "each"
463
+ or (cb_mode == "edge" and col == self._ncols - 1))):
464
+ h.append(Size.from_any(self._colorbar_pad, sz))
465
+ h_cb_pos.append(len(h))
466
+ h.append(Size.from_any(self._colorbar_size, sz))
467
+
468
+ v_ax_pos = []
469
+ v_cb_pos = []
470
+ for row, ax in enumerate(self.axes_column[0][::-1]):
471
+ if v:
472
+ v.append(self._vert_pad_size)
473
+
474
+ if ax:
475
+ sz = Size.AxesY(ax, aspect="axes", ref_ax=self.axes_all[0])
476
+ else:
477
+ sz = Size.AxesY(self.axes_all[0],
478
+ aspect="axes", ref_ax=self.axes_all[0])
479
+
480
+ if (cb_location == "bottom"
481
+ and (cb_mode == "each"
482
+ or (cb_mode == "edge" and row == 0))):
483
+ v_cb_pos.append(len(v))
484
+ v.append(Size.from_any(self._colorbar_size, sz))
485
+ v.append(Size.from_any(self._colorbar_pad, sz))
486
+
487
+ v_ax_pos.append(len(v))
488
+ v.append(sz)
489
+
490
+ if (cb_location == "top"
491
+ and (cb_mode == "each"
492
+ or (cb_mode == "edge" and row == self._nrows - 1))):
493
+ v.append(Size.from_any(self._colorbar_pad, sz))
494
+ v_cb_pos.append(len(v))
495
+ v.append(Size.from_any(self._colorbar_size, sz))
496
+
497
+ for i in range(self.ngrids):
498
+ col, row = self._get_col_row(i)
499
+ locator = self._divider.new_locator(nx=h_ax_pos[col],
500
+ ny=v_ax_pos[self._nrows-1-row])
501
+ self.axes_all[i].set_axes_locator(locator)
502
+
503
+ if cb_mode == "each":
504
+ if cb_location in ("right", "left"):
505
+ locator = self._divider.new_locator(
506
+ nx=h_cb_pos[col], ny=v_ax_pos[self._nrows - 1 - row])
507
+
508
+ elif cb_location in ("top", "bottom"):
509
+ locator = self._divider.new_locator(
510
+ nx=h_ax_pos[col], ny=v_cb_pos[self._nrows - 1 - row])
511
+
512
+ self.cbar_axes[i].set_axes_locator(locator)
513
+ elif cb_mode == "edge":
514
+ if (cb_location == "left" and col == 0
515
+ or cb_location == "right" and col == self._ncols - 1):
516
+ locator = self._divider.new_locator(
517
+ nx=h_cb_pos[0], ny=v_ax_pos[self._nrows - 1 - row])
518
+ self.cbar_axes[row].set_axes_locator(locator)
519
+ elif (cb_location == "bottom" and row == self._nrows - 1
520
+ or cb_location == "top" and row == 0):
521
+ locator = self._divider.new_locator(nx=h_ax_pos[col],
522
+ ny=v_cb_pos[0])
523
+ self.cbar_axes[col].set_axes_locator(locator)
524
+
525
+ if cb_mode == "single":
526
+ if cb_location == "right":
527
+ sz = self._nrows * Size.AxesX(self.axes_llc)
528
+ h.append(Size.from_any(self._colorbar_pad, sz))
529
+ h.append(Size.from_any(self._colorbar_size, sz))
530
+ locator = self._divider.new_locator(nx=-2, ny=0, ny1=-1)
531
+ elif cb_location == "top":
532
+ sz = self._ncols * Size.AxesY(self.axes_llc)
533
+ v.append(Size.from_any(self._colorbar_pad, sz))
534
+ v.append(Size.from_any(self._colorbar_size, sz))
535
+ locator = self._divider.new_locator(nx=0, nx1=-1, ny=-2)
536
+ if cb_location in ("right", "top"):
537
+ for i in range(self.ngrids):
538
+ self.cbar_axes[i].set_visible(False)
539
+ self.cbar_axes[0].set_axes_locator(locator)
540
+ self.cbar_axes[0].set_visible(True)
541
+ elif cb_mode == "each":
542
+ for i in range(self.ngrids):
543
+ self.cbar_axes[i].set_visible(True)
544
+ elif cb_mode == "edge":
545
+ if cb_location in ("right", "left"):
546
+ count = self._nrows
547
+ else:
548
+ count = self._ncols
549
+ for i in range(count):
550
+ self.cbar_axes[i].set_visible(True)
551
+ for j in range(i + 1, self.ngrids):
552
+ self.cbar_axes[j].set_visible(False)
553
+ else:
554
+ for i in range(self.ngrids):
555
+ self.cbar_axes[i].set_visible(False)
556
+ self.cbar_axes[i].set_position([1., 1., 0.001, 0.001],
557
+ which="active")
558
+
559
+ self._divider.set_horizontal(h)
560
+ self._divider.set_vertical(v)
561
+
562
+
563
+ AxesGrid = ImageGrid
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/axes_rgb.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from types import MethodType
2
+
3
+ import numpy as np
4
+
5
+ from .axes_divider import make_axes_locatable, Size
6
+ from .mpl_axes import Axes, SimpleAxisArtist
7
+
8
+
9
+ def make_rgb_axes(ax, pad=0.01, axes_class=None, **kwargs):
10
+ """
11
+ Parameters
12
+ ----------
13
+ ax : `~matplotlib.axes.Axes`
14
+ Axes instance to create the RGB Axes in.
15
+ pad : float, optional
16
+ Fraction of the Axes height to pad.
17
+ axes_class : `matplotlib.axes.Axes` or None, optional
18
+ Axes class to use for the R, G, and B Axes. If None, use
19
+ the same class as *ax*.
20
+ **kwargs
21
+ Forwarded to *axes_class* init for the R, G, and B Axes.
22
+ """
23
+
24
+ divider = make_axes_locatable(ax)
25
+
26
+ pad_size = pad * Size.AxesY(ax)
27
+
28
+ xsize = ((1-2*pad)/3) * Size.AxesX(ax)
29
+ ysize = ((1-2*pad)/3) * Size.AxesY(ax)
30
+
31
+ divider.set_horizontal([Size.AxesX(ax), pad_size, xsize])
32
+ divider.set_vertical([ysize, pad_size, ysize, pad_size, ysize])
33
+
34
+ ax.set_axes_locator(divider.new_locator(0, 0, ny1=-1))
35
+
36
+ ax_rgb = []
37
+ if axes_class is None:
38
+ axes_class = type(ax)
39
+
40
+ for ny in [4, 2, 0]:
41
+ ax1 = axes_class(ax.get_figure(), ax.get_position(original=True),
42
+ sharex=ax, sharey=ax, **kwargs)
43
+ locator = divider.new_locator(nx=2, ny=ny)
44
+ ax1.set_axes_locator(locator)
45
+ for t in ax1.yaxis.get_ticklabels() + ax1.xaxis.get_ticklabels():
46
+ t.set_visible(False)
47
+ try:
48
+ for axis in ax1.axis.values():
49
+ axis.major_ticklabels.set_visible(False)
50
+ except AttributeError:
51
+ pass
52
+
53
+ ax_rgb.append(ax1)
54
+
55
+ fig = ax.get_figure()
56
+ for ax1 in ax_rgb:
57
+ fig.add_axes(ax1)
58
+
59
+ return ax_rgb
60
+
61
+
62
+ class RGBAxes:
63
+ """
64
+ 4-panel `~.Axes.imshow` (RGB, R, G, B).
65
+
66
+ Layout::
67
+
68
+ ┌───────────────┬─────┐
69
+ │ │ R │
70
+ │ ├─────┤
71
+ │ RGB │ G │
72
+ │ ├─────┤
73
+ │ │ B │
74
+ └───────────────┴─────┘
75
+
76
+ Subclasses can override the ``_defaultAxesClass`` attribute.
77
+ By default RGBAxes uses `.mpl_axes.Axes`.
78
+
79
+ Attributes
80
+ ----------
81
+ RGB : ``_defaultAxesClass``
82
+ The Axes object for the three-channel `~.Axes.imshow`.
83
+ R : ``_defaultAxesClass``
84
+ The Axes object for the red channel `~.Axes.imshow`.
85
+ G : ``_defaultAxesClass``
86
+ The Axes object for the green channel `~.Axes.imshow`.
87
+ B : ``_defaultAxesClass``
88
+ The Axes object for the blue channel `~.Axes.imshow`.
89
+ """
90
+
91
+ _defaultAxesClass = Axes
92
+
93
+ def __init__(self, *args, pad=0, **kwargs):
94
+ """
95
+ Parameters
96
+ ----------
97
+ pad : float, default: 0
98
+ Fraction of the Axes height to put as padding.
99
+ axes_class : `~matplotlib.axes.Axes`
100
+ Axes class to use. If not provided, ``_defaultAxesClass`` is used.
101
+ *args
102
+ Forwarded to *axes_class* init for the RGB Axes
103
+ **kwargs
104
+ Forwarded to *axes_class* init for the RGB, R, G, and B Axes
105
+ """
106
+ axes_class = kwargs.pop("axes_class", self._defaultAxesClass)
107
+ self.RGB = ax = axes_class(*args, **kwargs)
108
+ ax.get_figure().add_axes(ax)
109
+ self.R, self.G, self.B = make_rgb_axes(
110
+ ax, pad=pad, axes_class=axes_class, **kwargs)
111
+ # Set the line color and ticks for the axes.
112
+ for ax1 in [self.RGB, self.R, self.G, self.B]:
113
+ if isinstance(ax1.axis, MethodType):
114
+ ad = Axes.AxisDict(self)
115
+ ad.update(
116
+ bottom=SimpleAxisArtist(ax1.xaxis, 1, ax1.spines["bottom"]),
117
+ top=SimpleAxisArtist(ax1.xaxis, 2, ax1.spines["top"]),
118
+ left=SimpleAxisArtist(ax1.yaxis, 1, ax1.spines["left"]),
119
+ right=SimpleAxisArtist(ax1.yaxis, 2, ax1.spines["right"]))
120
+ else:
121
+ ad = ax1.axis
122
+ ad[:].line.set_color("w")
123
+ ad[:].major_ticks.set_markeredgecolor("w")
124
+
125
+ def imshow_rgb(self, r, g, b, **kwargs):
126
+ """
127
+ Create the four images {rgb, r, g, b}.
128
+
129
+ Parameters
130
+ ----------
131
+ r, g, b : array-like
132
+ The red, green, and blue arrays.
133
+ **kwargs
134
+ Forwarded to `~.Axes.imshow` calls for the four images.
135
+
136
+ Returns
137
+ -------
138
+ rgb : `~matplotlib.image.AxesImage`
139
+ r : `~matplotlib.image.AxesImage`
140
+ g : `~matplotlib.image.AxesImage`
141
+ b : `~matplotlib.image.AxesImage`
142
+ """
143
+ if not (r.shape == g.shape == b.shape):
144
+ raise ValueError(
145
+ f'Input shapes ({r.shape}, {g.shape}, {b.shape}) do not match')
146
+ RGB = np.dstack([r, g, b])
147
+ R = np.zeros_like(RGB)
148
+ R[:, :, 0] = r
149
+ G = np.zeros_like(RGB)
150
+ G[:, :, 1] = g
151
+ B = np.zeros_like(RGB)
152
+ B[:, :, 2] = b
153
+ im_rgb = self.RGB.imshow(RGB, **kwargs)
154
+ im_r = self.R.imshow(R, **kwargs)
155
+ im_g = self.G.imshow(G, **kwargs)
156
+ im_b = self.B.imshow(B, **kwargs)
157
+ return im_rgb, im_r, im_g, im_b
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/axes_size.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Provides classes of simple units that will be used with `.AxesDivider`
3
+ class (or others) to determine the size of each Axes. The unit
4
+ classes define `get_size` method that returns a tuple of two floats,
5
+ meaning relative and absolute sizes, respectively.
6
+
7
+ Note that this class is nothing more than a simple tuple of two
8
+ floats. Take a look at the Divider class to see how these two
9
+ values are used.
10
+ """
11
+
12
+ from numbers import Real
13
+
14
+ from matplotlib import _api
15
+ from matplotlib.axes import Axes
16
+
17
+
18
+ class _Base:
19
+ def __rmul__(self, other):
20
+ return Fraction(other, self)
21
+
22
+ def __add__(self, other):
23
+ if isinstance(other, _Base):
24
+ return Add(self, other)
25
+ else:
26
+ return Add(self, Fixed(other))
27
+
28
+ def get_size(self, renderer):
29
+ """
30
+ Return two-float tuple with relative and absolute sizes.
31
+ """
32
+ raise NotImplementedError("Subclasses must implement")
33
+
34
+
35
+ class Add(_Base):
36
+ """
37
+ Sum of two sizes.
38
+ """
39
+
40
+ def __init__(self, a, b):
41
+ self._a = a
42
+ self._b = b
43
+
44
+ def get_size(self, renderer):
45
+ a_rel_size, a_abs_size = self._a.get_size(renderer)
46
+ b_rel_size, b_abs_size = self._b.get_size(renderer)
47
+ return a_rel_size + b_rel_size, a_abs_size + b_abs_size
48
+
49
+
50
+ class Fixed(_Base):
51
+ """
52
+ Simple fixed size with absolute part = *fixed_size* and relative part = 0.
53
+ """
54
+
55
+ def __init__(self, fixed_size):
56
+ _api.check_isinstance(Real, fixed_size=fixed_size)
57
+ self.fixed_size = fixed_size
58
+
59
+ def get_size(self, renderer):
60
+ rel_size = 0.
61
+ abs_size = self.fixed_size
62
+ return rel_size, abs_size
63
+
64
+
65
+ class Scaled(_Base):
66
+ """
67
+ Simple scaled(?) size with absolute part = 0 and
68
+ relative part = *scalable_size*.
69
+ """
70
+
71
+ def __init__(self, scalable_size):
72
+ self._scalable_size = scalable_size
73
+
74
+ def get_size(self, renderer):
75
+ rel_size = self._scalable_size
76
+ abs_size = 0.
77
+ return rel_size, abs_size
78
+
79
+ Scalable = Scaled
80
+
81
+
82
+ def _get_axes_aspect(ax):
83
+ aspect = ax.get_aspect()
84
+ if aspect == "auto":
85
+ aspect = 1.
86
+ return aspect
87
+
88
+
89
+ class AxesX(_Base):
90
+ """
91
+ Scaled size whose relative part corresponds to the data width
92
+ of the *axes* multiplied by the *aspect*.
93
+ """
94
+
95
+ def __init__(self, axes, aspect=1., ref_ax=None):
96
+ self._axes = axes
97
+ self._aspect = aspect
98
+ if aspect == "axes" and ref_ax is None:
99
+ raise ValueError("ref_ax must be set when aspect='axes'")
100
+ self._ref_ax = ref_ax
101
+
102
+ def get_size(self, renderer):
103
+ l1, l2 = self._axes.get_xlim()
104
+ if self._aspect == "axes":
105
+ ref_aspect = _get_axes_aspect(self._ref_ax)
106
+ aspect = ref_aspect / _get_axes_aspect(self._axes)
107
+ else:
108
+ aspect = self._aspect
109
+
110
+ rel_size = abs(l2-l1)*aspect
111
+ abs_size = 0.
112
+ return rel_size, abs_size
113
+
114
+
115
+ class AxesY(_Base):
116
+ """
117
+ Scaled size whose relative part corresponds to the data height
118
+ of the *axes* multiplied by the *aspect*.
119
+ """
120
+
121
+ def __init__(self, axes, aspect=1., ref_ax=None):
122
+ self._axes = axes
123
+ self._aspect = aspect
124
+ if aspect == "axes" and ref_ax is None:
125
+ raise ValueError("ref_ax must be set when aspect='axes'")
126
+ self._ref_ax = ref_ax
127
+
128
+ def get_size(self, renderer):
129
+ l1, l2 = self._axes.get_ylim()
130
+
131
+ if self._aspect == "axes":
132
+ ref_aspect = _get_axes_aspect(self._ref_ax)
133
+ aspect = _get_axes_aspect(self._axes)
134
+ else:
135
+ aspect = self._aspect
136
+
137
+ rel_size = abs(l2-l1)*aspect
138
+ abs_size = 0.
139
+ return rel_size, abs_size
140
+
141
+
142
+ class MaxExtent(_Base):
143
+ """
144
+ Size whose absolute part is either the largest width or the largest height
145
+ of the given *artist_list*.
146
+ """
147
+
148
+ def __init__(self, artist_list, w_or_h):
149
+ self._artist_list = artist_list
150
+ _api.check_in_list(["width", "height"], w_or_h=w_or_h)
151
+ self._w_or_h = w_or_h
152
+
153
+ def add_artist(self, a):
154
+ self._artist_list.append(a)
155
+
156
+ def get_size(self, renderer):
157
+ rel_size = 0.
158
+ extent_list = [
159
+ getattr(a.get_window_extent(renderer), self._w_or_h) / a.figure.dpi
160
+ for a in self._artist_list]
161
+ abs_size = max(extent_list, default=0)
162
+ return rel_size, abs_size
163
+
164
+
165
+ class MaxWidth(MaxExtent):
166
+ """
167
+ Size whose absolute part is the largest width of the given *artist_list*.
168
+ """
169
+
170
+ def __init__(self, artist_list):
171
+ super().__init__(artist_list, "width")
172
+
173
+
174
+ class MaxHeight(MaxExtent):
175
+ """
176
+ Size whose absolute part is the largest height of the given *artist_list*.
177
+ """
178
+
179
+ def __init__(self, artist_list):
180
+ super().__init__(artist_list, "height")
181
+
182
+
183
+ class Fraction(_Base):
184
+ """
185
+ An instance whose size is a *fraction* of the *ref_size*.
186
+
187
+ >>> s = Fraction(0.3, AxesX(ax))
188
+ """
189
+
190
+ def __init__(self, fraction, ref_size):
191
+ _api.check_isinstance(Real, fraction=fraction)
192
+ self._fraction_ref = ref_size
193
+ self._fraction = fraction
194
+
195
+ def get_size(self, renderer):
196
+ if self._fraction_ref is None:
197
+ return self._fraction, 0.
198
+ else:
199
+ r, a = self._fraction_ref.get_size(renderer)
200
+ rel_size = r*self._fraction
201
+ abs_size = a*self._fraction
202
+ return rel_size, abs_size
203
+
204
+
205
+ def from_any(size, fraction_ref=None):
206
+ """
207
+ Create a Fixed unit when the first argument is a float, or a
208
+ Fraction unit if that is a string that ends with %. The second
209
+ argument is only meaningful when Fraction unit is created.
210
+
211
+ >>> from mpl_toolkits.axes_grid1.axes_size import from_any
212
+ >>> a = from_any(1.2) # => Fixed(1.2)
213
+ >>> from_any("50%", a) # => Fraction(0.5, a)
214
+ """
215
+ if isinstance(size, Real):
216
+ return Fixed(size)
217
+ elif isinstance(size, str):
218
+ if size[-1] == "%":
219
+ return Fraction(float(size[:-1]) / 100, fraction_ref)
220
+ raise ValueError("Unknown format")
221
+
222
+
223
+ class _AxesDecorationsSize(_Base):
224
+ """
225
+ Fixed size, corresponding to the size of decorations on a given Axes side.
226
+ """
227
+
228
+ _get_size_map = {
229
+ "left": lambda tight_bb, axes_bb: axes_bb.xmin - tight_bb.xmin,
230
+ "right": lambda tight_bb, axes_bb: tight_bb.xmax - axes_bb.xmax,
231
+ "bottom": lambda tight_bb, axes_bb: axes_bb.ymin - tight_bb.ymin,
232
+ "top": lambda tight_bb, axes_bb: tight_bb.ymax - axes_bb.ymax,
233
+ }
234
+
235
+ def __init__(self, ax, direction):
236
+ _api.check_in_list(self._get_size_map, direction=direction)
237
+ self._direction = direction
238
+ self._ax_list = [ax] if isinstance(ax, Axes) else ax
239
+
240
+ def get_size(self, renderer):
241
+ sz = max([
242
+ self._get_size_map[self._direction](
243
+ ax.get_tightbbox(renderer, call_axes_locator=False), ax.bbox)
244
+ for ax in self._ax_list])
245
+ dpi = renderer.points_to_pixels(72)
246
+ abs_size = sz / dpi
247
+ rel_size = 0
248
+ return rel_size, abs_size
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/inset_locator.py ADDED
@@ -0,0 +1,561 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ A collection of functions and objects for creating or placing inset axes.
3
+ """
4
+
5
+ from matplotlib import _api, _docstring
6
+ from matplotlib.offsetbox import AnchoredOffsetbox
7
+ from matplotlib.patches import Patch, Rectangle
8
+ from matplotlib.path import Path
9
+ from matplotlib.transforms import Bbox, BboxTransformTo
10
+ from matplotlib.transforms import IdentityTransform, TransformedBbox
11
+
12
+ from . import axes_size as Size
13
+ from .parasite_axes import HostAxes
14
+
15
+
16
+ @_api.deprecated("3.8", alternative="Axes.inset_axes")
17
+ class InsetPosition:
18
+ @_docstring.dedent_interpd
19
+ def __init__(self, parent, lbwh):
20
+ """
21
+ An object for positioning an inset axes.
22
+
23
+ This is created by specifying the normalized coordinates in the axes,
24
+ instead of the figure.
25
+
26
+ Parameters
27
+ ----------
28
+ parent : `~matplotlib.axes.Axes`
29
+ Axes to use for normalizing coordinates.
30
+
31
+ lbwh : iterable of four floats
32
+ The left edge, bottom edge, width, and height of the inset axes, in
33
+ units of the normalized coordinate of the *parent* axes.
34
+
35
+ See Also
36
+ --------
37
+ :meth:`matplotlib.axes.Axes.set_axes_locator`
38
+
39
+ Examples
40
+ --------
41
+ The following bounds the inset axes to a box with 20%% of the parent
42
+ axes height and 40%% of the width. The size of the axes specified
43
+ ([0, 0, 1, 1]) ensures that the axes completely fills the bounding box:
44
+
45
+ >>> parent_axes = plt.gca()
46
+ >>> ax_ins = plt.axes([0, 0, 1, 1])
47
+ >>> ip = InsetPosition(parent_axes, [0.5, 0.1, 0.4, 0.2])
48
+ >>> ax_ins.set_axes_locator(ip)
49
+ """
50
+ self.parent = parent
51
+ self.lbwh = lbwh
52
+
53
+ def __call__(self, ax, renderer):
54
+ bbox_parent = self.parent.get_position(original=False)
55
+ trans = BboxTransformTo(bbox_parent)
56
+ bbox_inset = Bbox.from_bounds(*self.lbwh)
57
+ bb = TransformedBbox(bbox_inset, trans)
58
+ return bb
59
+
60
+
61
+ class AnchoredLocatorBase(AnchoredOffsetbox):
62
+ def __init__(self, bbox_to_anchor, offsetbox, loc,
63
+ borderpad=0.5, bbox_transform=None):
64
+ super().__init__(
65
+ loc, pad=0., child=None, borderpad=borderpad,
66
+ bbox_to_anchor=bbox_to_anchor, bbox_transform=bbox_transform
67
+ )
68
+
69
+ def draw(self, renderer):
70
+ raise RuntimeError("No draw method should be called")
71
+
72
+ def __call__(self, ax, renderer):
73
+ if renderer is None:
74
+ renderer = ax.figure._get_renderer()
75
+ self.axes = ax
76
+ bbox = self.get_window_extent(renderer)
77
+ px, py = self.get_offset(bbox.width, bbox.height, 0, 0, renderer)
78
+ bbox_canvas = Bbox.from_bounds(px, py, bbox.width, bbox.height)
79
+ tr = ax.figure.transSubfigure.inverted()
80
+ return TransformedBbox(bbox_canvas, tr)
81
+
82
+
83
+ class AnchoredSizeLocator(AnchoredLocatorBase):
84
+ def __init__(self, bbox_to_anchor, x_size, y_size, loc,
85
+ borderpad=0.5, bbox_transform=None):
86
+ super().__init__(
87
+ bbox_to_anchor, None, loc,
88
+ borderpad=borderpad, bbox_transform=bbox_transform
89
+ )
90
+
91
+ self.x_size = Size.from_any(x_size)
92
+ self.y_size = Size.from_any(y_size)
93
+
94
+ def get_bbox(self, renderer):
95
+ bbox = self.get_bbox_to_anchor()
96
+ dpi = renderer.points_to_pixels(72.)
97
+
98
+ r, a = self.x_size.get_size(renderer)
99
+ width = bbox.width * r + a * dpi
100
+ r, a = self.y_size.get_size(renderer)
101
+ height = bbox.height * r + a * dpi
102
+
103
+ fontsize = renderer.points_to_pixels(self.prop.get_size_in_points())
104
+ pad = self.pad * fontsize
105
+
106
+ return Bbox.from_bounds(0, 0, width, height).padded(pad)
107
+
108
+
109
+ class AnchoredZoomLocator(AnchoredLocatorBase):
110
+ def __init__(self, parent_axes, zoom, loc,
111
+ borderpad=0.5,
112
+ bbox_to_anchor=None,
113
+ bbox_transform=None):
114
+ self.parent_axes = parent_axes
115
+ self.zoom = zoom
116
+ if bbox_to_anchor is None:
117
+ bbox_to_anchor = parent_axes.bbox
118
+ super().__init__(
119
+ bbox_to_anchor, None, loc, borderpad=borderpad,
120
+ bbox_transform=bbox_transform)
121
+
122
+ def get_bbox(self, renderer):
123
+ bb = self.parent_axes.transData.transform_bbox(self.axes.viewLim)
124
+ fontsize = renderer.points_to_pixels(self.prop.get_size_in_points())
125
+ pad = self.pad * fontsize
126
+ return (
127
+ Bbox.from_bounds(
128
+ 0, 0, abs(bb.width * self.zoom), abs(bb.height * self.zoom))
129
+ .padded(pad))
130
+
131
+
132
+ class BboxPatch(Patch):
133
+ @_docstring.dedent_interpd
134
+ def __init__(self, bbox, **kwargs):
135
+ """
136
+ Patch showing the shape bounded by a Bbox.
137
+
138
+ Parameters
139
+ ----------
140
+ bbox : `~matplotlib.transforms.Bbox`
141
+ Bbox to use for the extents of this patch.
142
+
143
+ **kwargs
144
+ Patch properties. Valid arguments include:
145
+
146
+ %(Patch:kwdoc)s
147
+ """
148
+ if "transform" in kwargs:
149
+ raise ValueError("transform should not be set")
150
+
151
+ kwargs["transform"] = IdentityTransform()
152
+ super().__init__(**kwargs)
153
+ self.bbox = bbox
154
+
155
+ def get_path(self):
156
+ # docstring inherited
157
+ x0, y0, x1, y1 = self.bbox.extents
158
+ return Path._create_closed([(x0, y0), (x1, y0), (x1, y1), (x0, y1)])
159
+
160
+
161
+ class BboxConnector(Patch):
162
+ @staticmethod
163
+ def get_bbox_edge_pos(bbox, loc):
164
+ """
165
+ Return the ``(x, y)`` coordinates of corner *loc* of *bbox*; parameters
166
+ behave as documented for the `.BboxConnector` constructor.
167
+ """
168
+ x0, y0, x1, y1 = bbox.extents
169
+ if loc == 1:
170
+ return x1, y1
171
+ elif loc == 2:
172
+ return x0, y1
173
+ elif loc == 3:
174
+ return x0, y0
175
+ elif loc == 4:
176
+ return x1, y0
177
+
178
+ @staticmethod
179
+ def connect_bbox(bbox1, bbox2, loc1, loc2=None):
180
+ """
181
+ Construct a `.Path` connecting corner *loc1* of *bbox1* to corner
182
+ *loc2* of *bbox2*, where parameters behave as documented as for the
183
+ `.BboxConnector` constructor.
184
+ """
185
+ if isinstance(bbox1, Rectangle):
186
+ bbox1 = TransformedBbox(Bbox.unit(), bbox1.get_transform())
187
+ if isinstance(bbox2, Rectangle):
188
+ bbox2 = TransformedBbox(Bbox.unit(), bbox2.get_transform())
189
+ if loc2 is None:
190
+ loc2 = loc1
191
+ x1, y1 = BboxConnector.get_bbox_edge_pos(bbox1, loc1)
192
+ x2, y2 = BboxConnector.get_bbox_edge_pos(bbox2, loc2)
193
+ return Path([[x1, y1], [x2, y2]])
194
+
195
+ @_docstring.dedent_interpd
196
+ def __init__(self, bbox1, bbox2, loc1, loc2=None, **kwargs):
197
+ """
198
+ Connect two bboxes with a straight line.
199
+
200
+ Parameters
201
+ ----------
202
+ bbox1, bbox2 : `~matplotlib.transforms.Bbox`
203
+ Bounding boxes to connect.
204
+
205
+ loc1, loc2 : {1, 2, 3, 4}
206
+ Corner of *bbox1* and *bbox2* to draw the line. Valid values are::
207
+
208
+ 'upper right' : 1,
209
+ 'upper left' : 2,
210
+ 'lower left' : 3,
211
+ 'lower right' : 4
212
+
213
+ *loc2* is optional and defaults to *loc1*.
214
+
215
+ **kwargs
216
+ Patch properties for the line drawn. Valid arguments include:
217
+
218
+ %(Patch:kwdoc)s
219
+ """
220
+ if "transform" in kwargs:
221
+ raise ValueError("transform should not be set")
222
+
223
+ kwargs["transform"] = IdentityTransform()
224
+ kwargs.setdefault(
225
+ "fill", bool({'fc', 'facecolor', 'color'}.intersection(kwargs)))
226
+ super().__init__(**kwargs)
227
+ self.bbox1 = bbox1
228
+ self.bbox2 = bbox2
229
+ self.loc1 = loc1
230
+ self.loc2 = loc2
231
+
232
+ def get_path(self):
233
+ # docstring inherited
234
+ return self.connect_bbox(self.bbox1, self.bbox2,
235
+ self.loc1, self.loc2)
236
+
237
+
238
+ class BboxConnectorPatch(BboxConnector):
239
+ @_docstring.dedent_interpd
240
+ def __init__(self, bbox1, bbox2, loc1a, loc2a, loc1b, loc2b, **kwargs):
241
+ """
242
+ Connect two bboxes with a quadrilateral.
243
+
244
+ The quadrilateral is specified by two lines that start and end at
245
+ corners of the bboxes. The four sides of the quadrilateral are defined
246
+ by the two lines given, the line between the two corners specified in
247
+ *bbox1* and the line between the two corners specified in *bbox2*.
248
+
249
+ Parameters
250
+ ----------
251
+ bbox1, bbox2 : `~matplotlib.transforms.Bbox`
252
+ Bounding boxes to connect.
253
+
254
+ loc1a, loc2a, loc1b, loc2b : {1, 2, 3, 4}
255
+ The first line connects corners *loc1a* of *bbox1* and *loc2a* of
256
+ *bbox2*; the second line connects corners *loc1b* of *bbox1* and
257
+ *loc2b* of *bbox2*. Valid values are::
258
+
259
+ 'upper right' : 1,
260
+ 'upper left' : 2,
261
+ 'lower left' : 3,
262
+ 'lower right' : 4
263
+
264
+ **kwargs
265
+ Patch properties for the line drawn:
266
+
267
+ %(Patch:kwdoc)s
268
+ """
269
+ if "transform" in kwargs:
270
+ raise ValueError("transform should not be set")
271
+ super().__init__(bbox1, bbox2, loc1a, loc2a, **kwargs)
272
+ self.loc1b = loc1b
273
+ self.loc2b = loc2b
274
+
275
+ def get_path(self):
276
+ # docstring inherited
277
+ path1 = self.connect_bbox(self.bbox1, self.bbox2, self.loc1, self.loc2)
278
+ path2 = self.connect_bbox(self.bbox2, self.bbox1,
279
+ self.loc2b, self.loc1b)
280
+ path_merged = [*path1.vertices, *path2.vertices, path1.vertices[0]]
281
+ return Path(path_merged)
282
+
283
+
284
+ def _add_inset_axes(parent_axes, axes_class, axes_kwargs, axes_locator):
285
+ """Helper function to add an inset axes and disable navigation in it."""
286
+ if axes_class is None:
287
+ axes_class = HostAxes
288
+ if axes_kwargs is None:
289
+ axes_kwargs = {}
290
+ inset_axes = axes_class(
291
+ parent_axes.figure, parent_axes.get_position(),
292
+ **{"navigate": False, **axes_kwargs, "axes_locator": axes_locator})
293
+ return parent_axes.figure.add_axes(inset_axes)
294
+
295
+
296
+ @_docstring.dedent_interpd
297
+ def inset_axes(parent_axes, width, height, loc='upper right',
298
+ bbox_to_anchor=None, bbox_transform=None,
299
+ axes_class=None, axes_kwargs=None,
300
+ borderpad=0.5):
301
+ """
302
+ Create an inset axes with a given width and height.
303
+
304
+ Both sizes used can be specified either in inches or percentage.
305
+ For example,::
306
+
307
+ inset_axes(parent_axes, width='40%%', height='30%%', loc='lower left')
308
+
309
+ creates in inset axes in the lower left corner of *parent_axes* which spans
310
+ over 30%% in height and 40%% in width of the *parent_axes*. Since the usage
311
+ of `.inset_axes` may become slightly tricky when exceeding such standard
312
+ cases, it is recommended to read :doc:`the examples
313
+ </gallery/axes_grid1/inset_locator_demo>`.
314
+
315
+ Notes
316
+ -----
317
+ The meaning of *bbox_to_anchor* and *bbox_to_transform* is interpreted
318
+ differently from that of legend. The value of bbox_to_anchor
319
+ (or the return value of its get_points method; the default is
320
+ *parent_axes.bbox*) is transformed by the bbox_transform (the default
321
+ is Identity transform) and then interpreted as points in the pixel
322
+ coordinate (which is dpi dependent).
323
+
324
+ Thus, following three calls are identical and creates an inset axes
325
+ with respect to the *parent_axes*::
326
+
327
+ axins = inset_axes(parent_axes, "30%%", "40%%")
328
+ axins = inset_axes(parent_axes, "30%%", "40%%",
329
+ bbox_to_anchor=parent_axes.bbox)
330
+ axins = inset_axes(parent_axes, "30%%", "40%%",
331
+ bbox_to_anchor=(0, 0, 1, 1),
332
+ bbox_transform=parent_axes.transAxes)
333
+
334
+ Parameters
335
+ ----------
336
+ parent_axes : `matplotlib.axes.Axes`
337
+ Axes to place the inset axes.
338
+
339
+ width, height : float or str
340
+ Size of the inset axes to create. If a float is provided, it is
341
+ the size in inches, e.g. *width=1.3*. If a string is provided, it is
342
+ the size in relative units, e.g. *width='40%%'*. By default, i.e. if
343
+ neither *bbox_to_anchor* nor *bbox_transform* are specified, those
344
+ are relative to the parent_axes. Otherwise, they are to be understood
345
+ relative to the bounding box provided via *bbox_to_anchor*.
346
+
347
+ loc : str, default: 'upper right'
348
+ Location to place the inset axes. Valid locations are
349
+ 'upper left', 'upper center', 'upper right',
350
+ 'center left', 'center', 'center right',
351
+ 'lower left', 'lower center', 'lower right'.
352
+ For backward compatibility, numeric values are accepted as well.
353
+ See the parameter *loc* of `.Legend` for details.
354
+
355
+ bbox_to_anchor : tuple or `~matplotlib.transforms.BboxBase`, optional
356
+ Bbox that the inset axes will be anchored to. If None,
357
+ a tuple of (0, 0, 1, 1) is used if *bbox_transform* is set
358
+ to *parent_axes.transAxes* or *parent_axes.figure.transFigure*.
359
+ Otherwise, *parent_axes.bbox* is used. If a tuple, can be either
360
+ [left, bottom, width, height], or [left, bottom].
361
+ If the kwargs *width* and/or *height* are specified in relative units,
362
+ the 2-tuple [left, bottom] cannot be used. Note that,
363
+ unless *bbox_transform* is set, the units of the bounding box
364
+ are interpreted in the pixel coordinate. When using *bbox_to_anchor*
365
+ with tuple, it almost always makes sense to also specify
366
+ a *bbox_transform*. This might often be the axes transform
367
+ *parent_axes.transAxes*.
368
+
369
+ bbox_transform : `~matplotlib.transforms.Transform`, optional
370
+ Transformation for the bbox that contains the inset axes.
371
+ If None, a `.transforms.IdentityTransform` is used. The value
372
+ of *bbox_to_anchor* (or the return value of its get_points method)
373
+ is transformed by the *bbox_transform* and then interpreted
374
+ as points in the pixel coordinate (which is dpi dependent).
375
+ You may provide *bbox_to_anchor* in some normalized coordinate,
376
+ and give an appropriate transform (e.g., *parent_axes.transAxes*).
377
+
378
+ axes_class : `~matplotlib.axes.Axes` type, default: `.HostAxes`
379
+ The type of the newly created inset axes.
380
+
381
+ axes_kwargs : dict, optional
382
+ Keyword arguments to pass to the constructor of the inset axes.
383
+ Valid arguments include:
384
+
385
+ %(Axes:kwdoc)s
386
+
387
+ borderpad : float, default: 0.5
388
+ Padding between inset axes and the bbox_to_anchor.
389
+ The units are axes font size, i.e. for a default font size of 10 points
390
+ *borderpad = 0.5* is equivalent to a padding of 5 points.
391
+
392
+ Returns
393
+ -------
394
+ inset_axes : *axes_class*
395
+ Inset axes object created.
396
+ """
397
+
398
+ if (bbox_transform in [parent_axes.transAxes, parent_axes.figure.transFigure]
399
+ and bbox_to_anchor is None):
400
+ _api.warn_external("Using the axes or figure transform requires a "
401
+ "bounding box in the respective coordinates. "
402
+ "Using bbox_to_anchor=(0, 0, 1, 1) now.")
403
+ bbox_to_anchor = (0, 0, 1, 1)
404
+ if bbox_to_anchor is None:
405
+ bbox_to_anchor = parent_axes.bbox
406
+ if (isinstance(bbox_to_anchor, tuple) and
407
+ (isinstance(width, str) or isinstance(height, str))):
408
+ if len(bbox_to_anchor) != 4:
409
+ raise ValueError("Using relative units for width or height "
410
+ "requires to provide a 4-tuple or a "
411
+ "`Bbox` instance to `bbox_to_anchor.")
412
+ return _add_inset_axes(
413
+ parent_axes, axes_class, axes_kwargs,
414
+ AnchoredSizeLocator(
415
+ bbox_to_anchor, width, height, loc=loc,
416
+ bbox_transform=bbox_transform, borderpad=borderpad))
417
+
418
+
419
+ @_docstring.dedent_interpd
420
+ def zoomed_inset_axes(parent_axes, zoom, loc='upper right',
421
+ bbox_to_anchor=None, bbox_transform=None,
422
+ axes_class=None, axes_kwargs=None,
423
+ borderpad=0.5):
424
+ """
425
+ Create an anchored inset axes by scaling a parent axes. For usage, also see
426
+ :doc:`the examples </gallery/axes_grid1/inset_locator_demo2>`.
427
+
428
+ Parameters
429
+ ----------
430
+ parent_axes : `~matplotlib.axes.Axes`
431
+ Axes to place the inset axes.
432
+
433
+ zoom : float
434
+ Scaling factor of the data axes. *zoom* > 1 will enlarge the
435
+ coordinates (i.e., "zoomed in"), while *zoom* < 1 will shrink the
436
+ coordinates (i.e., "zoomed out").
437
+
438
+ loc : str, default: 'upper right'
439
+ Location to place the inset axes. Valid locations are
440
+ 'upper left', 'upper center', 'upper right',
441
+ 'center left', 'center', 'center right',
442
+ 'lower left', 'lower center', 'lower right'.
443
+ For backward compatibility, numeric values are accepted as well.
444
+ See the parameter *loc* of `.Legend` for details.
445
+
446
+ bbox_to_anchor : tuple or `~matplotlib.transforms.BboxBase`, optional
447
+ Bbox that the inset axes will be anchored to. If None,
448
+ *parent_axes.bbox* is used. If a tuple, can be either
449
+ [left, bottom, width, height], or [left, bottom].
450
+ If the kwargs *width* and/or *height* are specified in relative units,
451
+ the 2-tuple [left, bottom] cannot be used. Note that
452
+ the units of the bounding box are determined through the transform
453
+ in use. When using *bbox_to_anchor* it almost always makes sense to
454
+ also specify a *bbox_transform*. This might often be the axes transform
455
+ *parent_axes.transAxes*.
456
+
457
+ bbox_transform : `~matplotlib.transforms.Transform`, optional
458
+ Transformation for the bbox that contains the inset axes.
459
+ If None, a `.transforms.IdentityTransform` is used (i.e. pixel
460
+ coordinates). This is useful when not providing any argument to
461
+ *bbox_to_anchor*. When using *bbox_to_anchor* it almost always makes
462
+ sense to also specify a *bbox_transform*. This might often be the
463
+ axes transform *parent_axes.transAxes*. Inversely, when specifying
464
+ the axes- or figure-transform here, be aware that not specifying
465
+ *bbox_to_anchor* will use *parent_axes.bbox*, the units of which are
466
+ in display (pixel) coordinates.
467
+
468
+ axes_class : `~matplotlib.axes.Axes` type, default: `.HostAxes`
469
+ The type of the newly created inset axes.
470
+
471
+ axes_kwargs : dict, optional
472
+ Keyword arguments to pass to the constructor of the inset axes.
473
+ Valid arguments include:
474
+
475
+ %(Axes:kwdoc)s
476
+
477
+ borderpad : float, default: 0.5
478
+ Padding between inset axes and the bbox_to_anchor.
479
+ The units are axes font size, i.e. for a default font size of 10 points
480
+ *borderpad = 0.5* is equivalent to a padding of 5 points.
481
+
482
+ Returns
483
+ -------
484
+ inset_axes : *axes_class*
485
+ Inset axes object created.
486
+ """
487
+
488
+ return _add_inset_axes(
489
+ parent_axes, axes_class, axes_kwargs,
490
+ AnchoredZoomLocator(
491
+ parent_axes, zoom=zoom, loc=loc,
492
+ bbox_to_anchor=bbox_to_anchor, bbox_transform=bbox_transform,
493
+ borderpad=borderpad))
494
+
495
+
496
+ class _TransformedBboxWithCallback(TransformedBbox):
497
+ """
498
+ Variant of `.TransformBbox` which calls *callback* before returning points.
499
+
500
+ Used by `.mark_inset` to unstale the parent axes' viewlim as needed.
501
+ """
502
+
503
+ def __init__(self, *args, callback, **kwargs):
504
+ super().__init__(*args, **kwargs)
505
+ self._callback = callback
506
+
507
+ def get_points(self):
508
+ self._callback()
509
+ return super().get_points()
510
+
511
+
512
+ @_docstring.dedent_interpd
513
+ def mark_inset(parent_axes, inset_axes, loc1, loc2, **kwargs):
514
+ """
515
+ Draw a box to mark the location of an area represented by an inset axes.
516
+
517
+ This function draws a box in *parent_axes* at the bounding box of
518
+ *inset_axes*, and shows a connection with the inset axes by drawing lines
519
+ at the corners, giving a "zoomed in" effect.
520
+
521
+ Parameters
522
+ ----------
523
+ parent_axes : `~matplotlib.axes.Axes`
524
+ Axes which contains the area of the inset axes.
525
+
526
+ inset_axes : `~matplotlib.axes.Axes`
527
+ The inset axes.
528
+
529
+ loc1, loc2 : {1, 2, 3, 4}
530
+ Corners to use for connecting the inset axes and the area in the
531
+ parent axes.
532
+
533
+ **kwargs
534
+ Patch properties for the lines and box drawn:
535
+
536
+ %(Patch:kwdoc)s
537
+
538
+ Returns
539
+ -------
540
+ pp : `~matplotlib.patches.Patch`
541
+ The patch drawn to represent the area of the inset axes.
542
+
543
+ p1, p2 : `~matplotlib.patches.Patch`
544
+ The patches connecting two corners of the inset axes and its area.
545
+ """
546
+ rect = _TransformedBboxWithCallback(
547
+ inset_axes.viewLim, parent_axes.transData,
548
+ callback=parent_axes._unstale_viewLim)
549
+
550
+ kwargs.setdefault("fill", bool({'fc', 'facecolor', 'color'}.intersection(kwargs)))
551
+ pp = BboxPatch(rect, **kwargs)
552
+ parent_axes.add_patch(pp)
553
+
554
+ p1 = BboxConnector(inset_axes.bbox, rect, loc1=loc1, **kwargs)
555
+ inset_axes.add_patch(p1)
556
+ p1.set_clip_on(False)
557
+ p2 = BboxConnector(inset_axes.bbox, rect, loc1=loc2, **kwargs)
558
+ inset_axes.add_patch(p2)
559
+ p2.set_clip_on(False)
560
+
561
+ return pp, p1, p2
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/mpl_axes.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import matplotlib.axes as maxes
2
+ from matplotlib.artist import Artist
3
+ from matplotlib.axis import XAxis, YAxis
4
+
5
+
6
+ class SimpleChainedObjects:
7
+ def __init__(self, objects):
8
+ self._objects = objects
9
+
10
+ def __getattr__(self, k):
11
+ _a = SimpleChainedObjects([getattr(a, k) for a in self._objects])
12
+ return _a
13
+
14
+ def __call__(self, *args, **kwargs):
15
+ for m in self._objects:
16
+ m(*args, **kwargs)
17
+
18
+
19
+ class Axes(maxes.Axes):
20
+
21
+ class AxisDict(dict):
22
+ def __init__(self, axes):
23
+ self.axes = axes
24
+ super().__init__()
25
+
26
+ def __getitem__(self, k):
27
+ if isinstance(k, tuple):
28
+ r = SimpleChainedObjects(
29
+ # super() within a list comprehension needs explicit args.
30
+ [super(Axes.AxisDict, self).__getitem__(k1) for k1 in k])
31
+ return r
32
+ elif isinstance(k, slice):
33
+ if k.start is None and k.stop is None and k.step is None:
34
+ return SimpleChainedObjects(list(self.values()))
35
+ else:
36
+ raise ValueError("Unsupported slice")
37
+ else:
38
+ return dict.__getitem__(self, k)
39
+
40
+ def __call__(self, *v, **kwargs):
41
+ return maxes.Axes.axis(self.axes, *v, **kwargs)
42
+
43
+ @property
44
+ def axis(self):
45
+ return self._axislines
46
+
47
+ def clear(self):
48
+ # docstring inherited
49
+ super().clear()
50
+ # Init axis artists.
51
+ self._axislines = self.AxisDict(self)
52
+ self._axislines.update(
53
+ bottom=SimpleAxisArtist(self.xaxis, 1, self.spines["bottom"]),
54
+ top=SimpleAxisArtist(self.xaxis, 2, self.spines["top"]),
55
+ left=SimpleAxisArtist(self.yaxis, 1, self.spines["left"]),
56
+ right=SimpleAxisArtist(self.yaxis, 2, self.spines["right"]))
57
+
58
+
59
+ class SimpleAxisArtist(Artist):
60
+ def __init__(self, axis, axisnum, spine):
61
+ self._axis = axis
62
+ self._axisnum = axisnum
63
+ self.line = spine
64
+
65
+ if isinstance(axis, XAxis):
66
+ self._axis_direction = ["bottom", "top"][axisnum-1]
67
+ elif isinstance(axis, YAxis):
68
+ self._axis_direction = ["left", "right"][axisnum-1]
69
+ else:
70
+ raise ValueError(
71
+ f"axis must be instance of XAxis or YAxis, but got {axis}")
72
+ super().__init__()
73
+
74
+ @property
75
+ def major_ticks(self):
76
+ tickline = "tick%dline" % self._axisnum
77
+ return SimpleChainedObjects([getattr(tick, tickline)
78
+ for tick in self._axis.get_major_ticks()])
79
+
80
+ @property
81
+ def major_ticklabels(self):
82
+ label = "label%d" % self._axisnum
83
+ return SimpleChainedObjects([getattr(tick, label)
84
+ for tick in self._axis.get_major_ticks()])
85
+
86
+ @property
87
+ def label(self):
88
+ return self._axis.label
89
+
90
+ def set_visible(self, b):
91
+ self.toggle(all=b)
92
+ self.line.set_visible(b)
93
+ self._axis.set_visible(True)
94
+ super().set_visible(b)
95
+
96
+ def set_label(self, txt):
97
+ self._axis.set_label_text(txt)
98
+
99
+ def toggle(self, all=None, ticks=None, ticklabels=None, label=None):
100
+
101
+ if all:
102
+ _ticks, _ticklabels, _label = True, True, True
103
+ elif all is not None:
104
+ _ticks, _ticklabels, _label = False, False, False
105
+ else:
106
+ _ticks, _ticklabels, _label = None, None, None
107
+
108
+ if ticks is not None:
109
+ _ticks = ticks
110
+ if ticklabels is not None:
111
+ _ticklabels = ticklabels
112
+ if label is not None:
113
+ _label = label
114
+
115
+ if _ticks is not None:
116
+ tickparam = {f"tick{self._axisnum}On": _ticks}
117
+ self._axis.set_tick_params(**tickparam)
118
+ if _ticklabels is not None:
119
+ tickparam = {f"label{self._axisnum}On": _ticklabels}
120
+ self._axis.set_tick_params(**tickparam)
121
+
122
+ if _label is not None:
123
+ pos = self._axis.get_label_position()
124
+ if (pos == self._axis_direction) and not _label:
125
+ self._axis.label.set_visible(False)
126
+ elif _label:
127
+ self._axis.label.set_visible(True)
128
+ self._axis.set_label_position(self._axis_direction)
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/parasite_axes.py ADDED
@@ -0,0 +1,257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from matplotlib import _api, cbook
2
+ import matplotlib.artist as martist
3
+ import matplotlib.transforms as mtransforms
4
+ from matplotlib.transforms import Bbox
5
+ from .mpl_axes import Axes
6
+
7
+
8
+ class ParasiteAxesBase:
9
+
10
+ def __init__(self, parent_axes, aux_transform=None,
11
+ *, viewlim_mode=None, **kwargs):
12
+ self._parent_axes = parent_axes
13
+ self.transAux = aux_transform
14
+ self.set_viewlim_mode(viewlim_mode)
15
+ kwargs["frameon"] = False
16
+ super().__init__(parent_axes.figure, parent_axes._position, **kwargs)
17
+
18
+ def clear(self):
19
+ super().clear()
20
+ martist.setp(self.get_children(), visible=False)
21
+ self._get_lines = self._parent_axes._get_lines
22
+ self._parent_axes.callbacks._connect_picklable(
23
+ "xlim_changed", self._sync_lims)
24
+ self._parent_axes.callbacks._connect_picklable(
25
+ "ylim_changed", self._sync_lims)
26
+
27
+ def pick(self, mouseevent):
28
+ # This most likely goes to Artist.pick (depending on axes_class given
29
+ # to the factory), which only handles pick events registered on the
30
+ # axes associated with each child:
31
+ super().pick(mouseevent)
32
+ # But parasite axes are additionally given pick events from their host
33
+ # axes (cf. HostAxesBase.pick), which we handle here:
34
+ for a in self.get_children():
35
+ if (hasattr(mouseevent.inaxes, "parasites")
36
+ and self in mouseevent.inaxes.parasites):
37
+ a.pick(mouseevent)
38
+
39
+ # aux_transform support
40
+
41
+ def _set_lim_and_transforms(self):
42
+ if self.transAux is not None:
43
+ self.transAxes = self._parent_axes.transAxes
44
+ self.transData = self.transAux + self._parent_axes.transData
45
+ self._xaxis_transform = mtransforms.blended_transform_factory(
46
+ self.transData, self.transAxes)
47
+ self._yaxis_transform = mtransforms.blended_transform_factory(
48
+ self.transAxes, self.transData)
49
+ else:
50
+ super()._set_lim_and_transforms()
51
+
52
+ def set_viewlim_mode(self, mode):
53
+ _api.check_in_list([None, "equal", "transform"], mode=mode)
54
+ self._viewlim_mode = mode
55
+
56
+ def get_viewlim_mode(self):
57
+ return self._viewlim_mode
58
+
59
+ def _sync_lims(self, parent):
60
+ viewlim = parent.viewLim.frozen()
61
+ mode = self.get_viewlim_mode()
62
+ if mode is None:
63
+ pass
64
+ elif mode == "equal":
65
+ self.viewLim.set(viewlim)
66
+ elif mode == "transform":
67
+ self.viewLim.set(viewlim.transformed(self.transAux.inverted()))
68
+ else:
69
+ _api.check_in_list([None, "equal", "transform"], mode=mode)
70
+
71
+ # end of aux_transform support
72
+
73
+
74
+ parasite_axes_class_factory = cbook._make_class_factory(
75
+ ParasiteAxesBase, "{}Parasite")
76
+ ParasiteAxes = parasite_axes_class_factory(Axes)
77
+
78
+
79
+ class HostAxesBase:
80
+ def __init__(self, *args, **kwargs):
81
+ self.parasites = []
82
+ super().__init__(*args, **kwargs)
83
+
84
+ def get_aux_axes(
85
+ self, tr=None, viewlim_mode="equal", axes_class=None, **kwargs):
86
+ """
87
+ Add a parasite axes to this host.
88
+
89
+ Despite this method's name, this should actually be thought of as an
90
+ ``add_parasite_axes`` method.
91
+
92
+ .. versionchanged:: 3.7
93
+ Defaults to same base axes class as host axes.
94
+
95
+ Parameters
96
+ ----------
97
+ tr : `~matplotlib.transforms.Transform` or None, default: None
98
+ If a `.Transform`, the following relation will hold:
99
+ ``parasite.transData = tr + host.transData``.
100
+ If None, the parasite's and the host's ``transData`` are unrelated.
101
+ viewlim_mode : {"equal", "transform", None}, default: "equal"
102
+ How the parasite's view limits are set: directly equal to the
103
+ parent axes ("equal"), equal after application of *tr*
104
+ ("transform"), or independently (None).
105
+ axes_class : subclass type of `~matplotlib.axes.Axes`, optional
106
+ The `~.axes.Axes` subclass that is instantiated. If None, the base
107
+ class of the host axes is used.
108
+ **kwargs
109
+ Other parameters are forwarded to the parasite axes constructor.
110
+ """
111
+ if axes_class is None:
112
+ axes_class = self._base_axes_class
113
+ parasite_axes_class = parasite_axes_class_factory(axes_class)
114
+ ax2 = parasite_axes_class(
115
+ self, tr, viewlim_mode=viewlim_mode, **kwargs)
116
+ # note that ax2.transData == tr + ax1.transData
117
+ # Anything you draw in ax2 will match the ticks and grids of ax1.
118
+ self.parasites.append(ax2)
119
+ ax2._remove_method = self.parasites.remove
120
+ return ax2
121
+
122
+ def draw(self, renderer):
123
+ orig_children_len = len(self._children)
124
+
125
+ locator = self.get_axes_locator()
126
+ if locator:
127
+ pos = locator(self, renderer)
128
+ self.set_position(pos, which="active")
129
+ self.apply_aspect(pos)
130
+ else:
131
+ self.apply_aspect()
132
+
133
+ rect = self.get_position()
134
+ for ax in self.parasites:
135
+ ax.apply_aspect(rect)
136
+ self._children.extend(ax.get_children())
137
+
138
+ super().draw(renderer)
139
+ del self._children[orig_children_len:]
140
+
141
+ def clear(self):
142
+ super().clear()
143
+ for ax in self.parasites:
144
+ ax.clear()
145
+
146
+ def pick(self, mouseevent):
147
+ super().pick(mouseevent)
148
+ # Also pass pick events on to parasite axes and, in turn, their
149
+ # children (cf. ParasiteAxesBase.pick)
150
+ for a in self.parasites:
151
+ a.pick(mouseevent)
152
+
153
+ def twinx(self, axes_class=None):
154
+ """
155
+ Create a twin of Axes with a shared x-axis but independent y-axis.
156
+
157
+ The y-axis of self will have ticks on the left and the returned axes
158
+ will have ticks on the right.
159
+ """
160
+ ax = self._add_twin_axes(axes_class, sharex=self)
161
+ self.axis["right"].set_visible(False)
162
+ ax.axis["right"].set_visible(True)
163
+ ax.axis["left", "top", "bottom"].set_visible(False)
164
+ return ax
165
+
166
+ def twiny(self, axes_class=None):
167
+ """
168
+ Create a twin of Axes with a shared y-axis but independent x-axis.
169
+
170
+ The x-axis of self will have ticks on the bottom and the returned axes
171
+ will have ticks on the top.
172
+ """
173
+ ax = self._add_twin_axes(axes_class, sharey=self)
174
+ self.axis["top"].set_visible(False)
175
+ ax.axis["top"].set_visible(True)
176
+ ax.axis["left", "right", "bottom"].set_visible(False)
177
+ return ax
178
+
179
+ def twin(self, aux_trans=None, axes_class=None):
180
+ """
181
+ Create a twin of Axes with no shared axis.
182
+
183
+ While self will have ticks on the left and bottom axis, the returned
184
+ axes will have ticks on the top and right axis.
185
+ """
186
+ if aux_trans is None:
187
+ aux_trans = mtransforms.IdentityTransform()
188
+ ax = self._add_twin_axes(
189
+ axes_class, aux_transform=aux_trans, viewlim_mode="transform")
190
+ self.axis["top", "right"].set_visible(False)
191
+ ax.axis["top", "right"].set_visible(True)
192
+ ax.axis["left", "bottom"].set_visible(False)
193
+ return ax
194
+
195
+ def _add_twin_axes(self, axes_class, **kwargs):
196
+ """
197
+ Helper for `.twinx`/`.twiny`/`.twin`.
198
+
199
+ *kwargs* are forwarded to the parasite axes constructor.
200
+ """
201
+ if axes_class is None:
202
+ axes_class = self._base_axes_class
203
+ ax = parasite_axes_class_factory(axes_class)(self, **kwargs)
204
+ self.parasites.append(ax)
205
+ ax._remove_method = self._remove_any_twin
206
+ return ax
207
+
208
+ def _remove_any_twin(self, ax):
209
+ self.parasites.remove(ax)
210
+ restore = ["top", "right"]
211
+ if ax._sharex:
212
+ restore.remove("top")
213
+ if ax._sharey:
214
+ restore.remove("right")
215
+ self.axis[tuple(restore)].set_visible(True)
216
+ self.axis[tuple(restore)].toggle(ticklabels=False, label=False)
217
+
218
+ @_api.make_keyword_only("3.8", "call_axes_locator")
219
+ def get_tightbbox(self, renderer=None, call_axes_locator=True,
220
+ bbox_extra_artists=None):
221
+ bbs = [
222
+ *[ax.get_tightbbox(renderer, call_axes_locator=call_axes_locator)
223
+ for ax in self.parasites],
224
+ super().get_tightbbox(renderer,
225
+ call_axes_locator=call_axes_locator,
226
+ bbox_extra_artists=bbox_extra_artists)]
227
+ return Bbox.union([b for b in bbs if b.width != 0 or b.height != 0])
228
+
229
+
230
+ host_axes_class_factory = host_subplot_class_factory = \
231
+ cbook._make_class_factory(HostAxesBase, "{}HostAxes", "_base_axes_class")
232
+ HostAxes = SubplotHost = host_axes_class_factory(Axes)
233
+
234
+
235
+ def host_axes(*args, axes_class=Axes, figure=None, **kwargs):
236
+ """
237
+ Create axes that can act as a hosts to parasitic axes.
238
+
239
+ Parameters
240
+ ----------
241
+ figure : `~matplotlib.figure.Figure`
242
+ Figure to which the axes will be added. Defaults to the current figure
243
+ `.pyplot.gcf()`.
244
+
245
+ *args, **kwargs
246
+ Will be passed on to the underlying `~.axes.Axes` object creation.
247
+ """
248
+ import matplotlib.pyplot as plt
249
+ host_axes_class = host_axes_class_factory(axes_class)
250
+ if figure is None:
251
+ figure = plt.gcf()
252
+ ax = host_axes_class(figure, *args, **kwargs)
253
+ figure.add_axes(ax)
254
+ return ax
255
+
256
+
257
+ host_subplot = host_axes
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/__init__.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from pathlib import Path
2
+
3
+
4
+ # Check that the test directories exist
5
+ if not (Path(__file__).parent / "baseline_images").exists():
6
+ raise OSError(
7
+ 'The baseline image directory does not exist. '
8
+ 'This is most likely because the test data is not installed. '
9
+ 'You may need to install matplotlib from source to get the '
10
+ 'test data.')
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (480 Bytes). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/__pycache__/conftest.cpython-310.pyc ADDED
Binary file (306 Bytes). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/__pycache__/test_axes_grid1.cpython-310.pyc ADDED
Binary file (24.3 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/conftest.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from matplotlib.testing.conftest import (mpl_test_settings, # noqa
2
+ pytest_configure, pytest_unconfigure)
valley/lib/python3.10/site-packages/mpl_toolkits/axes_grid1/tests/test_axes_grid1.py ADDED
@@ -0,0 +1,792 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from itertools import product
2
+ import io
3
+ import platform
4
+
5
+ import matplotlib as mpl
6
+ import matplotlib.pyplot as plt
7
+ import matplotlib.ticker as mticker
8
+ from matplotlib import cbook
9
+ from matplotlib.backend_bases import MouseEvent
10
+ from matplotlib.colors import LogNorm
11
+ from matplotlib.patches import Circle, Ellipse
12
+ from matplotlib.transforms import Bbox, TransformedBbox
13
+ from matplotlib.testing.decorators import (
14
+ check_figures_equal, image_comparison, remove_ticks_and_titles)
15
+
16
+ from mpl_toolkits.axes_grid1 import (
17
+ axes_size as Size,
18
+ host_subplot, make_axes_locatable,
19
+ Grid, AxesGrid, ImageGrid)
20
+ from mpl_toolkits.axes_grid1.anchored_artists import (
21
+ AnchoredAuxTransformBox, AnchoredDrawingArea, AnchoredEllipse,
22
+ AnchoredDirectionArrows, AnchoredSizeBar)
23
+ from mpl_toolkits.axes_grid1.axes_divider import (
24
+ Divider, HBoxDivider, make_axes_area_auto_adjustable, SubplotDivider,
25
+ VBoxDivider)
26
+ from mpl_toolkits.axes_grid1.axes_rgb import RGBAxes
27
+ from mpl_toolkits.axes_grid1.inset_locator import (
28
+ zoomed_inset_axes, mark_inset, inset_axes, BboxConnectorPatch,
29
+ InsetPosition)
30
+ import mpl_toolkits.axes_grid1.mpl_axes
31
+ import pytest
32
+
33
+ import numpy as np
34
+ from numpy.testing import assert_array_equal, assert_array_almost_equal
35
+
36
+
37
+ def test_divider_append_axes():
38
+ fig, ax = plt.subplots()
39
+ divider = make_axes_locatable(ax)
40
+ axs = {
41
+ "main": ax,
42
+ "top": divider.append_axes("top", 1.2, pad=0.1, sharex=ax),
43
+ "bottom": divider.append_axes("bottom", 1.2, pad=0.1, sharex=ax),
44
+ "left": divider.append_axes("left", 1.2, pad=0.1, sharey=ax),
45
+ "right": divider.append_axes("right", 1.2, pad=0.1, sharey=ax),
46
+ }
47
+ fig.canvas.draw()
48
+ bboxes = {k: axs[k].get_window_extent() for k in axs}
49
+ dpi = fig.dpi
50
+ assert bboxes["top"].height == pytest.approx(1.2 * dpi)
51
+ assert bboxes["bottom"].height == pytest.approx(1.2 * dpi)
52
+ assert bboxes["left"].width == pytest.approx(1.2 * dpi)
53
+ assert bboxes["right"].width == pytest.approx(1.2 * dpi)
54
+ assert bboxes["top"].y0 - bboxes["main"].y1 == pytest.approx(0.1 * dpi)
55
+ assert bboxes["main"].y0 - bboxes["bottom"].y1 == pytest.approx(0.1 * dpi)
56
+ assert bboxes["main"].x0 - bboxes["left"].x1 == pytest.approx(0.1 * dpi)
57
+ assert bboxes["right"].x0 - bboxes["main"].x1 == pytest.approx(0.1 * dpi)
58
+ assert bboxes["left"].y0 == bboxes["main"].y0 == bboxes["right"].y0
59
+ assert bboxes["left"].y1 == bboxes["main"].y1 == bboxes["right"].y1
60
+ assert bboxes["top"].x0 == bboxes["main"].x0 == bboxes["bottom"].x0
61
+ assert bboxes["top"].x1 == bboxes["main"].x1 == bboxes["bottom"].x1
62
+
63
+
64
+ # Update style when regenerating the test image
65
+ @image_comparison(['twin_axes_empty_and_removed'], extensions=["png"], tol=1,
66
+ style=('classic', '_classic_test_patch'))
67
+ def test_twin_axes_empty_and_removed():
68
+ # Purely cosmetic font changes (avoid overlap)
69
+ mpl.rcParams.update(
70
+ {"font.size": 8, "xtick.labelsize": 8, "ytick.labelsize": 8})
71
+ generators = ["twinx", "twiny", "twin"]
72
+ modifiers = ["", "host invisible", "twin removed", "twin invisible",
73
+ "twin removed\nhost invisible"]
74
+ # Unmodified host subplot at the beginning for reference
75
+ h = host_subplot(len(modifiers)+1, len(generators), 2)
76
+ h.text(0.5, 0.5, "host_subplot",
77
+ horizontalalignment="center", verticalalignment="center")
78
+ # Host subplots with various modifications (twin*, visibility) applied
79
+ for i, (mod, gen) in enumerate(product(modifiers, generators),
80
+ len(generators) + 1):
81
+ h = host_subplot(len(modifiers)+1, len(generators), i)
82
+ t = getattr(h, gen)()
83
+ if "twin invisible" in mod:
84
+ t.axis[:].set_visible(False)
85
+ if "twin removed" in mod:
86
+ t.remove()
87
+ if "host invisible" in mod:
88
+ h.axis[:].set_visible(False)
89
+ h.text(0.5, 0.5, gen + ("\n" + mod if mod else ""),
90
+ horizontalalignment="center", verticalalignment="center")
91
+ plt.subplots_adjust(wspace=0.5, hspace=1)
92
+
93
+
94
+ def test_twin_axes_both_with_units():
95
+ host = host_subplot(111)
96
+ with pytest.warns(mpl.MatplotlibDeprecationWarning):
97
+ host.plot_date([0, 1, 2], [0, 1, 2], xdate=False, ydate=True)
98
+ twin = host.twinx()
99
+ twin.plot(["a", "b", "c"])
100
+ assert host.get_yticklabels()[0].get_text() == "00:00:00"
101
+ assert twin.get_yticklabels()[0].get_text() == "a"
102
+
103
+
104
+ def test_axesgrid_colorbar_log_smoketest():
105
+ fig = plt.figure()
106
+ grid = AxesGrid(fig, 111, # modified to be only subplot
107
+ nrows_ncols=(1, 1),
108
+ ngrids=1,
109
+ label_mode="L",
110
+ cbar_location="top",
111
+ cbar_mode="single",
112
+ )
113
+
114
+ Z = 10000 * np.random.rand(10, 10)
115
+ im = grid[0].imshow(Z, interpolation="nearest", norm=LogNorm())
116
+
117
+ grid.cbar_axes[0].colorbar(im)
118
+
119
+
120
+ def test_inset_colorbar_tight_layout_smoketest():
121
+ fig, ax = plt.subplots(1, 1)
122
+ pts = ax.scatter([0, 1], [0, 1], c=[1, 5])
123
+
124
+ cax = inset_axes(ax, width="3%", height="70%")
125
+ plt.colorbar(pts, cax=cax)
126
+
127
+ with pytest.warns(UserWarning, match="This figure includes Axes"):
128
+ # Will warn, but not raise an error
129
+ plt.tight_layout()
130
+
131
+
132
+ @image_comparison(['inset_locator.png'], style='default', remove_text=True)
133
+ def test_inset_locator():
134
+ fig, ax = plt.subplots(figsize=[5, 4])
135
+
136
+ # prepare the demo image
137
+ # Z is a 15x15 array
138
+ Z = cbook.get_sample_data("axes_grid/bivariate_normal.npy")
139
+ extent = (-3, 4, -4, 3)
140
+ Z2 = np.zeros((150, 150))
141
+ ny, nx = Z.shape
142
+ Z2[30:30+ny, 30:30+nx] = Z
143
+
144
+ ax.imshow(Z2, extent=extent, interpolation="nearest",
145
+ origin="lower")
146
+
147
+ axins = zoomed_inset_axes(ax, zoom=6, loc='upper right')
148
+ axins.imshow(Z2, extent=extent, interpolation="nearest",
149
+ origin="lower")
150
+ axins.yaxis.get_major_locator().set_params(nbins=7)
151
+ axins.xaxis.get_major_locator().set_params(nbins=7)
152
+ # sub region of the original image
153
+ x1, x2, y1, y2 = -1.5, -0.9, -2.5, -1.9
154
+ axins.set_xlim(x1, x2)
155
+ axins.set_ylim(y1, y2)
156
+
157
+ plt.xticks(visible=False)
158
+ plt.yticks(visible=False)
159
+
160
+ # draw a bbox of the region of the inset axes in the parent axes and
161
+ # connecting lines between the bbox and the inset axes area
162
+ mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
163
+
164
+ asb = AnchoredSizeBar(ax.transData,
165
+ 0.5,
166
+ '0.5',
167
+ loc='lower center',
168
+ pad=0.1, borderpad=0.5, sep=5,
169
+ frameon=False)
170
+ ax.add_artist(asb)
171
+
172
+
173
+ @image_comparison(['inset_axes.png'], style='default', remove_text=True)
174
+ def test_inset_axes():
175
+ fig, ax = plt.subplots(figsize=[5, 4])
176
+
177
+ # prepare the demo image
178
+ # Z is a 15x15 array
179
+ Z = cbook.get_sample_data("axes_grid/bivariate_normal.npy")
180
+ extent = (-3, 4, -4, 3)
181
+ Z2 = np.zeros((150, 150))
182
+ ny, nx = Z.shape
183
+ Z2[30:30+ny, 30:30+nx] = Z
184
+
185
+ ax.imshow(Z2, extent=extent, interpolation="nearest",
186
+ origin="lower")
187
+
188
+ # creating our inset axes with a bbox_transform parameter
189
+ axins = inset_axes(ax, width=1., height=1., bbox_to_anchor=(1, 1),
190
+ bbox_transform=ax.transAxes)
191
+
192
+ axins.imshow(Z2, extent=extent, interpolation="nearest",
193
+ origin="lower")
194
+ axins.yaxis.get_major_locator().set_params(nbins=7)
195
+ axins.xaxis.get_major_locator().set_params(nbins=7)
196
+ # sub region of the original image
197
+ x1, x2, y1, y2 = -1.5, -0.9, -2.5, -1.9
198
+ axins.set_xlim(x1, x2)
199
+ axins.set_ylim(y1, y2)
200
+
201
+ plt.xticks(visible=False)
202
+ plt.yticks(visible=False)
203
+
204
+ # draw a bbox of the region of the inset axes in the parent axes and
205
+ # connecting lines between the bbox and the inset axes area
206
+ mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
207
+
208
+ asb = AnchoredSizeBar(ax.transData,
209
+ 0.5,
210
+ '0.5',
211
+ loc='lower center',
212
+ pad=0.1, borderpad=0.5, sep=5,
213
+ frameon=False)
214
+ ax.add_artist(asb)
215
+
216
+
217
+ def test_inset_axes_complete():
218
+ dpi = 100
219
+ figsize = (6, 5)
220
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
221
+ fig.subplots_adjust(.1, .1, .9, .9)
222
+
223
+ ins = inset_axes(ax, width=2., height=2., borderpad=0)
224
+ fig.canvas.draw()
225
+ assert_array_almost_equal(
226
+ ins.get_position().extents,
227
+ [(0.9*figsize[0]-2.)/figsize[0], (0.9*figsize[1]-2.)/figsize[1],
228
+ 0.9, 0.9])
229
+
230
+ ins = inset_axes(ax, width="40%", height="30%", borderpad=0)
231
+ fig.canvas.draw()
232
+ assert_array_almost_equal(
233
+ ins.get_position().extents, [.9-.8*.4, .9-.8*.3, 0.9, 0.9])
234
+
235
+ ins = inset_axes(ax, width=1., height=1.2, bbox_to_anchor=(200, 100),
236
+ loc=3, borderpad=0)
237
+ fig.canvas.draw()
238
+ assert_array_almost_equal(
239
+ ins.get_position().extents,
240
+ [200/dpi/figsize[0], 100/dpi/figsize[1],
241
+ (200/dpi+1)/figsize[0], (100/dpi+1.2)/figsize[1]])
242
+
243
+ ins1 = inset_axes(ax, width="35%", height="60%", loc=3, borderpad=1)
244
+ ins2 = inset_axes(ax, width="100%", height="100%",
245
+ bbox_to_anchor=(0, 0, .35, .60),
246
+ bbox_transform=ax.transAxes, loc=3, borderpad=1)
247
+ fig.canvas.draw()
248
+ assert_array_equal(ins1.get_position().extents,
249
+ ins2.get_position().extents)
250
+
251
+ with pytest.raises(ValueError):
252
+ ins = inset_axes(ax, width="40%", height="30%",
253
+ bbox_to_anchor=(0.4, 0.5))
254
+
255
+ with pytest.warns(UserWarning):
256
+ ins = inset_axes(ax, width="40%", height="30%",
257
+ bbox_transform=ax.transAxes)
258
+
259
+
260
+ def test_inset_axes_tight():
261
+ # gh-26287 found that inset_axes raised with bbox_inches=tight
262
+ fig, ax = plt.subplots()
263
+ inset_axes(ax, width=1.3, height=0.9)
264
+
265
+ f = io.BytesIO()
266
+ fig.savefig(f, bbox_inches="tight")
267
+
268
+
269
+ @image_comparison(['fill_facecolor.png'], remove_text=True, style='mpl20')
270
+ def test_fill_facecolor():
271
+ fig, ax = plt.subplots(1, 5)
272
+ fig.set_size_inches(5, 5)
273
+ for i in range(1, 4):
274
+ ax[i].yaxis.set_visible(False)
275
+ ax[4].yaxis.tick_right()
276
+ bbox = Bbox.from_extents(0, 0.4, 1, 0.6)
277
+
278
+ # fill with blue by setting 'fc' field
279
+ bbox1 = TransformedBbox(bbox, ax[0].transData)
280
+ bbox2 = TransformedBbox(bbox, ax[1].transData)
281
+ # set color to BboxConnectorPatch
282
+ p = BboxConnectorPatch(
283
+ bbox1, bbox2, loc1a=1, loc2a=2, loc1b=4, loc2b=3,
284
+ ec="r", fc="b")
285
+ p.set_clip_on(False)
286
+ ax[0].add_patch(p)
287
+ # set color to marked area
288
+ axins = zoomed_inset_axes(ax[0], 1, loc='upper right')
289
+ axins.set_xlim(0, 0.2)
290
+ axins.set_ylim(0, 0.2)
291
+ plt.gca().axes.xaxis.set_ticks([])
292
+ plt.gca().axes.yaxis.set_ticks([])
293
+ mark_inset(ax[0], axins, loc1=2, loc2=4, fc="b", ec="0.5")
294
+
295
+ # fill with yellow by setting 'facecolor' field
296
+ bbox3 = TransformedBbox(bbox, ax[1].transData)
297
+ bbox4 = TransformedBbox(bbox, ax[2].transData)
298
+ # set color to BboxConnectorPatch
299
+ p = BboxConnectorPatch(
300
+ bbox3, bbox4, loc1a=1, loc2a=2, loc1b=4, loc2b=3,
301
+ ec="r", facecolor="y")
302
+ p.set_clip_on(False)
303
+ ax[1].add_patch(p)
304
+ # set color to marked area
305
+ axins = zoomed_inset_axes(ax[1], 1, loc='upper right')
306
+ axins.set_xlim(0, 0.2)
307
+ axins.set_ylim(0, 0.2)
308
+ plt.gca().axes.xaxis.set_ticks([])
309
+ plt.gca().axes.yaxis.set_ticks([])
310
+ mark_inset(ax[1], axins, loc1=2, loc2=4, facecolor="y", ec="0.5")
311
+
312
+ # fill with green by setting 'color' field
313
+ bbox5 = TransformedBbox(bbox, ax[2].transData)
314
+ bbox6 = TransformedBbox(bbox, ax[3].transData)
315
+ # set color to BboxConnectorPatch
316
+ p = BboxConnectorPatch(
317
+ bbox5, bbox6, loc1a=1, loc2a=2, loc1b=4, loc2b=3,
318
+ ec="r", color="g")
319
+ p.set_clip_on(False)
320
+ ax[2].add_patch(p)
321
+ # set color to marked area
322
+ axins = zoomed_inset_axes(ax[2], 1, loc='upper right')
323
+ axins.set_xlim(0, 0.2)
324
+ axins.set_ylim(0, 0.2)
325
+ plt.gca().axes.xaxis.set_ticks([])
326
+ plt.gca().axes.yaxis.set_ticks([])
327
+ mark_inset(ax[2], axins, loc1=2, loc2=4, color="g", ec="0.5")
328
+
329
+ # fill with green but color won't show if set fill to False
330
+ bbox7 = TransformedBbox(bbox, ax[3].transData)
331
+ bbox8 = TransformedBbox(bbox, ax[4].transData)
332
+ # BboxConnectorPatch won't show green
333
+ p = BboxConnectorPatch(
334
+ bbox7, bbox8, loc1a=1, loc2a=2, loc1b=4, loc2b=3,
335
+ ec="r", fc="g", fill=False)
336
+ p.set_clip_on(False)
337
+ ax[3].add_patch(p)
338
+ # marked area won't show green
339
+ axins = zoomed_inset_axes(ax[3], 1, loc='upper right')
340
+ axins.set_xlim(0, 0.2)
341
+ axins.set_ylim(0, 0.2)
342
+ axins.xaxis.set_ticks([])
343
+ axins.yaxis.set_ticks([])
344
+ mark_inset(ax[3], axins, loc1=2, loc2=4, fc="g", ec="0.5", fill=False)
345
+
346
+
347
+ # Update style when regenerating the test image
348
+ @image_comparison(['zoomed_axes.png', 'inverted_zoomed_axes.png'],
349
+ style=('classic', '_classic_test_patch'),
350
+ tol=0.02 if platform.machine() == 'arm64' else 0)
351
+ def test_zooming_with_inverted_axes():
352
+ fig, ax = plt.subplots()
353
+ ax.plot([1, 2, 3], [1, 2, 3])
354
+ ax.axis([1, 3, 1, 3])
355
+ inset_ax = zoomed_inset_axes(ax, zoom=2.5, loc='lower right')
356
+ inset_ax.axis([1.1, 1.4, 1.1, 1.4])
357
+
358
+ fig, ax = plt.subplots()
359
+ ax.plot([1, 2, 3], [1, 2, 3])
360
+ ax.axis([3, 1, 3, 1])
361
+ inset_ax = zoomed_inset_axes(ax, zoom=2.5, loc='lower right')
362
+ inset_ax.axis([1.4, 1.1, 1.4, 1.1])
363
+
364
+
365
+ # Update style when regenerating the test image
366
+ @image_comparison(['anchored_direction_arrows.png'],
367
+ tol=0 if platform.machine() == 'x86_64' else 0.01,
368
+ style=('classic', '_classic_test_patch'))
369
+ def test_anchored_direction_arrows():
370
+ fig, ax = plt.subplots()
371
+ ax.imshow(np.zeros((10, 10)), interpolation='nearest')
372
+
373
+ simple_arrow = AnchoredDirectionArrows(ax.transAxes, 'X', 'Y')
374
+ ax.add_artist(simple_arrow)
375
+
376
+
377
+ # Update style when regenerating the test image
378
+ @image_comparison(['anchored_direction_arrows_many_args.png'],
379
+ style=('classic', '_classic_test_patch'))
380
+ def test_anchored_direction_arrows_many_args():
381
+ fig, ax = plt.subplots()
382
+ ax.imshow(np.ones((10, 10)))
383
+
384
+ direction_arrows = AnchoredDirectionArrows(
385
+ ax.transAxes, 'A', 'B', loc='upper right', color='red',
386
+ aspect_ratio=-0.5, pad=0.6, borderpad=2, frameon=True, alpha=0.7,
387
+ sep_x=-0.06, sep_y=-0.08, back_length=0.1, head_width=9,
388
+ head_length=10, tail_width=5)
389
+ ax.add_artist(direction_arrows)
390
+
391
+
392
+ def test_axes_locatable_position():
393
+ fig, ax = plt.subplots()
394
+ divider = make_axes_locatable(ax)
395
+ with mpl.rc_context({"figure.subplot.wspace": 0.02}):
396
+ cax = divider.append_axes('right', size='5%')
397
+ fig.canvas.draw()
398
+ assert np.isclose(cax.get_position(original=False).width,
399
+ 0.03621495327102808)
400
+
401
+
402
+ @image_comparison(['image_grid_each_left_label_mode_all.png'], style='mpl20',
403
+ savefig_kwarg={'bbox_inches': 'tight'})
404
+ def test_image_grid_each_left_label_mode_all():
405
+ imdata = np.arange(100).reshape((10, 10))
406
+
407
+ fig = plt.figure(1, (3, 3))
408
+ grid = ImageGrid(fig, (1, 1, 1), nrows_ncols=(3, 2), axes_pad=(0.5, 0.3),
409
+ cbar_mode="each", cbar_location="left", cbar_size="15%",
410
+ label_mode="all")
411
+ # 3-tuple rect => SubplotDivider
412
+ assert isinstance(grid.get_divider(), SubplotDivider)
413
+ assert grid.get_axes_pad() == (0.5, 0.3)
414
+ assert grid.get_aspect() # True by default for ImageGrid
415
+ for ax, cax in zip(grid, grid.cbar_axes):
416
+ im = ax.imshow(imdata, interpolation='none')
417
+ cax.colorbar(im)
418
+
419
+
420
+ @image_comparison(['image_grid_single_bottom_label_mode_1.png'], style='mpl20',
421
+ savefig_kwarg={'bbox_inches': 'tight'})
422
+ def test_image_grid_single_bottom():
423
+ imdata = np.arange(100).reshape((10, 10))
424
+
425
+ fig = plt.figure(1, (2.5, 1.5))
426
+ grid = ImageGrid(fig, (0, 0, 1, 1), nrows_ncols=(1, 3),
427
+ axes_pad=(0.2, 0.15), cbar_mode="single",
428
+ cbar_location="bottom", cbar_size="10%", label_mode="1")
429
+ # 4-tuple rect => Divider, isinstance will give True for SubplotDivider
430
+ assert type(grid.get_divider()) is Divider
431
+ for i in range(3):
432
+ im = grid[i].imshow(imdata, interpolation='none')
433
+ grid.cbar_axes[0].colorbar(im)
434
+
435
+
436
+ def test_image_grid_label_mode_invalid():
437
+ fig = plt.figure()
438
+ with pytest.raises(ValueError, match="'foo' is not a valid value for mode"):
439
+ ImageGrid(fig, (0, 0, 1, 1), (2, 1), label_mode="foo")
440
+
441
+
442
+ @image_comparison(['image_grid.png'],
443
+ remove_text=True, style='mpl20',
444
+ savefig_kwarg={'bbox_inches': 'tight'})
445
+ def test_image_grid():
446
+ # test that image grid works with bbox_inches=tight.
447
+ im = np.arange(100).reshape((10, 10))
448
+
449
+ fig = plt.figure(1, (4, 4))
450
+ grid = ImageGrid(fig, 111, nrows_ncols=(2, 2), axes_pad=0.1)
451
+ assert grid.get_axes_pad() == (0.1, 0.1)
452
+ for i in range(4):
453
+ grid[i].imshow(im, interpolation='nearest')
454
+
455
+
456
+ def test_gettightbbox():
457
+ fig, ax = plt.subplots(figsize=(8, 6))
458
+
459
+ l, = ax.plot([1, 2, 3], [0, 1, 0])
460
+
461
+ ax_zoom = zoomed_inset_axes(ax, 4)
462
+ ax_zoom.plot([1, 2, 3], [0, 1, 0])
463
+
464
+ mark_inset(ax, ax_zoom, loc1=1, loc2=3, fc="none", ec='0.3')
465
+
466
+ remove_ticks_and_titles(fig)
467
+ bbox = fig.get_tightbbox(fig.canvas.get_renderer())
468
+ np.testing.assert_array_almost_equal(bbox.extents,
469
+ [-17.7, -13.9, 7.2, 5.4])
470
+
471
+
472
+ @pytest.mark.parametrize("click_on", ["big", "small"])
473
+ @pytest.mark.parametrize("big_on_axes,small_on_axes", [
474
+ ("gca", "gca"),
475
+ ("host", "host"),
476
+ ("host", "parasite"),
477
+ ("parasite", "host"),
478
+ ("parasite", "parasite")
479
+ ])
480
+ def test_picking_callbacks_overlap(big_on_axes, small_on_axes, click_on):
481
+ """Test pick events on normal, host or parasite axes."""
482
+ # Two rectangles are drawn and "clicked on", a small one and a big one
483
+ # enclosing the small one. The axis on which they are drawn as well as the
484
+ # rectangle that is clicked on are varied.
485
+ # In each case we expect that both rectangles are picked if we click on the
486
+ # small one and only the big one is picked if we click on the big one.
487
+ # Also tests picking on normal axes ("gca") as a control.
488
+ big = plt.Rectangle((0.25, 0.25), 0.5, 0.5, picker=5)
489
+ small = plt.Rectangle((0.4, 0.4), 0.2, 0.2, facecolor="r", picker=5)
490
+ # Machinery for "receiving" events
491
+ received_events = []
492
+ def on_pick(event):
493
+ received_events.append(event)
494
+ plt.gcf().canvas.mpl_connect('pick_event', on_pick)
495
+ # Shortcut
496
+ rectangles_on_axes = (big_on_axes, small_on_axes)
497
+ # Axes setup
498
+ axes = {"gca": None, "host": None, "parasite": None}
499
+ if "gca" in rectangles_on_axes:
500
+ axes["gca"] = plt.gca()
501
+ if "host" in rectangles_on_axes or "parasite" in rectangles_on_axes:
502
+ axes["host"] = host_subplot(111)
503
+ axes["parasite"] = axes["host"].twin()
504
+ # Add rectangles to axes
505
+ axes[big_on_axes].add_patch(big)
506
+ axes[small_on_axes].add_patch(small)
507
+ # Simulate picking with click mouse event
508
+ if click_on == "big":
509
+ click_axes = axes[big_on_axes]
510
+ axes_coords = (0.3, 0.3)
511
+ else:
512
+ click_axes = axes[small_on_axes]
513
+ axes_coords = (0.5, 0.5)
514
+ # In reality mouse events never happen on parasite axes, only host axes
515
+ if click_axes is axes["parasite"]:
516
+ click_axes = axes["host"]
517
+ (x, y) = click_axes.transAxes.transform(axes_coords)
518
+ m = MouseEvent("button_press_event", click_axes.figure.canvas, x, y,
519
+ button=1)
520
+ click_axes.pick(m)
521
+ # Checks
522
+ expected_n_events = 2 if click_on == "small" else 1
523
+ assert len(received_events) == expected_n_events
524
+ event_rects = [event.artist for event in received_events]
525
+ assert big in event_rects
526
+ if click_on == "small":
527
+ assert small in event_rects
528
+
529
+
530
+ @image_comparison(['anchored_artists.png'], remove_text=True, style='mpl20')
531
+ def test_anchored_artists():
532
+ fig, ax = plt.subplots(figsize=(3, 3))
533
+ ada = AnchoredDrawingArea(40, 20, 0, 0, loc='upper right', pad=0.,
534
+ frameon=False)
535
+ p1 = Circle((10, 10), 10)
536
+ ada.drawing_area.add_artist(p1)
537
+ p2 = Circle((30, 10), 5, fc="r")
538
+ ada.drawing_area.add_artist(p2)
539
+ ax.add_artist(ada)
540
+
541
+ box = AnchoredAuxTransformBox(ax.transData, loc='upper left')
542
+ el = Ellipse((0, 0), width=0.1, height=0.4, angle=30, color='cyan')
543
+ box.drawing_area.add_artist(el)
544
+ ax.add_artist(box)
545
+
546
+ # Manually construct the ellipse instead, once the deprecation elapses.
547
+ with pytest.warns(mpl.MatplotlibDeprecationWarning):
548
+ ae = AnchoredEllipse(ax.transData, width=0.1, height=0.25, angle=-60,
549
+ loc='lower left', pad=0.5, borderpad=0.4,
550
+ frameon=True)
551
+ ax.add_artist(ae)
552
+
553
+ asb = AnchoredSizeBar(ax.transData, 0.2, r"0.2 units", loc='lower right',
554
+ pad=0.3, borderpad=0.4, sep=4, fill_bar=True,
555
+ frameon=False, label_top=True, prop={'size': 20},
556
+ size_vertical=0.05, color='green')
557
+ ax.add_artist(asb)
558
+
559
+
560
+ def test_hbox_divider():
561
+ arr1 = np.arange(20).reshape((4, 5))
562
+ arr2 = np.arange(20).reshape((5, 4))
563
+
564
+ fig, (ax1, ax2) = plt.subplots(1, 2)
565
+ ax1.imshow(arr1)
566
+ ax2.imshow(arr2)
567
+
568
+ pad = 0.5 # inches.
569
+ divider = HBoxDivider(
570
+ fig, 111, # Position of combined axes.
571
+ horizontal=[Size.AxesX(ax1), Size.Fixed(pad), Size.AxesX(ax2)],
572
+ vertical=[Size.AxesY(ax1), Size.Scaled(1), Size.AxesY(ax2)])
573
+ ax1.set_axes_locator(divider.new_locator(0))
574
+ ax2.set_axes_locator(divider.new_locator(2))
575
+
576
+ fig.canvas.draw()
577
+ p1 = ax1.get_position()
578
+ p2 = ax2.get_position()
579
+ assert p1.height == p2.height
580
+ assert p2.width / p1.width == pytest.approx((4 / 5) ** 2)
581
+
582
+
583
+ def test_vbox_divider():
584
+ arr1 = np.arange(20).reshape((4, 5))
585
+ arr2 = np.arange(20).reshape((5, 4))
586
+
587
+ fig, (ax1, ax2) = plt.subplots(1, 2)
588
+ ax1.imshow(arr1)
589
+ ax2.imshow(arr2)
590
+
591
+ pad = 0.5 # inches.
592
+ divider = VBoxDivider(
593
+ fig, 111, # Position of combined axes.
594
+ horizontal=[Size.AxesX(ax1), Size.Scaled(1), Size.AxesX(ax2)],
595
+ vertical=[Size.AxesY(ax1), Size.Fixed(pad), Size.AxesY(ax2)])
596
+ ax1.set_axes_locator(divider.new_locator(0))
597
+ ax2.set_axes_locator(divider.new_locator(2))
598
+
599
+ fig.canvas.draw()
600
+ p1 = ax1.get_position()
601
+ p2 = ax2.get_position()
602
+ assert p1.width == p2.width
603
+ assert p1.height / p2.height == pytest.approx((4 / 5) ** 2)
604
+
605
+
606
+ def test_axes_class_tuple():
607
+ fig = plt.figure()
608
+ axes_class = (mpl_toolkits.axes_grid1.mpl_axes.Axes, {})
609
+ gr = AxesGrid(fig, 111, nrows_ncols=(1, 1), axes_class=axes_class)
610
+
611
+
612
+ def test_grid_axes_lists():
613
+ """Test Grid axes_all, axes_row and axes_column relationship."""
614
+ fig = plt.figure()
615
+ grid = Grid(fig, 111, (2, 3), direction="row")
616
+ assert_array_equal(grid, grid.axes_all)
617
+ assert_array_equal(grid.axes_row, np.transpose(grid.axes_column))
618
+ assert_array_equal(grid, np.ravel(grid.axes_row), "row")
619
+ assert grid.get_geometry() == (2, 3)
620
+ grid = Grid(fig, 111, (2, 3), direction="column")
621
+ assert_array_equal(grid, np.ravel(grid.axes_column), "column")
622
+
623
+
624
+ @pytest.mark.parametrize('direction', ('row', 'column'))
625
+ def test_grid_axes_position(direction):
626
+ """Test positioning of the axes in Grid."""
627
+ fig = plt.figure()
628
+ grid = Grid(fig, 111, (2, 2), direction=direction)
629
+ loc = [ax.get_axes_locator() for ax in np.ravel(grid.axes_row)]
630
+ # Test nx.
631
+ assert loc[1].args[0] > loc[0].args[0]
632
+ assert loc[0].args[0] == loc[2].args[0]
633
+ assert loc[3].args[0] == loc[1].args[0]
634
+ # Test ny.
635
+ assert loc[2].args[1] < loc[0].args[1]
636
+ assert loc[0].args[1] == loc[1].args[1]
637
+ assert loc[3].args[1] == loc[2].args[1]
638
+
639
+
640
+ @pytest.mark.parametrize('rect, ngrids, error, message', (
641
+ ((1, 1), None, TypeError, "Incorrect rect format"),
642
+ (111, -1, ValueError, "ngrids must be positive"),
643
+ (111, 7, ValueError, "ngrids must be positive"),
644
+ ))
645
+ def test_grid_errors(rect, ngrids, error, message):
646
+ fig = plt.figure()
647
+ with pytest.raises(error, match=message):
648
+ Grid(fig, rect, (2, 3), ngrids=ngrids)
649
+
650
+
651
+ @pytest.mark.parametrize('anchor, error, message', (
652
+ (None, TypeError, "anchor must be str"),
653
+ ("CC", ValueError, "'CC' is not a valid value for anchor"),
654
+ ((1, 1, 1), TypeError, "anchor must be str"),
655
+ ))
656
+ def test_divider_errors(anchor, error, message):
657
+ fig = plt.figure()
658
+ with pytest.raises(error, match=message):
659
+ Divider(fig, [0, 0, 1, 1], [Size.Fixed(1)], [Size.Fixed(1)],
660
+ anchor=anchor)
661
+
662
+
663
+ @check_figures_equal(extensions=["png"])
664
+ def test_mark_inset_unstales_viewlim(fig_test, fig_ref):
665
+ inset, full = fig_test.subplots(1, 2)
666
+ full.plot([0, 5], [0, 5])
667
+ inset.set(xlim=(1, 2), ylim=(1, 2))
668
+ # Check that mark_inset unstales full's viewLim before drawing the marks.
669
+ mark_inset(full, inset, 1, 4)
670
+
671
+ inset, full = fig_ref.subplots(1, 2)
672
+ full.plot([0, 5], [0, 5])
673
+ inset.set(xlim=(1, 2), ylim=(1, 2))
674
+ mark_inset(full, inset, 1, 4)
675
+ # Manually unstale the full's viewLim.
676
+ fig_ref.canvas.draw()
677
+
678
+
679
+ def test_auto_adjustable():
680
+ fig = plt.figure()
681
+ ax = fig.add_axes([0, 0, 1, 1])
682
+ pad = 0.1
683
+ make_axes_area_auto_adjustable(ax, pad=pad)
684
+ fig.canvas.draw()
685
+ tbb = ax.get_tightbbox()
686
+ assert tbb.x0 == pytest.approx(pad * fig.dpi)
687
+ assert tbb.x1 == pytest.approx(fig.bbox.width - pad * fig.dpi)
688
+ assert tbb.y0 == pytest.approx(pad * fig.dpi)
689
+ assert tbb.y1 == pytest.approx(fig.bbox.height - pad * fig.dpi)
690
+
691
+
692
+ # Update style when regenerating the test image
693
+ @image_comparison(['rgb_axes.png'], remove_text=True,
694
+ style=('classic', '_classic_test_patch'))
695
+ def test_rgb_axes():
696
+ fig = plt.figure()
697
+ ax = RGBAxes(fig, (0.1, 0.1, 0.8, 0.8), pad=0.1)
698
+ rng = np.random.default_rng(19680801)
699
+ r = rng.random((5, 5))
700
+ g = rng.random((5, 5))
701
+ b = rng.random((5, 5))
702
+ ax.imshow_rgb(r, g, b, interpolation='none')
703
+
704
+
705
+ # Update style when regenerating the test image
706
+ @image_comparison(['insetposition.png'], remove_text=True,
707
+ style=('classic', '_classic_test_patch'))
708
+ def test_insetposition():
709
+ fig, ax = plt.subplots(figsize=(2, 2))
710
+ ax_ins = plt.axes([0, 0, 1, 1])
711
+ with pytest.warns(mpl.MatplotlibDeprecationWarning):
712
+ ip = InsetPosition(ax, [0.2, 0.25, 0.5, 0.4])
713
+ ax_ins.set_axes_locator(ip)
714
+
715
+
716
+ # The original version of this test relied on mpl_toolkits's slightly different
717
+ # colorbar implementation; moving to matplotlib's own colorbar implementation
718
+ # caused the small image comparison error.
719
+ @image_comparison(['imagegrid_cbar_mode.png'],
720
+ remove_text=True, style='mpl20', tol=0.3)
721
+ def test_imagegrid_cbar_mode_edge():
722
+ arr = np.arange(16).reshape((4, 4))
723
+
724
+ fig = plt.figure(figsize=(18, 9))
725
+
726
+ positions = (241, 242, 243, 244, 245, 246, 247, 248)
727
+ directions = ['row']*4 + ['column']*4
728
+ cbar_locations = ['left', 'right', 'top', 'bottom']*2
729
+
730
+ for position, direction, location in zip(
731
+ positions, directions, cbar_locations):
732
+ grid = ImageGrid(fig, position,
733
+ nrows_ncols=(2, 2),
734
+ direction=direction,
735
+ cbar_location=location,
736
+ cbar_size='20%',
737
+ cbar_mode='edge')
738
+ ax1, ax2, ax3, ax4 = grid
739
+
740
+ ax1.imshow(arr, cmap='nipy_spectral')
741
+ ax2.imshow(arr.T, cmap='hot')
742
+ ax3.imshow(np.hypot(arr, arr.T), cmap='jet')
743
+ ax4.imshow(np.arctan2(arr, arr.T), cmap='hsv')
744
+
745
+ # In each row/column, the "first" colorbars must be overwritten by the
746
+ # "second" ones. To achieve this, clear out the axes first.
747
+ for ax in grid:
748
+ ax.cax.cla()
749
+ cb = ax.cax.colorbar(ax.images[0])
750
+
751
+
752
+ def test_imagegrid():
753
+ fig = plt.figure()
754
+ grid = ImageGrid(fig, 111, nrows_ncols=(1, 1))
755
+ ax = grid[0]
756
+ im = ax.imshow([[1, 2]], norm=mpl.colors.LogNorm())
757
+ cb = ax.cax.colorbar(im)
758
+ assert isinstance(cb.locator, mticker.LogLocator)
759
+
760
+
761
+ def test_removal():
762
+ import matplotlib.pyplot as plt
763
+ import mpl_toolkits.axisartist as AA
764
+ fig = plt.figure()
765
+ ax = host_subplot(111, axes_class=AA.Axes, figure=fig)
766
+ col = ax.fill_between(range(5), 0, range(5))
767
+ fig.canvas.draw()
768
+ col.remove()
769
+ fig.canvas.draw()
770
+
771
+
772
+ @image_comparison(['anchored_locator_base_call.png'], style="mpl20")
773
+ def test_anchored_locator_base_call():
774
+ fig = plt.figure(figsize=(3, 3))
775
+ fig1, fig2 = fig.subfigures(nrows=2, ncols=1)
776
+
777
+ ax = fig1.subplots()
778
+ ax.set(aspect=1, xlim=(-15, 15), ylim=(-20, 5))
779
+ ax.set(xticks=[], yticks=[])
780
+
781
+ Z = cbook.get_sample_data("axes_grid/bivariate_normal.npy")
782
+ extent = (-3, 4, -4, 3)
783
+
784
+ axins = zoomed_inset_axes(ax, zoom=2, loc="upper left")
785
+ axins.set(xticks=[], yticks=[])
786
+
787
+ axins.imshow(Z, extent=extent, origin="lower")
788
+
789
+
790
+ def test_grid_with_axes_class_not_overriding_axis():
791
+ Grid(plt.figure(), 111, (2, 2), axes_class=mpl.axes.Axes)
792
+ RGBAxes(plt.figure(), 111, axes_class=mpl.axes.Axes)
valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (816 Bytes). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/axes_divider.cpython-310.pyc ADDED
Binary file (346 Bytes). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/axis_artist.cpython-310.pyc ADDED
Binary file (34.8 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/axisline_style.cpython-310.pyc ADDED
Binary file (6.89 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/grid_helper_curvelinear.cpython-310.pyc ADDED
Binary file (11.3 kB). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/axisartist/__pycache__/parasite_axes.cpython-310.pyc ADDED
Binary file (407 Bytes). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from .axes3d import Axes3D
2
+
3
+ __all__ = ['Axes3D']
valley/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (231 Bytes). View file
 
valley/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/art3d.cpython-310.pyc ADDED
Binary file (40.9 kB). View file