ZTWHHH commited on
Commit
44984d9
·
verified ·
1 Parent(s): b6b40f8

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +2 -0
  2. parrot/lib/libtinfow.so.6 +3 -0
  3. parrot/lib/python3.10/site-packages/aiohappyeyeballs/__init__.py +13 -0
  4. parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/__init__.cpython-310.pyc +0 -0
  5. parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/impl.cpython-310.pyc +0 -0
  6. parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/staggered.cpython-310.pyc +0 -0
  7. parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/types.cpython-310.pyc +0 -0
  8. parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/utils.cpython-310.pyc +0 -0
  9. parrot/lib/python3.10/site-packages/aiohappyeyeballs/_staggered.py +101 -0
  10. parrot/lib/python3.10/site-packages/aiohappyeyeballs/impl.py +204 -0
  11. parrot/lib/python3.10/site-packages/aiohappyeyeballs/py.typed +0 -0
  12. parrot/lib/python3.10/site-packages/aiohappyeyeballs/staggered.py +9 -0
  13. parrot/lib/python3.10/site-packages/aiohappyeyeballs/types.py +12 -0
  14. parrot/lib/python3.10/site-packages/aiohappyeyeballs/utils.py +97 -0
  15. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc +0 -0
  16. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/base_protocol.cpython-310.pyc +0 -0
  17. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http_parser.cpython-310.pyc +0 -0
  18. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http_websocket.cpython-310.pyc +0 -0
  19. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc +0 -0
  20. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/log.cpython-310.pyc +0 -0
  21. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/multipart.cpython-310.pyc +0 -0
  22. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-310.pyc +0 -0
  23. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/tracing.cpython-310.pyc +0 -0
  24. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/typedefs.cpython-310.pyc +0 -0
  25. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_app.cpython-310.pyc +0 -0
  26. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_runner.cpython-310.pyc +0 -0
  27. parrot/lib/python3.10/site-packages/aiohttp/__pycache__/worker.cpython-310.pyc +0 -0
  28. parrot/lib/python3.10/site-packages/google/protobuf/__pycache__/descriptor_pb2.cpython-310.pyc +3 -0
  29. parrot/lib/python3.10/site-packages/mdit_py_plugins/myst_blocks/__pycache__/index.cpython-310.pyc +0 -0
  30. parrot/lib/python3.10/site-packages/mdit_py_plugins/myst_blocks/index.py +153 -0
  31. parrot/lib/python3.10/site-packages/mdit_py_plugins/texmath/README.md +137 -0
  32. parrot/lib/python3.10/site-packages/mdit_py_plugins/texmath/__init__.py +1 -0
  33. parrot/lib/python3.10/site-packages/simple_parsing/__init__.py +60 -0
  34. parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/__init__.cpython-310.pyc +0 -0
  35. parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/conflicts.cpython-310.pyc +0 -0
  36. parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/decorators.cpython-310.pyc +0 -0
  37. parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/docstring.cpython-310.pyc +0 -0
  38. parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/help_formatter.cpython-310.pyc +0 -0
  39. parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/parsing.cpython-310.pyc +0 -0
  40. parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/replace.cpython-310.pyc +0 -0
  41. parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/utils.cpython-310.pyc +0 -0
  42. parrot/lib/python3.10/site-packages/simple_parsing/annotation_utils/__init__.py +0 -0
  43. parrot/lib/python3.10/site-packages/simple_parsing/annotation_utils/__pycache__/__init__.cpython-310.pyc +0 -0
  44. parrot/lib/python3.10/site-packages/simple_parsing/annotation_utils/__pycache__/get_field_annotations.cpython-310.pyc +0 -0
  45. parrot/lib/python3.10/site-packages/simple_parsing/annotation_utils/get_field_annotations.py +251 -0
  46. parrot/lib/python3.10/site-packages/simple_parsing/conflicts.py +389 -0
  47. parrot/lib/python3.10/site-packages/simple_parsing/decorators.py +139 -0
  48. parrot/lib/python3.10/site-packages/simple_parsing/docstring.py +385 -0
  49. parrot/lib/python3.10/site-packages/simple_parsing/help_formatter.py +88 -0
  50. parrot/lib/python3.10/site-packages/simple_parsing/helpers/__init__.py +15 -0
.gitattributes CHANGED
@@ -100,3 +100,5 @@ parrot/lib/libncursesw.a filter=lfs diff=lfs merge=lfs -text
100
  parrot/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
101
  parrot/lib/libncursesw.so.6.4 filter=lfs diff=lfs merge=lfs -text
102
  parrot/lib/libtinfow.so filter=lfs diff=lfs merge=lfs -text
 
 
 
100
  parrot/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
101
  parrot/lib/libncursesw.so.6.4 filter=lfs diff=lfs merge=lfs -text
102
  parrot/lib/libtinfow.so filter=lfs diff=lfs merge=lfs -text
103
+ parrot/lib/libtinfow.so.6 filter=lfs diff=lfs merge=lfs -text
104
+ parrot/lib/python3.10/site-packages/google/protobuf/__pycache__/descriptor_pb2.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
parrot/lib/libtinfow.so.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ff9b333bc4b796b31c188c2dadd7840788cb963dbf4f34567deb3f326326b02
3
+ size 287080
parrot/lib/python3.10/site-packages/aiohappyeyeballs/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __version__ = "2.4.2"
2
+
3
+ from .impl import start_connection
4
+ from .types import AddrInfoType
5
+ from .utils import addr_to_addr_infos, pop_addr_infos_interleave, remove_addr_infos
6
+
7
+ __all__ = (
8
+ "start_connection",
9
+ "AddrInfoType",
10
+ "remove_addr_infos",
11
+ "pop_addr_infos_interleave",
12
+ "addr_to_addr_infos",
13
+ )
parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (435 Bytes). View file
 
parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/impl.cpython-310.pyc ADDED
Binary file (5.18 kB). View file
 
parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/staggered.cpython-310.pyc ADDED
Binary file (333 Bytes). View file
 
parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/types.cpython-310.pyc ADDED
Binary file (384 Bytes). View file
 
parrot/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/utils.cpython-310.pyc ADDED
Binary file (2.53 kB). View file
 
parrot/lib/python3.10/site-packages/aiohappyeyeballs/_staggered.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import contextlib
3
+ from typing import Awaitable, Callable, Iterable, List, Optional, Tuple, TypeVar
4
+
5
+
6
+ class _Done(Exception):
7
+ pass
8
+
9
+
10
+ _T = TypeVar("_T")
11
+
12
+
13
+ async def staggered_race(
14
+ coro_fns: Iterable[Callable[[], Awaitable[_T]]], delay: Optional[float]
15
+ ) -> Tuple[Optional[_T], Optional[int], List[Optional[BaseException]]]:
16
+ """
17
+ Run coroutines with staggered start times and take the first to finish.
18
+
19
+ This method takes an iterable of coroutine functions. The first one is
20
+ started immediately. From then on, whenever the immediately preceding one
21
+ fails (raises an exception), or when *delay* seconds has passed, the next
22
+ coroutine is started. This continues until one of the coroutines complete
23
+ successfully, in which case all others are cancelled, or until all
24
+ coroutines fail.
25
+
26
+ The coroutines provided should be well-behaved in the following way:
27
+
28
+ * They should only ``return`` if completed successfully.
29
+
30
+ * They should always raise an exception if they did not complete
31
+ successfully. In particular, if they handle cancellation, they should
32
+ probably reraise, like this::
33
+
34
+ try:
35
+ # do work
36
+ except asyncio.CancelledError:
37
+ # undo partially completed work
38
+ raise
39
+
40
+ Args:
41
+ coro_fns: an iterable of coroutine functions, i.e. callables that
42
+ return a coroutine object when called. Use ``functools.partial`` or
43
+ lambdas to pass arguments.
44
+
45
+ delay: amount of time, in seconds, between starting coroutines. If
46
+ ``None``, the coroutines will run sequentially.
47
+
48
+ Returns:
49
+ tuple *(winner_result, winner_index, exceptions)* where
50
+
51
+ - *winner_result*: the result of the winning coroutine, or ``None``
52
+ if no coroutines won.
53
+
54
+ - *winner_index*: the index of the winning coroutine in
55
+ ``coro_fns``, or ``None`` if no coroutines won. If the winning
56
+ coroutine may return None on success, *winner_index* can be used
57
+ to definitively determine whether any coroutine won.
58
+
59
+ - *exceptions*: list of exceptions returned by the coroutines.
60
+ ``len(exceptions)`` is equal to the number of coroutines actually
61
+ started, and the order is the same as in ``coro_fns``. The winning
62
+ coroutine's entry is ``None``.
63
+
64
+ """
65
+ # TODO: when we have aiter() and anext(), allow async iterables in coro_fns.
66
+ winner_result = None
67
+ winner_index = None
68
+ exceptions: List[Optional[BaseException]] = []
69
+
70
+ async def run_one_coro(
71
+ this_index: int,
72
+ coro_fn: Callable[[], Awaitable[_T]],
73
+ this_failed: asyncio.Event,
74
+ ) -> None:
75
+ try:
76
+ result = await coro_fn()
77
+ except (SystemExit, KeyboardInterrupt):
78
+ raise
79
+ except BaseException as e:
80
+ exceptions[this_index] = e
81
+ this_failed.set() # Kickstart the next coroutine
82
+ else:
83
+ # Store winner's results
84
+ nonlocal winner_index, winner_result
85
+ assert winner_index is None # noqa: S101
86
+ winner_index = this_index
87
+ winner_result = result
88
+ raise _Done
89
+
90
+ try:
91
+ async with asyncio.TaskGroup() as tg:
92
+ for this_index, coro_fn in enumerate(coro_fns):
93
+ this_failed = asyncio.Event()
94
+ exceptions.append(None)
95
+ tg.create_task(run_one_coro(this_index, coro_fn, this_failed))
96
+ with contextlib.suppress(TimeoutError):
97
+ await asyncio.wait_for(this_failed.wait(), delay)
98
+ except* _Done:
99
+ pass
100
+
101
+ return winner_result, winner_index, exceptions
parrot/lib/python3.10/site-packages/aiohappyeyeballs/impl.py ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base implementation."""
2
+
3
+ import asyncio
4
+ import collections
5
+ import functools
6
+ import itertools
7
+ import socket
8
+ import sys
9
+ from typing import List, Optional, Sequence
10
+
11
+ from . import staggered
12
+ from .types import AddrInfoType
13
+
14
+ if sys.version_info < (3, 8, 2): # noqa: UP036
15
+ # asyncio.staggered is broken in Python 3.8.0 and 3.8.1
16
+ # so it must be patched:
17
+ # https://github.com/aio-libs/aiohttp/issues/8556
18
+ # https://bugs.python.org/issue39129
19
+ # https://github.com/python/cpython/pull/17693
20
+ import asyncio.futures
21
+
22
+ asyncio.futures.TimeoutError = asyncio.TimeoutError # type: ignore[attr-defined]
23
+
24
+
25
+ async def start_connection(
26
+ addr_infos: Sequence[AddrInfoType],
27
+ *,
28
+ local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
29
+ happy_eyeballs_delay: Optional[float] = None,
30
+ interleave: Optional[int] = None,
31
+ loop: Optional[asyncio.AbstractEventLoop] = None,
32
+ ) -> socket.socket:
33
+ """
34
+ Connect to a TCP server.
35
+
36
+ Create a socket connection to a specified destination. The
37
+ destination is specified as a list of AddrInfoType tuples as
38
+ returned from getaddrinfo().
39
+
40
+ The arguments are, in order:
41
+
42
+ * ``family``: the address family, e.g. ``socket.AF_INET`` or
43
+ ``socket.AF_INET6``.
44
+ * ``type``: the socket type, e.g. ``socket.SOCK_STREAM`` or
45
+ ``socket.SOCK_DGRAM``.
46
+ * ``proto``: the protocol, e.g. ``socket.IPPROTO_TCP`` or
47
+ ``socket.IPPROTO_UDP``.
48
+ * ``canonname``: the canonical name of the address, e.g.
49
+ ``"www.python.org"``.
50
+ * ``sockaddr``: the socket address
51
+
52
+ This method is a coroutine which will try to establish the connection
53
+ in the background. When successful, the coroutine returns a
54
+ socket.
55
+
56
+ The expected use case is to use this method in conjunction with
57
+ loop.create_connection() to establish a connection to a server::
58
+
59
+ socket = await start_connection(addr_infos)
60
+ transport, protocol = await loop.create_connection(
61
+ MyProtocol, sock=socket, ...)
62
+ """
63
+ if not (current_loop := loop):
64
+ current_loop = asyncio.get_running_loop()
65
+
66
+ single_addr_info = len(addr_infos) == 1
67
+
68
+ if happy_eyeballs_delay is not None and interleave is None:
69
+ # If using happy eyeballs, default to interleave addresses by family
70
+ interleave = 1
71
+
72
+ if interleave and not single_addr_info:
73
+ addr_infos = _interleave_addrinfos(addr_infos, interleave)
74
+
75
+ sock: Optional[socket.socket] = None
76
+ exceptions: List[List[OSError]] = []
77
+ if happy_eyeballs_delay is None or single_addr_info:
78
+ # not using happy eyeballs
79
+ for addrinfo in addr_infos:
80
+ try:
81
+ sock = await _connect_sock(
82
+ current_loop, exceptions, addrinfo, local_addr_infos
83
+ )
84
+ break
85
+ except OSError:
86
+ continue
87
+ else: # using happy eyeballs
88
+ sock, _, _ = await staggered.staggered_race(
89
+ (
90
+ functools.partial(
91
+ _connect_sock, current_loop, exceptions, addrinfo, local_addr_infos
92
+ )
93
+ for addrinfo in addr_infos
94
+ ),
95
+ happy_eyeballs_delay,
96
+ )
97
+
98
+ if sock is None:
99
+ all_exceptions = [exc for sub in exceptions for exc in sub]
100
+ try:
101
+ first_exception = all_exceptions[0]
102
+ if len(all_exceptions) == 1:
103
+ raise first_exception
104
+ else:
105
+ # If they all have the same str(), raise one.
106
+ model = str(first_exception)
107
+ if all(str(exc) == model for exc in all_exceptions):
108
+ raise first_exception
109
+ # Raise a combined exception so the user can see all
110
+ # the various error messages.
111
+ msg = "Multiple exceptions: {}".format(
112
+ ", ".join(str(exc) for exc in all_exceptions)
113
+ )
114
+ # If the errno is the same for all exceptions, raise
115
+ # an OSError with that errno.
116
+ first_errno = first_exception.errno
117
+ if all(
118
+ isinstance(exc, OSError) and exc.errno == first_errno
119
+ for exc in all_exceptions
120
+ ):
121
+ raise OSError(first_errno, msg)
122
+ raise OSError(msg)
123
+ finally:
124
+ all_exceptions = None # type: ignore[assignment]
125
+ exceptions = None # type: ignore[assignment]
126
+
127
+ return sock
128
+
129
+
130
+ async def _connect_sock(
131
+ loop: asyncio.AbstractEventLoop,
132
+ exceptions: List[List[OSError]],
133
+ addr_info: AddrInfoType,
134
+ local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
135
+ ) -> socket.socket:
136
+ """Create, bind and connect one socket."""
137
+ my_exceptions: list[OSError] = []
138
+ exceptions.append(my_exceptions)
139
+ family, type_, proto, _, address = addr_info
140
+ sock = None
141
+ try:
142
+ sock = socket.socket(family=family, type=type_, proto=proto)
143
+ sock.setblocking(False)
144
+ if local_addr_infos is not None:
145
+ for lfamily, _, _, _, laddr in local_addr_infos:
146
+ # skip local addresses of different family
147
+ if lfamily != family:
148
+ continue
149
+ try:
150
+ sock.bind(laddr)
151
+ break
152
+ except OSError as exc:
153
+ msg = (
154
+ f"error while attempting to bind on "
155
+ f"address {laddr!r}: "
156
+ f"{exc.strerror.lower()}"
157
+ )
158
+ exc = OSError(exc.errno, msg)
159
+ my_exceptions.append(exc)
160
+ else: # all bind attempts failed
161
+ if my_exceptions:
162
+ raise my_exceptions.pop()
163
+ else:
164
+ raise OSError(f"no matching local address with {family=} found")
165
+ await loop.sock_connect(sock, address)
166
+ return sock
167
+ except OSError as exc:
168
+ my_exceptions.append(exc)
169
+ if sock is not None:
170
+ sock.close()
171
+ raise
172
+ except:
173
+ if sock is not None:
174
+ sock.close()
175
+ raise
176
+ finally:
177
+ exceptions = my_exceptions = None # type: ignore[assignment]
178
+
179
+
180
+ def _interleave_addrinfos(
181
+ addrinfos: Sequence[AddrInfoType], first_address_family_count: int = 1
182
+ ) -> List[AddrInfoType]:
183
+ """Interleave list of addrinfo tuples by family."""
184
+ # Group addresses by family
185
+ addrinfos_by_family: collections.OrderedDict[int, List[AddrInfoType]] = (
186
+ collections.OrderedDict()
187
+ )
188
+ for addr in addrinfos:
189
+ family = addr[0]
190
+ if family not in addrinfos_by_family:
191
+ addrinfos_by_family[family] = []
192
+ addrinfos_by_family[family].append(addr)
193
+ addrinfos_lists = list(addrinfos_by_family.values())
194
+
195
+ reordered: List[AddrInfoType] = []
196
+ if first_address_family_count > 1:
197
+ reordered.extend(addrinfos_lists[0][: first_address_family_count - 1])
198
+ del addrinfos_lists[0][: first_address_family_count - 1]
199
+ reordered.extend(
200
+ a
201
+ for a in itertools.chain.from_iterable(itertools.zip_longest(*addrinfos_lists))
202
+ if a is not None
203
+ )
204
+ return reordered
parrot/lib/python3.10/site-packages/aiohappyeyeballs/py.typed ADDED
File without changes
parrot/lib/python3.10/site-packages/aiohappyeyeballs/staggered.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ if sys.version_info > (3, 11):
4
+ # https://github.com/python/cpython/issues/124639#issuecomment-2378129834
5
+ from ._staggered import staggered_race
6
+ else:
7
+ from asyncio.staggered import staggered_race
8
+
9
+ __all__ = ["staggered_race"]
parrot/lib/python3.10/site-packages/aiohappyeyeballs/types.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Types for aiohappyeyeballs."""
2
+
3
+ import socket
4
+ from typing import Tuple, Union
5
+
6
+ AddrInfoType = Tuple[
7
+ Union[int, socket.AddressFamily],
8
+ Union[int, socket.SocketKind],
9
+ int,
10
+ str,
11
+ Tuple, # type: ignore[type-arg]
12
+ ]
parrot/lib/python3.10/site-packages/aiohappyeyeballs/utils.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Utility functions for aiohappyeyeballs."""
2
+
3
+ import ipaddress
4
+ import socket
5
+ from typing import Dict, List, Optional, Tuple, Union
6
+
7
+ from .types import AddrInfoType
8
+
9
+
10
+ def addr_to_addr_infos(
11
+ addr: Optional[
12
+ Union[Tuple[str, int, int, int], Tuple[str, int, int], Tuple[str, int]]
13
+ ]
14
+ ) -> Optional[List[AddrInfoType]]:
15
+ """Convert an address tuple to a list of addr_info tuples."""
16
+ if addr is None:
17
+ return None
18
+ host = addr[0]
19
+ port = addr[1]
20
+ is_ipv6 = ":" in host
21
+ if is_ipv6:
22
+ flowinfo = 0
23
+ scopeid = 0
24
+ addr_len = len(addr)
25
+ if addr_len >= 4:
26
+ scopeid = addr[3] # type: ignore[misc]
27
+ if addr_len >= 3:
28
+ flowinfo = addr[2] # type: ignore[misc]
29
+ addr = (host, port, flowinfo, scopeid)
30
+ family = socket.AF_INET6
31
+ else:
32
+ addr = (host, port)
33
+ family = socket.AF_INET
34
+ return [(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)]
35
+
36
+
37
+ def pop_addr_infos_interleave(
38
+ addr_infos: List[AddrInfoType], interleave: Optional[int] = None
39
+ ) -> None:
40
+ """
41
+ Pop addr_info from the list of addr_infos by family up to interleave times.
42
+
43
+ The interleave parameter is used to know how many addr_infos for
44
+ each family should be popped of the top of the list.
45
+ """
46
+ seen: Dict[int, int] = {}
47
+ if interleave is None:
48
+ interleave = 1
49
+ to_remove: List[AddrInfoType] = []
50
+ for addr_info in addr_infos:
51
+ family = addr_info[0]
52
+ if family not in seen:
53
+ seen[family] = 0
54
+ if seen[family] < interleave:
55
+ to_remove.append(addr_info)
56
+ seen[family] += 1
57
+ for addr_info in to_remove:
58
+ addr_infos.remove(addr_info)
59
+
60
+
61
+ def _addr_tuple_to_ip_address(
62
+ addr: Union[Tuple[str, int], Tuple[str, int, int, int]]
63
+ ) -> Union[
64
+ Tuple[ipaddress.IPv4Address, int], Tuple[ipaddress.IPv6Address, int, int, int]
65
+ ]:
66
+ """Convert an address tuple to an IPv4Address."""
67
+ return (ipaddress.ip_address(addr[0]), *addr[1:])
68
+
69
+
70
+ def remove_addr_infos(
71
+ addr_infos: List[AddrInfoType],
72
+ addr: Union[Tuple[str, int], Tuple[str, int, int, int]],
73
+ ) -> None:
74
+ """
75
+ Remove an address from the list of addr_infos.
76
+
77
+ The addr value is typically the return value of
78
+ sock.getpeername().
79
+ """
80
+ bad_addrs_infos: List[AddrInfoType] = []
81
+ for addr_info in addr_infos:
82
+ if addr_info[-1] == addr:
83
+ bad_addrs_infos.append(addr_info)
84
+ if bad_addrs_infos:
85
+ for bad_addr_info in bad_addrs_infos:
86
+ addr_infos.remove(bad_addr_info)
87
+ return
88
+ # Slow path in case addr is formatted differently
89
+ match_addr = _addr_tuple_to_ip_address(addr)
90
+ for addr_info in addr_infos:
91
+ if match_addr == _addr_tuple_to_ip_address(addr_info[-1]):
92
+ bad_addrs_infos.append(addr_info)
93
+ if bad_addrs_infos:
94
+ for bad_addr_info in bad_addrs_infos:
95
+ addr_infos.remove(bad_addr_info)
96
+ return
97
+ raise ValueError(f"Address {addr} not found in addr_infos")
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (4.5 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/base_protocol.cpython-310.pyc ADDED
Binary file (2.92 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http_parser.cpython-310.pyc ADDED
Binary file (20.1 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http_websocket.cpython-310.pyc ADDED
Binary file (15.9 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc ADDED
Binary file (1.63 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/log.cpython-310.pyc ADDED
Binary file (439 Bytes). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/multipart.cpython-310.pyc ADDED
Binary file (28.7 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-310.pyc ADDED
Binary file (1.13 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/tracing.cpython-310.pyc ADDED
Binary file (14.3 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/typedefs.cpython-310.pyc ADDED
Binary file (2.04 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_app.cpython-310.pyc ADDED
Binary file (17.3 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_runner.cpython-310.pyc ADDED
Binary file (12 kB). View file
 
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/worker.cpython-310.pyc ADDED
Binary file (6.51 kB). View file
 
parrot/lib/python3.10/site-packages/google/protobuf/__pycache__/descriptor_pb2.cpython-310.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f7411f196ab7af30cb03dea4c904d1b55601b6c1dce9a01c94ca0b7fb77aa2d
3
+ size 101191
parrot/lib/python3.10/site-packages/mdit_py_plugins/myst_blocks/__pycache__/index.cpython-310.pyc ADDED
Binary file (3.43 kB). View file
 
parrot/lib/python3.10/site-packages/mdit_py_plugins/myst_blocks/index.py ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+
3
+ from markdown_it import MarkdownIt
4
+ from markdown_it.common.utils import escapeHtml, isSpace
5
+ from markdown_it.rules_block import StateBlock
6
+
7
+
8
+ def myst_block_plugin(md: MarkdownIt):
9
+ """Parse MyST targets (``(name)=``), blockquotes (``% comment``) and block breaks (``+++``)."""
10
+ md.block.ruler.before(
11
+ "blockquote",
12
+ "myst_line_comment",
13
+ line_comment,
14
+ {"alt": ["paragraph", "reference", "blockquote", "list", "footnote_def"]},
15
+ )
16
+ md.block.ruler.before(
17
+ "hr",
18
+ "myst_block_break",
19
+ block_break,
20
+ {"alt": ["paragraph", "reference", "blockquote", "list", "footnote_def"]},
21
+ )
22
+ md.block.ruler.before(
23
+ "hr",
24
+ "myst_target",
25
+ target,
26
+ {"alt": ["paragraph", "reference", "blockquote", "list", "footnote_def"]},
27
+ )
28
+ md.add_render_rule("myst_target", render_myst_target)
29
+ md.add_render_rule("myst_line_comment", render_myst_line_comment)
30
+
31
+
32
+ def line_comment(state: StateBlock, startLine: int, endLine: int, silent: bool):
33
+
34
+ pos = state.bMarks[startLine] + state.tShift[startLine]
35
+ maximum = state.eMarks[startLine]
36
+
37
+ # if it's indented more than 3 spaces, it should be a code block
38
+ if state.sCount[startLine] - state.blkIndent >= 4:
39
+ return False
40
+
41
+ if state.src[pos] != "%":
42
+ return False
43
+
44
+ if silent:
45
+ return True
46
+
47
+ token = state.push("myst_line_comment", "", 0)
48
+ token.attrSet("class", "myst-line-comment")
49
+ token.content = state.src[pos + 1 : maximum].rstrip()
50
+ token.markup = "%"
51
+
52
+ # search end of block while appending lines to `token.content`
53
+ for nextLine in itertools.count(startLine + 1):
54
+ if nextLine >= endLine:
55
+ break
56
+ pos = state.bMarks[nextLine] + state.tShift[nextLine]
57
+ maximum = state.eMarks[nextLine]
58
+
59
+ if state.src[pos] != "%":
60
+ break
61
+ token.content += "\n" + state.src[pos + 1 : maximum].rstrip()
62
+
63
+ state.line = nextLine
64
+ token.map = [startLine, nextLine]
65
+
66
+ return True
67
+
68
+
69
+ def block_break(state: StateBlock, startLine: int, endLine: int, silent: bool):
70
+
71
+ pos = state.bMarks[startLine] + state.tShift[startLine]
72
+ maximum = state.eMarks[startLine]
73
+
74
+ # if it's indented more than 3 spaces, it should be a code block
75
+ if state.sCount[startLine] - state.blkIndent >= 4:
76
+ return False
77
+
78
+ marker = state.srcCharCode[pos]
79
+ pos += 1
80
+
81
+ # Check block marker /* + */
82
+ if marker != 0x2B:
83
+ return False
84
+
85
+ # markers can be mixed with spaces, but there should be at least 3 of them
86
+
87
+ cnt = 1
88
+ while pos < maximum:
89
+ ch = state.srcCharCode[pos]
90
+ if ch != marker and not isSpace(ch):
91
+ break
92
+ if ch == marker:
93
+ cnt += 1
94
+ pos += 1
95
+
96
+ if cnt < 3:
97
+ return False
98
+
99
+ if silent:
100
+ return True
101
+
102
+ state.line = startLine + 1
103
+
104
+ token = state.push("myst_block_break", "hr", 0)
105
+ token.attrSet("class", "myst-block")
106
+ token.content = state.src[pos:maximum].strip()
107
+ token.map = [startLine, state.line]
108
+ token.markup = chr(marker) * cnt
109
+
110
+ return True
111
+
112
+
113
+ def target(state: StateBlock, startLine: int, endLine: int, silent: bool):
114
+
115
+ pos = state.bMarks[startLine] + state.tShift[startLine]
116
+ maximum = state.eMarks[startLine]
117
+
118
+ # if it's indented more than 3 spaces, it should be a code block
119
+ if state.sCount[startLine] - state.blkIndent >= 4:
120
+ return False
121
+
122
+ text = state.src[pos:maximum].strip()
123
+ if not text.startswith("("):
124
+ return False
125
+ if not text.endswith(")="):
126
+ return False
127
+ if not text[1:-2]:
128
+ return False
129
+
130
+ if silent:
131
+ return True
132
+
133
+ state.line = startLine + 1
134
+
135
+ token = state.push("myst_target", "", 0)
136
+ token.attrSet("class", "myst-target")
137
+ token.content = text[1:-2]
138
+ token.map = [startLine, state.line]
139
+
140
+ return True
141
+
142
+
143
+ def render_myst_target(self, tokens, idx, options, env):
144
+ label = tokens[idx].content
145
+ class_name = "myst-target"
146
+ target = f'<a href="#{label}">({label})=</a>'
147
+ return f'<div class="{class_name}">{target}</div>'
148
+
149
+
150
+ def render_myst_line_comment(self, tokens, idx, options, env):
151
+ # Strip leading whitespace from all lines
152
+ content = "\n".join(line.lstrip() for line in tokens[idx].content.split("\n"))
153
+ return f"<!-- {escapeHtml(content)} -->"
parrot/lib/python3.10/site-packages/mdit_py_plugins/texmath/README.md ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [![License](https://img.shields.io/github/license/goessner/markdown-it-texmath.svg)](https://github.com/goessner/markdown-it-texmath/blob/master/licence.txt)
2
+ [![npm](https://img.shields.io/npm/v/markdown-it-texmath.svg)](https://www.npmjs.com/package/markdown-it-texmath)
3
+ [![npm](https://img.shields.io/npm/dt/markdown-it-texmath.svg)](https://www.npmjs.com/package/markdown-it-texmath)
4
+
5
+ # markdown-it-texmath
6
+
7
+ Add TeX math equations to your Markdown documents rendered by [markdown-it](https://github.com/markdown-it/markdown-it) parser. [KaTeX](https://github.com/Khan/KaTeX) is used as a fast math renderer.
8
+
9
+ ## Features
10
+ Simplify the process of authoring markdown documents containing math formulas.
11
+ This extension is a comfortable tool for scientists, engineers and students with markdown as their first choice document format.
12
+
13
+ * Macro support
14
+ * Simple formula numbering
15
+ * Inline math with tables, lists and blockquote.
16
+ * User setting delimiters:
17
+ * `'dollars'` (default)
18
+ * inline: `$...$`
19
+ * display: `$$...$$`
20
+ * display + equation number: `$$...$$ (1)`
21
+ * `'brackets'`
22
+ * inline: `\(...\)`
23
+ * display: `\[...\]`
24
+ * display + equation number: `\[...\] (1)`
25
+ * `'gitlab'`
26
+ * inline: ``$`...`$``
27
+ * display: `` ```math ... ``` ``
28
+ * display + equation number: `` ```math ... ``` (1)``
29
+ * `'julia'`
30
+ * inline: `$...$` or ``` ``...`` ```
31
+ * display: `` ```math ... ``` ``
32
+ * display + equation number: `` ```math ... ``` (1)``
33
+ * `'kramdown'`
34
+ * inline: ``$$...$$``
35
+ * display: `$$...$$`
36
+ * display + equation number: `$$...$$ (1)`
37
+
38
+ ## Show me
39
+
40
+ View a [test table](https://goessner.github.io/markdown-it-texmath/index.html).
41
+
42
+ [try it out ...](https://goessner.github.io/markdown-it-texmath/markdown-it-texmath-demo.html)
43
+
44
+ ## Use with `node.js`
45
+
46
+ Install the extension. Verify having `markdown-it` and `katex` already installed .
47
+ ```
48
+ npm install markdown-it-texmath
49
+ ```
50
+ Use it with JavaScript.
51
+ ```js
52
+ let kt = require('katex'),
53
+ tm = require('markdown-it-texmath').use(kt),
54
+ md = require('markdown-it')().use(tm,{delimiters:'dollars',macros:{"\\RR": "\\mathbb{R}"}});
55
+
56
+ md.render('Euler\'s identity \(e^{i\pi}+1=0\) is a beautiful formula in $\\RR 2$.')
57
+ ```
58
+
59
+ ## Use in Browser
60
+ ```html
61
+ <html>
62
+ <head>
63
+ <meta charset='utf-8'>
64
+ <link rel="stylesheet" href="katex.min.css">
65
+ <link rel="stylesheet" href="texmath.css">
66
+ <script src="markdown-it.min.js"></script>
67
+ <script src="katex.min.js"></script>
68
+ <script src="texmath.js"></script>
69
+ </head>
70
+ <body>
71
+ <div id="out"></div>
72
+ <script>
73
+ let md;
74
+ document.addEventListener("DOMContentLoaded", () => {
75
+ const tm = texmath.use(katex);
76
+ md = markdownit().use(tm,{delimiters:'dollars',macros:{"\\RR": "\\mathbb{R}"}});
77
+ out.innerHTML = md.render('Euler\'s identity $e^{i\pi}+1=0$ is a beautiful formula in //RR 2.');
78
+ })
79
+ </script>
80
+ </body>
81
+ </html>
82
+ ```
83
+ ## CDN
84
+
85
+ Use following links for `texmath.js` and `texmath.css`
86
+ * `https://gitcdn.xyz/cdn/goessner/markdown-it-texmath/master/texmath.js`
87
+ * `https://gitcdn.xyz/cdn/goessner/markdown-it-texmath/master/texmath.css`
88
+
89
+ ## Dependencies
90
+
91
+ * [`markdown-it`](https://github.com/markdown-it/markdown-it): Markdown parser done right. Fast and easy to extend.
92
+ * [`katex`](https://github.com/Khan/KaTeX): This is where credits for fast rendering TeX math in HTML go to.
93
+
94
+ ## ToDo
95
+
96
+ nothing yet
97
+
98
+ ## FAQ
99
+
100
+ * __`markdown-it-texmath` with React Native does not work, why ?__
101
+ * `markdown-it-texmath` is using regular expressions with `y` [(sticky) property](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/sticky) and cannot avoid this. The use of the `y` flag in regular expressions means the plugin is not compatible with React Native (which as of now doesn't support it and throws an error `Invalid flags supplied to RegExp constructor`).
102
+
103
+ ## CHANGELOG
104
+
105
+ ### [0.6.0] on October 04, 2019
106
+ * Add support for [Julia Markdown](https://docs.julialang.org/en/v1/stdlib/Markdown/) on [request](https://github.com/goessner/markdown-it-texmath/issues/15).
107
+
108
+ ### [0.5.5] on February 07, 2019
109
+ * Remove [rendering bug with brackets delimiters](https://github.com/goessner/markdown-it-texmath/issues/9).
110
+
111
+ ### [0.5.4] on January 20, 2019
112
+ * Remove pathological [bug within blockquotes](https://github.com/goessner/mdmath/issues/50).
113
+
114
+ ### [0.5.3] on November 11, 2018
115
+ * Add support for Tex macros (https://katex.org/docs/supported.html#macros) .
116
+ * Bug with [brackets delimiters](https://github.com/goessner/markdown-it-texmath/issues/9) .
117
+
118
+ ### [0.5.2] on September 07, 2018
119
+ * Add support for [Kramdown](https://kramdown.gettalong.org/) .
120
+
121
+ ### [0.5.0] on August 15, 2018
122
+ * Fatal blockquote bug investigated. Implemented workaround to vscode bug, which has finally gone with vscode 1.26.0 .
123
+
124
+ ### [0.4.6] on January 05, 2018
125
+ * Escaped underscore bug removed.
126
+
127
+ ### [0.4.5] on November 06, 2017
128
+ * Backslash bug removed.
129
+
130
+ ### [0.4.4] on September 27, 2017
131
+ * Modifying the `block` mode regular expression with `gitlab` delimiters, so removing the `newline` bug.
132
+
133
+ ## License
134
+
135
+ `markdown-it-texmath` is licensed under the [MIT License](./license.txt)
136
+
137
+ © [Stefan Gössner](https://github.com/goessner)
parrot/lib/python3.10/site-packages/mdit_py_plugins/texmath/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ from .index import texmath_plugin # noqa F401
parrot/lib/python3.10/site-packages/simple_parsing/__init__.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Simple, Elegant Argument parsing.
2
+
3
+ @author: Fabrice Normandin
4
+ """
5
+ from . import helpers, utils, wrappers
6
+ from .conflicts import ConflictResolution
7
+ from .decorators import main
8
+ from .help_formatter import SimpleHelpFormatter
9
+ from .helpers import (
10
+ Partial,
11
+ Serializable,
12
+ choice,
13
+ config_for,
14
+ field,
15
+ flag,
16
+ list_field,
17
+ mutable_field,
18
+ subgroups,
19
+ subparsers,
20
+ )
21
+ from .parsing import (
22
+ ArgumentGenerationMode,
23
+ ArgumentParser,
24
+ DashVariant,
25
+ NestedMode,
26
+ ParsingError,
27
+ parse,
28
+ parse_known_args,
29
+ )
30
+ from .replace import replace, replace_subgroups
31
+ from .utils import InconsistentArgumentError
32
+
33
+ __all__ = [
34
+ "ArgumentGenerationMode",
35
+ "ArgumentParser",
36
+ "choice",
37
+ "config_for",
38
+ "ConflictResolution",
39
+ "DashVariant",
40
+ "field",
41
+ "flag",
42
+ "helpers",
43
+ "InconsistentArgumentError",
44
+ "list_field",
45
+ "main",
46
+ "mutable_field",
47
+ "NestedMode",
48
+ "parse_known_args",
49
+ "parse",
50
+ "ParsingError",
51
+ "Partial",
52
+ "replace",
53
+ "replace_subgroups",
54
+ "Serializable",
55
+ "SimpleHelpFormatter",
56
+ "subgroups",
57
+ "subparsers",
58
+ "utils",
59
+ "wrappers",
60
+ ]
parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.11 kB). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/conflicts.cpython-310.pyc ADDED
Binary file (13.6 kB). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/decorators.cpython-310.pyc ADDED
Binary file (3.89 kB). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/docstring.cpython-310.pyc ADDED
Binary file (8.7 kB). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/help_formatter.cpython-310.pyc ADDED
Binary file (3.57 kB). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/parsing.cpython-310.pyc ADDED
Binary file (30.5 kB). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/replace.cpython-310.pyc ADDED
Binary file (6.19 kB). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/__pycache__/utils.cpython-310.pyc ADDED
Binary file (29.5 kB). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/annotation_utils/__init__.py ADDED
File without changes
parrot/lib/python3.10/site-packages/simple_parsing/annotation_utils/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (183 Bytes). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/annotation_utils/__pycache__/get_field_annotations.cpython-310.pyc ADDED
Binary file (7.63 kB). View file
 
parrot/lib/python3.10/site-packages/simple_parsing/annotation_utils/get_field_annotations.py ADDED
@@ -0,0 +1,251 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import inspect
3
+ import sys
4
+ import types
5
+ import typing
6
+ from contextlib import contextmanager
7
+ from dataclasses import InitVar
8
+ from itertools import dropwhile
9
+ from logging import getLogger as get_logger
10
+ from typing import Any, Dict, Iterator, Optional, get_type_hints
11
+
12
+ logger = get_logger(__name__)
13
+
14
+ # NOTE: This dict is used to enable forward compatibility with things such as `tuple[int, str]`,
15
+ # `list[float]`, etc. when using `from __future__ import annotations`.
16
+ forward_refs_to_types = {
17
+ "tuple": typing.Tuple,
18
+ "set": typing.Set,
19
+ "dict": typing.Dict,
20
+ "list": typing.List,
21
+ "type": typing.Type,
22
+ }
23
+
24
+
25
+ @contextmanager
26
+ def _initvar_patcher() -> Iterator[None]:
27
+ """Patch InitVar to not fail when annotations are postponed.
28
+
29
+ `TypeVar('Forward references must evaluate to types. Got dataclasses.InitVar[tp].')` is raised
30
+ when postponed annotations are enabled and `get_type_hints` is called
31
+ Bug is mentioned here https://github.com/python/cpython/issues/88962
32
+ In python 3.11 this is fixed, but backport fix is not planned for old releases
33
+
34
+ Workaround is mentioned here https://stackoverflow.com/q/70400639
35
+ """
36
+ if sys.version_info[:2] < (3, 11):
37
+ InitVar.__call__ = lambda *args: None
38
+ yield
39
+ if sys.version_info[:2] < (3, 11):
40
+ del InitVar.__call__
41
+
42
+
43
+ def evaluate_string_annotation(annotation: str, containing_class: Optional[type] = None) -> type:
44
+ """Attempts to evaluate the given annotation string, to get a 'live' type annotation back.
45
+
46
+ Any exceptions that are raised when evaluating are raised directly as-is.
47
+
48
+ NOTE: This is probably not 100% safe. I mean, if the user code puts urls and stuff in their
49
+ type annotations, and then uses simple-parsing, then sure, that code might get executed. But
50
+ I don't think it's my job to prevent them from shooting themselves in the foot, you know what I
51
+ mean?
52
+ """
53
+ # The type of the field might be a string when using `from __future__ import annotations`.
54
+ # Get the local and global namespaces to pass to the `get_type_hints` function.
55
+ local_ns: Dict[str, Any] = {"typing": typing, **vars(typing)}
56
+ local_ns.update(forward_refs_to_types)
57
+ global_ns = {}
58
+ if containing_class:
59
+ # Get the globals in the module where the class was defined.
60
+ global_ns = sys.modules[containing_class.__module__].__dict__
61
+
62
+ if "|" in annotation:
63
+ annotation = _get_old_style_annotation(annotation)
64
+ evaluated_t: type = eval(annotation, local_ns, global_ns)
65
+ return evaluated_t
66
+
67
+
68
+ def _replace_UnionType_with_typing_Union(annotation):
69
+ from simple_parsing.utils import builtin_types, is_dict, is_list, is_tuple
70
+
71
+ if sys.version_info[:2] < (3, 10):
72
+ # This is only useful for python 3.10+ (where UnionTypes exist).
73
+ # Therefore just return the annotation as-is.
74
+ return annotation
75
+
76
+ if isinstance(annotation, types.UnionType): # type: ignore
77
+ union_args = typing.get_args(annotation)
78
+ new_union_args = tuple(_replace_UnionType_with_typing_Union(arg) for arg in union_args)
79
+ return typing.Union[new_union_args] # type: ignore
80
+ if is_list(annotation):
81
+ item_annotation = typing.get_args(annotation)[0]
82
+ new_item_annotation = _replace_UnionType_with_typing_Union(item_annotation)
83
+ return typing.List[new_item_annotation]
84
+ if is_tuple(annotation):
85
+ item_annotations = typing.get_args(annotation)
86
+ new_item_annotations = tuple(
87
+ _replace_UnionType_with_typing_Union(arg) for arg in item_annotations
88
+ )
89
+ return typing.Tuple[new_item_annotations] # type: ignore
90
+ if is_dict(annotation):
91
+ annotations = typing.get_args(annotation)
92
+ if not annotations:
93
+ return typing.Dict
94
+ assert len(annotations) == 2
95
+ key_annotation = annotations[0]
96
+ value_annotation = annotations[1]
97
+ new_key_annotation = _replace_UnionType_with_typing_Union(key_annotation)
98
+ new_value_annotation = _replace_UnionType_with_typing_Union(value_annotation)
99
+ return typing.Dict[new_key_annotation, new_value_annotation]
100
+ if annotation in builtin_types:
101
+ return annotation
102
+ if inspect.isclass(annotation):
103
+ return annotation
104
+ raise NotImplementedError(annotation)
105
+
106
+
107
+ # # return forward_refs_to_types.get(ann, local_ns.get(ann, global_ns.get(ann, getattr(builtins, ann, ann))))
108
+
109
+
110
+ def _not_supported(annotation) -> typing.NoReturn:
111
+ raise NotImplementedError(f"Don't yet support annotations like this: {annotation}")
112
+
113
+
114
+ def _get_old_style_annotation(annotation: str) -> str:
115
+ """Replaces A | B with Union[A,B] in the annotation."""
116
+ # TODO: Add proper support for things like `list[int | float]`, which isn't currently
117
+ # working, even without the new-style union.
118
+ if "|" not in annotation:
119
+ return annotation
120
+
121
+ annotation = annotation.strip()
122
+ if "[" not in annotation:
123
+ assert "]" not in annotation
124
+ return "Union[" + ", ".join(v.strip() for v in annotation.split("|")) + "]"
125
+
126
+ before, lsep, rest = annotation.partition("[")
127
+ middle, rsep, after = rest.rpartition("]")
128
+ # BUG: Need to handle things like bob[int] | None
129
+ assert (
130
+ not after.strip()
131
+ ), f"can't have text at HERE in <something>[<something>]<HERE>!: {annotation}"
132
+
133
+ if "|" in before or "|" in after:
134
+ _not_supported(annotation)
135
+ assert "|" in middle
136
+
137
+ if "," in middle:
138
+ parts = [v.strip() for v in middle.split(",")]
139
+ parts = [_get_old_style_annotation(part) for part in parts]
140
+ middle = ", ".join(parts)
141
+
142
+ new_middle = _get_old_style_annotation(annotation=middle)
143
+ new_annotation = before + lsep + new_middle + rsep + after
144
+ return new_annotation
145
+
146
+
147
+ def _replace_new_union_syntax_with_old_union_syntax(
148
+ annotations_dict: Dict[str, str], context: collections.ChainMap
149
+ ) -> Dict[str, Any]:
150
+ new_annotations = annotations_dict.copy()
151
+ for field, annotation_str in annotations_dict.items():
152
+ updated_annotation = _get_old_style_annotation(annotation_str)
153
+ new_annotations[field] = updated_annotation
154
+
155
+ return new_annotations
156
+
157
+
158
+ def get_field_type_from_annotations(some_class: type, field_name: str) -> type:
159
+ """Get the annotation for the given field, in the 'old-style' format with types from
160
+ typing.List, typing.Union, etc.
161
+
162
+ If the script uses `from __future__ import annotations`, and we are in python<3.9,
163
+ Then we need to actually first make this forward-compatibility 'patch' so that we
164
+ don't run into a "`type` object is not subscriptable" error.
165
+
166
+ NOTE: If you get errors of this kind from the function below, then you might want to add an
167
+ entry to the `forward_refs_to_types` dict above.
168
+ """
169
+
170
+ # Pretty hacky: Modify the type annotations of the class (preferably a copy of the class
171
+ # if possible, to avoid modifying things in-place), and replace the `a | b`-type
172
+ # expressions with `Union[a, b]`, so that `get_type_hints` doesn't raise an error.
173
+ # The type of the field might be a string when using `from __future__ import annotations`.
174
+
175
+ # The type of the field might be a string when using `from __future__ import annotations`.
176
+ # Get the local and global namespaces to pass to the `get_type_hints` function.
177
+ local_ns: Dict[str, Any] = {"typing": typing, **vars(typing)}
178
+ local_ns.update(forward_refs_to_types)
179
+
180
+ # NOTE: Get the local namespace of the calling function / module where this class is defined,
181
+ # and use it to get the correct type of the field, if it is a forward reference.
182
+ frame = inspect.currentframe()
183
+ # stack = []
184
+ while frame.f_back is not None and frame.f_locals.get(some_class.__name__) is not some_class:
185
+ # stack.append(frame)
186
+ frame = frame.f_back
187
+ # Found the frame with the dataclass definition. Update the locals. This makes it possible to
188
+ # use dataclasses defined in local scopes!
189
+ if frame is not None:
190
+ local_ns.update(frame.f_locals)
191
+
192
+ # Get the global_ns in the module starting from the deepest base until the module with the field_name last definition.
193
+ global_ns = {}
194
+ classes_to_iterate = list(
195
+ dropwhile(
196
+ lambda cls: field_name not in getattr(cls, "__annotations__", {}), some_class.mro()
197
+ )
198
+ )
199
+ for base_cls in reversed(classes_to_iterate):
200
+ global_ns.update(sys.modules[base_cls.__module__].__dict__)
201
+
202
+ try:
203
+ with _initvar_patcher():
204
+ annotations_dict = get_type_hints(some_class, localns=local_ns, globalns=global_ns)
205
+ except TypeError:
206
+ annotations_dict = collections.ChainMap(
207
+ *[getattr(cls, "__annotations__", {}) for cls in some_class.mro()]
208
+ )
209
+
210
+ if field_name not in annotations_dict:
211
+ raise ValueError(f"Field {field_name} not found in annotations of class {some_class}")
212
+
213
+ field_type = annotations_dict[field_name]
214
+
215
+ if sys.version_info[:2] >= (3, 7) and isinstance(field_type, typing.ForwardRef):
216
+ # Weird bug happens when mixing postponed evaluation of type annotations + forward
217
+ # references: The ForwardRefs are left as-is, and not evaluated!
218
+ forward_arg = field_type.__forward_arg__
219
+ field_type = forward_arg
220
+
221
+ if sys.version_info >= (3, 10) and isinstance(field_type, types.UnionType):
222
+ # In python >= 3.10, int | float is allowed. Therefore, just to be consistent, we want
223
+ # to convert those into the corresponding typing.Union type.
224
+ # This is necessary for the rest of the code to work, since it's all based on typing.Union.
225
+ field_type = _replace_UnionType_with_typing_Union(field_type)
226
+
227
+ if isinstance(field_type, str) and "|" in field_type:
228
+ field_type = _get_old_style_annotation(field_type)
229
+
230
+ # Pretty hacky:
231
+ # In order to use `get_type_hints`, we need to pass it a class. We can't just ask it to
232
+ # evaluate a single annotation. Therefore, we create a temporary class and set it's
233
+ # __annotation__ attribute, which is introspected by `get_type_hints`.
234
+
235
+ try:
236
+
237
+ class Temp_:
238
+ pass
239
+
240
+ Temp_.__annotations__ = {field_name: field_type}
241
+ with _initvar_patcher():
242
+ annotations_dict = get_type_hints(Temp_, globalns=global_ns, localns=local_ns)
243
+ field_type = annotations_dict[field_name]
244
+ except Exception:
245
+ logger.warning(
246
+ f"Unable to evaluate forward reference {field_type} for field '{field_name}'.\n"
247
+ f"Leaving it as-is."
248
+ )
249
+ field_type = field_type
250
+
251
+ return field_type
parrot/lib/python3.10/site-packages/simple_parsing/conflicts.py ADDED
@@ -0,0 +1,389 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import enum
4
+ from collections import defaultdict
5
+ from logging import getLogger
6
+ from typing import NamedTuple
7
+
8
+ from .wrappers import DataclassWrapper, FieldWrapper
9
+
10
+ logger = getLogger(__name__)
11
+
12
+
13
+ class ConflictResolution(enum.Enum):
14
+ """Determines prefixing when adding the same dataclass more than once.
15
+
16
+ - NONE:
17
+ Disallow using the same dataclass in two different destinations without
18
+ explicitly setting a distinct prefix for at least one of them.
19
+
20
+ - EXPLICIT:
21
+ When adding arguments for a dataclass that is already present, the
22
+ argparse arguments for each class will use their full absolute path as a
23
+ prefix.
24
+
25
+ - ALWAYS_MERGE:
26
+ When adding arguments for a dataclass that has previously been added,
27
+ the arguments for both the old and new destinations will be set using
28
+ the same option_string, and the passed values for the old and new
29
+ destinations will correspond to the first and second values,
30
+ respectively.
31
+ NOTE: This changes the argparse type for that argument into a list of
32
+ the original item type.
33
+
34
+ - AUTO (default):
35
+ Prefixes for each destination are created automatically, using the first
36
+ discriminative prefix that can differentiate between all the conflicting
37
+ arguments.
38
+ """
39
+
40
+ NONE = -1
41
+ EXPLICIT = 0
42
+ ALWAYS_MERGE = 1
43
+ AUTO = 2
44
+
45
+
46
+ class ConflictResolutionError(Exception):
47
+ def __init__(self, *args, **kwargs):
48
+ super().__init__(*args, **kwargs)
49
+
50
+
51
+ class Conflict(NamedTuple):
52
+ option_string: str
53
+ wrappers: list[FieldWrapper]
54
+
55
+
56
+ def unflatten(possibly_related_wrappers: list[DataclassWrapper]) -> list[DataclassWrapper]:
57
+ return [wrapper for wrapper in possibly_related_wrappers if wrapper.parent is None]
58
+
59
+
60
+ class ConflictResolver:
61
+ def __init__(self, conflict_resolution=ConflictResolution.AUTO):
62
+ self.conflict_resolution = conflict_resolution
63
+ self.max_attempts = 50
64
+
65
+ def resolve_and_flatten(self, wrappers: list[DataclassWrapper]) -> list[DataclassWrapper]:
66
+ """Given the list of all dataclass wrappers, find and resolve any conflicts between fields.
67
+
68
+ Returns the new list of (possibly mutated in-place) dataclass wrappers. This returned list
69
+ is flattened, i.e. it contains all the dataclass wrappers and their children.
70
+ """
71
+ from simple_parsing.parsing import _assert_no_duplicates, _flatten_wrappers
72
+
73
+ wrappers = wrappers.copy()
74
+
75
+ _assert_no_duplicates(wrappers)
76
+ wrappers_flat = _flatten_wrappers(wrappers)
77
+
78
+ dests = [w.dest for w in wrappers_flat]
79
+ assert len(dests) == len(set(dests)), f"shouldn't be any duplicates: {wrappers_flat}"
80
+
81
+ conflict = self.get_conflict(wrappers_flat)
82
+
83
+ # current and maximum number of attempts. When reached, raises an error.
84
+ cur_attempts = 0
85
+ while conflict:
86
+ message: str = (
87
+ "The following wrappers are in conflict, as they share the "
88
+ + f"'{conflict.option_string}' option string:"
89
+ + ("\n".join(str(w) for w in conflict.wrappers))
90
+ + f"(Conflict Resolution mode is {self.conflict_resolution})"
91
+ )
92
+ logger.debug(message)
93
+
94
+ if self.conflict_resolution == ConflictResolution.NONE:
95
+ raise ConflictResolutionError(message)
96
+
97
+ elif self.conflict_resolution == ConflictResolution.EXPLICIT:
98
+ self._fix_conflict_explicit(conflict)
99
+
100
+ elif self.conflict_resolution == ConflictResolution.ALWAYS_MERGE:
101
+ wrappers_flat = self._fix_conflict_merge(conflict, wrappers_flat)
102
+
103
+ elif self.conflict_resolution == ConflictResolution.AUTO:
104
+ self._fix_conflict_auto(conflict)
105
+
106
+ conflict = self.get_conflict(wrappers_flat)
107
+ cur_attempts += 1
108
+ if cur_attempts == self.max_attempts:
109
+ raise ConflictResolutionError(
110
+ f"Reached maximum number of attempts ({self.max_attempts}) "
111
+ "while trying to solve the conflicting argument names. "
112
+ "This is either a bug, or there is something weird going "
113
+ "on with your class hierarchy/argument names... \n"
114
+ "In any case, Please help us by submitting an issue on "
115
+ "the Github repo at "
116
+ "https://github.com/lebrice/SimpleParsing/issues, "
117
+ "or by using the following link: "
118
+ "https://github.com/lebrice/SimpleParsing/issues/new?"
119
+ "assignees=lebrice&"
120
+ "labels=bug"
121
+ "&template=bug_report.md"
122
+ "&title=BUG: ConflictResolutionError"
123
+ )
124
+
125
+ assert not self._conflict_exists(wrappers_flat)
126
+ return wrappers_flat
127
+
128
+ def resolve(self, wrappers: list[DataclassWrapper]) -> list[DataclassWrapper]:
129
+ return unflatten(self.resolve_and_flatten(wrappers))
130
+
131
+ def get_conflict(
132
+ self, wrappers: list[DataclassWrapper] | list[FieldWrapper]
133
+ ) -> Conflict | None:
134
+ field_wrappers: list[FieldWrapper] = []
135
+ for w in wrappers:
136
+ if isinstance(w, DataclassWrapper):
137
+ field_wrappers.extend(w.fields)
138
+ # logger.debug(f"Wrapper {w.dest} has fields {w.fields}")
139
+ else:
140
+ field_wrappers.append(w)
141
+
142
+ assert len(field_wrappers) == len(set(field_wrappers)), "duplicates?"
143
+
144
+ # TODO: #49: Also consider the conflicts with regular argparse arguments.
145
+ conflicts: dict[str, list[FieldWrapper]] = defaultdict(list)
146
+ for field_wrapper in field_wrappers:
147
+ for option_string in field_wrapper.option_strings:
148
+ conflicts[option_string].append(field_wrapper)
149
+ # logger.debug(f"conflicts[{option_string}].append({repr(field_wrapper)})")
150
+
151
+ for option_string, field_wrappers in conflicts.items():
152
+ if len(field_wrappers) > 1:
153
+ return Conflict(option_string, field_wrappers)
154
+ return None
155
+
156
+ def _add(
157
+ self,
158
+ wrapper: DataclassWrapper | FieldWrapper,
159
+ wrappers: list[DataclassWrapper],
160
+ ) -> list[DataclassWrapper]:
161
+ """Add the given wrapper and all its descendants to the list of wrappers."""
162
+ if isinstance(wrapper, FieldWrapper):
163
+ wrapper = wrapper.parent
164
+ assert isinstance(wrapper, DataclassWrapper)
165
+ logger.debug(f"Adding new DataclassWrapper: {wrapper}")
166
+ wrappers.append(wrapper)
167
+ wrappers.extend(wrapper.descendants)
168
+
169
+ return wrappers
170
+
171
+ def _remove(
172
+ self,
173
+ wrapper: DataclassWrapper | FieldWrapper,
174
+ wrappers: list[DataclassWrapper],
175
+ ):
176
+ """Remove the given wrapper and all its descendants from the list of wrappers."""
177
+ if isinstance(wrapper, FieldWrapper):
178
+ wrapper = wrapper.parent
179
+ assert isinstance(wrapper, DataclassWrapper)
180
+ logger.debug(f"Removing DataclassWrapper {wrapper}")
181
+ wrappers.remove(wrapper)
182
+ for child in wrapper.descendants:
183
+ logger.debug(f"\tAlso removing Child DataclassWrapper {child}")
184
+ wrappers.remove(child)
185
+ # TODO: Should we also remove the reference to this wrapper from its parent?
186
+ for other_wrapper in wrappers:
187
+ if wrapper in other_wrapper._children:
188
+ other_wrapper._children.remove(wrapper)
189
+ return wrappers
190
+
191
+ def _fix_conflict_explicit(self, conflict: Conflict):
192
+ """Fixes conflicts between arguments following the "Explicit" approach.
193
+
194
+ The Explicit approach gives a prefix to each argument which points to
195
+ exactly where the argument is stored in the resulting Namespace. There
196
+ can therefore not be any confusion between arguments, at the cost of
197
+ having lengthy option strings.
198
+
199
+ Parameters
200
+ ----------
201
+ - conflict : Conflict
202
+
203
+ The conflict to hangle/fix.
204
+
205
+ Raises
206
+ ------
207
+ ConflictResolutionError
208
+ If its impossibe to fix the conflict.
209
+ """
210
+ logger.debug(f"fixing explicit conflict: {conflict}")
211
+
212
+ if any(w.prefix for w in conflict.wrappers):
213
+ raise ConflictResolutionError(
214
+ "When in 'Explicit' mode, there shouldn't be a need for any user-set prefixes."
215
+ "Just let the ArgumentParser set the explicit prefixes for all fields, and there won't be a conflict."
216
+ )
217
+ # TODO: Only set an explicit prefix on the fields that are in conflict
218
+
219
+ # Check that there is no conflict between the fields after setting the explicit prefix.
220
+ # If there is, that means that this conflict can't be fixed automatically, and a manual prefix should be set by the user.
221
+ for field_wrapper in conflict.wrappers:
222
+ explicit_prefix = field_wrapper.parent.dest + "."
223
+ field_wrapper.prefix = explicit_prefix
224
+
225
+ another_conflict = self.get_conflict(conflict.wrappers)
226
+ if another_conflict and another_conflict.option_string == conflict.option_string:
227
+ raise ConflictResolutionError(
228
+ f"There is a conflict over the '{conflict.option_string}' "
229
+ "option string, even after adding an explicit prefix to all "
230
+ "the conflicting fields! \n"
231
+ "To solve this, You can either use a different argument name, "
232
+ "a different destination, or pass a differentiating prefix to "
233
+ "`parser.add_arguments(<dataclass>, dest=destination, "
234
+ "prefix=prefix)`"
235
+ )
236
+
237
+ def _fix_conflict_auto(self, conflict: Conflict):
238
+ """Fixes a conflict using the AUTO method.
239
+
240
+ Tries to find a discriminating prefix of minimal length for all the conflicting fields, using roughly the following pseudocode:
241
+
242
+ 1. Sort the field wrappers by ascending nesting level.
243
+ ("parent/root" wrappers first, children "leaf" wrappers last)
244
+ 2. If the first wrapper is less nested than the others, remove it from the list (don't change its prefix)
245
+ 3. For all the remaining wrappers, add one more "word" from their lineage (dest attribute) to their prefix,
246
+ starting from the end and moving towards the parent.
247
+ 4. If there is no conflict left, exit, else, return to step 1 with the new conflict.
248
+ (This is performed implicitly by the method that calls this function, since it loops while there is a conflict).
249
+
250
+ Parameters
251
+ ----------
252
+ - conflict : Conflict
253
+
254
+ The Conflict NamedTuple containing the conflicting option_string, as well as the conflicting `FieldWrapper`s.
255
+
256
+ Raises
257
+ ------
258
+ ConflictResolutionError
259
+ If its impossibe to fix the conflict.
260
+ """
261
+ field_wrappers = sorted(conflict.wrappers, key=lambda w: w.nesting_level)
262
+ logger.debug(f"Conflict with options string '{conflict.option_string}':")
263
+ for i, field in enumerate(field_wrappers):
264
+ logger.debug(f"Field wrapper #{i+1}: {field} nesting level: {field.nesting_level}.")
265
+
266
+ assert (
267
+ len(set(field_wrappers)) >= 2
268
+ ), "Need at least 2 (distinct) FieldWrappers to have a conflict..."
269
+
270
+ first_wrapper = field_wrappers[0]
271
+ second_wrapper = field_wrappers[1]
272
+ if first_wrapper.nesting_level < second_wrapper.nesting_level:
273
+ # IF the first field_wrapper is a 'parent' of the following field_wrappers, then it maybe doesn't need an additional prefix.
274
+ logger.debug(
275
+ f"The first FieldWrapper is less nested than the others, removing it. ({first_wrapper})"
276
+ )
277
+ field_wrappers.remove(first_wrapper)
278
+
279
+ # add one more word to each of the remaining field_wrappers.
280
+ for field_wrapper in field_wrappers:
281
+ # Get the current and explicit (maximum) prefix:
282
+ current_prefix = field_wrapper.prefix
283
+ explicit_prefix = field_wrapper.parent.dest + "."
284
+
285
+ logger.debug(f"current prefix: {current_prefix}, explicit prefix: {explicit_prefix}")
286
+ if current_prefix == explicit_prefix:
287
+ # We can't add any more words to the prefix of this FieldWrapper,
288
+ # as it has already a prefix equivalent to its full destination...
289
+ raise ConflictResolutionError(
290
+ " ".join(
291
+ [
292
+ f"Cannot fix the conflict for the Options string {conflict.option_string},",
293
+ f"as the field {field_wrapper} already has the most explicit",
294
+ "prefix possible, and thus we can't add an additional",
295
+ "discriminating word to its prefix.",
296
+ "\n Consider modifying either the destination or the prefix",
297
+ "passed to `parser.add_arguments(<dataclass>, dest=destination, prefix=prefix)",
298
+ ]
299
+ )
300
+ )
301
+
302
+ # find the next 'word' to add to the prefix.
303
+ available_words = list(filter(bool, explicit_prefix.split(".")))
304
+ used_words = list(filter(bool, current_prefix.split(".")))
305
+ assert len(available_words) > len(
306
+ used_words
307
+ ), "There should at least one word we haven't used yet!"
308
+ logger.debug(f"Available words: {available_words}, used_words: {used_words}")
309
+
310
+ n_available_words = len(available_words)
311
+ n_used_words = len(used_words)
312
+ word_to_add = available_words[(n_available_words - 1) - n_used_words]
313
+ logger.debug(f"Word to be added: {word_to_add}")
314
+ field_wrapper.prefix = word_to_add + "." + current_prefix
315
+ logger.debug(f"New prefix: {field_wrapper.prefix}")
316
+
317
+ def _fix_conflict_merge(self, conflict: Conflict, wrappers_flat: list[DataclassWrapper]):
318
+ """Fix conflicts using the merging approach.
319
+
320
+ The first wrapper is kept, and the rest of the wrappers are absorbed
321
+ into the first wrapper.
322
+
323
+ # TODO: check that the ordering of arguments is still preserved!
324
+
325
+ Parameters
326
+ ----------
327
+ conflict : Conflict
328
+ The conflict NamedTuple.
329
+ """
330
+ fields = sorted(conflict.wrappers, key=lambda w: w.nesting_level)
331
+ logger.debug(f"Conflict with options string '{conflict.option_string}':")
332
+ for field in fields:
333
+ logger.debug(f"Field wrapper: {field} nesting level: {field.nesting_level}.")
334
+
335
+ assert len(conflict.wrappers) > 1
336
+
337
+ # Merge all the fields into the first one.
338
+ first_wrapper: FieldWrapper = fields[0]
339
+ wrappers = wrappers_flat.copy()
340
+
341
+ first_containing_dataclass: DataclassWrapper = first_wrapper.parent
342
+ original_parent = first_containing_dataclass.parent
343
+ wrappers = self._remove(first_containing_dataclass, wrappers)
344
+
345
+ for wrapper in conflict.wrappers[1:]:
346
+ containing_dataclass = wrapper.parent
347
+ wrappers = self._remove(containing_dataclass, wrappers)
348
+ first_containing_dataclass.merge(containing_dataclass)
349
+
350
+ assert first_containing_dataclass.multiple
351
+ wrappers = self._add(first_containing_dataclass, wrappers)
352
+ if original_parent:
353
+ original_parent._children.append(first_containing_dataclass)
354
+ return wrappers
355
+
356
+ def _get_conflicting_group(self, all_wrappers: list[DataclassWrapper]) -> Conflict | None:
357
+ """Return the conflicting DataclassWrappers which share argument names.
358
+
359
+ TODO: maybe return the list of fields, rather than the dataclasses?
360
+ """
361
+ conflicts: dict[str, list[FieldWrapper]] = defaultdict(list)
362
+ for wrapper in all_wrappers:
363
+ for field in wrapper.fields:
364
+ for option in field.option_strings:
365
+ conflicts[option].append(field)
366
+
367
+ for option_string, fields in conflicts.items():
368
+ if len(fields) > 1:
369
+ # the dataclasses of the fields that share the same name.
370
+ # wrappers: List[DataclassWrapper] = [f.parent for f in fields]
371
+ # dataclasses = [wrapper.dataclass for wrapper in wrappers]
372
+ # prefixes = [wrapper.prefix for wrapper in wrappers]
373
+ # return Conflict(dataclasses[0], prefixes[0], wrappers)
374
+ return Conflict(option_string, fields)
375
+ return None
376
+
377
+ def _conflict_exists(self, all_wrappers: list[DataclassWrapper]) -> bool:
378
+ """Return True whenever a conflict exists.
379
+
380
+ (option strings overlap).
381
+ """
382
+ arg_names: set[str] = set()
383
+ for wrapper in all_wrappers:
384
+ for field in wrapper.fields:
385
+ for option in field.option_strings:
386
+ if option in arg_names:
387
+ return True
388
+ arg_names.add(option)
389
+ return False
parrot/lib/python3.10/site-packages/simple_parsing/decorators.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import collections
4
+ import dataclasses
5
+ import functools
6
+ import inspect
7
+ import typing
8
+ from typing import Any, Callable, NamedTuple
9
+
10
+ import docstring_parser as dp
11
+
12
+ from simple_parsing.docstring import dp_parse, inspect_getdoc
13
+
14
+ from . import helpers, parsing
15
+
16
+
17
+ class _Field(NamedTuple):
18
+ name: str
19
+ annotation: type
20
+ field: dataclasses.Field
21
+
22
+
23
+ def _description_from_docstring(docstring: dp.Docstring) -> str:
24
+ """Construct a description from the short and long description of a docstring."""
25
+ description = ""
26
+ if docstring.short_description:
27
+ description += f"{docstring.short_description}\n"
28
+ if docstring.blank_after_short_description:
29
+ description += "\n"
30
+ if docstring.long_description:
31
+ description += f"{docstring.long_description}\n"
32
+ if docstring.blank_after_long_description:
33
+ description += "\n"
34
+ return description
35
+
36
+
37
+ @typing.overload
38
+ def main(original_function: None = None, **sp_kwargs) -> Callable[..., Callable[..., Any]]:
39
+ ...
40
+
41
+
42
+ @typing.overload
43
+ def main(original_function: Callable[..., Any], **sp_kwargs) -> Callable[..., Any]:
44
+ ...
45
+
46
+
47
+ def main(original_function=None, **sp_kwargs):
48
+ """Parse a function's arguments using simple-parsing from type annotations."""
49
+
50
+ def _decorate_with_cli_args(function: Callable[..., Any]) -> Callable[..., Any]:
51
+ """Decorate `function` by binding its arguments obtained from simple-parsing."""
52
+
53
+ @functools.wraps(function)
54
+ def _wrapper(*other_args, **other_kwargs) -> Any:
55
+ # Parse signature and parameters
56
+ signature = inspect.signature(function, follow_wrapped=True)
57
+ parameters = signature.parameters
58
+
59
+ # Parse docstring to use as help strings
60
+ docstring = dp_parse(inspect_getdoc(function) or "")
61
+ docstring_param_description = {
62
+ param.arg_name: param.description for param in docstring.params
63
+ }
64
+
65
+ # Parse all arguments from the function
66
+ fields = []
67
+ for name, parameter in parameters.items():
68
+ # Replace empty annotation with Any
69
+ if parameter.annotation == inspect.Parameter.empty:
70
+ parameter = parameter.replace(annotation=Any)
71
+
72
+ # Parse default or default_factory if the default is callable.
73
+ default, default_factory = dataclasses.MISSING, dataclasses.MISSING
74
+ if parameter.default != inspect.Parameter.empty:
75
+ if inspect.isfunction(parameter.default):
76
+ default_factory = parameter.default
77
+ else:
78
+ default = parameter.default
79
+
80
+ field = _Field(
81
+ name,
82
+ parameter.annotation,
83
+ helpers.field(
84
+ name=name,
85
+ default=default,
86
+ default_factory=default_factory,
87
+ help=docstring_param_description.get(name, ""),
88
+ positional=parameter.kind == inspect.Parameter.POSITIONAL_ONLY,
89
+ ),
90
+ )
91
+ fields.append(field)
92
+
93
+ # We can have positional arguments with no defaults that come out of order
94
+ # when parsing the function signature. Therefore, before we construct
95
+ # the dataclass we have to sort fields according to their default value.
96
+ # We query fields by name so there's no need to worry about the order.
97
+ def _field_has_default(field: _Field) -> bool:
98
+ return (
99
+ field.field.default is not dataclasses.MISSING
100
+ or field.field.default_factory is not dataclasses.MISSING
101
+ )
102
+
103
+ fields = sorted(fields, key=_field_has_default)
104
+
105
+ # Create the dataclass using the fields derived from the function's signature
106
+ FunctionArgs = dataclasses.make_dataclass(function.__qualname__, fields)
107
+ FunctionArgs.__doc__ = _description_from_docstring(docstring) or None
108
+ function_args = parsing.parse(
109
+ FunctionArgs,
110
+ dest="args",
111
+ add_config_path_arg=False,
112
+ **sp_kwargs,
113
+ )
114
+
115
+ # Construct both positional and keyword arguments.
116
+ args, kwargs = [], {}
117
+ for field in dataclasses.fields(function_args):
118
+ value = getattr(function_args, field.name)
119
+ if field.metadata.get("positional", False):
120
+ args.append(value)
121
+ else:
122
+ # TODO: py39: use union operator (|=)
123
+ kwargs.update({field.name: value})
124
+
125
+ # Construct positional arguments with CLI and runtime args
126
+ positionals = (*args, *other_args)
127
+ # Construct keyword arguments so it can override arguments
128
+ # so we don't receive multiple value errors.
129
+ keywords = collections.ChainMap(kwargs, other_kwargs)
130
+
131
+ # Call the function
132
+ return function(*positionals, **keywords)
133
+
134
+ return _wrapper
135
+
136
+ if original_function:
137
+ return _decorate_with_cli_args(original_function)
138
+
139
+ return _decorate_with_cli_args
parrot/lib/python3.10/site-packages/simple_parsing/docstring.py ADDED
@@ -0,0 +1,385 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Utility for retrieveing the docstring of a dataclass's attributes.
2
+
3
+ @author: Fabrice Normandin
4
+ """
5
+ from __future__ import annotations
6
+
7
+ import functools
8
+ import inspect
9
+
10
+ # from inspect import
11
+ from dataclasses import dataclass
12
+ from logging import getLogger
13
+
14
+ import docstring_parser as dp
15
+ from docstring_parser.common import Docstring
16
+
17
+ dp_parse = functools.lru_cache(2048)(dp.parse)
18
+ inspect_getsource = functools.lru_cache(2048)(inspect.getsource)
19
+ inspect_getdoc = functools.lru_cache(2048)(inspect.getdoc)
20
+ logger = getLogger(__name__)
21
+
22
+
23
+ @dataclass
24
+ class AttributeDocString:
25
+ """Simple dataclass for holding the comments of a given field."""
26
+
27
+ comment_above: str = ""
28
+ comment_inline: str = ""
29
+ docstring_below: str = ""
30
+
31
+ desc_from_cls_docstring: str = ""
32
+ """The description of this field from the class docstring."""
33
+
34
+ @property
35
+ def help_string(self) -> str:
36
+ """Returns the value that will be used for the "--help" string, using the contents of
37
+ self."""
38
+ return (
39
+ self.docstring_below
40
+ or self.comment_above
41
+ or self.comment_inline
42
+ or self.desc_from_cls_docstring
43
+ )
44
+
45
+
46
+ def get_attribute_docstring(
47
+ dataclass: type, field_name: str, accumulate_from_bases: bool = True
48
+ ) -> AttributeDocString:
49
+ """Returns the docstrings of a dataclass field.
50
+ NOTE: a docstring can either be:
51
+ - An inline comment, starting with <#>
52
+ - A Comment on the preceding line, starting with <#>
53
+ - A docstring on the following line, starting with either <\"\"\"> or <'''>
54
+ - The description of a field in the classes's docstring.
55
+
56
+ Arguments:
57
+ some_dataclass: a dataclass
58
+ field_name: the name of the field.
59
+ accumulate_from_bases: Whether to accumulate the docstring components by looking through the
60
+ base classes. When set to `False`, whenever one of the classes has a definition for the
61
+ field, it is directly returned. Otherwise, we accumulate the parts of the dodc
62
+ Returns:
63
+ AttributeDocString -- an object holding the string descriptions of the field.
64
+ """
65
+ created_docstring: AttributeDocString | None = None
66
+
67
+ mro = inspect.getmro(dataclass)
68
+ assert mro[0] is dataclass
69
+ assert mro[-1] is object
70
+ mro = mro[:-1]
71
+ for base_class in mro:
72
+ attribute_docstring = _get_attribute_docstring(base_class, field_name)
73
+ if not attribute_docstring:
74
+ continue
75
+ if not created_docstring:
76
+ created_docstring = attribute_docstring
77
+ if not accumulate_from_bases:
78
+ # We found a definition for that field in that class, so return it directly.
79
+ return created_docstring
80
+ else:
81
+ # Update the fields.
82
+ created_docstring.comment_above = (
83
+ created_docstring.comment_above or attribute_docstring.comment_above
84
+ )
85
+ created_docstring.comment_inline = (
86
+ created_docstring.comment_inline or attribute_docstring.comment_inline
87
+ )
88
+ created_docstring.docstring_below = (
89
+ created_docstring.docstring_below or attribute_docstring.docstring_below
90
+ )
91
+ created_docstring.desc_from_cls_docstring = (
92
+ created_docstring.desc_from_cls_docstring
93
+ or attribute_docstring.desc_from_cls_docstring
94
+ )
95
+ if not created_docstring:
96
+ logger.debug(
97
+ RuntimeWarning(
98
+ f"Couldn't find the definition for field '{field_name}' within the dataclass "
99
+ f"{dataclass} or any of its base classes {','.join(t.__name__ for t in mro[1:])}."
100
+ )
101
+ )
102
+ return AttributeDocString()
103
+ return created_docstring
104
+
105
+
106
+ @functools.lru_cache(2048)
107
+ def _get_attribute_docstring(dataclass: type, field_name: str) -> AttributeDocString | None:
108
+ """Gets the AttributeDocString of the given field in the given dataclass.
109
+
110
+ Doesn't inspect base classes.
111
+ """
112
+ try:
113
+ source = inspect_getsource(dataclass)
114
+ except (TypeError, OSError) as e:
115
+ logger.debug(
116
+ UserWarning(
117
+ f"Couldn't retrieve the source code of class {dataclass} "
118
+ f"(in order to retrieve the docstring of field {field_name}): {e}"
119
+ )
120
+ )
121
+ return None
122
+
123
+ # Parse docstring to use as help strings
124
+ desc_from_cls_docstring = ""
125
+ cls_docstring = inspect_getdoc(dataclass)
126
+ if cls_docstring:
127
+ docstring: Docstring = dp_parse(cls_docstring)
128
+ for param in docstring.params:
129
+ if param.arg_name == field_name:
130
+ desc_from_cls_docstring = param.description or ""
131
+
132
+ # NOTE: We want to skip the docstring lines.
133
+ # NOTE: Currently, we just remove the __doc__ from the source. It's perhaps a bit crude,
134
+ # but it works.
135
+ if dataclass.__doc__ and dataclass.__doc__ in source:
136
+ source = source.replace(dataclass.__doc__, "\n", 1)
137
+ # note: does this remove the whitespace though?
138
+
139
+ code_lines: list[str] = source.splitlines()
140
+ # the first line is the class definition (OR the decorator!), we skip it.
141
+ start_line_index = 1
142
+ # starting at the second line, there might be the docstring for the class.
143
+ # We want to skip over that until we reach an attribute definition.
144
+ while start_line_index < len(code_lines):
145
+ if _contains_field_definition(code_lines[start_line_index]):
146
+ break
147
+ start_line_index += 1
148
+
149
+ lines_with_field_defs = [
150
+ (index, line) for index, line in enumerate(code_lines) if _contains_field_definition(line)
151
+ ]
152
+ for i, line in lines_with_field_defs:
153
+ if _line_contains_definition_for(line, field_name):
154
+ # we found the line with the definition of this field.
155
+ comment_above = _get_comment_ending_at_line(code_lines, i - 1)
156
+ comment_inline = _get_inline_comment_at_line(code_lines, i)
157
+ docstring_below = _get_docstring_starting_at_line(code_lines, i + 1)
158
+ return AttributeDocString(
159
+ comment_above,
160
+ comment_inline,
161
+ docstring_below,
162
+ desc_from_cls_docstring=desc_from_cls_docstring,
163
+ )
164
+ return None
165
+
166
+
167
+ def _contains_field_definition(line: str) -> bool:
168
+ """Returns whether or not a line contains a an dataclass field definition.
169
+
170
+ Arguments:
171
+ line_str {str} -- the line content
172
+
173
+ Returns:
174
+ bool -- True if there is an attribute definition in the line.
175
+
176
+ >>> _contains_field_definition("a: int = 0")
177
+ True
178
+ >>> _contains_field_definition("a: int")
179
+ True
180
+ >>> _contains_field_definition("a: int # comment")
181
+ True
182
+ >>> _contains_field_definition("a: int = 0 # comment")
183
+ True
184
+ >>> _contains_field_definition("class FooBaz(Foo, Baz):")
185
+ False
186
+ >>> _contains_field_definition("a = 4")
187
+ False
188
+ >>> _contains_field_definition("fooooooooobar.append(123)")
189
+ False
190
+ >>> _contains_field_definition("{a: int}")
191
+ False
192
+ >>> _contains_field_definition(" foobaz: int = 123 #: The foobaz property")
193
+ True
194
+ >>> _contains_field_definition("a #:= 3")
195
+ False
196
+ """
197
+ # Get rid of any comments first.
198
+ line, _, _ = line.partition("#")
199
+
200
+ if ":" not in line:
201
+ return False
202
+
203
+ if "=" in line:
204
+ attribute_and_type, _, _ = line.partition("=")
205
+ else:
206
+ attribute_and_type = line
207
+
208
+ field_name, _, type = attribute_and_type.partition(":")
209
+ field_name = field_name.strip()
210
+ if ":" in type:
211
+ # weird annotation or dictionary?
212
+ return False
213
+ if not field_name:
214
+ # Empty attribute name?
215
+ return False
216
+ return field_name.isidentifier()
217
+
218
+
219
+ def _line_contains_definition_for(line: str, field_name: str) -> bool:
220
+ line = line.strip()
221
+ if not _contains_field_definition(line):
222
+ return False
223
+ attribute, _, type_and_value_assignment = line.partition(":")
224
+ attribute = attribute.strip() # remove any whitespace after the attribute name.
225
+ return attribute.isidentifier() and attribute == field_name
226
+
227
+
228
+ def _is_empty(line_str: str) -> bool:
229
+ return line_str.strip() == ""
230
+
231
+
232
+ def _is_comment(line_str: str) -> bool:
233
+ return line_str.strip().startswith("#")
234
+
235
+
236
+ def _get_comment_at_line(code_lines: list[str], line: int) -> str:
237
+ """Gets the comment at line `line` in `code_lines`.
238
+
239
+ Arguments:
240
+ line {int} -- the index of the line in code_lines
241
+
242
+ Returns:
243
+ str -- the comment at the given line. empty string if not present.
244
+ """
245
+ line_str = code_lines[line]
246
+ assert not _contains_field_definition(line_str)
247
+ if "#" not in line_str:
248
+ return ""
249
+ parts = line_str.split("#", maxsplit=1)
250
+ comment = parts[1].strip()
251
+ return comment
252
+
253
+
254
+ def _get_inline_comment_at_line(code_lines: list[str], line: int) -> str:
255
+ """Gets the inline comment at line `line`.
256
+
257
+ Arguments:
258
+ line {int} -- the index of the line in code_lines
259
+
260
+ Returns:
261
+ str -- the inline comment at the given line, else an empty string.
262
+ """
263
+ assert 0 <= line < len(code_lines)
264
+ assert _contains_field_definition(code_lines[line])
265
+ line_str = code_lines[line]
266
+ parts = line_str.split("#", maxsplit=1)
267
+ if len(parts) != 2:
268
+ return ""
269
+ comment = parts[1].strip()
270
+ return comment
271
+
272
+
273
+ def _get_comment_ending_at_line(code_lines: list[str], line: int) -> str:
274
+ start_line = line
275
+ end_line = line
276
+ # move up the code, one line at a time, while we don't hit the start,
277
+ # an attribute definition, or the end of a docstring.
278
+ while start_line > 0:
279
+ line_str = code_lines[start_line]
280
+ if _contains_field_definition(line_str):
281
+ break # previous line is an assignment
282
+ if '"""' in line_str or "'''" in line_str:
283
+ break # previous line has a docstring
284
+ start_line -= 1
285
+ start_line += 1
286
+
287
+ lines = []
288
+ for i in range(start_line, end_line + 1):
289
+ # print(f"line {i}: {code_lines[i]}")
290
+ if _is_empty(code_lines[i]):
291
+ continue
292
+ assert not _contains_field_definition(code_lines[i])
293
+ comment = _get_comment_at_line(code_lines, i)
294
+ lines.append(comment)
295
+ return "\n".join(lines).strip()
296
+
297
+
298
+ def _get_docstring_starting_at_line(code_lines: list[str], line: int) -> str:
299
+ i = line
300
+ token: str | None = None
301
+ triple_single = "'''"
302
+ triple_double = '"""'
303
+ # print("finding docstring starting from line", line)
304
+
305
+ # if we are looking further down than the end of the code, there is no
306
+ # docstring.
307
+ if line >= len(code_lines):
308
+ return ""
309
+ # the list of lines making up the docstring.
310
+ docstring_contents: list[str] = []
311
+
312
+ while i < len(code_lines):
313
+ line_str = code_lines[i]
314
+ # print(f"(docstring) line {line}: {line_str}")
315
+
316
+ # we haven't identified the starting line yet.
317
+ if token is None:
318
+ if _is_empty(line_str):
319
+ i += 1
320
+ continue
321
+
322
+ elif _contains_field_definition(line_str) or _is_comment(line_str):
323
+ # we haven't reached the start of a docstring yet (since token
324
+ # is None), and we reached a line with an attribute definition,
325
+ # or a comment, hence the docstring is empty.
326
+ return ""
327
+
328
+ elif triple_single in line_str and triple_double in line_str:
329
+ # This handles something stupid like:
330
+ # @dataclass
331
+ # class Bob:
332
+ # a: int
333
+ # """ hello '''
334
+ # bob
335
+ # ''' bye
336
+ # """
337
+ triple_single_index = line_str.index(triple_single)
338
+ triple_double_index = line_str.index(triple_double)
339
+ if triple_single_index < triple_double_index:
340
+ token = triple_single
341
+ else:
342
+ token = triple_double
343
+ elif triple_double in line_str:
344
+ token = triple_double
345
+ elif triple_single in line_str:
346
+ token = triple_single
347
+ else:
348
+ # for i, line in enumerate(code_lines):
349
+ # print(f"line {i}: <{line}>")
350
+ # print(f"token: <{token}>")
351
+ # print(line_str)
352
+ logger.debug(f"Warning: Unable to parse attribute docstring: {line_str}")
353
+ return ""
354
+
355
+ # get the string portion of the line (after a token or possibly
356
+ # between two tokens).
357
+ parts = line_str.split(token, maxsplit=2)
358
+ if len(parts) == 3:
359
+ # This takes care of cases like:
360
+ # @dataclass
361
+ # class Bob:
362
+ # a: int
363
+ # """ hello """
364
+ between_tokens = parts[1].strip()
365
+ # print("Between tokens:", between_tokens)
366
+ docstring_contents.append(between_tokens)
367
+ break
368
+
369
+ elif len(parts) == 2:
370
+ after_token = parts[1].strip()
371
+ # print("After token:", after_token)
372
+ docstring_contents.append(after_token)
373
+ else:
374
+ # print(f"token is <{token}>")
375
+ if token in line_str:
376
+ # print(f"Line {line} End of a docstring:", line_str)
377
+ before = line_str.split(token, maxsplit=1)[0]
378
+ docstring_contents.append(before.strip())
379
+ break
380
+ else:
381
+ # intermediate line without the token.
382
+ docstring_contents.append(line_str.strip())
383
+ i += 1
384
+ # print("Docstring contents:", docstring_contents)
385
+ return "\n".join(docstring_contents)
parrot/lib/python3.10/site-packages/simple_parsing/help_formatter.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ from argparse import ONE_OR_MORE, OPTIONAL, PARSER, REMAINDER, ZERO_OR_MORE, Action
3
+ from logging import getLogger
4
+ from typing import Optional, Type
5
+
6
+ from .wrappers.field_metavar import get_metavar
7
+
8
+ TEMPORARY_TOKEN = "<__TEMP__>"
9
+ logger = getLogger(__name__)
10
+
11
+
12
+ class SimpleHelpFormatter(
13
+ argparse.ArgumentDefaultsHelpFormatter,
14
+ argparse.MetavarTypeHelpFormatter,
15
+ argparse.RawDescriptionHelpFormatter,
16
+ ):
17
+ """Little shorthand for using some useful HelpFormatters from argparse.
18
+
19
+ This class inherits from argparse's `ArgumentDefaultHelpFormatter`,
20
+ `MetavarTypeHelpFormatter` and `RawDescriptionHelpFormatter` classes.
21
+
22
+ This produces the following resulting actions:
23
+ - adds a "(default: xyz)" for each argument with a default
24
+ - uses the name of the argument type as the metavar. For example, gives
25
+ "-n int" instead of "-n N" in the usage and description of the arguments.
26
+ - Conserves the formatting of the class and argument docstrings, if given.
27
+ """
28
+
29
+ def _format_args(self, action: Action, default_metavar: str):
30
+ _get_metavar = self._metavar_formatter(action, default_metavar)
31
+ action_type = action.type
32
+
33
+ metavar = action.metavar or get_metavar(action_type)
34
+ if metavar and not action.choices:
35
+ result = metavar
36
+ elif action.nargs is None:
37
+ result = "%s" % _get_metavar(1)
38
+ elif action.nargs == OPTIONAL:
39
+ result = "[%s]" % _get_metavar(1)
40
+ elif action.nargs == ZERO_OR_MORE:
41
+ result = "[%s [%s ...]]" % _get_metavar(2) # noqa: UP031
42
+ elif action.nargs == ONE_OR_MORE:
43
+ result = "%s [%s ...]" % _get_metavar(2) # noqa: UP031
44
+ elif action.nargs == REMAINDER:
45
+ result = "..."
46
+ elif action.nargs == PARSER:
47
+ result = "%s ..." % _get_metavar(1)
48
+ else:
49
+ formats = ["%s" for _ in range(action.nargs)]
50
+ result = " ".join(formats) % _get_metavar(action.nargs)
51
+
52
+ # logger.debug(
53
+ # f"action type: {action_type}, Result: {result}, nargs: {action.nargs}, default metavar: {default_metavar}"
54
+ # )
55
+ return result
56
+
57
+ def _get_default_metavar_for_optional(self, action: argparse.Action):
58
+ try:
59
+ return super()._get_default_metavar_for_optional(action)
60
+ except BaseException:
61
+ logger.debug(f"Getting metavar for action with dest {action.dest}.")
62
+ metavar = self._get_metavar_for_action(action)
63
+ logger.debug(f"Result metavar: {metavar}")
64
+ return metavar
65
+
66
+ def _get_default_metavar_for_positional(self, action: argparse.Action):
67
+ try:
68
+ return super()._get_default_metavar_for_positional(action)
69
+ except BaseException:
70
+ logger.debug(f"Getting metavar for action with dest {action.dest}.")
71
+ metavar = self._get_metavar_for_action(action)
72
+ logger.debug(f"Result metavar: {metavar}")
73
+ return metavar
74
+
75
+ def _get_metavar_for_action(self, action: argparse.Action) -> str:
76
+ return self._get_metavar_for_type(action.type)
77
+
78
+ def _get_metavar_for_type(self, t: Type) -> str:
79
+ return get_metavar(t) or str(t)
80
+
81
+ def _get_help_string(self, action: Action) -> Optional[str]:
82
+ help = super()._get_help_string(action=action)
83
+ if help is not None:
84
+ help = help.replace(TEMPORARY_TOKEN, "")
85
+ return help
86
+
87
+
88
+ Formatter = SimpleHelpFormatter
parrot/lib/python3.10/site-packages/simple_parsing/helpers/__init__.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Collection of helper classes and functions to reduce boilerplate code."""
2
+ from .fields import *
3
+ from .flatten import FlattenedAccess
4
+ from .hparams import HyperParameters
5
+ from .partial import Partial, config_for
6
+ from .serialization import FrozenSerializable, Serializable, SimpleJsonEncoder, encode
7
+
8
+ try:
9
+ from .serialization import YamlSerializable
10
+ except ImportError:
11
+ pass
12
+
13
+ # For backward compatibility purposes
14
+ JsonSerializable = Serializable
15
+ SimpleEncoder = SimpleJsonEncoder