Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .venv/lib/python3.11/site-packages/cloudpickle-3.1.1.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/cloudpickle-3.1.1.dist-info/METADATA +186 -0
- .venv/lib/python3.11/site-packages/cloudpickle-3.1.1.dist-info/RECORD +10 -0
- .venv/lib/python3.11/site-packages/cloudpickle-3.1.1.dist-info/WHEEL +4 -0
- .venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/LICENSE +201 -0
- .venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/METADATA +477 -0
- .venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/RECORD +12 -0
- .venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/WHEEL +8 -0
- .venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/top_level.txt +1 -0
- .venv/lib/python3.11/site-packages/pyparsing/__init__.py +326 -0
- .venv/lib/python3.11/site-packages/pyparsing/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/__pycache__/actions.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/__pycache__/common.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/__pycache__/exceptions.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/__pycache__/helpers.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/__pycache__/results.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/__pycache__/testing.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/__pycache__/unicode.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/__pycache__/util.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/actions.py +219 -0
- .venv/lib/python3.11/site-packages/pyparsing/common.py +434 -0
- .venv/lib/python3.11/site-packages/pyparsing/core.py +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/diagram/__init__.py +744 -0
- .venv/lib/python3.11/site-packages/pyparsing/diagram/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/exceptions.py +314 -0
- .venv/lib/python3.11/site-packages/pyparsing/helpers.py +1102 -0
- .venv/lib/python3.11/site-packages/pyparsing/py.typed +0 -0
- .venv/lib/python3.11/site-packages/pyparsing/results.py +816 -0
- .venv/lib/python3.11/site-packages/pyparsing/testing.py +362 -0
- .venv/lib/python3.11/site-packages/pyparsing/unicode.py +356 -0
- .venv/lib/python3.11/site-packages/pyparsing/util.py +398 -0
- .venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/LICENSE +29 -0
- .venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/METADATA +146 -0
- .venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/RECORD +384 -0
- .venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/WHEEL +5 -0
- .venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/top_level.txt +1 -0
- .venv/lib/python3.11/site-packages/torio/_extension/__init__.py +13 -0
- .venv/lib/python3.11/site-packages/torio/_extension/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/torio/_extension/__pycache__/utils.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/torio/_extension/utils.py +147 -0
- .venv/lib/python3.11/site-packages/torio/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/torio/utils/__pycache__/ffmpeg_utils.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/virtualenv/__init__.py +10 -0
- .venv/lib/python3.11/site-packages/virtualenv/__main__.py +73 -0
- .venv/lib/python3.11/site-packages/virtualenv/info.py +66 -0
- .venv/lib/python3.11/site-packages/virtualenv/report.py +50 -0
- .venv/lib/python3.11/site-packages/virtualenv/run/__init__.py +164 -0
- .venv/lib/python3.11/site-packages/virtualenv/run/__pycache__/__init__.cpython-311.pyc +0 -0
.venv/lib/python3.11/site-packages/cloudpickle-3.1.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/lib/python3.11/site-packages/cloudpickle-3.1.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.3
|
| 2 |
+
Name: cloudpickle
|
| 3 |
+
Version: 3.1.1
|
| 4 |
+
Summary: Pickler class to extend the standard pickle.Pickler functionality
|
| 5 |
+
Home-page: https://github.com/cloudpipe/cloudpickle
|
| 6 |
+
License: BSD-3-Clause
|
| 7 |
+
Author: The cloudpickle developer team
|
| 8 |
+
Author-email: cloudpipe@googlegroups.com
|
| 9 |
+
Requires-Python: >=3.8
|
| 10 |
+
Description-Content-Type: text/markdown
|
| 11 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 12 |
+
Classifier: Intended Audience :: Developers
|
| 13 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 14 |
+
Classifier: Operating System :: POSIX
|
| 15 |
+
Classifier: Operating System :: Microsoft :: Windows
|
| 16 |
+
Classifier: Operating System :: MacOS :: MacOS X
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.14
|
| 24 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 25 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 26 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 27 |
+
Classifier: Topic :: Scientific/Engineering
|
| 28 |
+
Classifier: Topic :: System :: Distributed Computing
|
| 29 |
+
|
| 30 |
+
# cloudpickle
|
| 31 |
+
|
| 32 |
+
[](https://github.com/cloudpipe/cloudpickle/actions)
|
| 33 |
+
[](https://codecov.io/github/cloudpipe/cloudpickle?branch=master)
|
| 34 |
+
|
| 35 |
+
`cloudpickle` makes it possible to serialize Python constructs not supported
|
| 36 |
+
by the default `pickle` module from the Python standard library.
|
| 37 |
+
|
| 38 |
+
`cloudpickle` is especially useful for **cluster computing** where Python
|
| 39 |
+
code is shipped over the network to execute on remote hosts, possibly close
|
| 40 |
+
to the data.
|
| 41 |
+
|
| 42 |
+
Among other things, `cloudpickle` supports pickling for **lambda functions**
|
| 43 |
+
along with **functions and classes defined interactively** in the
|
| 44 |
+
`__main__` module (for instance in a script, a shell or a Jupyter notebook).
|
| 45 |
+
|
| 46 |
+
Cloudpickle can only be used to send objects between the **exact same version
|
| 47 |
+
of Python**.
|
| 48 |
+
|
| 49 |
+
Using `cloudpickle` for **long-term object storage is not supported and
|
| 50 |
+
strongly discouraged.**
|
| 51 |
+
|
| 52 |
+
**Security notice**: one should **only load pickle data from trusted sources** as
|
| 53 |
+
otherwise `pickle.load` can lead to arbitrary code execution resulting in a critical
|
| 54 |
+
security vulnerability.
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
Installation
|
| 58 |
+
------------
|
| 59 |
+
|
| 60 |
+
The latest release of `cloudpickle` is available from
|
| 61 |
+
[pypi](https://pypi.python.org/pypi/cloudpickle):
|
| 62 |
+
|
| 63 |
+
pip install cloudpickle
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
Examples
|
| 67 |
+
--------
|
| 68 |
+
|
| 69 |
+
Pickling a lambda expression:
|
| 70 |
+
|
| 71 |
+
```python
|
| 72 |
+
>>> import cloudpickle
|
| 73 |
+
>>> squared = lambda x: x ** 2
|
| 74 |
+
>>> pickled_lambda = cloudpickle.dumps(squared)
|
| 75 |
+
|
| 76 |
+
>>> import pickle
|
| 77 |
+
>>> new_squared = pickle.loads(pickled_lambda)
|
| 78 |
+
>>> new_squared(2)
|
| 79 |
+
4
|
| 80 |
+
```
|
| 81 |
+
|
| 82 |
+
Pickling a function interactively defined in a Python shell session
|
| 83 |
+
(in the `__main__` module):
|
| 84 |
+
|
| 85 |
+
```python
|
| 86 |
+
>>> CONSTANT = 42
|
| 87 |
+
>>> def my_function(data: int) -> int:
|
| 88 |
+
... return data + CONSTANT
|
| 89 |
+
...
|
| 90 |
+
>>> pickled_function = cloudpickle.dumps(my_function)
|
| 91 |
+
>>> depickled_function = pickle.loads(pickled_function)
|
| 92 |
+
>>> depickled_function
|
| 93 |
+
<function __main__.my_function(data:int) -> int>
|
| 94 |
+
>>> depickled_function(43)
|
| 95 |
+
85
|
| 96 |
+
```
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
Overriding pickle's serialization mechanism for importable constructs:
|
| 100 |
+
----------------------------------------------------------------------
|
| 101 |
+
|
| 102 |
+
An important difference between `cloudpickle` and `pickle` is that
|
| 103 |
+
`cloudpickle` can serialize a function or class **by value**, whereas `pickle`
|
| 104 |
+
can only serialize it **by reference**. Serialization by reference treats
|
| 105 |
+
functions and classes as attributes of modules, and pickles them through
|
| 106 |
+
instructions that trigger the import of their module at load time.
|
| 107 |
+
Serialization by reference is thus limited in that it assumes that the module
|
| 108 |
+
containing the function or class is available/importable in the unpickling
|
| 109 |
+
environment. This assumption breaks when pickling constructs defined in an
|
| 110 |
+
interactive session, a case that is automatically detected by `cloudpickle`,
|
| 111 |
+
that pickles such constructs **by value**.
|
| 112 |
+
|
| 113 |
+
Another case where the importability assumption is expected to break is when
|
| 114 |
+
developing a module in a distributed execution environment: the worker
|
| 115 |
+
processes may not have access to the said module, for example if they live on a
|
| 116 |
+
different machine than the process in which the module is being developed. By
|
| 117 |
+
itself, `cloudpickle` cannot detect such "locally importable" modules and
|
| 118 |
+
switch to serialization by value; instead, it relies on its default mode, which
|
| 119 |
+
is serialization by reference. However, since `cloudpickle 2.0.0`, one can
|
| 120 |
+
explicitly specify modules for which serialization by value should be used,
|
| 121 |
+
using the
|
| 122 |
+
`register_pickle_by_value(module)`/`/unregister_pickle_by_value(module)` API:
|
| 123 |
+
|
| 124 |
+
```python
|
| 125 |
+
>>> import cloudpickle
|
| 126 |
+
>>> import my_module
|
| 127 |
+
>>> cloudpickle.register_pickle_by_value(my_module)
|
| 128 |
+
>>> cloudpickle.dumps(my_module.my_function) # my_function is pickled by value
|
| 129 |
+
>>> cloudpickle.unregister_pickle_by_value(my_module)
|
| 130 |
+
>>> cloudpickle.dumps(my_module.my_function) # my_function is pickled by reference
|
| 131 |
+
```
|
| 132 |
+
|
| 133 |
+
Using this API, there is no need to re-install the new version of the module on
|
| 134 |
+
all the worker nodes nor to restart the workers: restarting the client Python
|
| 135 |
+
process with the new source code is enough.
|
| 136 |
+
|
| 137 |
+
Note that this feature is still **experimental**, and may fail in the following
|
| 138 |
+
situations:
|
| 139 |
+
|
| 140 |
+
- If the body of a function/class pickled by value contains an `import` statement:
|
| 141 |
+
```python
|
| 142 |
+
>>> def f():
|
| 143 |
+
>>> ... from another_module import g
|
| 144 |
+
>>> ... # calling f in the unpickling environment may fail if another_module
|
| 145 |
+
>>> ... # is unavailable
|
| 146 |
+
>>> ... return g() + 1
|
| 147 |
+
```
|
| 148 |
+
|
| 149 |
+
- If a function pickled by reference uses a function pickled by value during its execution.
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
Running the tests
|
| 153 |
+
-----------------
|
| 154 |
+
|
| 155 |
+
- With `tox`, to test run the tests for all the supported versions of
|
| 156 |
+
Python and PyPy:
|
| 157 |
+
|
| 158 |
+
pip install tox
|
| 159 |
+
tox
|
| 160 |
+
|
| 161 |
+
or alternatively for a specific environment:
|
| 162 |
+
|
| 163 |
+
tox -e py312
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
- With `pytest` to only run the tests for your current version of
|
| 167 |
+
Python:
|
| 168 |
+
|
| 169 |
+
pip install -r dev-requirements.txt
|
| 170 |
+
PYTHONPATH='.:tests' pytest
|
| 171 |
+
|
| 172 |
+
History
|
| 173 |
+
-------
|
| 174 |
+
|
| 175 |
+
`cloudpickle` was initially developed by [picloud.com](http://web.archive.org/web/20140721022102/http://blog.picloud.com/2013/11/17/picloud-has-joined-dropbox/) and shipped as part of
|
| 176 |
+
the client SDK.
|
| 177 |
+
|
| 178 |
+
A copy of `cloudpickle.py` was included as part of PySpark, the Python
|
| 179 |
+
interface to [Apache Spark](https://spark.apache.org/). Davies Liu, Josh
|
| 180 |
+
Rosen, Thom Neale and other Apache Spark developers improved it significantly,
|
| 181 |
+
most notably to add support for PyPy and Python 3.
|
| 182 |
+
|
| 183 |
+
The aim of the `cloudpickle` project is to make that work available to a wider
|
| 184 |
+
audience outside of the Spark ecosystem and to make it easier to improve it
|
| 185 |
+
further notably with the help of a dedicated non-regression test suite.
|
| 186 |
+
|
.venv/lib/python3.11/site-packages/cloudpickle-3.1.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cloudpickle-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
cloudpickle-3.1.1.dist-info/METADATA,sha256=_vO13j0fIhPIGD1IjHUTCy1PUJiFYAtR8PMD0KLbMB8,7057
|
| 3 |
+
cloudpickle-3.1.1.dist-info/RECORD,,
|
| 4 |
+
cloudpickle-3.1.1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
| 5 |
+
cloudpickle/__init__.py,sha256=IzKm9MzljfhH-QmN_o-zP5QimTwbtgJeRja8nrGFanQ,308
|
| 6 |
+
cloudpickle/__pycache__/__init__.cpython-311.pyc,,
|
| 7 |
+
cloudpickle/__pycache__/cloudpickle.cpython-311.pyc,,
|
| 8 |
+
cloudpickle/__pycache__/cloudpickle_fast.cpython-311.pyc,,
|
| 9 |
+
cloudpickle/cloudpickle.py,sha256=cNEBKdjBDlzFce_tvZL889uv71AnXTz1XBzkjKASSTo,58466
|
| 10 |
+
cloudpickle/cloudpickle_fast.py,sha256=AI5ZKf2AbLNxD8lXyLDpKZyzeZ2ofFtdK1ZWFq_ec1c,323
|
.venv/lib/python3.11/site-packages/cloudpickle-3.1.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: flit 3.10.1
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
.venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Apache License
|
| 2 |
+
Version 2.0, January 2004
|
| 3 |
+
http://www.apache.org/licenses/
|
| 4 |
+
|
| 5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 6 |
+
|
| 7 |
+
1. Definitions.
|
| 8 |
+
|
| 9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 11 |
+
|
| 12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 13 |
+
the copyright owner that is granting the License.
|
| 14 |
+
|
| 15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 16 |
+
other entities that control, are controlled by, or are under common
|
| 17 |
+
control with that entity. For the purposes of this definition,
|
| 18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 19 |
+
direction or management of such entity, whether by contract or
|
| 20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 22 |
+
|
| 23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 24 |
+
exercising permissions granted by this License.
|
| 25 |
+
|
| 26 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 27 |
+
including but not limited to software source code, documentation
|
| 28 |
+
source, and configuration files.
|
| 29 |
+
|
| 30 |
+
"Object" form shall mean any form resulting from mechanical
|
| 31 |
+
transformation or translation of a Source form, including but
|
| 32 |
+
not limited to compiled object code, generated documentation,
|
| 33 |
+
and conversions to other media types.
|
| 34 |
+
|
| 35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 36 |
+
Object form, made available under the License, as indicated by a
|
| 37 |
+
copyright notice that is included in or attached to the work
|
| 38 |
+
(an example is provided in the Appendix below).
|
| 39 |
+
|
| 40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 41 |
+
form, that is based on (or derived from) the Work and for which the
|
| 42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 44 |
+
of this License, Derivative Works shall not include works that remain
|
| 45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 46 |
+
the Work and Derivative Works thereof.
|
| 47 |
+
|
| 48 |
+
"Contribution" shall mean any work of authorship, including
|
| 49 |
+
the original version of the Work and any modifications or additions
|
| 50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 54 |
+
means any form of electronic, verbal, or written communication sent
|
| 55 |
+
to the Licensor or its representatives, including but not limited to
|
| 56 |
+
communication on electronic mailing lists, source code control systems,
|
| 57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 59 |
+
excluding communication that is conspicuously marked or otherwise
|
| 60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 61 |
+
|
| 62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 64 |
+
subsequently incorporated within the Work.
|
| 65 |
+
|
| 66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 71 |
+
Work and such Derivative Works in Source or Object form.
|
| 72 |
+
|
| 73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 76 |
+
(except as stated in this section) patent license to make, have made,
|
| 77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 78 |
+
where such license applies only to those patent claims licensable
|
| 79 |
+
by such Contributor that are necessarily infringed by their
|
| 80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 82 |
+
institute patent litigation against any entity (including a
|
| 83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 84 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 85 |
+
or contributory patent infringement, then any patent licenses
|
| 86 |
+
granted to You under this License for that Work shall terminate
|
| 87 |
+
as of the date such litigation is filed.
|
| 88 |
+
|
| 89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 90 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 91 |
+
modifications, and in Source or Object form, provided that You
|
| 92 |
+
meet the following conditions:
|
| 93 |
+
|
| 94 |
+
(a) You must give any other recipients of the Work or
|
| 95 |
+
Derivative Works a copy of this License; and
|
| 96 |
+
|
| 97 |
+
(b) You must cause any modified files to carry prominent notices
|
| 98 |
+
stating that You changed the files; and
|
| 99 |
+
|
| 100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 101 |
+
that You distribute, all copyright, patent, trademark, and
|
| 102 |
+
attribution notices from the Source form of the Work,
|
| 103 |
+
excluding those notices that do not pertain to any part of
|
| 104 |
+
the Derivative Works; and
|
| 105 |
+
|
| 106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 107 |
+
distribution, then any Derivative Works that You distribute must
|
| 108 |
+
include a readable copy of the attribution notices contained
|
| 109 |
+
within such NOTICE file, excluding those notices that do not
|
| 110 |
+
pertain to any part of the Derivative Works, in at least one
|
| 111 |
+
of the following places: within a NOTICE text file distributed
|
| 112 |
+
as part of the Derivative Works; within the Source form or
|
| 113 |
+
documentation, if provided along with the Derivative Works; or,
|
| 114 |
+
within a display generated by the Derivative Works, if and
|
| 115 |
+
wherever such third-party notices normally appear. The contents
|
| 116 |
+
of the NOTICE file are for informational purposes only and
|
| 117 |
+
do not modify the License. You may add Your own attribution
|
| 118 |
+
notices within Derivative Works that You distribute, alongside
|
| 119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 120 |
+
that such additional attribution notices cannot be construed
|
| 121 |
+
as modifying the License.
|
| 122 |
+
|
| 123 |
+
You may add Your own copyright statement to Your modifications and
|
| 124 |
+
may provide additional or different license terms and conditions
|
| 125 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 126 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 127 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 128 |
+
the conditions stated in this License.
|
| 129 |
+
|
| 130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 132 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 133 |
+
this License, without any additional terms or conditions.
|
| 134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 135 |
+
the terms of any separate license agreement you may have executed
|
| 136 |
+
with Licensor regarding such Contributions.
|
| 137 |
+
|
| 138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 140 |
+
except as required for reasonable and customary use in describing the
|
| 141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 142 |
+
|
| 143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 144 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 147 |
+
implied, including, without limitation, any warranties or conditions
|
| 148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 150 |
+
appropriateness of using or redistributing the Work and assume any
|
| 151 |
+
risks associated with Your exercise of permissions under this License.
|
| 152 |
+
|
| 153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 154 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 155 |
+
unless required by applicable law (such as deliberate and grossly
|
| 156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 157 |
+
liable to You for damages, including any direct, indirect, special,
|
| 158 |
+
incidental, or consequential damages of any character arising as a
|
| 159 |
+
result of this License or out of the use or inability to use the
|
| 160 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 161 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 162 |
+
other commercial damages or losses), even if such Contributor
|
| 163 |
+
has been advised of the possibility of such damages.
|
| 164 |
+
|
| 165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 168 |
+
or other liability obligations and/or rights consistent with this
|
| 169 |
+
License. However, in accepting such obligations, You may act only
|
| 170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 171 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 172 |
+
defend, and hold each Contributor harmless for any liability
|
| 173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 174 |
+
of your accepting any such warranty or additional liability.
|
| 175 |
+
|
| 176 |
+
END OF TERMS AND CONDITIONS
|
| 177 |
+
|
| 178 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 179 |
+
|
| 180 |
+
To apply the Apache License to your work, attach the following
|
| 181 |
+
boilerplate notice, with the fields enclosed by brackets "{}"
|
| 182 |
+
replaced with your own identifying information. (Don't include
|
| 183 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 184 |
+
comment syntax for the file format. We also recommend that a
|
| 185 |
+
file or class name and description of purpose be included on the
|
| 186 |
+
same "printed page" as the copyright notice for easier
|
| 187 |
+
identification within third-party archives.
|
| 188 |
+
|
| 189 |
+
Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
|
| 190 |
+
|
| 191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 192 |
+
you may not use this file except in compliance with the License.
|
| 193 |
+
You may obtain a copy of the License at
|
| 194 |
+
|
| 195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 196 |
+
|
| 197 |
+
Unless required by applicable law or agreed to in writing, software
|
| 198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 200 |
+
See the License for the specific language governing permissions and
|
| 201 |
+
limitations under the License.
|
.venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,477 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: frozenlist
|
| 3 |
+
Version: 1.5.0
|
| 4 |
+
Summary: A list-like structure which implements collections.abc.MutableSequence
|
| 5 |
+
Home-page: https://github.com/aio-libs/frozenlist
|
| 6 |
+
Maintainer: aiohttp team <team@aiohttp.org>
|
| 7 |
+
Maintainer-email: team@aiohttp.org
|
| 8 |
+
License: Apache 2
|
| 9 |
+
Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
|
| 10 |
+
Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
|
| 11 |
+
Project-URL: CI: Github Actions, https://github.com/aio-libs/frozenlist/actions
|
| 12 |
+
Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
|
| 13 |
+
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/frozenlist
|
| 14 |
+
Project-URL: Docs: Changelog, https://github.com/aio-libs/frozenlist/blob/master/CHANGES.rst#changelog
|
| 15 |
+
Project-URL: Docs: RTD, https://frozenlist.aio-libs.org
|
| 16 |
+
Project-URL: GitHub: issues, https://github.com/aio-libs/frozenlist/issues
|
| 17 |
+
Project-URL: GitHub: repo, https://github.com/aio-libs/frozenlist
|
| 18 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 19 |
+
Classifier: Intended Audience :: Developers
|
| 20 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 21 |
+
Classifier: Operating System :: POSIX
|
| 22 |
+
Classifier: Operating System :: MacOS :: MacOS X
|
| 23 |
+
Classifier: Operating System :: Microsoft :: Windows
|
| 24 |
+
Classifier: Programming Language :: Cython
|
| 25 |
+
Classifier: Programming Language :: Python
|
| 26 |
+
Classifier: Programming Language :: Python :: 3
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 29 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 30 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 31 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 32 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 33 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 34 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 35 |
+
Requires-Python: >=3.8
|
| 36 |
+
Description-Content-Type: text/x-rst
|
| 37 |
+
License-File: LICENSE
|
| 38 |
+
|
| 39 |
+
frozenlist
|
| 40 |
+
==========
|
| 41 |
+
|
| 42 |
+
.. image:: https://github.com/aio-libs/frozenlist/workflows/CI/badge.svg
|
| 43 |
+
:target: https://github.com/aio-libs/frozenlist/actions
|
| 44 |
+
:alt: GitHub status for master branch
|
| 45 |
+
|
| 46 |
+
.. image:: https://codecov.io/gh/aio-libs/frozenlist/branch/master/graph/badge.svg
|
| 47 |
+
:target: https://codecov.io/gh/aio-libs/frozenlist
|
| 48 |
+
:alt: codecov.io status for master branch
|
| 49 |
+
|
| 50 |
+
.. image:: https://img.shields.io/pypi/v/frozenlist.svg?logo=Python&logoColor=white
|
| 51 |
+
:target: https://pypi.org/project/frozenlist
|
| 52 |
+
:alt: frozenlist @ PyPI
|
| 53 |
+
|
| 54 |
+
.. image:: https://readthedocs.org/projects/frozenlist/badge/?version=latest
|
| 55 |
+
:target: https://frozenlist.aio-libs.org
|
| 56 |
+
:alt: Read The Docs build status badge
|
| 57 |
+
|
| 58 |
+
.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
| 59 |
+
:target: https://matrix.to/#/%23aio-libs:matrix.org
|
| 60 |
+
:alt: Matrix Room — #aio-libs:matrix.org
|
| 61 |
+
|
| 62 |
+
.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
| 63 |
+
:target: https://matrix.to/#/%23aio-libs-space:matrix.org
|
| 64 |
+
:alt: Matrix Space — #aio-libs-space:matrix.org
|
| 65 |
+
|
| 66 |
+
Introduction
|
| 67 |
+
------------
|
| 68 |
+
|
| 69 |
+
``frozenlist.FrozenList`` is a list-like structure which implements
|
| 70 |
+
``collections.abc.MutableSequence``. The list is *mutable* until ``FrozenList.freeze``
|
| 71 |
+
is called, after which list modifications raise ``RuntimeError``:
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
>>> from frozenlist import FrozenList
|
| 75 |
+
>>> fl = FrozenList([17, 42])
|
| 76 |
+
>>> fl.append('spam')
|
| 77 |
+
>>> fl.append('Vikings')
|
| 78 |
+
>>> fl
|
| 79 |
+
<FrozenList(frozen=False, [17, 42, 'spam', 'Vikings'])>
|
| 80 |
+
>>> fl.freeze()
|
| 81 |
+
>>> fl
|
| 82 |
+
<FrozenList(frozen=True, [17, 42, 'spam', 'Vikings'])>
|
| 83 |
+
>>> fl.frozen
|
| 84 |
+
True
|
| 85 |
+
>>> fl.append("Monty")
|
| 86 |
+
Traceback (most recent call last):
|
| 87 |
+
File "<stdin>", line 1, in <module>
|
| 88 |
+
File "frozenlist/_frozenlist.pyx", line 97, in frozenlist._frozenlist.FrozenList.append
|
| 89 |
+
self._check_frozen()
|
| 90 |
+
File "frozenlist/_frozenlist.pyx", line 19, in frozenlist._frozenlist.FrozenList._check_frozen
|
| 91 |
+
raise RuntimeError("Cannot modify frozen list.")
|
| 92 |
+
RuntimeError: Cannot modify frozen list.
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
FrozenList is also hashable, but only when frozen. Otherwise it also throws a RuntimeError:
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
>>> fl = FrozenList([17, 42, 'spam'])
|
| 99 |
+
>>> hash(fl)
|
| 100 |
+
Traceback (most recent call last):
|
| 101 |
+
File "<stdin>", line 1, in <module>
|
| 102 |
+
File "frozenlist/_frozenlist.pyx", line 111, in frozenlist._frozenlist.FrozenList.__hash__
|
| 103 |
+
raise RuntimeError("Cannot hash unfrozen list.")
|
| 104 |
+
RuntimeError: Cannot hash unfrozen list.
|
| 105 |
+
>>> fl.freeze()
|
| 106 |
+
>>> hash(fl)
|
| 107 |
+
3713081631934410656
|
| 108 |
+
>>> dictionary = {fl: 'Vikings'} # frozen fl can be a dict key
|
| 109 |
+
>>> dictionary
|
| 110 |
+
{<FrozenList(frozen=True, [1, 2])>: 'Vikings'}
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
Installation
|
| 114 |
+
------------
|
| 115 |
+
|
| 116 |
+
::
|
| 117 |
+
|
| 118 |
+
$ pip install frozenlist
|
| 119 |
+
|
| 120 |
+
The library requires Python 3.8 or newer.
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
Documentation
|
| 124 |
+
-------------
|
| 125 |
+
|
| 126 |
+
https://frozenlist.aio-libs.org
|
| 127 |
+
|
| 128 |
+
Communication channels
|
| 129 |
+
----------------------
|
| 130 |
+
|
| 131 |
+
We have a *Matrix Space* `#aio-libs-space:matrix.org
|
| 132 |
+
<https://matrix.to/#/%23aio-libs-space:matrix.org>`_ which is
|
| 133 |
+
also accessible via Gitter.
|
| 134 |
+
|
| 135 |
+
Requirements
|
| 136 |
+
------------
|
| 137 |
+
|
| 138 |
+
- Python >= 3.8
|
| 139 |
+
|
| 140 |
+
License
|
| 141 |
+
-------
|
| 142 |
+
|
| 143 |
+
``frozenlist`` is offered under the Apache 2 license.
|
| 144 |
+
|
| 145 |
+
Source code
|
| 146 |
+
-----------
|
| 147 |
+
|
| 148 |
+
The project is hosted on GitHub_
|
| 149 |
+
|
| 150 |
+
Please file an issue in the `bug tracker
|
| 151 |
+
<https://github.com/aio-libs/frozenlist/issues>`_ if you have found a bug
|
| 152 |
+
or have some suggestions to improve the library.
|
| 153 |
+
|
| 154 |
+
.. _GitHub: https://github.com/aio-libs/frozenlist
|
| 155 |
+
|
| 156 |
+
=========
|
| 157 |
+
Changelog
|
| 158 |
+
=========
|
| 159 |
+
|
| 160 |
+
..
|
| 161 |
+
You should *NOT* be adding new change log entries to this file, this
|
| 162 |
+
file is managed by towncrier. You *may* edit previous change logs to
|
| 163 |
+
fix problems like typo corrections or such.
|
| 164 |
+
To add a new change log entry, please see
|
| 165 |
+
https://pip.pypa.io/en/latest/development/contributing/#news-entries
|
| 166 |
+
we named the news folder "changes".
|
| 167 |
+
|
| 168 |
+
WARNING: Don't drop the next directive!
|
| 169 |
+
|
| 170 |
+
.. towncrier release notes start
|
| 171 |
+
|
| 172 |
+
1.5.0 (2024-10-22)
|
| 173 |
+
==================
|
| 174 |
+
|
| 175 |
+
Bug fixes
|
| 176 |
+
---------
|
| 177 |
+
|
| 178 |
+
- An incorrect signature of the ``__class_getitem__`` class method
|
| 179 |
+
has been fixed, adding a missing ``class_item`` argument under
|
| 180 |
+
Python 3.8 and older.
|
| 181 |
+
|
| 182 |
+
This change also improves the code coverage of this method that
|
| 183 |
+
was previously missing -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
*Related issues and pull requests on GitHub:*
|
| 187 |
+
`#567 <https://github.com/aio-libs/frozenlist/issues/567>`__, `#571 <https://github.com/aio-libs/frozenlist/issues/571>`__.
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
Improved documentation
|
| 191 |
+
----------------------
|
| 192 |
+
|
| 193 |
+
- Rendered issue, PR, and commit links now lead to
|
| 194 |
+
``frozenlist``'s repo instead of ``yarl``'s repo.
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
*Related issues and pull requests on GitHub:*
|
| 198 |
+
`#573 <https://github.com/aio-libs/frozenlist/issues/573>`__.
|
| 199 |
+
|
| 200 |
+
- On the ``Contributing docs`` page,
|
| 201 |
+
a link to the ``Towncrier philosophy`` has been fixed.
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
*Related issues and pull requests on GitHub:*
|
| 205 |
+
`#574 <https://github.com/aio-libs/frozenlist/issues/574>`__.
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
Packaging updates and notes for downstreams
|
| 209 |
+
-------------------------------------------
|
| 210 |
+
|
| 211 |
+
- A name of a temporary building directory now reflects
|
| 212 |
+
that it's related to ``frozenlist``, not ``yarl``.
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
*Related issues and pull requests on GitHub:*
|
| 216 |
+
`#573 <https://github.com/aio-libs/frozenlist/issues/573>`__.
|
| 217 |
+
|
| 218 |
+
- Declared Python 3.13 supported officially in the distribution package metadata.
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
*Related issues and pull requests on GitHub:*
|
| 222 |
+
`#595 <https://github.com/aio-libs/frozenlist/issues/595>`__.
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
----
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
1.4.1 (2023-12-15)
|
| 229 |
+
==================
|
| 230 |
+
|
| 231 |
+
Packaging updates and notes for downstreams
|
| 232 |
+
-------------------------------------------
|
| 233 |
+
|
| 234 |
+
- Declared Python 3.12 and PyPy 3.8-3.10 supported officially
|
| 235 |
+
in the distribution package metadata.
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
*Related issues and pull requests on GitHub:*
|
| 239 |
+
`#553 <https://github.com/aio-libs/frozenlist/issues/553>`__.
|
| 240 |
+
|
| 241 |
+
- Replaced the packaging is replaced from an old-fashioned ``setup.py`` to an
|
| 242 |
+
in-tree `PEP 517 <https://peps.python.org/pep-517>`__ build backend -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
|
| 243 |
+
|
| 244 |
+
Whenever the end-users or downstream packagers need to build ``frozenlist``
|
| 245 |
+
from source (a Git checkout or an sdist), they may pass a ``config_settings``
|
| 246 |
+
flag ``pure-python``. If this flag is not set, a C-extension will be built
|
| 247 |
+
and included into the distribution.
|
| 248 |
+
|
| 249 |
+
Here is how this can be done with ``pip``:
|
| 250 |
+
|
| 251 |
+
.. code-block:: console
|
| 252 |
+
|
| 253 |
+
$ python3 -m pip install . --config-settings=pure-python=
|
| 254 |
+
|
| 255 |
+
This will also work with ``-e | --editable``.
|
| 256 |
+
|
| 257 |
+
The same can be achieved via ``pypa/build``:
|
| 258 |
+
|
| 259 |
+
.. code-block:: console
|
| 260 |
+
|
| 261 |
+
$ python3 -m build --config-setting=pure-python=
|
| 262 |
+
|
| 263 |
+
Adding ``-w | --wheel`` can force ``pypa/build`` produce a wheel from source
|
| 264 |
+
directly, as opposed to building an ``sdist`` and then building from it.
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
*Related issues and pull requests on GitHub:*
|
| 268 |
+
`#560 <https://github.com/aio-libs/frozenlist/issues/560>`__.
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
Contributor-facing changes
|
| 272 |
+
--------------------------
|
| 273 |
+
|
| 274 |
+
- It is now possible to request line tracing in Cython builds using the
|
| 275 |
+
``with-cython-tracing`` `PEP 517 <https://peps.python.org/pep-517>`__ config setting
|
| 276 |
+
-- `@webknjaz <https://github.com/sponsors/webknjaz>`__.
|
| 277 |
+
|
| 278 |
+
This can be used in CI and development environment to measure coverage
|
| 279 |
+
on Cython modules, but is not normally useful to the end-users or
|
| 280 |
+
downstream packagers.
|
| 281 |
+
|
| 282 |
+
Here's a usage example:
|
| 283 |
+
|
| 284 |
+
.. code-block:: console
|
| 285 |
+
|
| 286 |
+
$ python3 -Im pip install . --config-settings=with-cython-tracing=true
|
| 287 |
+
|
| 288 |
+
For editable installs, this setting is on by default. Otherwise, it's
|
| 289 |
+
off unless requested explicitly.
|
| 290 |
+
|
| 291 |
+
The following produces C-files required for the Cython coverage
|
| 292 |
+
plugin to map the measurements back to the PYX-files:
|
| 293 |
+
|
| 294 |
+
.. code-block:: console
|
| 295 |
+
|
| 296 |
+
$ python -Im pip install -e .
|
| 297 |
+
|
| 298 |
+
Alternatively, the ``FROZENLIST_CYTHON_TRACING=1`` environment variable
|
| 299 |
+
can be set to do the same as the `PEP 517 <https://peps.python.org/pep-517>`__ config setting.
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
*Related issues and pull requests on GitHub:*
|
| 303 |
+
`#560 <https://github.com/aio-libs/frozenlist/issues/560>`__.
|
| 304 |
+
|
| 305 |
+
- Coverage collection has been implemented for the Cython modules
|
| 306 |
+
-- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
|
| 307 |
+
|
| 308 |
+
It will also be reported to Codecov from any non-release CI jobs.
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
*Related issues and pull requests on GitHub:*
|
| 312 |
+
`#561 <https://github.com/aio-libs/frozenlist/issues/561>`__.
|
| 313 |
+
|
| 314 |
+
- A step-by-step ``Release Guide`` guide has
|
| 315 |
+
been added, describing how to release *frozenlist* -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
|
| 316 |
+
|
| 317 |
+
This is primarily targeting the maintainers.
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
*Related issues and pull requests on GitHub:*
|
| 321 |
+
`#563 <https://github.com/aio-libs/frozenlist/issues/563>`__.
|
| 322 |
+
|
| 323 |
+
- Detailed ``Contributing Guidelines`` on
|
| 324 |
+
authoring the changelog fragments have been published in the
|
| 325 |
+
documentation -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
*Related issues and pull requests on GitHub:*
|
| 329 |
+
`#564 <https://github.com/aio-libs/frozenlist/issues/564>`__.
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
----
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
1.4.0 (2023-07-12)
|
| 336 |
+
==================
|
| 337 |
+
|
| 338 |
+
The published source distribution package became buildable
|
| 339 |
+
under Python 3.12.
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
----
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
Bugfixes
|
| 346 |
+
--------
|
| 347 |
+
|
| 348 |
+
- Removed an unused ``typing.Tuple`` import
|
| 349 |
+
`#411 <https://github.com/aio-libs/frozenlist/issues/411>`_
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
Deprecations and Removals
|
| 353 |
+
-------------------------
|
| 354 |
+
|
| 355 |
+
- Dropped Python 3.7 support.
|
| 356 |
+
`#413 <https://github.com/aio-libs/frozenlist/issues/413>`_
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
Misc
|
| 360 |
+
----
|
| 361 |
+
|
| 362 |
+
- `#410 <https://github.com/aio-libs/frozenlist/issues/410>`_, `#433 <https://github.com/aio-libs/frozenlist/issues/433>`_
|
| 363 |
+
|
| 364 |
+
|
| 365 |
+
----
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
1.3.3 (2022-11-08)
|
| 369 |
+
==================
|
| 370 |
+
|
| 371 |
+
- Fixed CI runs when creating a new release, where new towncrier versions
|
| 372 |
+
fail when the current version section is already present.
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
----
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
1.3.2 (2022-11-08)
|
| 379 |
+
==================
|
| 380 |
+
|
| 381 |
+
Misc
|
| 382 |
+
----
|
| 383 |
+
|
| 384 |
+
- Updated the CI runs to better check for test results and to avoid deprecated syntax. `#327 <https://github.com/aio-libs/frozenlist/issues/327>`_
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
----
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
1.3.1 (2022-08-02)
|
| 391 |
+
==================
|
| 392 |
+
|
| 393 |
+
The published source distribution package became buildable
|
| 394 |
+
under Python 3.11.
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
----
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
1.3.0 (2022-01-18)
|
| 401 |
+
==================
|
| 402 |
+
|
| 403 |
+
Bugfixes
|
| 404 |
+
--------
|
| 405 |
+
|
| 406 |
+
- Do not install C sources with binary distributions.
|
| 407 |
+
`#250 <https://github.com/aio-libs/frozenlist/issues/250>`_
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
Deprecations and Removals
|
| 411 |
+
-------------------------
|
| 412 |
+
|
| 413 |
+
- Dropped Python 3.6 support
|
| 414 |
+
`#274 <https://github.com/aio-libs/frozenlist/issues/274>`_
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
----
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
1.2.0 (2021-10-16)
|
| 421 |
+
==================
|
| 422 |
+
|
| 423 |
+
Features
|
| 424 |
+
--------
|
| 425 |
+
|
| 426 |
+
- ``FrozenList`` now supports being used as a generic type as per PEP 585, e.g. ``frozen_int_list: FrozenList[int]`` (requires Python 3.9 or newer).
|
| 427 |
+
`#172 <https://github.com/aio-libs/frozenlist/issues/172>`_
|
| 428 |
+
- Added support for Python 3.10.
|
| 429 |
+
`#227 <https://github.com/aio-libs/frozenlist/issues/227>`_
|
| 430 |
+
- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes.
|
| 431 |
+
`#227 <https://github.com/aio-libs/frozenlist/issues/227>`_
|
| 432 |
+
- Started shipping platform-specific arm64 wheels for Apple Silicon.
|
| 433 |
+
`#227 <https://github.com/aio-libs/frozenlist/issues/227>`_
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
----
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
1.1.1 (2020-11-14)
|
| 440 |
+
==================
|
| 441 |
+
|
| 442 |
+
Bugfixes
|
| 443 |
+
--------
|
| 444 |
+
|
| 445 |
+
- Provide x86 Windows wheels.
|
| 446 |
+
`#169 <https://github.com/aio-libs/frozenlist/issues/169>`_
|
| 447 |
+
|
| 448 |
+
|
| 449 |
+
----
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
1.1.0 (2020-10-13)
|
| 453 |
+
==================
|
| 454 |
+
|
| 455 |
+
Features
|
| 456 |
+
--------
|
| 457 |
+
|
| 458 |
+
- Add support for hashing of a frozen list.
|
| 459 |
+
`#136 <https://github.com/aio-libs/frozenlist/issues/136>`_
|
| 460 |
+
|
| 461 |
+
- Support Python 3.8 and 3.9.
|
| 462 |
+
|
| 463 |
+
- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on
|
| 464 |
+
Linux as well as ``x86_64``.
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
----
|
| 468 |
+
|
| 469 |
+
|
| 470 |
+
1.0.0 (2019-11-09)
|
| 471 |
+
==================
|
| 472 |
+
|
| 473 |
+
Deprecations and Removals
|
| 474 |
+
-------------------------
|
| 475 |
+
|
| 476 |
+
- Dropped support for Python 3.5; only 3.6, 3.7 and 3.8 are supported going forward.
|
| 477 |
+
`#24 <https://github.com/aio-libs/frozenlist/issues/24>`_
|
.venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
frozenlist-1.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
frozenlist-1.5.0.dist-info/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332
|
| 3 |
+
frozenlist-1.5.0.dist-info/METADATA,sha256=BpQvB7z2NbU3f4XTQDvhAZ9L08WR4XiYajilj9IY6Yk,13762
|
| 4 |
+
frozenlist-1.5.0.dist-info/RECORD,,
|
| 5 |
+
frozenlist-1.5.0.dist-info/WHEEL,sha256=g2F1VBkM0jT4VUJ6zw4l3h6A4vsUecrEFGycpjwnlV0,224
|
| 6 |
+
frozenlist-1.5.0.dist-info/top_level.txt,sha256=jivtxsPXA3nK3WBWW2LW5Mtu_GHt8UZA13NeCs2cKuA,11
|
| 7 |
+
frozenlist/__init__.py,sha256=ymVtnW3MinO-Ux3cBj_PLEpXnmLawk45el8vcX6IkWY,2371
|
| 8 |
+
frozenlist/__init__.pyi,sha256=vMEoES1xGegPtVXoCi9XydEeHsyuIq-KdeXwP5PdsaA,1470
|
| 9 |
+
frozenlist/__pycache__/__init__.cpython-311.pyc,,
|
| 10 |
+
frozenlist/_frozenlist.cpython-311-x86_64-linux-gnu.so,sha256=i8wUceqj_Nyr0hb7D8kyPPUbtAtLl4J7MtPQYTzhRug,923584
|
| 11 |
+
frozenlist/_frozenlist.pyx,sha256=4YturclNF7wioO7YX3Vzl7Ldb2-iswe6UrjJOMKSswU,2993
|
| 12 |
+
frozenlist/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7
|
.venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (75.2.0)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp311-cp311-manylinux_2_5_x86_64
|
| 5 |
+
Tag: cp311-cp311-manylinux1_x86_64
|
| 6 |
+
Tag: cp311-cp311-manylinux_2_17_x86_64
|
| 7 |
+
Tag: cp311-cp311-manylinux2014_x86_64
|
| 8 |
+
|
.venv/lib/python3.11/site-packages/frozenlist-1.5.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
frozenlist
|
.venv/lib/python3.11/site-packages/pyparsing/__init__.py
ADDED
|
@@ -0,0 +1,326 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# module pyparsing.py
|
| 2 |
+
#
|
| 3 |
+
# Copyright (c) 2003-2022 Paul T. McGuire
|
| 4 |
+
#
|
| 5 |
+
# Permission is hereby granted, free of charge, to any person obtaining
|
| 6 |
+
# a copy of this software and associated documentation files (the
|
| 7 |
+
# "Software"), to deal in the Software without restriction, including
|
| 8 |
+
# without limitation the rights to use, copy, modify, merge, publish,
|
| 9 |
+
# distribute, sublicense, and/or sell copies of the Software, and to
|
| 10 |
+
# permit persons to whom the Software is furnished to do so, subject to
|
| 11 |
+
# the following conditions:
|
| 12 |
+
#
|
| 13 |
+
# The above copyright notice and this permission notice shall be
|
| 14 |
+
# included in all copies or substantial portions of the Software.
|
| 15 |
+
#
|
| 16 |
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 17 |
+
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 18 |
+
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
| 19 |
+
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
| 20 |
+
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
| 21 |
+
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
| 22 |
+
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 23 |
+
#
|
| 24 |
+
|
| 25 |
+
__doc__ = """
|
| 26 |
+
pyparsing module - Classes and methods to define and execute parsing grammars
|
| 27 |
+
=============================================================================
|
| 28 |
+
|
| 29 |
+
The pyparsing module is an alternative approach to creating and
|
| 30 |
+
executing simple grammars, vs. the traditional lex/yacc approach, or the
|
| 31 |
+
use of regular expressions. With pyparsing, you don't need to learn
|
| 32 |
+
a new syntax for defining grammars or matching expressions - the parsing
|
| 33 |
+
module provides a library of classes that you use to construct the
|
| 34 |
+
grammar directly in Python.
|
| 35 |
+
|
| 36 |
+
Here is a program to parse "Hello, World!" (or any greeting of the form
|
| 37 |
+
``"<salutation>, <addressee>!"``), built up using :class:`Word`,
|
| 38 |
+
:class:`Literal`, and :class:`And` elements
|
| 39 |
+
(the :meth:`'+'<ParserElement.__add__>` operators create :class:`And` expressions,
|
| 40 |
+
and the strings are auto-converted to :class:`Literal` expressions)::
|
| 41 |
+
|
| 42 |
+
from pyparsing import Word, alphas
|
| 43 |
+
|
| 44 |
+
# define grammar of a greeting
|
| 45 |
+
greet = Word(alphas) + "," + Word(alphas) + "!"
|
| 46 |
+
|
| 47 |
+
hello = "Hello, World!"
|
| 48 |
+
print(hello, "->", greet.parse_string(hello))
|
| 49 |
+
|
| 50 |
+
The program outputs the following::
|
| 51 |
+
|
| 52 |
+
Hello, World! -> ['Hello', ',', 'World', '!']
|
| 53 |
+
|
| 54 |
+
The Python representation of the grammar is quite readable, owing to the
|
| 55 |
+
self-explanatory class names, and the use of :class:`'+'<And>`,
|
| 56 |
+
:class:`'|'<MatchFirst>`, :class:`'^'<Or>` and :class:`'&'<Each>` operators.
|
| 57 |
+
|
| 58 |
+
The :class:`ParseResults` object returned from
|
| 59 |
+
:class:`ParserElement.parse_string` can be
|
| 60 |
+
accessed as a nested list, a dictionary, or an object with named
|
| 61 |
+
attributes.
|
| 62 |
+
|
| 63 |
+
The pyparsing module handles some of the problems that are typically
|
| 64 |
+
vexing when writing text parsers:
|
| 65 |
+
|
| 66 |
+
- extra or missing whitespace (the above program will also handle
|
| 67 |
+
"Hello,World!", "Hello , World !", etc.)
|
| 68 |
+
- quoted strings
|
| 69 |
+
- embedded comments
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
Getting Started -
|
| 73 |
+
-----------------
|
| 74 |
+
Visit the classes :class:`ParserElement` and :class:`ParseResults` to
|
| 75 |
+
see the base classes that most other pyparsing
|
| 76 |
+
classes inherit from. Use the docstrings for examples of how to:
|
| 77 |
+
|
| 78 |
+
- construct literal match expressions from :class:`Literal` and
|
| 79 |
+
:class:`CaselessLiteral` classes
|
| 80 |
+
- construct character word-group expressions using the :class:`Word`
|
| 81 |
+
class
|
| 82 |
+
- see how to create repetitive expressions using :class:`ZeroOrMore`
|
| 83 |
+
and :class:`OneOrMore` classes
|
| 84 |
+
- use :class:`'+'<And>`, :class:`'|'<MatchFirst>`, :class:`'^'<Or>`,
|
| 85 |
+
and :class:`'&'<Each>` operators to combine simple expressions into
|
| 86 |
+
more complex ones
|
| 87 |
+
- associate names with your parsed results using
|
| 88 |
+
:class:`ParserElement.set_results_name`
|
| 89 |
+
- access the parsed data, which is returned as a :class:`ParseResults`
|
| 90 |
+
object
|
| 91 |
+
- find some helpful expression short-cuts like :class:`DelimitedList`
|
| 92 |
+
and :class:`one_of`
|
| 93 |
+
- find more useful common expressions in the :class:`pyparsing_common`
|
| 94 |
+
namespace class
|
| 95 |
+
"""
|
| 96 |
+
from typing import NamedTuple
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class version_info(NamedTuple):
|
| 100 |
+
major: int
|
| 101 |
+
minor: int
|
| 102 |
+
micro: int
|
| 103 |
+
releaselevel: str
|
| 104 |
+
serial: int
|
| 105 |
+
|
| 106 |
+
@property
|
| 107 |
+
def __version__(self):
|
| 108 |
+
return (
|
| 109 |
+
f"{self.major}.{self.minor}.{self.micro}"
|
| 110 |
+
+ (
|
| 111 |
+
f"{'r' if self.releaselevel[0] == 'c' else ''}{self.releaselevel[0]}{self.serial}",
|
| 112 |
+
"",
|
| 113 |
+
)[self.releaselevel == "final"]
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
def __str__(self):
|
| 117 |
+
return f"{__name__} {self.__version__} / {__version_time__}"
|
| 118 |
+
|
| 119 |
+
def __repr__(self):
|
| 120 |
+
return f"{__name__}.{type(self).__name__}({', '.join('{}={!r}'.format(*nv) for nv in zip(self._fields, self))})"
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
__version_info__ = version_info(3, 2, 1, "final", 1)
|
| 124 |
+
__version_time__ = "31 Dec 2024 20:41 UTC"
|
| 125 |
+
__version__ = __version_info__.__version__
|
| 126 |
+
__versionTime__ = __version_time__
|
| 127 |
+
__author__ = "Paul McGuire <ptmcg.gm+pyparsing@gmail.com>"
|
| 128 |
+
|
| 129 |
+
from .util import *
|
| 130 |
+
from .exceptions import *
|
| 131 |
+
from .actions import *
|
| 132 |
+
from .core import __diag__, __compat__
|
| 133 |
+
from .results import *
|
| 134 |
+
from .core import *
|
| 135 |
+
from .core import _builtin_exprs as core_builtin_exprs
|
| 136 |
+
from .helpers import *
|
| 137 |
+
from .helpers import _builtin_exprs as helper_builtin_exprs
|
| 138 |
+
|
| 139 |
+
from .unicode import unicode_set, UnicodeRangeList, pyparsing_unicode as unicode
|
| 140 |
+
from .testing import pyparsing_test as testing
|
| 141 |
+
from .common import (
|
| 142 |
+
pyparsing_common as common,
|
| 143 |
+
_builtin_exprs as common_builtin_exprs,
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
# Compatibility synonyms
|
| 147 |
+
if "pyparsing_unicode" not in globals():
|
| 148 |
+
pyparsing_unicode = unicode # type: ignore[misc]
|
| 149 |
+
if "pyparsing_common" not in globals():
|
| 150 |
+
pyparsing_common = common
|
| 151 |
+
if "pyparsing_test" not in globals():
|
| 152 |
+
pyparsing_test = testing
|
| 153 |
+
|
| 154 |
+
core_builtin_exprs += common_builtin_exprs + helper_builtin_exprs
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
__all__ = [
|
| 158 |
+
"__version__",
|
| 159 |
+
"__version_time__",
|
| 160 |
+
"__author__",
|
| 161 |
+
"__compat__",
|
| 162 |
+
"__diag__",
|
| 163 |
+
"And",
|
| 164 |
+
"AtLineStart",
|
| 165 |
+
"AtStringStart",
|
| 166 |
+
"CaselessKeyword",
|
| 167 |
+
"CaselessLiteral",
|
| 168 |
+
"CharsNotIn",
|
| 169 |
+
"CloseMatch",
|
| 170 |
+
"Combine",
|
| 171 |
+
"DelimitedList",
|
| 172 |
+
"Dict",
|
| 173 |
+
"Each",
|
| 174 |
+
"Empty",
|
| 175 |
+
"FollowedBy",
|
| 176 |
+
"Forward",
|
| 177 |
+
"GoToColumn",
|
| 178 |
+
"Group",
|
| 179 |
+
"IndentedBlock",
|
| 180 |
+
"Keyword",
|
| 181 |
+
"LineEnd",
|
| 182 |
+
"LineStart",
|
| 183 |
+
"Literal",
|
| 184 |
+
"Located",
|
| 185 |
+
"PrecededBy",
|
| 186 |
+
"MatchFirst",
|
| 187 |
+
"NoMatch",
|
| 188 |
+
"NotAny",
|
| 189 |
+
"OneOrMore",
|
| 190 |
+
"OnlyOnce",
|
| 191 |
+
"OpAssoc",
|
| 192 |
+
"Opt",
|
| 193 |
+
"Optional",
|
| 194 |
+
"Or",
|
| 195 |
+
"ParseBaseException",
|
| 196 |
+
"ParseElementEnhance",
|
| 197 |
+
"ParseException",
|
| 198 |
+
"ParseExpression",
|
| 199 |
+
"ParseFatalException",
|
| 200 |
+
"ParseResults",
|
| 201 |
+
"ParseSyntaxException",
|
| 202 |
+
"ParserElement",
|
| 203 |
+
"PositionToken",
|
| 204 |
+
"QuotedString",
|
| 205 |
+
"RecursiveGrammarException",
|
| 206 |
+
"Regex",
|
| 207 |
+
"SkipTo",
|
| 208 |
+
"StringEnd",
|
| 209 |
+
"StringStart",
|
| 210 |
+
"Suppress",
|
| 211 |
+
"Tag",
|
| 212 |
+
"Token",
|
| 213 |
+
"TokenConverter",
|
| 214 |
+
"White",
|
| 215 |
+
"Word",
|
| 216 |
+
"WordEnd",
|
| 217 |
+
"WordStart",
|
| 218 |
+
"ZeroOrMore",
|
| 219 |
+
"Char",
|
| 220 |
+
"alphanums",
|
| 221 |
+
"alphas",
|
| 222 |
+
"alphas8bit",
|
| 223 |
+
"any_close_tag",
|
| 224 |
+
"any_open_tag",
|
| 225 |
+
"autoname_elements",
|
| 226 |
+
"c_style_comment",
|
| 227 |
+
"col",
|
| 228 |
+
"common_html_entity",
|
| 229 |
+
"condition_as_parse_action",
|
| 230 |
+
"counted_array",
|
| 231 |
+
"cpp_style_comment",
|
| 232 |
+
"dbl_quoted_string",
|
| 233 |
+
"dbl_slash_comment",
|
| 234 |
+
"delimited_list",
|
| 235 |
+
"dict_of",
|
| 236 |
+
"empty",
|
| 237 |
+
"hexnums",
|
| 238 |
+
"html_comment",
|
| 239 |
+
"identchars",
|
| 240 |
+
"identbodychars",
|
| 241 |
+
"infix_notation",
|
| 242 |
+
"java_style_comment",
|
| 243 |
+
"line",
|
| 244 |
+
"line_end",
|
| 245 |
+
"line_start",
|
| 246 |
+
"lineno",
|
| 247 |
+
"make_html_tags",
|
| 248 |
+
"make_xml_tags",
|
| 249 |
+
"match_only_at_col",
|
| 250 |
+
"match_previous_expr",
|
| 251 |
+
"match_previous_literal",
|
| 252 |
+
"nested_expr",
|
| 253 |
+
"null_debug_action",
|
| 254 |
+
"nums",
|
| 255 |
+
"one_of",
|
| 256 |
+
"original_text_for",
|
| 257 |
+
"printables",
|
| 258 |
+
"punc8bit",
|
| 259 |
+
"pyparsing_common",
|
| 260 |
+
"pyparsing_test",
|
| 261 |
+
"pyparsing_unicode",
|
| 262 |
+
"python_style_comment",
|
| 263 |
+
"quoted_string",
|
| 264 |
+
"remove_quotes",
|
| 265 |
+
"replace_with",
|
| 266 |
+
"replace_html_entity",
|
| 267 |
+
"rest_of_line",
|
| 268 |
+
"sgl_quoted_string",
|
| 269 |
+
"srange",
|
| 270 |
+
"string_end",
|
| 271 |
+
"string_start",
|
| 272 |
+
"token_map",
|
| 273 |
+
"trace_parse_action",
|
| 274 |
+
"ungroup",
|
| 275 |
+
"unicode_set",
|
| 276 |
+
"unicode_string",
|
| 277 |
+
"with_attribute",
|
| 278 |
+
"with_class",
|
| 279 |
+
# pre-PEP8 compatibility names
|
| 280 |
+
"__versionTime__",
|
| 281 |
+
"anyCloseTag",
|
| 282 |
+
"anyOpenTag",
|
| 283 |
+
"cStyleComment",
|
| 284 |
+
"commonHTMLEntity",
|
| 285 |
+
"conditionAsParseAction",
|
| 286 |
+
"countedArray",
|
| 287 |
+
"cppStyleComment",
|
| 288 |
+
"dblQuotedString",
|
| 289 |
+
"dblSlashComment",
|
| 290 |
+
"delimitedList",
|
| 291 |
+
"dictOf",
|
| 292 |
+
"htmlComment",
|
| 293 |
+
"indentedBlock",
|
| 294 |
+
"infixNotation",
|
| 295 |
+
"javaStyleComment",
|
| 296 |
+
"lineEnd",
|
| 297 |
+
"lineStart",
|
| 298 |
+
"locatedExpr",
|
| 299 |
+
"makeHTMLTags",
|
| 300 |
+
"makeXMLTags",
|
| 301 |
+
"matchOnlyAtCol",
|
| 302 |
+
"matchPreviousExpr",
|
| 303 |
+
"matchPreviousLiteral",
|
| 304 |
+
"nestedExpr",
|
| 305 |
+
"nullDebugAction",
|
| 306 |
+
"oneOf",
|
| 307 |
+
"opAssoc",
|
| 308 |
+
"originalTextFor",
|
| 309 |
+
"pythonStyleComment",
|
| 310 |
+
"quotedString",
|
| 311 |
+
"removeQuotes",
|
| 312 |
+
"replaceHTMLEntity",
|
| 313 |
+
"replaceWith",
|
| 314 |
+
"restOfLine",
|
| 315 |
+
"sglQuotedString",
|
| 316 |
+
"stringEnd",
|
| 317 |
+
"stringStart",
|
| 318 |
+
"tokenMap",
|
| 319 |
+
"traceParseAction",
|
| 320 |
+
"unicodeString",
|
| 321 |
+
"withAttribute",
|
| 322 |
+
"withClass",
|
| 323 |
+
"common",
|
| 324 |
+
"unicode",
|
| 325 |
+
"testing",
|
| 326 |
+
]
|
.venv/lib/python3.11/site-packages/pyparsing/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (8.22 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/__pycache__/actions.cpython-311.pyc
ADDED
|
Binary file (9.5 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/__pycache__/common.cpython-311.pyc
ADDED
|
Binary file (15.5 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/__pycache__/exceptions.cpython-311.pyc
ADDED
|
Binary file (14.4 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/__pycache__/helpers.cpython-311.pyc
ADDED
|
Binary file (52.9 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/__pycache__/results.cpython-311.pyc
ADDED
|
Binary file (38.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/__pycache__/testing.cpython-311.pyc
ADDED
|
Binary file (21 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/__pycache__/unicode.cpython-311.pyc
ADDED
|
Binary file (15.3 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/__pycache__/util.cpython-311.pyc
ADDED
|
Binary file (21.5 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/actions.py
ADDED
|
@@ -0,0 +1,219 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# actions.py
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
from typing import Union, Callable, Any
|
| 5 |
+
|
| 6 |
+
from .exceptions import ParseException
|
| 7 |
+
from .util import col, replaced_by_pep8
|
| 8 |
+
from .results import ParseResults
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
ParseAction = Union[
|
| 12 |
+
Callable[[], Any],
|
| 13 |
+
Callable[[ParseResults], Any],
|
| 14 |
+
Callable[[int, ParseResults], Any],
|
| 15 |
+
Callable[[str, int, ParseResults], Any],
|
| 16 |
+
]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class OnlyOnce:
|
| 20 |
+
"""
|
| 21 |
+
Wrapper for parse actions, to ensure they are only called once.
|
| 22 |
+
Note: parse action signature must include all 3 arguments.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
def __init__(self, method_call: Callable[[str, int, ParseResults], Any]):
|
| 26 |
+
from .core import _trim_arity
|
| 27 |
+
|
| 28 |
+
self.callable = _trim_arity(method_call)
|
| 29 |
+
self.called = False
|
| 30 |
+
|
| 31 |
+
def __call__(self, s: str, l: int, t: ParseResults) -> ParseResults:
|
| 32 |
+
if not self.called:
|
| 33 |
+
results = self.callable(s, l, t)
|
| 34 |
+
self.called = True
|
| 35 |
+
return results
|
| 36 |
+
raise ParseException(s, l, "OnlyOnce obj called multiple times w/out reset")
|
| 37 |
+
|
| 38 |
+
def reset(self):
|
| 39 |
+
"""
|
| 40 |
+
Allow the associated parse action to be called once more.
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
self.called = False
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def match_only_at_col(n: int) -> ParseAction:
|
| 47 |
+
"""
|
| 48 |
+
Helper method for defining parse actions that require matching at
|
| 49 |
+
a specific column in the input text.
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
def verify_col(strg: str, locn: int, toks: ParseResults) -> None:
|
| 53 |
+
if col(locn, strg) != n:
|
| 54 |
+
raise ParseException(strg, locn, f"matched token not at column {n}")
|
| 55 |
+
|
| 56 |
+
return verify_col
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def replace_with(repl_str: str) -> ParseAction:
|
| 60 |
+
"""
|
| 61 |
+
Helper method for common parse actions that simply return
|
| 62 |
+
a literal value. Especially useful when used with
|
| 63 |
+
:class:`transform_string<ParserElement.transform_string>` ().
|
| 64 |
+
|
| 65 |
+
Example::
|
| 66 |
+
|
| 67 |
+
num = Word(nums).set_parse_action(lambda toks: int(toks[0]))
|
| 68 |
+
na = one_of("N/A NA").set_parse_action(replace_with(math.nan))
|
| 69 |
+
term = na | num
|
| 70 |
+
|
| 71 |
+
term[1, ...].parse_string("324 234 N/A 234") # -> [324, 234, nan, 234]
|
| 72 |
+
"""
|
| 73 |
+
return lambda s, l, t: [repl_str]
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def remove_quotes(s: str, l: int, t: ParseResults) -> Any:
|
| 77 |
+
"""
|
| 78 |
+
Helper parse action for removing quotation marks from parsed
|
| 79 |
+
quoted strings.
|
| 80 |
+
|
| 81 |
+
Example::
|
| 82 |
+
|
| 83 |
+
# by default, quotation marks are included in parsed results
|
| 84 |
+
quoted_string.parse_string("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
|
| 85 |
+
|
| 86 |
+
# use remove_quotes to strip quotation marks from parsed results
|
| 87 |
+
quoted_string.set_parse_action(remove_quotes)
|
| 88 |
+
quoted_string.parse_string("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
|
| 89 |
+
"""
|
| 90 |
+
return t[0][1:-1]
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def with_attribute(*args: tuple[str, str], **attr_dict) -> ParseAction:
|
| 94 |
+
"""
|
| 95 |
+
Helper to create a validating parse action to be used with start
|
| 96 |
+
tags created with :class:`make_xml_tags` or
|
| 97 |
+
:class:`make_html_tags`. Use ``with_attribute`` to qualify
|
| 98 |
+
a starting tag with a required attribute value, to avoid false
|
| 99 |
+
matches on common tags such as ``<TD>`` or ``<DIV>``.
|
| 100 |
+
|
| 101 |
+
Call ``with_attribute`` with a series of attribute names and
|
| 102 |
+
values. Specify the list of filter attributes names and values as:
|
| 103 |
+
|
| 104 |
+
- keyword arguments, as in ``(align="right")``, or
|
| 105 |
+
- as an explicit dict with ``**`` operator, when an attribute
|
| 106 |
+
name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}``
|
| 107 |
+
- a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))``
|
| 108 |
+
|
| 109 |
+
For attribute names with a namespace prefix, you must use the second
|
| 110 |
+
form. Attribute names are matched insensitive to upper/lower case.
|
| 111 |
+
|
| 112 |
+
If just testing for ``class`` (with or without a namespace), use
|
| 113 |
+
:class:`with_class`.
|
| 114 |
+
|
| 115 |
+
To verify that the attribute exists, but without specifying a value,
|
| 116 |
+
pass ``with_attribute.ANY_VALUE`` as the value.
|
| 117 |
+
|
| 118 |
+
Example::
|
| 119 |
+
|
| 120 |
+
html = '''
|
| 121 |
+
<div>
|
| 122 |
+
Some text
|
| 123 |
+
<div type="grid">1 4 0 1 0</div>
|
| 124 |
+
<div type="graph">1,3 2,3 1,1</div>
|
| 125 |
+
<div>this has no type</div>
|
| 126 |
+
</div>
|
| 127 |
+
'''
|
| 128 |
+
div,div_end = make_html_tags("div")
|
| 129 |
+
|
| 130 |
+
# only match div tag having a type attribute with value "grid"
|
| 131 |
+
div_grid = div().set_parse_action(with_attribute(type="grid"))
|
| 132 |
+
grid_expr = div_grid + SkipTo(div | div_end)("body")
|
| 133 |
+
for grid_header in grid_expr.search_string(html):
|
| 134 |
+
print(grid_header.body)
|
| 135 |
+
|
| 136 |
+
# construct a match with any div tag having a type attribute, regardless of the value
|
| 137 |
+
div_any_type = div().set_parse_action(with_attribute(type=with_attribute.ANY_VALUE))
|
| 138 |
+
div_expr = div_any_type + SkipTo(div | div_end)("body")
|
| 139 |
+
for div_header in div_expr.search_string(html):
|
| 140 |
+
print(div_header.body)
|
| 141 |
+
|
| 142 |
+
prints::
|
| 143 |
+
|
| 144 |
+
1 4 0 1 0
|
| 145 |
+
|
| 146 |
+
1 4 0 1 0
|
| 147 |
+
1,3 2,3 1,1
|
| 148 |
+
"""
|
| 149 |
+
attrs_list: list[tuple[str, str]] = []
|
| 150 |
+
if args:
|
| 151 |
+
attrs_list.extend(args)
|
| 152 |
+
else:
|
| 153 |
+
attrs_list.extend(attr_dict.items())
|
| 154 |
+
|
| 155 |
+
def pa(s: str, l: int, tokens: ParseResults) -> None:
|
| 156 |
+
for attrName, attrValue in attrs_list:
|
| 157 |
+
if attrName not in tokens:
|
| 158 |
+
raise ParseException(s, l, "no matching attribute " + attrName)
|
| 159 |
+
if attrValue != with_attribute.ANY_VALUE and tokens[attrName] != attrValue: # type: ignore [attr-defined]
|
| 160 |
+
raise ParseException(
|
| 161 |
+
s,
|
| 162 |
+
l,
|
| 163 |
+
f"attribute {attrName!r} has value {tokens[attrName]!r}, must be {attrValue!r}",
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
return pa
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
with_attribute.ANY_VALUE = object() # type: ignore [attr-defined]
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def with_class(classname: str, namespace: str = "") -> ParseAction:
|
| 173 |
+
"""
|
| 174 |
+
Simplified version of :class:`with_attribute` when
|
| 175 |
+
matching on a div class - made difficult because ``class`` is
|
| 176 |
+
a reserved word in Python.
|
| 177 |
+
|
| 178 |
+
Example::
|
| 179 |
+
|
| 180 |
+
html = '''
|
| 181 |
+
<div>
|
| 182 |
+
Some text
|
| 183 |
+
<div class="grid">1 4 0 1 0</div>
|
| 184 |
+
<div class="graph">1,3 2,3 1,1</div>
|
| 185 |
+
<div>this <div> has no class</div>
|
| 186 |
+
</div>
|
| 187 |
+
|
| 188 |
+
'''
|
| 189 |
+
div,div_end = make_html_tags("div")
|
| 190 |
+
div_grid = div().set_parse_action(with_class("grid"))
|
| 191 |
+
|
| 192 |
+
grid_expr = div_grid + SkipTo(div | div_end)("body")
|
| 193 |
+
for grid_header in grid_expr.search_string(html):
|
| 194 |
+
print(grid_header.body)
|
| 195 |
+
|
| 196 |
+
div_any_type = div().set_parse_action(with_class(withAttribute.ANY_VALUE))
|
| 197 |
+
div_expr = div_any_type + SkipTo(div | div_end)("body")
|
| 198 |
+
for div_header in div_expr.search_string(html):
|
| 199 |
+
print(div_header.body)
|
| 200 |
+
|
| 201 |
+
prints::
|
| 202 |
+
|
| 203 |
+
1 4 0 1 0
|
| 204 |
+
|
| 205 |
+
1 4 0 1 0
|
| 206 |
+
1,3 2,3 1,1
|
| 207 |
+
"""
|
| 208 |
+
classattr = f"{namespace}:class" if namespace else "class"
|
| 209 |
+
return with_attribute(**{classattr: classname})
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
# Compatibility synonyms
|
| 213 |
+
# fmt: off
|
| 214 |
+
replaceWith = replaced_by_pep8("replaceWith", replace_with)
|
| 215 |
+
removeQuotes = replaced_by_pep8("removeQuotes", remove_quotes)
|
| 216 |
+
withAttribute = replaced_by_pep8("withAttribute", with_attribute)
|
| 217 |
+
withClass = replaced_by_pep8("withClass", with_class)
|
| 218 |
+
matchOnlyAtCol = replaced_by_pep8("matchOnlyAtCol", match_only_at_col)
|
| 219 |
+
# fmt: on
|
.venv/lib/python3.11/site-packages/pyparsing/common.py
ADDED
|
@@ -0,0 +1,434 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# common.py
|
| 2 |
+
from .core import *
|
| 3 |
+
from .helpers import DelimitedList, any_open_tag, any_close_tag
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
# some other useful expressions - using lower-case class name since we are really using this as a namespace
|
| 8 |
+
class pyparsing_common:
|
| 9 |
+
"""Here are some common low-level expressions that may be useful in
|
| 10 |
+
jump-starting parser development:
|
| 11 |
+
|
| 12 |
+
- numeric forms (:class:`integers<integer>`, :class:`reals<real>`,
|
| 13 |
+
:class:`scientific notation<sci_real>`)
|
| 14 |
+
- common :class:`programming identifiers<identifier>`
|
| 15 |
+
- network addresses (:class:`MAC<mac_address>`,
|
| 16 |
+
:class:`IPv4<ipv4_address>`, :class:`IPv6<ipv6_address>`)
|
| 17 |
+
- ISO8601 :class:`dates<iso8601_date>` and
|
| 18 |
+
:class:`datetime<iso8601_datetime>`
|
| 19 |
+
- :class:`UUID<uuid>`
|
| 20 |
+
- :class:`comma-separated list<comma_separated_list>`
|
| 21 |
+
- :class:`url`
|
| 22 |
+
|
| 23 |
+
Parse actions:
|
| 24 |
+
|
| 25 |
+
- :class:`convert_to_integer`
|
| 26 |
+
- :class:`convert_to_float`
|
| 27 |
+
- :class:`convert_to_date`
|
| 28 |
+
- :class:`convert_to_datetime`
|
| 29 |
+
- :class:`strip_html_tags`
|
| 30 |
+
- :class:`upcase_tokens`
|
| 31 |
+
- :class:`downcase_tokens`
|
| 32 |
+
|
| 33 |
+
Example::
|
| 34 |
+
|
| 35 |
+
pyparsing_common.number.run_tests('''
|
| 36 |
+
# any int or real number, returned as the appropriate type
|
| 37 |
+
100
|
| 38 |
+
-100
|
| 39 |
+
+100
|
| 40 |
+
3.14159
|
| 41 |
+
6.02e23
|
| 42 |
+
1e-12
|
| 43 |
+
''')
|
| 44 |
+
|
| 45 |
+
pyparsing_common.fnumber.run_tests('''
|
| 46 |
+
# any int or real number, returned as float
|
| 47 |
+
100
|
| 48 |
+
-100
|
| 49 |
+
+100
|
| 50 |
+
3.14159
|
| 51 |
+
6.02e23
|
| 52 |
+
1e-12
|
| 53 |
+
''')
|
| 54 |
+
|
| 55 |
+
pyparsing_common.hex_integer.run_tests('''
|
| 56 |
+
# hex numbers
|
| 57 |
+
100
|
| 58 |
+
FF
|
| 59 |
+
''')
|
| 60 |
+
|
| 61 |
+
pyparsing_common.fraction.run_tests('''
|
| 62 |
+
# fractions
|
| 63 |
+
1/2
|
| 64 |
+
-3/4
|
| 65 |
+
''')
|
| 66 |
+
|
| 67 |
+
pyparsing_common.mixed_integer.run_tests('''
|
| 68 |
+
# mixed fractions
|
| 69 |
+
1
|
| 70 |
+
1/2
|
| 71 |
+
-3/4
|
| 72 |
+
1-3/4
|
| 73 |
+
''')
|
| 74 |
+
|
| 75 |
+
import uuid
|
| 76 |
+
pyparsing_common.uuid.set_parse_action(token_map(uuid.UUID))
|
| 77 |
+
pyparsing_common.uuid.run_tests('''
|
| 78 |
+
# uuid
|
| 79 |
+
12345678-1234-5678-1234-567812345678
|
| 80 |
+
''')
|
| 81 |
+
|
| 82 |
+
prints::
|
| 83 |
+
|
| 84 |
+
# any int or real number, returned as the appropriate type
|
| 85 |
+
100
|
| 86 |
+
[100]
|
| 87 |
+
|
| 88 |
+
-100
|
| 89 |
+
[-100]
|
| 90 |
+
|
| 91 |
+
+100
|
| 92 |
+
[100]
|
| 93 |
+
|
| 94 |
+
3.14159
|
| 95 |
+
[3.14159]
|
| 96 |
+
|
| 97 |
+
6.02e23
|
| 98 |
+
[6.02e+23]
|
| 99 |
+
|
| 100 |
+
1e-12
|
| 101 |
+
[1e-12]
|
| 102 |
+
|
| 103 |
+
# any int or real number, returned as float
|
| 104 |
+
100
|
| 105 |
+
[100.0]
|
| 106 |
+
|
| 107 |
+
-100
|
| 108 |
+
[-100.0]
|
| 109 |
+
|
| 110 |
+
+100
|
| 111 |
+
[100.0]
|
| 112 |
+
|
| 113 |
+
3.14159
|
| 114 |
+
[3.14159]
|
| 115 |
+
|
| 116 |
+
6.02e23
|
| 117 |
+
[6.02e+23]
|
| 118 |
+
|
| 119 |
+
1e-12
|
| 120 |
+
[1e-12]
|
| 121 |
+
|
| 122 |
+
# hex numbers
|
| 123 |
+
100
|
| 124 |
+
[256]
|
| 125 |
+
|
| 126 |
+
FF
|
| 127 |
+
[255]
|
| 128 |
+
|
| 129 |
+
# fractions
|
| 130 |
+
1/2
|
| 131 |
+
[0.5]
|
| 132 |
+
|
| 133 |
+
-3/4
|
| 134 |
+
[-0.75]
|
| 135 |
+
|
| 136 |
+
# mixed fractions
|
| 137 |
+
1
|
| 138 |
+
[1]
|
| 139 |
+
|
| 140 |
+
1/2
|
| 141 |
+
[0.5]
|
| 142 |
+
|
| 143 |
+
-3/4
|
| 144 |
+
[-0.75]
|
| 145 |
+
|
| 146 |
+
1-3/4
|
| 147 |
+
[1.75]
|
| 148 |
+
|
| 149 |
+
# uuid
|
| 150 |
+
12345678-1234-5678-1234-567812345678
|
| 151 |
+
[UUID('12345678-1234-5678-1234-567812345678')]
|
| 152 |
+
"""
|
| 153 |
+
|
| 154 |
+
convert_to_integer = token_map(int)
|
| 155 |
+
"""
|
| 156 |
+
Parse action for converting parsed integers to Python int
|
| 157 |
+
"""
|
| 158 |
+
|
| 159 |
+
convert_to_float = token_map(float)
|
| 160 |
+
"""
|
| 161 |
+
Parse action for converting parsed numbers to Python float
|
| 162 |
+
"""
|
| 163 |
+
|
| 164 |
+
integer = Word(nums).set_name("integer").set_parse_action(convert_to_integer)
|
| 165 |
+
"""expression that parses an unsigned integer, returns an int"""
|
| 166 |
+
|
| 167 |
+
hex_integer = (
|
| 168 |
+
Word(hexnums).set_name("hex integer").set_parse_action(token_map(int, 16))
|
| 169 |
+
)
|
| 170 |
+
"""expression that parses a hexadecimal integer, returns an int"""
|
| 171 |
+
|
| 172 |
+
signed_integer = (
|
| 173 |
+
Regex(r"[+-]?\d+")
|
| 174 |
+
.set_name("signed integer")
|
| 175 |
+
.set_parse_action(convert_to_integer)
|
| 176 |
+
)
|
| 177 |
+
"""expression that parses an integer with optional leading sign, returns an int"""
|
| 178 |
+
|
| 179 |
+
fraction = (
|
| 180 |
+
signed_integer().set_parse_action(convert_to_float)
|
| 181 |
+
+ "/"
|
| 182 |
+
+ signed_integer().set_parse_action(convert_to_float)
|
| 183 |
+
).set_name("fraction")
|
| 184 |
+
"""fractional expression of an integer divided by an integer, returns a float"""
|
| 185 |
+
fraction.add_parse_action(lambda tt: tt[0] / tt[-1])
|
| 186 |
+
|
| 187 |
+
mixed_integer = (
|
| 188 |
+
fraction | signed_integer + Opt(Opt("-").suppress() + fraction)
|
| 189 |
+
).set_name("fraction or mixed integer-fraction")
|
| 190 |
+
"""mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""
|
| 191 |
+
mixed_integer.add_parse_action(sum)
|
| 192 |
+
|
| 193 |
+
real = (
|
| 194 |
+
Regex(r"[+-]?(?:\d+\.\d*|\.\d+)")
|
| 195 |
+
.set_name("real number")
|
| 196 |
+
.set_parse_action(convert_to_float)
|
| 197 |
+
)
|
| 198 |
+
"""expression that parses a floating point number and returns a float"""
|
| 199 |
+
|
| 200 |
+
sci_real = (
|
| 201 |
+
Regex(r"[+-]?(?:\d+(?:[eE][+-]?\d+)|(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?)")
|
| 202 |
+
.set_name("real number with scientific notation")
|
| 203 |
+
.set_parse_action(convert_to_float)
|
| 204 |
+
)
|
| 205 |
+
"""expression that parses a floating point number with optional
|
| 206 |
+
scientific notation and returns a float"""
|
| 207 |
+
|
| 208 |
+
# streamlining this expression makes the docs nicer-looking
|
| 209 |
+
number = (sci_real | real | signed_integer).set_name("number").streamline()
|
| 210 |
+
"""any numeric expression, returns the corresponding Python type"""
|
| 211 |
+
|
| 212 |
+
fnumber = (
|
| 213 |
+
Regex(r"[+-]?\d+\.?\d*(?:[eE][+-]?\d+)?")
|
| 214 |
+
.set_name("fnumber")
|
| 215 |
+
.set_parse_action(convert_to_float)
|
| 216 |
+
)
|
| 217 |
+
"""any int or real number, returned as float"""
|
| 218 |
+
|
| 219 |
+
ieee_float = (
|
| 220 |
+
Regex(r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))")
|
| 221 |
+
.set_name("ieee_float")
|
| 222 |
+
.set_parse_action(convert_to_float)
|
| 223 |
+
)
|
| 224 |
+
"""any floating-point literal (int, real number, infinity, or NaN), returned as float"""
|
| 225 |
+
|
| 226 |
+
identifier = Word(identchars, identbodychars).set_name("identifier")
|
| 227 |
+
"""typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""
|
| 228 |
+
|
| 229 |
+
ipv4_address = Regex(
|
| 230 |
+
r"(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}"
|
| 231 |
+
).set_name("IPv4 address")
|
| 232 |
+
"IPv4 address (``0.0.0.0 - 255.255.255.255``)"
|
| 233 |
+
|
| 234 |
+
_ipv6_part = Regex(r"[0-9a-fA-F]{1,4}").set_name("hex_integer")
|
| 235 |
+
_full_ipv6_address = (_ipv6_part + (":" + _ipv6_part) * 7).set_name(
|
| 236 |
+
"full IPv6 address"
|
| 237 |
+
)
|
| 238 |
+
_short_ipv6_address = (
|
| 239 |
+
Opt(_ipv6_part + (":" + _ipv6_part) * (0, 6))
|
| 240 |
+
+ "::"
|
| 241 |
+
+ Opt(_ipv6_part + (":" + _ipv6_part) * (0, 6))
|
| 242 |
+
).set_name("short IPv6 address")
|
| 243 |
+
_short_ipv6_address.add_condition(
|
| 244 |
+
lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8
|
| 245 |
+
)
|
| 246 |
+
_mixed_ipv6_address = ("::ffff:" + ipv4_address).set_name("mixed IPv6 address")
|
| 247 |
+
ipv6_address = Combine(
|
| 248 |
+
(_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).set_name(
|
| 249 |
+
"IPv6 address"
|
| 250 |
+
)
|
| 251 |
+
).set_name("IPv6 address")
|
| 252 |
+
"IPv6 address (long, short, or mixed form)"
|
| 253 |
+
|
| 254 |
+
mac_address = Regex(
|
| 255 |
+
r"[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}"
|
| 256 |
+
).set_name("MAC address")
|
| 257 |
+
"MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"
|
| 258 |
+
|
| 259 |
+
@staticmethod
|
| 260 |
+
def convert_to_date(fmt: str = "%Y-%m-%d"):
|
| 261 |
+
"""
|
| 262 |
+
Helper to create a parse action for converting parsed date string to Python datetime.date
|
| 263 |
+
|
| 264 |
+
Params -
|
| 265 |
+
- fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``)
|
| 266 |
+
|
| 267 |
+
Example::
|
| 268 |
+
|
| 269 |
+
date_expr = pyparsing_common.iso8601_date.copy()
|
| 270 |
+
date_expr.set_parse_action(pyparsing_common.convert_to_date())
|
| 271 |
+
print(date_expr.parse_string("1999-12-31"))
|
| 272 |
+
|
| 273 |
+
prints::
|
| 274 |
+
|
| 275 |
+
[datetime.date(1999, 12, 31)]
|
| 276 |
+
"""
|
| 277 |
+
|
| 278 |
+
def cvt_fn(ss, ll, tt):
|
| 279 |
+
try:
|
| 280 |
+
return datetime.strptime(tt[0], fmt).date()
|
| 281 |
+
except ValueError as ve:
|
| 282 |
+
raise ParseException(ss, ll, str(ve))
|
| 283 |
+
|
| 284 |
+
return cvt_fn
|
| 285 |
+
|
| 286 |
+
@staticmethod
|
| 287 |
+
def convert_to_datetime(fmt: str = "%Y-%m-%dT%H:%M:%S.%f"):
|
| 288 |
+
"""Helper to create a parse action for converting parsed
|
| 289 |
+
datetime string to Python datetime.datetime
|
| 290 |
+
|
| 291 |
+
Params -
|
| 292 |
+
- fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``)
|
| 293 |
+
|
| 294 |
+
Example::
|
| 295 |
+
|
| 296 |
+
dt_expr = pyparsing_common.iso8601_datetime.copy()
|
| 297 |
+
dt_expr.set_parse_action(pyparsing_common.convert_to_datetime())
|
| 298 |
+
print(dt_expr.parse_string("1999-12-31T23:59:59.999"))
|
| 299 |
+
|
| 300 |
+
prints::
|
| 301 |
+
|
| 302 |
+
[datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
|
| 303 |
+
"""
|
| 304 |
+
|
| 305 |
+
def cvt_fn(s, l, t):
|
| 306 |
+
try:
|
| 307 |
+
return datetime.strptime(t[0], fmt)
|
| 308 |
+
except ValueError as ve:
|
| 309 |
+
raise ParseException(s, l, str(ve))
|
| 310 |
+
|
| 311 |
+
return cvt_fn
|
| 312 |
+
|
| 313 |
+
iso8601_date = Regex(
|
| 314 |
+
r"(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?"
|
| 315 |
+
).set_name("ISO8601 date")
|
| 316 |
+
"ISO8601 date (``yyyy-mm-dd``)"
|
| 317 |
+
|
| 318 |
+
iso8601_datetime = Regex(
|
| 319 |
+
r"(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?"
|
| 320 |
+
).set_name("ISO8601 datetime")
|
| 321 |
+
"ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``"
|
| 322 |
+
|
| 323 |
+
uuid = Regex(r"[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}").set_name("UUID")
|
| 324 |
+
"UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)"
|
| 325 |
+
|
| 326 |
+
_html_stripper = any_open_tag.suppress() | any_close_tag.suppress()
|
| 327 |
+
|
| 328 |
+
@staticmethod
|
| 329 |
+
def strip_html_tags(s: str, l: int, tokens: ParseResults):
|
| 330 |
+
"""Parse action to remove HTML tags from web page HTML source
|
| 331 |
+
|
| 332 |
+
Example::
|
| 333 |
+
|
| 334 |
+
# strip HTML links from normal text
|
| 335 |
+
text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>'
|
| 336 |
+
td, td_end = make_html_tags("TD")
|
| 337 |
+
table_text = td + SkipTo(td_end).set_parse_action(pyparsing_common.strip_html_tags)("body") + td_end
|
| 338 |
+
print(table_text.parse_string(text).body)
|
| 339 |
+
|
| 340 |
+
Prints::
|
| 341 |
+
|
| 342 |
+
More info at the pyparsing wiki page
|
| 343 |
+
"""
|
| 344 |
+
return pyparsing_common._html_stripper.transform_string(tokens[0])
|
| 345 |
+
|
| 346 |
+
_commasepitem = (
|
| 347 |
+
Combine(
|
| 348 |
+
OneOrMore(
|
| 349 |
+
~Literal(",")
|
| 350 |
+
+ ~LineEnd()
|
| 351 |
+
+ Word(printables, exclude_chars=",")
|
| 352 |
+
+ Opt(White(" \t") + ~FollowedBy(LineEnd() | ","))
|
| 353 |
+
)
|
| 354 |
+
)
|
| 355 |
+
.streamline()
|
| 356 |
+
.set_name("commaItem")
|
| 357 |
+
)
|
| 358 |
+
comma_separated_list = DelimitedList(
|
| 359 |
+
Opt(quoted_string.copy() | _commasepitem, default="")
|
| 360 |
+
).set_name("comma separated list")
|
| 361 |
+
"""Predefined expression of 1 or more printable words or quoted strings, separated by commas."""
|
| 362 |
+
|
| 363 |
+
upcase_tokens = staticmethod(token_map(lambda t: t.upper()))
|
| 364 |
+
"""Parse action to convert tokens to upper case."""
|
| 365 |
+
|
| 366 |
+
downcase_tokens = staticmethod(token_map(lambda t: t.lower()))
|
| 367 |
+
"""Parse action to convert tokens to lower case."""
|
| 368 |
+
|
| 369 |
+
# fmt: off
|
| 370 |
+
url = Regex(
|
| 371 |
+
# https://mathiasbynens.be/demo/url-regex
|
| 372 |
+
# https://gist.github.com/dperini/729294
|
| 373 |
+
r"(?P<url>" +
|
| 374 |
+
# protocol identifier (optional)
|
| 375 |
+
# short syntax // still required
|
| 376 |
+
r"(?:(?:(?P<scheme>https?|ftp):)?\/\/)" +
|
| 377 |
+
# user:pass BasicAuth (optional)
|
| 378 |
+
r"(?:(?P<auth>\S+(?::\S*)?)@)?" +
|
| 379 |
+
r"(?P<host>" +
|
| 380 |
+
# IP address exclusion
|
| 381 |
+
# private & local networks
|
| 382 |
+
r"(?!(?:10|127)(?:\.\d{1,3}){3})" +
|
| 383 |
+
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})" +
|
| 384 |
+
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})" +
|
| 385 |
+
# IP address dotted notation octets
|
| 386 |
+
# excludes loopback network 0.0.0.0
|
| 387 |
+
# excludes reserved space >= 224.0.0.0
|
| 388 |
+
# excludes network & broadcast addresses
|
| 389 |
+
# (first & last IP address of each class)
|
| 390 |
+
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" +
|
| 391 |
+
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}" +
|
| 392 |
+
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))" +
|
| 393 |
+
r"|" +
|
| 394 |
+
# host & domain names, may end with dot
|
| 395 |
+
# can be replaced by a shortest alternative
|
| 396 |
+
# (?![-_])(?:[-\w\u00a1-\uffff]{0,63}[^-_]\.)+
|
| 397 |
+
r"(?:" +
|
| 398 |
+
r"(?:" +
|
| 399 |
+
r"[a-z0-9\u00a1-\uffff]" +
|
| 400 |
+
r"[a-z0-9\u00a1-\uffff_-]{0,62}" +
|
| 401 |
+
r")?" +
|
| 402 |
+
r"[a-z0-9\u00a1-\uffff]\." +
|
| 403 |
+
r")+" +
|
| 404 |
+
# TLD identifier name, may end with dot
|
| 405 |
+
r"(?:[a-z\u00a1-\uffff]{2,}\.?)" +
|
| 406 |
+
r")" +
|
| 407 |
+
# port number (optional)
|
| 408 |
+
r"(:(?P<port>\d{2,5}))?" +
|
| 409 |
+
# resource path (optional)
|
| 410 |
+
r"(?P<path>\/[^?# ]*)?" +
|
| 411 |
+
# query string (optional)
|
| 412 |
+
r"(\?(?P<query>[^#]*))?" +
|
| 413 |
+
# fragment (optional)
|
| 414 |
+
r"(#(?P<fragment>\S*))?" +
|
| 415 |
+
r")"
|
| 416 |
+
).set_name("url")
|
| 417 |
+
"""URL (http/https/ftp scheme)"""
|
| 418 |
+
# fmt: on
|
| 419 |
+
|
| 420 |
+
# pre-PEP8 compatibility names
|
| 421 |
+
# fmt: off
|
| 422 |
+
convertToInteger = staticmethod(replaced_by_pep8("convertToInteger", convert_to_integer))
|
| 423 |
+
convertToFloat = staticmethod(replaced_by_pep8("convertToFloat", convert_to_float))
|
| 424 |
+
convertToDate = staticmethod(replaced_by_pep8("convertToDate", convert_to_date))
|
| 425 |
+
convertToDatetime = staticmethod(replaced_by_pep8("convertToDatetime", convert_to_datetime))
|
| 426 |
+
stripHTMLTags = staticmethod(replaced_by_pep8("stripHTMLTags", strip_html_tags))
|
| 427 |
+
upcaseTokens = staticmethod(replaced_by_pep8("upcaseTokens", upcase_tokens))
|
| 428 |
+
downcaseTokens = staticmethod(replaced_by_pep8("downcaseTokens", downcase_tokens))
|
| 429 |
+
# fmt: on
|
| 430 |
+
|
| 431 |
+
|
| 432 |
+
_builtin_exprs = [
|
| 433 |
+
v for v in vars(pyparsing_common).values() if isinstance(v, ParserElement)
|
| 434 |
+
]
|
.venv/lib/python3.11/site-packages/pyparsing/core.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/diagram/__init__.py
ADDED
|
@@ -0,0 +1,744 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mypy: ignore-errors
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
import itertools
|
| 5 |
+
import railroad
|
| 6 |
+
import pyparsing
|
| 7 |
+
import dataclasses
|
| 8 |
+
import typing
|
| 9 |
+
from typing import (
|
| 10 |
+
Generic,
|
| 11 |
+
TypeVar,
|
| 12 |
+
Callable,
|
| 13 |
+
Iterable,
|
| 14 |
+
)
|
| 15 |
+
from jinja2 import Template
|
| 16 |
+
from io import StringIO
|
| 17 |
+
import inspect
|
| 18 |
+
import re
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
jinja2_template_source = """\
|
| 22 |
+
{% if not embed %}
|
| 23 |
+
<!DOCTYPE html>
|
| 24 |
+
<html>
|
| 25 |
+
<head>
|
| 26 |
+
{% endif %}
|
| 27 |
+
{% if not head %}
|
| 28 |
+
<style>
|
| 29 |
+
.railroad-heading {
|
| 30 |
+
font-family: monospace;
|
| 31 |
+
}
|
| 32 |
+
</style>
|
| 33 |
+
{% else %}
|
| 34 |
+
{{ head | safe }}
|
| 35 |
+
{% endif %}
|
| 36 |
+
{% if not embed %}
|
| 37 |
+
</head>
|
| 38 |
+
<body>
|
| 39 |
+
{% endif %}
|
| 40 |
+
<meta charset="UTF-8"/>
|
| 41 |
+
{{ body | safe }}
|
| 42 |
+
{% for diagram in diagrams %}
|
| 43 |
+
<div class="railroad-group">
|
| 44 |
+
<h1 class="railroad-heading" id="{{ diagram.bookmark }}">{{ diagram.title }}</h1>
|
| 45 |
+
<div class="railroad-description">{{ diagram.text }}</div>
|
| 46 |
+
<div class="railroad-svg">
|
| 47 |
+
{{ diagram.svg }}
|
| 48 |
+
</div>
|
| 49 |
+
</div>
|
| 50 |
+
{% endfor %}
|
| 51 |
+
{% if not embed %}
|
| 52 |
+
</body>
|
| 53 |
+
</html>
|
| 54 |
+
{% endif %}
|
| 55 |
+
"""
|
| 56 |
+
|
| 57 |
+
template = Template(jinja2_template_source)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
_bookmark_lookup = {}
|
| 61 |
+
_bookmark_ids = itertools.count(start=1)
|
| 62 |
+
|
| 63 |
+
def _make_bookmark(s: str) -> str:
|
| 64 |
+
"""
|
| 65 |
+
Converts a string into a valid HTML bookmark (ID or anchor name).
|
| 66 |
+
"""
|
| 67 |
+
if s in _bookmark_lookup:
|
| 68 |
+
return _bookmark_lookup[s]
|
| 69 |
+
|
| 70 |
+
# Replace invalid characters with hyphens and ensure only valid characters
|
| 71 |
+
bookmark = re.sub(r'[^a-zA-Z0-9-]+', '-', s)
|
| 72 |
+
|
| 73 |
+
# Ensure it starts with a letter by adding 'z' if necessary
|
| 74 |
+
if not bookmark[:1].isalpha():
|
| 75 |
+
bookmark = f"z{bookmark}"
|
| 76 |
+
|
| 77 |
+
# Convert to lowercase and strip hyphens
|
| 78 |
+
bookmark = bookmark.lower().strip('-')
|
| 79 |
+
|
| 80 |
+
_bookmark_lookup[s] = bookmark = f"{bookmark}-{next(_bookmark_ids):04d}"
|
| 81 |
+
|
| 82 |
+
return bookmark
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def _collapse_verbose_regex(regex_str: str) -> str:
|
| 86 |
+
if "\n" not in regex_str:
|
| 87 |
+
return regex_str
|
| 88 |
+
collapsed = pyparsing.Regex(r"#.*$").suppress().transform_string(regex_str)
|
| 89 |
+
collapsed = re.sub(r"\s*\n\s*", "", collapsed)
|
| 90 |
+
return collapsed
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
@dataclasses.dataclass
|
| 94 |
+
class NamedDiagram:
|
| 95 |
+
"""
|
| 96 |
+
A simple structure for associating a name with a railroad diagram
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
name: str
|
| 100 |
+
index: int
|
| 101 |
+
diagram: railroad.DiagramItem = None
|
| 102 |
+
|
| 103 |
+
@property
|
| 104 |
+
def bookmark(self):
|
| 105 |
+
bookmark = _make_bookmark(self.name)
|
| 106 |
+
return bookmark
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
T = TypeVar("T")
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class EachItem(railroad.Group):
|
| 113 |
+
"""
|
| 114 |
+
Custom railroad item to compose a:
|
| 115 |
+
- Group containing a
|
| 116 |
+
- OneOrMore containing a
|
| 117 |
+
- Choice of the elements in the Each
|
| 118 |
+
with the group label indicating that all must be matched
|
| 119 |
+
"""
|
| 120 |
+
|
| 121 |
+
all_label = "[ALL]"
|
| 122 |
+
|
| 123 |
+
def __init__(self, *items):
|
| 124 |
+
choice_item = railroad.Choice(len(items) - 1, *items)
|
| 125 |
+
one_or_more_item = railroad.OneOrMore(item=choice_item)
|
| 126 |
+
super().__init__(one_or_more_item, label=self.all_label)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
class AnnotatedItem(railroad.Group):
|
| 130 |
+
"""
|
| 131 |
+
Simple subclass of Group that creates an annotation label
|
| 132 |
+
"""
|
| 133 |
+
|
| 134 |
+
def __init__(self, label: str, item):
|
| 135 |
+
super().__init__(item=item, label=f"[{label}]" if label else "")
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
class EditablePartial(Generic[T]):
|
| 139 |
+
"""
|
| 140 |
+
Acts like a functools.partial, but can be edited. In other words, it represents a type that hasn't yet been
|
| 141 |
+
constructed.
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
# We need this here because the railroad constructors actually transform the data, so can't be called until the
|
| 145 |
+
# entire tree is assembled
|
| 146 |
+
|
| 147 |
+
def __init__(self, func: Callable[..., T], args: list, kwargs: dict):
|
| 148 |
+
self.func = func
|
| 149 |
+
self.args = args
|
| 150 |
+
self.kwargs = kwargs
|
| 151 |
+
|
| 152 |
+
@classmethod
|
| 153 |
+
def from_call(cls, func: Callable[..., T], *args, **kwargs) -> EditablePartial[T]:
|
| 154 |
+
"""
|
| 155 |
+
If you call this function in the same way that you would call the constructor, it will store the arguments
|
| 156 |
+
as you expect. For example EditablePartial.from_call(Fraction, 1, 3)() == Fraction(1, 3)
|
| 157 |
+
"""
|
| 158 |
+
return EditablePartial(func=func, args=list(args), kwargs=kwargs)
|
| 159 |
+
|
| 160 |
+
@property
|
| 161 |
+
def name(self):
|
| 162 |
+
return self.kwargs["name"]
|
| 163 |
+
|
| 164 |
+
def __call__(self) -> T:
|
| 165 |
+
"""
|
| 166 |
+
Evaluate the partial and return the result
|
| 167 |
+
"""
|
| 168 |
+
args = self.args.copy()
|
| 169 |
+
kwargs = self.kwargs.copy()
|
| 170 |
+
|
| 171 |
+
# This is a helpful hack to allow you to specify varargs parameters (e.g. *args) as keyword args (e.g.
|
| 172 |
+
# args=['list', 'of', 'things'])
|
| 173 |
+
arg_spec = inspect.getfullargspec(self.func)
|
| 174 |
+
if arg_spec.varargs in self.kwargs:
|
| 175 |
+
args += kwargs.pop(arg_spec.varargs)
|
| 176 |
+
|
| 177 |
+
return self.func(*args, **kwargs)
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def railroad_to_html(diagrams: list[NamedDiagram], embed=False, **kwargs) -> str:
|
| 181 |
+
"""
|
| 182 |
+
Given a list of NamedDiagram, produce a single HTML string that visualises those diagrams
|
| 183 |
+
:params kwargs: kwargs to be passed in to the template
|
| 184 |
+
"""
|
| 185 |
+
data = []
|
| 186 |
+
for diagram in diagrams:
|
| 187 |
+
if diagram.diagram is None:
|
| 188 |
+
continue
|
| 189 |
+
io = StringIO()
|
| 190 |
+
try:
|
| 191 |
+
css = kwargs.get("css")
|
| 192 |
+
diagram.diagram.writeStandalone(io.write, css=css)
|
| 193 |
+
except AttributeError:
|
| 194 |
+
diagram.diagram.writeSvg(io.write)
|
| 195 |
+
title = diagram.name
|
| 196 |
+
if diagram.index == 0:
|
| 197 |
+
title += " (root)"
|
| 198 |
+
data.append(
|
| 199 |
+
{
|
| 200 |
+
"title": title, "text": "", "svg": io.getvalue(), "bookmark": diagram.bookmark
|
| 201 |
+
}
|
| 202 |
+
)
|
| 203 |
+
|
| 204 |
+
return template.render(diagrams=data, embed=embed, **kwargs)
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
def resolve_partial(partial: EditablePartial[T]) -> T:
|
| 208 |
+
"""
|
| 209 |
+
Recursively resolves a collection of Partials into whatever type they are
|
| 210 |
+
"""
|
| 211 |
+
if isinstance(partial, EditablePartial):
|
| 212 |
+
partial.args = resolve_partial(partial.args)
|
| 213 |
+
partial.kwargs = resolve_partial(partial.kwargs)
|
| 214 |
+
return partial()
|
| 215 |
+
elif isinstance(partial, list):
|
| 216 |
+
return [resolve_partial(x) for x in partial]
|
| 217 |
+
elif isinstance(partial, dict):
|
| 218 |
+
return {key: resolve_partial(x) for key, x in partial.items()}
|
| 219 |
+
else:
|
| 220 |
+
return partial
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def to_railroad(
|
| 224 |
+
element: pyparsing.ParserElement,
|
| 225 |
+
diagram_kwargs: typing.Optional[dict] = None,
|
| 226 |
+
vertical: int = 3,
|
| 227 |
+
show_results_names: bool = False,
|
| 228 |
+
show_groups: bool = False,
|
| 229 |
+
) -> list[NamedDiagram]:
|
| 230 |
+
"""
|
| 231 |
+
Convert a pyparsing element tree into a list of diagrams. This is the recommended entrypoint to diagram
|
| 232 |
+
creation if you want to access the Railroad tree before it is converted to HTML
|
| 233 |
+
:param element: base element of the parser being diagrammed
|
| 234 |
+
:param diagram_kwargs: kwargs to pass to the Diagram() constructor
|
| 235 |
+
:param vertical: (optional) - int - limit at which number of alternatives should be
|
| 236 |
+
shown vertically instead of horizontally
|
| 237 |
+
:param show_results_names - bool to indicate whether results name annotations should be
|
| 238 |
+
included in the diagram
|
| 239 |
+
:param show_groups - bool to indicate whether groups should be highlighted with an unlabeled
|
| 240 |
+
surrounding box
|
| 241 |
+
"""
|
| 242 |
+
# Convert the whole tree underneath the root
|
| 243 |
+
lookup = ConverterState(diagram_kwargs=diagram_kwargs or {})
|
| 244 |
+
_to_diagram_element(
|
| 245 |
+
element,
|
| 246 |
+
lookup=lookup,
|
| 247 |
+
parent=None,
|
| 248 |
+
vertical=vertical,
|
| 249 |
+
show_results_names=show_results_names,
|
| 250 |
+
show_groups=show_groups,
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
root_id = id(element)
|
| 254 |
+
# Convert the root if it hasn't been already
|
| 255 |
+
if root_id in lookup:
|
| 256 |
+
if not element.customName:
|
| 257 |
+
lookup[root_id].name = ""
|
| 258 |
+
lookup[root_id].mark_for_extraction(root_id, lookup, force=True)
|
| 259 |
+
|
| 260 |
+
# Now that we're finished, we can convert from intermediate structures into Railroad elements
|
| 261 |
+
diags = list(lookup.diagrams.values())
|
| 262 |
+
if len(diags) > 1:
|
| 263 |
+
# collapse out duplicate diags with the same name
|
| 264 |
+
seen = set()
|
| 265 |
+
deduped_diags = []
|
| 266 |
+
for d in diags:
|
| 267 |
+
# don't extract SkipTo elements, they are uninformative as subdiagrams
|
| 268 |
+
if d.name == "...":
|
| 269 |
+
continue
|
| 270 |
+
if d.name is not None and d.name not in seen:
|
| 271 |
+
seen.add(d.name)
|
| 272 |
+
deduped_diags.append(d)
|
| 273 |
+
resolved = [resolve_partial(partial) for partial in deduped_diags]
|
| 274 |
+
else:
|
| 275 |
+
# special case - if just one diagram, always display it, even if
|
| 276 |
+
# it has no name
|
| 277 |
+
resolved = [resolve_partial(partial) for partial in diags]
|
| 278 |
+
return sorted(resolved, key=lambda diag: diag.index)
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def _should_vertical(
|
| 282 |
+
specification: int, exprs: Iterable[pyparsing.ParserElement]
|
| 283 |
+
) -> bool:
|
| 284 |
+
"""
|
| 285 |
+
Returns true if we should return a vertical list of elements
|
| 286 |
+
"""
|
| 287 |
+
if specification is None:
|
| 288 |
+
return False
|
| 289 |
+
else:
|
| 290 |
+
return len(_visible_exprs(exprs)) >= specification
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
@dataclasses.dataclass
|
| 294 |
+
class ElementState:
|
| 295 |
+
"""
|
| 296 |
+
State recorded for an individual pyparsing Element
|
| 297 |
+
"""
|
| 298 |
+
|
| 299 |
+
#: The pyparsing element that this represents
|
| 300 |
+
element: pyparsing.ParserElement
|
| 301 |
+
#: The output Railroad element in an unconverted state
|
| 302 |
+
converted: EditablePartial
|
| 303 |
+
#: The parent Railroad element, which we store so that we can extract this if it's duplicated
|
| 304 |
+
parent: EditablePartial
|
| 305 |
+
#: The order in which we found this element, used for sorting diagrams if this is extracted into a diagram
|
| 306 |
+
number: int
|
| 307 |
+
#: The name of the element
|
| 308 |
+
name: str = None
|
| 309 |
+
#: The index of this inside its parent
|
| 310 |
+
parent_index: typing.Optional[int] = None
|
| 311 |
+
#: If true, we should extract this out into a subdiagram
|
| 312 |
+
extract: bool = False
|
| 313 |
+
#: If true, all of this element's children have been filled out
|
| 314 |
+
complete: bool = False
|
| 315 |
+
|
| 316 |
+
def mark_for_extraction(
|
| 317 |
+
self, el_id: int, state: ConverterState, name: str = None, force: bool = False
|
| 318 |
+
):
|
| 319 |
+
"""
|
| 320 |
+
Called when this instance has been seen twice, and thus should eventually be extracted into a sub-diagram
|
| 321 |
+
:param el_id: id of the element
|
| 322 |
+
:param state: element/diagram state tracker
|
| 323 |
+
:param name: name to use for this element's text
|
| 324 |
+
:param force: If true, force extraction now, regardless of the state of this. Only useful for extracting the
|
| 325 |
+
root element when we know we're finished
|
| 326 |
+
"""
|
| 327 |
+
self.extract = True
|
| 328 |
+
|
| 329 |
+
# Set the name
|
| 330 |
+
if not self.name:
|
| 331 |
+
if name:
|
| 332 |
+
# Allow forcing a custom name
|
| 333 |
+
self.name = name
|
| 334 |
+
elif self.element.customName:
|
| 335 |
+
self.name = self.element.customName
|
| 336 |
+
else:
|
| 337 |
+
self.name = ""
|
| 338 |
+
|
| 339 |
+
# Just because this is marked for extraction doesn't mean we can do it yet. We may have to wait for children
|
| 340 |
+
# to be added
|
| 341 |
+
# Also, if this is just a string literal etc, don't bother extracting it
|
| 342 |
+
if force or (self.complete and _worth_extracting(self.element)):
|
| 343 |
+
state.extract_into_diagram(el_id)
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
class ConverterState:
|
| 347 |
+
"""
|
| 348 |
+
Stores some state that persists between recursions into the element tree
|
| 349 |
+
"""
|
| 350 |
+
|
| 351 |
+
def __init__(self, diagram_kwargs: typing.Optional[dict] = None):
|
| 352 |
+
#: A dictionary mapping ParserElements to state relating to them
|
| 353 |
+
self._element_diagram_states: dict[int, ElementState] = {}
|
| 354 |
+
#: A dictionary mapping ParserElement IDs to subdiagrams generated from them
|
| 355 |
+
self.diagrams: dict[int, EditablePartial[NamedDiagram]] = {}
|
| 356 |
+
#: The index of the next unnamed element
|
| 357 |
+
self.unnamed_index: int = 1
|
| 358 |
+
#: The index of the next element. This is used for sorting
|
| 359 |
+
self.index: int = 0
|
| 360 |
+
#: Shared kwargs that are used to customize the construction of diagrams
|
| 361 |
+
self.diagram_kwargs: dict = diagram_kwargs or {}
|
| 362 |
+
self.extracted_diagram_names: set[str] = set()
|
| 363 |
+
|
| 364 |
+
def __setitem__(self, key: int, value: ElementState):
|
| 365 |
+
self._element_diagram_states[key] = value
|
| 366 |
+
|
| 367 |
+
def __getitem__(self, key: int) -> ElementState:
|
| 368 |
+
return self._element_diagram_states[key]
|
| 369 |
+
|
| 370 |
+
def __delitem__(self, key: int):
|
| 371 |
+
del self._element_diagram_states[key]
|
| 372 |
+
|
| 373 |
+
def __contains__(self, key: int):
|
| 374 |
+
return key in self._element_diagram_states
|
| 375 |
+
|
| 376 |
+
def get(self, key, default=None):
|
| 377 |
+
try:
|
| 378 |
+
return self[key]
|
| 379 |
+
except KeyError:
|
| 380 |
+
return default
|
| 381 |
+
|
| 382 |
+
def generate_unnamed(self) -> int:
|
| 383 |
+
"""
|
| 384 |
+
Generate a number used in the name of an otherwise unnamed diagram
|
| 385 |
+
"""
|
| 386 |
+
self.unnamed_index += 1
|
| 387 |
+
return self.unnamed_index
|
| 388 |
+
|
| 389 |
+
def generate_index(self) -> int:
|
| 390 |
+
"""
|
| 391 |
+
Generate a number used to index a diagram
|
| 392 |
+
"""
|
| 393 |
+
self.index += 1
|
| 394 |
+
return self.index
|
| 395 |
+
|
| 396 |
+
def extract_into_diagram(self, el_id: int):
|
| 397 |
+
"""
|
| 398 |
+
Used when we encounter the same token twice in the same tree. When this
|
| 399 |
+
happens, we replace all instances of that token with a terminal, and
|
| 400 |
+
create a new subdiagram for the token
|
| 401 |
+
"""
|
| 402 |
+
position = self[el_id]
|
| 403 |
+
|
| 404 |
+
# Replace the original definition of this element with a regular block
|
| 405 |
+
if position.parent:
|
| 406 |
+
href = f"#{_make_bookmark(position.name)}"
|
| 407 |
+
ret = EditablePartial.from_call(railroad.NonTerminal, text=position.name, href=href)
|
| 408 |
+
if "item" in position.parent.kwargs:
|
| 409 |
+
position.parent.kwargs["item"] = ret
|
| 410 |
+
elif "items" in position.parent.kwargs:
|
| 411 |
+
position.parent.kwargs["items"][position.parent_index] = ret
|
| 412 |
+
|
| 413 |
+
# If the element we're extracting is a group, skip to its content but keep the title
|
| 414 |
+
if position.converted.func == railroad.Group:
|
| 415 |
+
content = position.converted.kwargs["item"]
|
| 416 |
+
else:
|
| 417 |
+
content = position.converted
|
| 418 |
+
|
| 419 |
+
self.diagrams[el_id] = EditablePartial.from_call(
|
| 420 |
+
NamedDiagram,
|
| 421 |
+
name=position.name,
|
| 422 |
+
diagram=EditablePartial.from_call(
|
| 423 |
+
railroad.Diagram, content, **self.diagram_kwargs
|
| 424 |
+
),
|
| 425 |
+
index=position.number,
|
| 426 |
+
)
|
| 427 |
+
|
| 428 |
+
del self[el_id]
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def _worth_extracting(element: pyparsing.ParserElement) -> bool:
|
| 432 |
+
"""
|
| 433 |
+
Returns true if this element is worth having its own sub-diagram. Simply, if any of its children
|
| 434 |
+
themselves have children, then its complex enough to extract
|
| 435 |
+
"""
|
| 436 |
+
children = element.recurse()
|
| 437 |
+
return any(child.recurse() for child in children)
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
def _apply_diagram_item_enhancements(fn):
|
| 441 |
+
"""
|
| 442 |
+
decorator to ensure enhancements to a diagram item (such as results name annotations)
|
| 443 |
+
get applied on return from _to_diagram_element (we do this since there are several
|
| 444 |
+
returns in _to_diagram_element)
|
| 445 |
+
"""
|
| 446 |
+
|
| 447 |
+
def _inner(
|
| 448 |
+
element: pyparsing.ParserElement,
|
| 449 |
+
parent: typing.Optional[EditablePartial],
|
| 450 |
+
lookup: ConverterState = None,
|
| 451 |
+
vertical: int = None,
|
| 452 |
+
index: int = 0,
|
| 453 |
+
name_hint: str = None,
|
| 454 |
+
show_results_names: bool = False,
|
| 455 |
+
show_groups: bool = False,
|
| 456 |
+
) -> typing.Optional[EditablePartial]:
|
| 457 |
+
ret = fn(
|
| 458 |
+
element,
|
| 459 |
+
parent,
|
| 460 |
+
lookup,
|
| 461 |
+
vertical,
|
| 462 |
+
index,
|
| 463 |
+
name_hint,
|
| 464 |
+
show_results_names,
|
| 465 |
+
show_groups,
|
| 466 |
+
)
|
| 467 |
+
|
| 468 |
+
# apply annotation for results name, if present
|
| 469 |
+
if show_results_names and ret is not None:
|
| 470 |
+
element_results_name = element.resultsName
|
| 471 |
+
if element_results_name:
|
| 472 |
+
# add "*" to indicate if this is a "list all results" name
|
| 473 |
+
modal_tag = "" if element.modalResults else "*"
|
| 474 |
+
ret = EditablePartial.from_call(
|
| 475 |
+
railroad.Group,
|
| 476 |
+
item=ret,
|
| 477 |
+
label=f"{repr(element_results_name)}{modal_tag}",
|
| 478 |
+
)
|
| 479 |
+
|
| 480 |
+
return ret
|
| 481 |
+
|
| 482 |
+
return _inner
|
| 483 |
+
|
| 484 |
+
|
| 485 |
+
def _visible_exprs(exprs: Iterable[pyparsing.ParserElement]):
|
| 486 |
+
non_diagramming_exprs = (
|
| 487 |
+
pyparsing.ParseElementEnhance,
|
| 488 |
+
pyparsing.PositionToken,
|
| 489 |
+
pyparsing.And._ErrorStop,
|
| 490 |
+
)
|
| 491 |
+
return [
|
| 492 |
+
e
|
| 493 |
+
for e in exprs
|
| 494 |
+
if not isinstance(e, non_diagramming_exprs)
|
| 495 |
+
]
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
@_apply_diagram_item_enhancements
|
| 499 |
+
def _to_diagram_element(
|
| 500 |
+
element: pyparsing.ParserElement,
|
| 501 |
+
parent: typing.Optional[EditablePartial],
|
| 502 |
+
lookup: ConverterState = None,
|
| 503 |
+
vertical: int = None,
|
| 504 |
+
index: int = 0,
|
| 505 |
+
name_hint: str = None,
|
| 506 |
+
show_results_names: bool = False,
|
| 507 |
+
show_groups: bool = False,
|
| 508 |
+
show_hidden: bool = False,
|
| 509 |
+
) -> typing.Optional[EditablePartial]:
|
| 510 |
+
"""
|
| 511 |
+
Recursively converts a PyParsing Element to a railroad Element
|
| 512 |
+
:param lookup: The shared converter state that keeps track of useful things
|
| 513 |
+
:param index: The index of this element within the parent
|
| 514 |
+
:param parent: The parent of this element in the output tree
|
| 515 |
+
:param vertical: Controls at what point we make a list of elements vertical. If this is an integer (the default),
|
| 516 |
+
it sets the threshold of the number of items before we go vertical. If True, always go vertical, if False, never
|
| 517 |
+
do so
|
| 518 |
+
:param name_hint: If provided, this will override the generated name
|
| 519 |
+
:param show_results_names: bool flag indicating whether to add annotations for results names
|
| 520 |
+
:param show_groups: bool flag indicating whether to show groups using bounding box
|
| 521 |
+
:param show_hidden: bool flag indicating whether to show elements that are typically hidden
|
| 522 |
+
:returns: The converted version of the input element, but as a Partial that hasn't yet been constructed
|
| 523 |
+
"""
|
| 524 |
+
exprs = element.recurse()
|
| 525 |
+
name = name_hint or element.customName or type(element).__name__
|
| 526 |
+
|
| 527 |
+
# Python's id() is used to provide a unique identifier for elements
|
| 528 |
+
el_id = id(element)
|
| 529 |
+
|
| 530 |
+
element_results_name = element.resultsName
|
| 531 |
+
|
| 532 |
+
# Here we basically bypass processing certain wrapper elements if they contribute nothing to the diagram
|
| 533 |
+
if not element.customName:
|
| 534 |
+
if isinstance(
|
| 535 |
+
element,
|
| 536 |
+
(
|
| 537 |
+
# pyparsing.TokenConverter,
|
| 538 |
+
pyparsing.Forward,
|
| 539 |
+
pyparsing.Located,
|
| 540 |
+
),
|
| 541 |
+
):
|
| 542 |
+
# However, if this element has a useful custom name, and its child does not, we can pass it on to the child
|
| 543 |
+
if exprs:
|
| 544 |
+
if not exprs[0].customName:
|
| 545 |
+
propagated_name = name
|
| 546 |
+
else:
|
| 547 |
+
propagated_name = None
|
| 548 |
+
|
| 549 |
+
return _to_diagram_element(
|
| 550 |
+
element.expr,
|
| 551 |
+
parent=parent,
|
| 552 |
+
lookup=lookup,
|
| 553 |
+
vertical=vertical,
|
| 554 |
+
index=index,
|
| 555 |
+
name_hint=propagated_name,
|
| 556 |
+
show_results_names=show_results_names,
|
| 557 |
+
show_groups=show_groups,
|
| 558 |
+
)
|
| 559 |
+
|
| 560 |
+
# If the element isn't worth extracting, we always treat it as the first time we say it
|
| 561 |
+
if _worth_extracting(element):
|
| 562 |
+
looked_up = lookup.get(el_id)
|
| 563 |
+
if looked_up and looked_up.name is not None:
|
| 564 |
+
# If we've seen this element exactly once before, we are only just now finding out that it's a duplicate,
|
| 565 |
+
# so we have to extract it into a new diagram.
|
| 566 |
+
looked_up.mark_for_extraction(el_id, lookup, name=name_hint)
|
| 567 |
+
href = f"#{_make_bookmark(looked_up.name)}"
|
| 568 |
+
ret = EditablePartial.from_call(railroad.NonTerminal, text=looked_up.name, href=href)
|
| 569 |
+
return ret
|
| 570 |
+
|
| 571 |
+
elif el_id in lookup.diagrams:
|
| 572 |
+
# If we have seen the element at least twice before, and have already extracted it into a subdiagram, we
|
| 573 |
+
# just put in a marker element that refers to the sub-diagram
|
| 574 |
+
text = lookup.diagrams[el_id].kwargs["name"]
|
| 575 |
+
ret = EditablePartial.from_call(
|
| 576 |
+
railroad.NonTerminal, text=text, href=f"#{_make_bookmark(text)}"
|
| 577 |
+
)
|
| 578 |
+
return ret
|
| 579 |
+
|
| 580 |
+
# Recursively convert child elements
|
| 581 |
+
# Here we find the most relevant Railroad element for matching pyparsing Element
|
| 582 |
+
# We use ``items=[]`` here to hold the place for where the child elements will go once created
|
| 583 |
+
|
| 584 |
+
# see if this element is normally hidden, and whether hidden elements are desired
|
| 585 |
+
# if not, just return None
|
| 586 |
+
if not element.show_in_diagram and not show_hidden:
|
| 587 |
+
return None
|
| 588 |
+
|
| 589 |
+
if isinstance(element, pyparsing.And):
|
| 590 |
+
# detect And's created with ``expr*N`` notation - for these use a OneOrMore with a repeat
|
| 591 |
+
# (all will have the same name, and resultsName)
|
| 592 |
+
if not exprs:
|
| 593 |
+
return None
|
| 594 |
+
if len(set((e.name, e.resultsName) for e in exprs)) == 1 and len(exprs) > 2:
|
| 595 |
+
ret = EditablePartial.from_call(
|
| 596 |
+
railroad.OneOrMore, item="", repeat=str(len(exprs))
|
| 597 |
+
)
|
| 598 |
+
elif _should_vertical(vertical, exprs):
|
| 599 |
+
ret = EditablePartial.from_call(railroad.Stack, items=[])
|
| 600 |
+
else:
|
| 601 |
+
ret = EditablePartial.from_call(railroad.Sequence, items=[])
|
| 602 |
+
elif isinstance(element, (pyparsing.Or, pyparsing.MatchFirst)):
|
| 603 |
+
if not exprs:
|
| 604 |
+
return None
|
| 605 |
+
if _should_vertical(vertical, exprs):
|
| 606 |
+
ret = EditablePartial.from_call(railroad.Choice, 0, items=[])
|
| 607 |
+
else:
|
| 608 |
+
ret = EditablePartial.from_call(railroad.HorizontalChoice, items=[])
|
| 609 |
+
elif isinstance(element, pyparsing.Each):
|
| 610 |
+
if not exprs:
|
| 611 |
+
return None
|
| 612 |
+
ret = EditablePartial.from_call(EachItem, items=[])
|
| 613 |
+
elif isinstance(element, pyparsing.NotAny):
|
| 614 |
+
ret = EditablePartial.from_call(AnnotatedItem, label="NOT", item="")
|
| 615 |
+
elif isinstance(element, pyparsing.FollowedBy):
|
| 616 |
+
ret = EditablePartial.from_call(AnnotatedItem, label="LOOKAHEAD", item="")
|
| 617 |
+
elif isinstance(element, pyparsing.PrecededBy):
|
| 618 |
+
ret = EditablePartial.from_call(AnnotatedItem, label="LOOKBEHIND", item="")
|
| 619 |
+
elif isinstance(element, pyparsing.Group):
|
| 620 |
+
if show_groups:
|
| 621 |
+
ret = EditablePartial.from_call(AnnotatedItem, label="", item="")
|
| 622 |
+
else:
|
| 623 |
+
ret = EditablePartial.from_call(
|
| 624 |
+
railroad.Group, item=None, label=element_results_name
|
| 625 |
+
)
|
| 626 |
+
elif isinstance(element, pyparsing.TokenConverter):
|
| 627 |
+
label = type(element).__name__.lower()
|
| 628 |
+
if label == "tokenconverter":
|
| 629 |
+
ret = EditablePartial.from_call(railroad.Sequence, items=[])
|
| 630 |
+
else:
|
| 631 |
+
ret = EditablePartial.from_call(AnnotatedItem, label=label, item="")
|
| 632 |
+
elif isinstance(element, pyparsing.Opt):
|
| 633 |
+
ret = EditablePartial.from_call(railroad.Optional, item="")
|
| 634 |
+
elif isinstance(element, pyparsing.OneOrMore):
|
| 635 |
+
if element.not_ender is not None:
|
| 636 |
+
args = [
|
| 637 |
+
parent,
|
| 638 |
+
lookup,
|
| 639 |
+
vertical,
|
| 640 |
+
index,
|
| 641 |
+
name_hint,
|
| 642 |
+
show_results_names,
|
| 643 |
+
show_groups,
|
| 644 |
+
]
|
| 645 |
+
return _to_diagram_element(
|
| 646 |
+
(~element.not_ender.expr + element.expr)[1, ...].set_name(element.name),
|
| 647 |
+
*args,
|
| 648 |
+
)
|
| 649 |
+
ret = EditablePartial.from_call(railroad.OneOrMore, item=None)
|
| 650 |
+
elif isinstance(element, pyparsing.ZeroOrMore):
|
| 651 |
+
if element.not_ender is not None:
|
| 652 |
+
args = [
|
| 653 |
+
parent,
|
| 654 |
+
lookup,
|
| 655 |
+
vertical,
|
| 656 |
+
index,
|
| 657 |
+
name_hint,
|
| 658 |
+
show_results_names,
|
| 659 |
+
show_groups,
|
| 660 |
+
]
|
| 661 |
+
return _to_diagram_element(
|
| 662 |
+
(~element.not_ender.expr + element.expr)[...].set_name(element.name),
|
| 663 |
+
*args,
|
| 664 |
+
)
|
| 665 |
+
ret = EditablePartial.from_call(railroad.ZeroOrMore, item="")
|
| 666 |
+
elif isinstance(element, pyparsing.Empty) and not element.customName:
|
| 667 |
+
# Skip unnamed "Empty" elements
|
| 668 |
+
ret = None
|
| 669 |
+
elif isinstance(element, pyparsing.ParseElementEnhance):
|
| 670 |
+
ret = EditablePartial.from_call(railroad.Sequence, items=[])
|
| 671 |
+
elif len(exprs) > 0 and not element_results_name:
|
| 672 |
+
ret = EditablePartial.from_call(railroad.Group, item="", label=name)
|
| 673 |
+
elif isinstance(element, pyparsing.Regex):
|
| 674 |
+
collapsed_patt = _collapse_verbose_regex(element.pattern)
|
| 675 |
+
ret = EditablePartial.from_call(railroad.Terminal, collapsed_patt)
|
| 676 |
+
elif len(exprs) > 0:
|
| 677 |
+
ret = EditablePartial.from_call(railroad.Sequence, items=[])
|
| 678 |
+
else:
|
| 679 |
+
terminal = EditablePartial.from_call(railroad.Terminal, element.defaultName)
|
| 680 |
+
ret = terminal
|
| 681 |
+
|
| 682 |
+
if ret is None:
|
| 683 |
+
return
|
| 684 |
+
|
| 685 |
+
# Indicate this element's position in the tree so we can extract it if necessary
|
| 686 |
+
lookup[el_id] = ElementState(
|
| 687 |
+
element=element,
|
| 688 |
+
converted=ret,
|
| 689 |
+
parent=parent,
|
| 690 |
+
parent_index=index,
|
| 691 |
+
number=lookup.generate_index(),
|
| 692 |
+
)
|
| 693 |
+
if element.customName:
|
| 694 |
+
lookup[el_id].mark_for_extraction(el_id, lookup, element.customName)
|
| 695 |
+
|
| 696 |
+
i = 0
|
| 697 |
+
for expr in exprs:
|
| 698 |
+
# Add a placeholder index in case we have to extract the child before we even add it to the parent
|
| 699 |
+
if "items" in ret.kwargs:
|
| 700 |
+
ret.kwargs["items"].insert(i, None)
|
| 701 |
+
|
| 702 |
+
item = _to_diagram_element(
|
| 703 |
+
expr,
|
| 704 |
+
parent=ret,
|
| 705 |
+
lookup=lookup,
|
| 706 |
+
vertical=vertical,
|
| 707 |
+
index=i,
|
| 708 |
+
show_results_names=show_results_names,
|
| 709 |
+
show_groups=show_groups,
|
| 710 |
+
)
|
| 711 |
+
|
| 712 |
+
# Some elements don't need to be shown in the diagram
|
| 713 |
+
if item is not None:
|
| 714 |
+
if "item" in ret.kwargs:
|
| 715 |
+
ret.kwargs["item"] = item
|
| 716 |
+
elif "items" in ret.kwargs:
|
| 717 |
+
# If we've already extracted the child, don't touch this index, since it's occupied by a nonterminal
|
| 718 |
+
ret.kwargs["items"][i] = item
|
| 719 |
+
i += 1
|
| 720 |
+
elif "items" in ret.kwargs:
|
| 721 |
+
# If we're supposed to skip this element, remove it from the parent
|
| 722 |
+
del ret.kwargs["items"][i]
|
| 723 |
+
|
| 724 |
+
# If all this items children are none, skip this item
|
| 725 |
+
if ret and (
|
| 726 |
+
("items" in ret.kwargs and len(ret.kwargs["items"]) == 0)
|
| 727 |
+
or ("item" in ret.kwargs and ret.kwargs["item"] is None)
|
| 728 |
+
):
|
| 729 |
+
ret = EditablePartial.from_call(railroad.Terminal, name)
|
| 730 |
+
|
| 731 |
+
# Mark this element as "complete", ie it has all of its children
|
| 732 |
+
if el_id in lookup:
|
| 733 |
+
lookup[el_id].complete = True
|
| 734 |
+
|
| 735 |
+
if el_id in lookup and lookup[el_id].extract and lookup[el_id].complete:
|
| 736 |
+
lookup.extract_into_diagram(el_id)
|
| 737 |
+
if ret is not None:
|
| 738 |
+
text = lookup.diagrams[el_id].kwargs["name"]
|
| 739 |
+
href = f"#{_make_bookmark(text)}"
|
| 740 |
+
ret = EditablePartial.from_call(
|
| 741 |
+
railroad.NonTerminal, text=text, href=href
|
| 742 |
+
)
|
| 743 |
+
|
| 744 |
+
return ret
|
.venv/lib/python3.11/site-packages/pyparsing/diagram/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (31.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/pyparsing/exceptions.py
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# exceptions.py
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
import copy
|
| 5 |
+
import re
|
| 6 |
+
import sys
|
| 7 |
+
import typing
|
| 8 |
+
from functools import cached_property
|
| 9 |
+
|
| 10 |
+
from .unicode import pyparsing_unicode as ppu
|
| 11 |
+
from .util import (
|
| 12 |
+
_collapse_string_to_ranges,
|
| 13 |
+
col,
|
| 14 |
+
line,
|
| 15 |
+
lineno,
|
| 16 |
+
replaced_by_pep8,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class _ExceptionWordUnicodeSet(
|
| 21 |
+
ppu.Latin1, ppu.LatinA, ppu.LatinB, ppu.Greek, ppu.Cyrillic
|
| 22 |
+
):
|
| 23 |
+
pass
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
_extract_alphanums = _collapse_string_to_ranges(_ExceptionWordUnicodeSet.alphanums)
|
| 27 |
+
_exception_word_extractor = re.compile("([" + _extract_alphanums + "]{1,16})|.")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class ParseBaseException(Exception):
|
| 31 |
+
"""base exception class for all parsing runtime exceptions"""
|
| 32 |
+
|
| 33 |
+
loc: int
|
| 34 |
+
msg: str
|
| 35 |
+
pstr: str
|
| 36 |
+
parser_element: typing.Any # "ParserElement"
|
| 37 |
+
args: tuple[str, int, typing.Optional[str]]
|
| 38 |
+
|
| 39 |
+
__slots__ = (
|
| 40 |
+
"loc",
|
| 41 |
+
"msg",
|
| 42 |
+
"pstr",
|
| 43 |
+
"parser_element",
|
| 44 |
+
"args",
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
# Performance tuning: we construct a *lot* of these, so keep this
|
| 48 |
+
# constructor as small and fast as possible
|
| 49 |
+
def __init__(
|
| 50 |
+
self,
|
| 51 |
+
pstr: str,
|
| 52 |
+
loc: int = 0,
|
| 53 |
+
msg: typing.Optional[str] = None,
|
| 54 |
+
elem=None,
|
| 55 |
+
):
|
| 56 |
+
if msg is None:
|
| 57 |
+
msg, pstr = pstr, ""
|
| 58 |
+
|
| 59 |
+
self.loc = loc
|
| 60 |
+
self.msg = msg
|
| 61 |
+
self.pstr = pstr
|
| 62 |
+
self.parser_element = elem
|
| 63 |
+
self.args = (pstr, loc, msg)
|
| 64 |
+
|
| 65 |
+
@staticmethod
|
| 66 |
+
def explain_exception(exc: Exception, depth: int = 16) -> str:
|
| 67 |
+
"""
|
| 68 |
+
Method to take an exception and translate the Python internal traceback into a list
|
| 69 |
+
of the pyparsing expressions that caused the exception to be raised.
|
| 70 |
+
|
| 71 |
+
Parameters:
|
| 72 |
+
|
| 73 |
+
- exc - exception raised during parsing (need not be a ParseException, in support
|
| 74 |
+
of Python exceptions that might be raised in a parse action)
|
| 75 |
+
- depth (default=16) - number of levels back in the stack trace to list expression
|
| 76 |
+
and function names; if None, the full stack trace names will be listed; if 0, only
|
| 77 |
+
the failing input line, marker, and exception string will be shown
|
| 78 |
+
|
| 79 |
+
Returns a multi-line string listing the ParserElements and/or function names in the
|
| 80 |
+
exception's stack trace.
|
| 81 |
+
"""
|
| 82 |
+
import inspect
|
| 83 |
+
from .core import ParserElement
|
| 84 |
+
|
| 85 |
+
if depth is None:
|
| 86 |
+
depth = sys.getrecursionlimit()
|
| 87 |
+
ret: list[str] = []
|
| 88 |
+
if isinstance(exc, ParseBaseException):
|
| 89 |
+
ret.append(exc.line)
|
| 90 |
+
ret.append(f"{' ' * (exc.column - 1)}^")
|
| 91 |
+
ret.append(f"{type(exc).__name__}: {exc}")
|
| 92 |
+
|
| 93 |
+
if depth <= 0 or exc.__traceback__ is None:
|
| 94 |
+
return "\n".join(ret)
|
| 95 |
+
|
| 96 |
+
callers = inspect.getinnerframes(exc.__traceback__, context=depth)
|
| 97 |
+
seen: set[int] = set()
|
| 98 |
+
for ff in callers[-depth:]:
|
| 99 |
+
frm = ff[0]
|
| 100 |
+
|
| 101 |
+
f_self = frm.f_locals.get("self", None)
|
| 102 |
+
if isinstance(f_self, ParserElement):
|
| 103 |
+
if not frm.f_code.co_name.startswith(("parseImpl", "_parseNoCache")):
|
| 104 |
+
continue
|
| 105 |
+
if id(f_self) in seen:
|
| 106 |
+
continue
|
| 107 |
+
seen.add(id(f_self))
|
| 108 |
+
|
| 109 |
+
self_type = type(f_self)
|
| 110 |
+
ret.append(f"{self_type.__module__}.{self_type.__name__} - {f_self}")
|
| 111 |
+
|
| 112 |
+
elif f_self is not None:
|
| 113 |
+
self_type = type(f_self)
|
| 114 |
+
ret.append(f"{self_type.__module__}.{self_type.__name__}")
|
| 115 |
+
|
| 116 |
+
else:
|
| 117 |
+
code = frm.f_code
|
| 118 |
+
if code.co_name in ("wrapper", "<module>"):
|
| 119 |
+
continue
|
| 120 |
+
|
| 121 |
+
ret.append(code.co_name)
|
| 122 |
+
|
| 123 |
+
depth -= 1
|
| 124 |
+
if not depth:
|
| 125 |
+
break
|
| 126 |
+
|
| 127 |
+
return "\n".join(ret)
|
| 128 |
+
|
| 129 |
+
@classmethod
|
| 130 |
+
def _from_exception(cls, pe) -> ParseBaseException:
|
| 131 |
+
"""
|
| 132 |
+
internal factory method to simplify creating one type of ParseException
|
| 133 |
+
from another - avoids having __init__ signature conflicts among subclasses
|
| 134 |
+
"""
|
| 135 |
+
return cls(pe.pstr, pe.loc, pe.msg, pe.parser_element)
|
| 136 |
+
|
| 137 |
+
@cached_property
|
| 138 |
+
def line(self) -> str:
|
| 139 |
+
"""
|
| 140 |
+
Return the line of text where the exception occurred.
|
| 141 |
+
"""
|
| 142 |
+
return line(self.loc, self.pstr)
|
| 143 |
+
|
| 144 |
+
@cached_property
|
| 145 |
+
def lineno(self) -> int:
|
| 146 |
+
"""
|
| 147 |
+
Return the 1-based line number of text where the exception occurred.
|
| 148 |
+
"""
|
| 149 |
+
return lineno(self.loc, self.pstr)
|
| 150 |
+
|
| 151 |
+
@cached_property
|
| 152 |
+
def col(self) -> int:
|
| 153 |
+
"""
|
| 154 |
+
Return the 1-based column on the line of text where the exception occurred.
|
| 155 |
+
"""
|
| 156 |
+
return col(self.loc, self.pstr)
|
| 157 |
+
|
| 158 |
+
@cached_property
|
| 159 |
+
def column(self) -> int:
|
| 160 |
+
"""
|
| 161 |
+
Return the 1-based column on the line of text where the exception occurred.
|
| 162 |
+
"""
|
| 163 |
+
return col(self.loc, self.pstr)
|
| 164 |
+
|
| 165 |
+
@cached_property
|
| 166 |
+
def found(self) -> str:
|
| 167 |
+
if not self.pstr:
|
| 168 |
+
return ""
|
| 169 |
+
|
| 170 |
+
if self.loc >= len(self.pstr):
|
| 171 |
+
return "end of text"
|
| 172 |
+
|
| 173 |
+
# pull out next word at error location
|
| 174 |
+
found_match = _exception_word_extractor.match(self.pstr, self.loc)
|
| 175 |
+
if found_match is not None:
|
| 176 |
+
found_text = found_match.group(0)
|
| 177 |
+
else:
|
| 178 |
+
found_text = self.pstr[self.loc : self.loc + 1]
|
| 179 |
+
|
| 180 |
+
return repr(found_text).replace(r"\\", "\\")
|
| 181 |
+
|
| 182 |
+
# pre-PEP8 compatibility
|
| 183 |
+
@property
|
| 184 |
+
def parserElement(self):
|
| 185 |
+
return self.parser_element
|
| 186 |
+
|
| 187 |
+
@parserElement.setter
|
| 188 |
+
def parserElement(self, elem):
|
| 189 |
+
self.parser_element = elem
|
| 190 |
+
|
| 191 |
+
def copy(self):
|
| 192 |
+
return copy.copy(self)
|
| 193 |
+
|
| 194 |
+
def formatted_message(self) -> str:
|
| 195 |
+
found_phrase = f", found {self.found}" if self.found else ""
|
| 196 |
+
return f"{self.msg}{found_phrase} (at char {self.loc}), (line:{self.lineno}, col:{self.column})"
|
| 197 |
+
|
| 198 |
+
def __str__(self) -> str:
|
| 199 |
+
return self.formatted_message()
|
| 200 |
+
|
| 201 |
+
def __repr__(self):
|
| 202 |
+
return str(self)
|
| 203 |
+
|
| 204 |
+
def mark_input_line(
|
| 205 |
+
self, marker_string: typing.Optional[str] = None, *, markerString: str = ">!<"
|
| 206 |
+
) -> str:
|
| 207 |
+
"""
|
| 208 |
+
Extracts the exception line from the input string, and marks
|
| 209 |
+
the location of the exception with a special symbol.
|
| 210 |
+
"""
|
| 211 |
+
markerString = marker_string if marker_string is not None else markerString
|
| 212 |
+
line_str = self.line
|
| 213 |
+
line_column = self.column - 1
|
| 214 |
+
if markerString:
|
| 215 |
+
line_str = f"{line_str[:line_column]}{markerString}{line_str[line_column:]}"
|
| 216 |
+
return line_str.strip()
|
| 217 |
+
|
| 218 |
+
def explain(self, depth: int = 16) -> str:
|
| 219 |
+
"""
|
| 220 |
+
Method to translate the Python internal traceback into a list
|
| 221 |
+
of the pyparsing expressions that caused the exception to be raised.
|
| 222 |
+
|
| 223 |
+
Parameters:
|
| 224 |
+
|
| 225 |
+
- depth (default=16) - number of levels back in the stack trace to list expression
|
| 226 |
+
and function names; if None, the full stack trace names will be listed; if 0, only
|
| 227 |
+
the failing input line, marker, and exception string will be shown
|
| 228 |
+
|
| 229 |
+
Returns a multi-line string listing the ParserElements and/or function names in the
|
| 230 |
+
exception's stack trace.
|
| 231 |
+
|
| 232 |
+
Example::
|
| 233 |
+
|
| 234 |
+
# an expression to parse 3 integers
|
| 235 |
+
expr = pp.Word(pp.nums) * 3
|
| 236 |
+
try:
|
| 237 |
+
# a failing parse - the third integer is prefixed with "A"
|
| 238 |
+
expr.parse_string("123 456 A789")
|
| 239 |
+
except pp.ParseException as pe:
|
| 240 |
+
print(pe.explain(depth=0))
|
| 241 |
+
|
| 242 |
+
prints::
|
| 243 |
+
|
| 244 |
+
123 456 A789
|
| 245 |
+
^
|
| 246 |
+
ParseException: Expected W:(0-9), found 'A' (at char 8), (line:1, col:9)
|
| 247 |
+
|
| 248 |
+
Note: the diagnostic output will include string representations of the expressions
|
| 249 |
+
that failed to parse. These representations will be more helpful if you use `set_name` to
|
| 250 |
+
give identifiable names to your expressions. Otherwise they will use the default string
|
| 251 |
+
forms, which may be cryptic to read.
|
| 252 |
+
|
| 253 |
+
Note: pyparsing's default truncation of exception tracebacks may also truncate the
|
| 254 |
+
stack of expressions that are displayed in the ``explain`` output. To get the full listing
|
| 255 |
+
of parser expressions, you may have to set ``ParserElement.verbose_stacktrace = True``
|
| 256 |
+
"""
|
| 257 |
+
return self.explain_exception(self, depth)
|
| 258 |
+
|
| 259 |
+
# Compatibility synonyms
|
| 260 |
+
# fmt: off
|
| 261 |
+
markInputline = replaced_by_pep8("markInputline", mark_input_line)
|
| 262 |
+
# fmt: on
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
class ParseException(ParseBaseException):
|
| 266 |
+
"""
|
| 267 |
+
Exception thrown when a parse expression doesn't match the input string
|
| 268 |
+
|
| 269 |
+
Example::
|
| 270 |
+
|
| 271 |
+
integer = Word(nums).set_name("integer")
|
| 272 |
+
try:
|
| 273 |
+
integer.parse_string("ABC")
|
| 274 |
+
except ParseException as pe:
|
| 275 |
+
print(pe)
|
| 276 |
+
print(f"column: {pe.column}")
|
| 277 |
+
|
| 278 |
+
prints::
|
| 279 |
+
|
| 280 |
+
Expected integer (at char 0), (line:1, col:1) column: 1
|
| 281 |
+
|
| 282 |
+
"""
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
class ParseFatalException(ParseBaseException):
|
| 286 |
+
"""
|
| 287 |
+
User-throwable exception thrown when inconsistent parse content
|
| 288 |
+
is found; stops all parsing immediately
|
| 289 |
+
"""
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
class ParseSyntaxException(ParseFatalException):
|
| 293 |
+
"""
|
| 294 |
+
Just like :class:`ParseFatalException`, but thrown internally
|
| 295 |
+
when an :class:`ErrorStop<And._ErrorStop>` ('-' operator) indicates
|
| 296 |
+
that parsing is to stop immediately because an unbacktrackable
|
| 297 |
+
syntax error has been found.
|
| 298 |
+
"""
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
class RecursiveGrammarException(Exception):
|
| 302 |
+
"""
|
| 303 |
+
Exception thrown by :class:`ParserElement.validate` if the
|
| 304 |
+
grammar could be left-recursive; parser may need to enable
|
| 305 |
+
left recursion using :class:`ParserElement.enable_left_recursion<ParserElement.enable_left_recursion>`
|
| 306 |
+
|
| 307 |
+
Deprecated: only used by deprecated method ParserElement.validate.
|
| 308 |
+
"""
|
| 309 |
+
|
| 310 |
+
def __init__(self, parseElementList):
|
| 311 |
+
self.parseElementTrace = parseElementList
|
| 312 |
+
|
| 313 |
+
def __str__(self) -> str:
|
| 314 |
+
return f"RecursiveGrammarException: {self.parseElementTrace}"
|
.venv/lib/python3.11/site-packages/pyparsing/helpers.py
ADDED
|
@@ -0,0 +1,1102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# helpers.py
|
| 2 |
+
import html.entities
|
| 3 |
+
import operator
|
| 4 |
+
import re
|
| 5 |
+
import sys
|
| 6 |
+
import typing
|
| 7 |
+
|
| 8 |
+
from . import __diag__
|
| 9 |
+
from .core import *
|
| 10 |
+
from .util import (
|
| 11 |
+
_bslash,
|
| 12 |
+
_flatten,
|
| 13 |
+
_escape_regex_range_chars,
|
| 14 |
+
make_compressed_re,
|
| 15 |
+
replaced_by_pep8,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
#
|
| 20 |
+
# global helpers
|
| 21 |
+
#
|
| 22 |
+
def counted_array(
|
| 23 |
+
expr: ParserElement,
|
| 24 |
+
int_expr: typing.Optional[ParserElement] = None,
|
| 25 |
+
*,
|
| 26 |
+
intExpr: typing.Optional[ParserElement] = None,
|
| 27 |
+
) -> ParserElement:
|
| 28 |
+
"""Helper to define a counted list of expressions.
|
| 29 |
+
|
| 30 |
+
This helper defines a pattern of the form::
|
| 31 |
+
|
| 32 |
+
integer expr expr expr...
|
| 33 |
+
|
| 34 |
+
where the leading integer tells how many expr expressions follow.
|
| 35 |
+
The matched tokens returns the array of expr tokens as a list - the
|
| 36 |
+
leading count token is suppressed.
|
| 37 |
+
|
| 38 |
+
If ``int_expr`` is specified, it should be a pyparsing expression
|
| 39 |
+
that produces an integer value.
|
| 40 |
+
|
| 41 |
+
Example::
|
| 42 |
+
|
| 43 |
+
counted_array(Word(alphas)).parse_string('2 ab cd ef') # -> ['ab', 'cd']
|
| 44 |
+
|
| 45 |
+
# in this parser, the leading integer value is given in binary,
|
| 46 |
+
# '10' indicating that 2 values are in the array
|
| 47 |
+
binary_constant = Word('01').set_parse_action(lambda t: int(t[0], 2))
|
| 48 |
+
counted_array(Word(alphas), int_expr=binary_constant).parse_string('10 ab cd ef') # -> ['ab', 'cd']
|
| 49 |
+
|
| 50 |
+
# if other fields must be parsed after the count but before the
|
| 51 |
+
# list items, give the fields results names and they will
|
| 52 |
+
# be preserved in the returned ParseResults:
|
| 53 |
+
count_with_metadata = integer + Word(alphas)("type")
|
| 54 |
+
typed_array = counted_array(Word(alphanums), int_expr=count_with_metadata)("items")
|
| 55 |
+
result = typed_array.parse_string("3 bool True True False")
|
| 56 |
+
print(result.dump())
|
| 57 |
+
|
| 58 |
+
# prints
|
| 59 |
+
# ['True', 'True', 'False']
|
| 60 |
+
# - items: ['True', 'True', 'False']
|
| 61 |
+
# - type: 'bool'
|
| 62 |
+
"""
|
| 63 |
+
intExpr = intExpr or int_expr
|
| 64 |
+
array_expr = Forward()
|
| 65 |
+
|
| 66 |
+
def count_field_parse_action(s, l, t):
|
| 67 |
+
nonlocal array_expr
|
| 68 |
+
n = t[0]
|
| 69 |
+
array_expr <<= (expr * n) if n else Empty()
|
| 70 |
+
# clear list contents, but keep any named results
|
| 71 |
+
del t[:]
|
| 72 |
+
|
| 73 |
+
if intExpr is None:
|
| 74 |
+
intExpr = Word(nums).set_parse_action(lambda t: int(t[0]))
|
| 75 |
+
else:
|
| 76 |
+
intExpr = intExpr.copy()
|
| 77 |
+
intExpr.set_name("arrayLen")
|
| 78 |
+
intExpr.add_parse_action(count_field_parse_action, call_during_try=True)
|
| 79 |
+
return (intExpr + array_expr).set_name(f"(len) {expr}...")
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def match_previous_literal(expr: ParserElement) -> ParserElement:
|
| 83 |
+
"""Helper to define an expression that is indirectly defined from
|
| 84 |
+
the tokens matched in a previous expression, that is, it looks for
|
| 85 |
+
a 'repeat' of a previous expression. For example::
|
| 86 |
+
|
| 87 |
+
first = Word(nums)
|
| 88 |
+
second = match_previous_literal(first)
|
| 89 |
+
match_expr = first + ":" + second
|
| 90 |
+
|
| 91 |
+
will match ``"1:1"``, but not ``"1:2"``. Because this
|
| 92 |
+
matches a previous literal, will also match the leading
|
| 93 |
+
``"1:1"`` in ``"1:10"``. If this is not desired, use
|
| 94 |
+
:class:`match_previous_expr`. Do *not* use with packrat parsing
|
| 95 |
+
enabled.
|
| 96 |
+
"""
|
| 97 |
+
rep = Forward()
|
| 98 |
+
|
| 99 |
+
def copy_token_to_repeater(s, l, t):
|
| 100 |
+
if not t:
|
| 101 |
+
rep << Empty()
|
| 102 |
+
return
|
| 103 |
+
|
| 104 |
+
if len(t) == 1:
|
| 105 |
+
rep << t[0]
|
| 106 |
+
return
|
| 107 |
+
|
| 108 |
+
# flatten t tokens
|
| 109 |
+
tflat = _flatten(t.as_list())
|
| 110 |
+
rep << And(Literal(tt) for tt in tflat)
|
| 111 |
+
|
| 112 |
+
expr.add_parse_action(copy_token_to_repeater, callDuringTry=True)
|
| 113 |
+
rep.set_name("(prev) " + str(expr))
|
| 114 |
+
return rep
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def match_previous_expr(expr: ParserElement) -> ParserElement:
|
| 118 |
+
"""Helper to define an expression that is indirectly defined from
|
| 119 |
+
the tokens matched in a previous expression, that is, it looks for
|
| 120 |
+
a 'repeat' of a previous expression. For example::
|
| 121 |
+
|
| 122 |
+
first = Word(nums)
|
| 123 |
+
second = match_previous_expr(first)
|
| 124 |
+
match_expr = first + ":" + second
|
| 125 |
+
|
| 126 |
+
will match ``"1:1"``, but not ``"1:2"``. Because this
|
| 127 |
+
matches by expressions, will *not* match the leading ``"1:1"``
|
| 128 |
+
in ``"1:10"``; the expressions are evaluated first, and then
|
| 129 |
+
compared, so ``"1"`` is compared with ``"10"``. Do *not* use
|
| 130 |
+
with packrat parsing enabled.
|
| 131 |
+
"""
|
| 132 |
+
rep = Forward()
|
| 133 |
+
e2 = expr.copy()
|
| 134 |
+
rep <<= e2
|
| 135 |
+
|
| 136 |
+
def copy_token_to_repeater(s, l, t):
|
| 137 |
+
matchTokens = _flatten(t.as_list())
|
| 138 |
+
|
| 139 |
+
def must_match_these_tokens(s, l, t):
|
| 140 |
+
theseTokens = _flatten(t.as_list())
|
| 141 |
+
if theseTokens != matchTokens:
|
| 142 |
+
raise ParseException(
|
| 143 |
+
s, l, f"Expected {matchTokens}, found{theseTokens}"
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
rep.set_parse_action(must_match_these_tokens, callDuringTry=True)
|
| 147 |
+
|
| 148 |
+
expr.add_parse_action(copy_token_to_repeater, callDuringTry=True)
|
| 149 |
+
rep.set_name("(prev) " + str(expr))
|
| 150 |
+
return rep
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def one_of(
|
| 154 |
+
strs: Union[typing.Iterable[str], str],
|
| 155 |
+
caseless: bool = False,
|
| 156 |
+
use_regex: bool = True,
|
| 157 |
+
as_keyword: bool = False,
|
| 158 |
+
*,
|
| 159 |
+
useRegex: bool = True,
|
| 160 |
+
asKeyword: bool = False,
|
| 161 |
+
) -> ParserElement:
|
| 162 |
+
"""Helper to quickly define a set of alternative :class:`Literal` s,
|
| 163 |
+
and makes sure to do longest-first testing when there is a conflict,
|
| 164 |
+
regardless of the input order, but returns
|
| 165 |
+
a :class:`MatchFirst` for best performance.
|
| 166 |
+
|
| 167 |
+
Parameters:
|
| 168 |
+
|
| 169 |
+
- ``strs`` - a string of space-delimited literals, or a collection of
|
| 170 |
+
string literals
|
| 171 |
+
- ``caseless`` - treat all literals as caseless - (default= ``False``)
|
| 172 |
+
- ``use_regex`` - as an optimization, will
|
| 173 |
+
generate a :class:`Regex` object; otherwise, will generate
|
| 174 |
+
a :class:`MatchFirst` object (if ``caseless=True`` or ``as_keyword=True``, or if
|
| 175 |
+
creating a :class:`Regex` raises an exception) - (default= ``True``)
|
| 176 |
+
- ``as_keyword`` - enforce :class:`Keyword`-style matching on the
|
| 177 |
+
generated expressions - (default= ``False``)
|
| 178 |
+
- ``asKeyword`` and ``useRegex`` are retained for pre-PEP8 compatibility,
|
| 179 |
+
but will be removed in a future release
|
| 180 |
+
|
| 181 |
+
Example::
|
| 182 |
+
|
| 183 |
+
comp_oper = one_of("< = > <= >= !=")
|
| 184 |
+
var = Word(alphas)
|
| 185 |
+
number = Word(nums)
|
| 186 |
+
term = var | number
|
| 187 |
+
comparison_expr = term + comp_oper + term
|
| 188 |
+
print(comparison_expr.search_string("B = 12 AA=23 B<=AA AA>12"))
|
| 189 |
+
|
| 190 |
+
prints::
|
| 191 |
+
|
| 192 |
+
[['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
|
| 193 |
+
"""
|
| 194 |
+
asKeyword = asKeyword or as_keyword
|
| 195 |
+
useRegex = useRegex and use_regex
|
| 196 |
+
|
| 197 |
+
if (
|
| 198 |
+
isinstance(caseless, str_type)
|
| 199 |
+
and __diag__.warn_on_multiple_string_args_to_oneof
|
| 200 |
+
):
|
| 201 |
+
warnings.warn(
|
| 202 |
+
"warn_on_multiple_string_args_to_oneof:"
|
| 203 |
+
" More than one string argument passed to one_of, pass"
|
| 204 |
+
" choices as a list or space-delimited string",
|
| 205 |
+
stacklevel=2,
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
if caseless:
|
| 209 |
+
is_equal = lambda a, b: a.upper() == b.upper()
|
| 210 |
+
masks = lambda a, b: b.upper().startswith(a.upper())
|
| 211 |
+
parse_element_class = CaselessKeyword if asKeyword else CaselessLiteral
|
| 212 |
+
else:
|
| 213 |
+
is_equal = operator.eq
|
| 214 |
+
masks = lambda a, b: b.startswith(a)
|
| 215 |
+
parse_element_class = Keyword if asKeyword else Literal
|
| 216 |
+
|
| 217 |
+
symbols: list[str]
|
| 218 |
+
if isinstance(strs, str_type):
|
| 219 |
+
strs = typing.cast(str, strs)
|
| 220 |
+
symbols = strs.split()
|
| 221 |
+
elif isinstance(strs, Iterable):
|
| 222 |
+
symbols = list(strs)
|
| 223 |
+
else:
|
| 224 |
+
raise TypeError("Invalid argument to one_of, expected string or iterable")
|
| 225 |
+
if not symbols:
|
| 226 |
+
return NoMatch()
|
| 227 |
+
|
| 228 |
+
# reorder given symbols to take care to avoid masking longer choices with shorter ones
|
| 229 |
+
# (but only if the given symbols are not just single characters)
|
| 230 |
+
i = 0
|
| 231 |
+
while i < len(symbols) - 1:
|
| 232 |
+
cur = symbols[i]
|
| 233 |
+
for j, other in enumerate(symbols[i + 1 :]):
|
| 234 |
+
if is_equal(other, cur):
|
| 235 |
+
del symbols[i + j + 1]
|
| 236 |
+
break
|
| 237 |
+
if len(other) > len(cur) and masks(cur, other):
|
| 238 |
+
del symbols[i + j + 1]
|
| 239 |
+
symbols.insert(i, other)
|
| 240 |
+
break
|
| 241 |
+
else:
|
| 242 |
+
i += 1
|
| 243 |
+
|
| 244 |
+
if useRegex:
|
| 245 |
+
re_flags: int = re.IGNORECASE if caseless else 0
|
| 246 |
+
|
| 247 |
+
try:
|
| 248 |
+
if all(len(sym) == 1 for sym in symbols):
|
| 249 |
+
# symbols are just single characters, create range regex pattern
|
| 250 |
+
patt = f"[{''.join(_escape_regex_range_chars(sym) for sym in symbols)}]"
|
| 251 |
+
else:
|
| 252 |
+
patt = "|".join(re.escape(sym) for sym in symbols)
|
| 253 |
+
|
| 254 |
+
# wrap with \b word break markers if defining as keywords
|
| 255 |
+
if asKeyword:
|
| 256 |
+
patt = rf"\b(?:{patt})\b"
|
| 257 |
+
|
| 258 |
+
ret = Regex(patt, flags=re_flags).set_name(" | ".join(symbols))
|
| 259 |
+
|
| 260 |
+
if caseless:
|
| 261 |
+
# add parse action to return symbols as specified, not in random
|
| 262 |
+
# casing as found in input string
|
| 263 |
+
symbol_map = {sym.lower(): sym for sym in symbols}
|
| 264 |
+
ret.add_parse_action(lambda s, l, t: symbol_map[t[0].lower()])
|
| 265 |
+
|
| 266 |
+
return ret
|
| 267 |
+
|
| 268 |
+
except re.error:
|
| 269 |
+
warnings.warn(
|
| 270 |
+
"Exception creating Regex for one_of, building MatchFirst", stacklevel=2
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
# last resort, just use MatchFirst
|
| 274 |
+
return MatchFirst(parse_element_class(sym) for sym in symbols).set_name(
|
| 275 |
+
" | ".join(symbols)
|
| 276 |
+
)
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
def dict_of(key: ParserElement, value: ParserElement) -> ParserElement:
|
| 280 |
+
"""Helper to easily and clearly define a dictionary by specifying
|
| 281 |
+
the respective patterns for the key and value. Takes care of
|
| 282 |
+
defining the :class:`Dict`, :class:`ZeroOrMore`, and
|
| 283 |
+
:class:`Group` tokens in the proper order. The key pattern
|
| 284 |
+
can include delimiting markers or punctuation, as long as they are
|
| 285 |
+
suppressed, thereby leaving the significant key text. The value
|
| 286 |
+
pattern can include named results, so that the :class:`Dict` results
|
| 287 |
+
can include named token fields.
|
| 288 |
+
|
| 289 |
+
Example::
|
| 290 |
+
|
| 291 |
+
text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
|
| 292 |
+
attr_expr = (label + Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join))
|
| 293 |
+
print(attr_expr[1, ...].parse_string(text).dump())
|
| 294 |
+
|
| 295 |
+
attr_label = label
|
| 296 |
+
attr_value = Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join)
|
| 297 |
+
|
| 298 |
+
# similar to Dict, but simpler call format
|
| 299 |
+
result = dict_of(attr_label, attr_value).parse_string(text)
|
| 300 |
+
print(result.dump())
|
| 301 |
+
print(result['shape'])
|
| 302 |
+
print(result.shape) # object attribute access works too
|
| 303 |
+
print(result.as_dict())
|
| 304 |
+
|
| 305 |
+
prints::
|
| 306 |
+
|
| 307 |
+
[['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
|
| 308 |
+
- color: 'light blue'
|
| 309 |
+
- posn: 'upper left'
|
| 310 |
+
- shape: 'SQUARE'
|
| 311 |
+
- texture: 'burlap'
|
| 312 |
+
SQUARE
|
| 313 |
+
SQUARE
|
| 314 |
+
{'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
|
| 315 |
+
"""
|
| 316 |
+
return Dict(OneOrMore(Group(key + value)))
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def original_text_for(
|
| 320 |
+
expr: ParserElement, as_string: bool = True, *, asString: bool = True
|
| 321 |
+
) -> ParserElement:
|
| 322 |
+
"""Helper to return the original, untokenized text for a given
|
| 323 |
+
expression. Useful to restore the parsed fields of an HTML start
|
| 324 |
+
tag into the raw tag text itself, or to revert separate tokens with
|
| 325 |
+
intervening whitespace back to the original matching input text. By
|
| 326 |
+
default, returns a string containing the original parsed text.
|
| 327 |
+
|
| 328 |
+
If the optional ``as_string`` argument is passed as
|
| 329 |
+
``False``, then the return value is
|
| 330 |
+
a :class:`ParseResults` containing any results names that
|
| 331 |
+
were originally matched, and a single token containing the original
|
| 332 |
+
matched text from the input string. So if the expression passed to
|
| 333 |
+
:class:`original_text_for` contains expressions with defined
|
| 334 |
+
results names, you must set ``as_string`` to ``False`` if you
|
| 335 |
+
want to preserve those results name values.
|
| 336 |
+
|
| 337 |
+
The ``asString`` pre-PEP8 argument is retained for compatibility,
|
| 338 |
+
but will be removed in a future release.
|
| 339 |
+
|
| 340 |
+
Example::
|
| 341 |
+
|
| 342 |
+
src = "this is test <b> bold <i>text</i> </b> normal text "
|
| 343 |
+
for tag in ("b", "i"):
|
| 344 |
+
opener, closer = make_html_tags(tag)
|
| 345 |
+
patt = original_text_for(opener + ... + closer)
|
| 346 |
+
print(patt.search_string(src)[0])
|
| 347 |
+
|
| 348 |
+
prints::
|
| 349 |
+
|
| 350 |
+
['<b> bold <i>text</i> </b>']
|
| 351 |
+
['<i>text</i>']
|
| 352 |
+
"""
|
| 353 |
+
asString = asString and as_string
|
| 354 |
+
|
| 355 |
+
locMarker = Empty().set_parse_action(lambda s, loc, t: loc)
|
| 356 |
+
endlocMarker = locMarker.copy()
|
| 357 |
+
endlocMarker.callPreparse = False
|
| 358 |
+
matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
|
| 359 |
+
if asString:
|
| 360 |
+
extractText = lambda s, l, t: s[t._original_start : t._original_end]
|
| 361 |
+
else:
|
| 362 |
+
|
| 363 |
+
def extractText(s, l, t):
|
| 364 |
+
t[:] = [s[t.pop("_original_start") : t.pop("_original_end")]]
|
| 365 |
+
|
| 366 |
+
matchExpr.set_parse_action(extractText)
|
| 367 |
+
matchExpr.ignoreExprs = expr.ignoreExprs
|
| 368 |
+
matchExpr.suppress_warning(Diagnostics.warn_ungrouped_named_tokens_in_collection)
|
| 369 |
+
return matchExpr
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
def ungroup(expr: ParserElement) -> ParserElement:
|
| 373 |
+
"""Helper to undo pyparsing's default grouping of And expressions,
|
| 374 |
+
even if all but one are non-empty.
|
| 375 |
+
"""
|
| 376 |
+
return TokenConverter(expr).add_parse_action(lambda t: t[0])
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
def locatedExpr(expr: ParserElement) -> ParserElement:
|
| 380 |
+
"""
|
| 381 |
+
(DEPRECATED - future code should use the :class:`Located` class)
|
| 382 |
+
Helper to decorate a returned token with its starting and ending
|
| 383 |
+
locations in the input string.
|
| 384 |
+
|
| 385 |
+
This helper adds the following results names:
|
| 386 |
+
|
| 387 |
+
- ``locn_start`` - location where matched expression begins
|
| 388 |
+
- ``locn_end`` - location where matched expression ends
|
| 389 |
+
- ``value`` - the actual parsed results
|
| 390 |
+
|
| 391 |
+
Be careful if the input text contains ``<TAB>`` characters, you
|
| 392 |
+
may want to call :class:`ParserElement.parse_with_tabs`
|
| 393 |
+
|
| 394 |
+
Example::
|
| 395 |
+
|
| 396 |
+
wd = Word(alphas)
|
| 397 |
+
for match in locatedExpr(wd).search_string("ljsdf123lksdjjf123lkkjj1222"):
|
| 398 |
+
print(match)
|
| 399 |
+
|
| 400 |
+
prints::
|
| 401 |
+
|
| 402 |
+
[[0, 'ljsdf', 5]]
|
| 403 |
+
[[8, 'lksdjjf', 15]]
|
| 404 |
+
[[18, 'lkkjj', 23]]
|
| 405 |
+
"""
|
| 406 |
+
locator = Empty().set_parse_action(lambda ss, ll, tt: ll)
|
| 407 |
+
return Group(
|
| 408 |
+
locator("locn_start")
|
| 409 |
+
+ expr("value")
|
| 410 |
+
+ locator.copy().leaveWhitespace()("locn_end")
|
| 411 |
+
)
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
def nested_expr(
|
| 415 |
+
opener: Union[str, ParserElement] = "(",
|
| 416 |
+
closer: Union[str, ParserElement] = ")",
|
| 417 |
+
content: typing.Optional[ParserElement] = None,
|
| 418 |
+
ignore_expr: ParserElement = quoted_string(),
|
| 419 |
+
*,
|
| 420 |
+
ignoreExpr: ParserElement = quoted_string(),
|
| 421 |
+
) -> ParserElement:
|
| 422 |
+
"""Helper method for defining nested lists enclosed in opening and
|
| 423 |
+
closing delimiters (``"("`` and ``")"`` are the default).
|
| 424 |
+
|
| 425 |
+
Parameters:
|
| 426 |
+
|
| 427 |
+
- ``opener`` - opening character for a nested list
|
| 428 |
+
(default= ``"("``); can also be a pyparsing expression
|
| 429 |
+
- ``closer`` - closing character for a nested list
|
| 430 |
+
(default= ``")"``); can also be a pyparsing expression
|
| 431 |
+
- ``content`` - expression for items within the nested lists
|
| 432 |
+
(default= ``None``)
|
| 433 |
+
- ``ignore_expr`` - expression for ignoring opening and closing delimiters
|
| 434 |
+
(default= :class:`quoted_string`)
|
| 435 |
+
- ``ignoreExpr`` - this pre-PEP8 argument is retained for compatibility
|
| 436 |
+
but will be removed in a future release
|
| 437 |
+
|
| 438 |
+
If an expression is not provided for the content argument, the
|
| 439 |
+
nested expression will capture all whitespace-delimited content
|
| 440 |
+
between delimiters as a list of separate values.
|
| 441 |
+
|
| 442 |
+
Use the ``ignore_expr`` argument to define expressions that may
|
| 443 |
+
contain opening or closing characters that should not be treated as
|
| 444 |
+
opening or closing characters for nesting, such as quoted_string or
|
| 445 |
+
a comment expression. Specify multiple expressions using an
|
| 446 |
+
:class:`Or` or :class:`MatchFirst`. The default is
|
| 447 |
+
:class:`quoted_string`, but if no expressions are to be ignored, then
|
| 448 |
+
pass ``None`` for this argument.
|
| 449 |
+
|
| 450 |
+
Example::
|
| 451 |
+
|
| 452 |
+
data_type = one_of("void int short long char float double")
|
| 453 |
+
decl_data_type = Combine(data_type + Opt(Word('*')))
|
| 454 |
+
ident = Word(alphas+'_', alphanums+'_')
|
| 455 |
+
number = pyparsing_common.number
|
| 456 |
+
arg = Group(decl_data_type + ident)
|
| 457 |
+
LPAR, RPAR = map(Suppress, "()")
|
| 458 |
+
|
| 459 |
+
code_body = nested_expr('{', '}', ignore_expr=(quoted_string | c_style_comment))
|
| 460 |
+
|
| 461 |
+
c_function = (decl_data_type("type")
|
| 462 |
+
+ ident("name")
|
| 463 |
+
+ LPAR + Opt(DelimitedList(arg), [])("args") + RPAR
|
| 464 |
+
+ code_body("body"))
|
| 465 |
+
c_function.ignore(c_style_comment)
|
| 466 |
+
|
| 467 |
+
source_code = '''
|
| 468 |
+
int is_odd(int x) {
|
| 469 |
+
return (x%2);
|
| 470 |
+
}
|
| 471 |
+
|
| 472 |
+
int dec_to_hex(char hchar) {
|
| 473 |
+
if (hchar >= '0' && hchar <= '9') {
|
| 474 |
+
return (ord(hchar)-ord('0'));
|
| 475 |
+
} else {
|
| 476 |
+
return (10+ord(hchar)-ord('A'));
|
| 477 |
+
}
|
| 478 |
+
}
|
| 479 |
+
'''
|
| 480 |
+
for func in c_function.search_string(source_code):
|
| 481 |
+
print("%(name)s (%(type)s) args: %(args)s" % func)
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
prints::
|
| 485 |
+
|
| 486 |
+
is_odd (int) args: [['int', 'x']]
|
| 487 |
+
dec_to_hex (int) args: [['char', 'hchar']]
|
| 488 |
+
"""
|
| 489 |
+
if ignoreExpr != ignore_expr:
|
| 490 |
+
ignoreExpr = ignore_expr if ignoreExpr == quoted_string() else ignoreExpr
|
| 491 |
+
if opener == closer:
|
| 492 |
+
raise ValueError("opening and closing strings cannot be the same")
|
| 493 |
+
if content is None:
|
| 494 |
+
if isinstance(opener, str_type) and isinstance(closer, str_type):
|
| 495 |
+
opener = typing.cast(str, opener)
|
| 496 |
+
closer = typing.cast(str, closer)
|
| 497 |
+
if len(opener) == 1 and len(closer) == 1:
|
| 498 |
+
if ignoreExpr is not None:
|
| 499 |
+
content = Combine(
|
| 500 |
+
OneOrMore(
|
| 501 |
+
~ignoreExpr
|
| 502 |
+
+ CharsNotIn(
|
| 503 |
+
opener + closer + ParserElement.DEFAULT_WHITE_CHARS,
|
| 504 |
+
exact=1,
|
| 505 |
+
)
|
| 506 |
+
)
|
| 507 |
+
).set_parse_action(lambda t: t[0].strip())
|
| 508 |
+
else:
|
| 509 |
+
content = empty.copy() + CharsNotIn(
|
| 510 |
+
opener + closer + ParserElement.DEFAULT_WHITE_CHARS
|
| 511 |
+
).set_parse_action(lambda t: t[0].strip())
|
| 512 |
+
else:
|
| 513 |
+
if ignoreExpr is not None:
|
| 514 |
+
content = Combine(
|
| 515 |
+
OneOrMore(
|
| 516 |
+
~ignoreExpr
|
| 517 |
+
+ ~Literal(opener)
|
| 518 |
+
+ ~Literal(closer)
|
| 519 |
+
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)
|
| 520 |
+
)
|
| 521 |
+
).set_parse_action(lambda t: t[0].strip())
|
| 522 |
+
else:
|
| 523 |
+
content = Combine(
|
| 524 |
+
OneOrMore(
|
| 525 |
+
~Literal(opener)
|
| 526 |
+
+ ~Literal(closer)
|
| 527 |
+
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)
|
| 528 |
+
)
|
| 529 |
+
).set_parse_action(lambda t: t[0].strip())
|
| 530 |
+
else:
|
| 531 |
+
raise ValueError(
|
| 532 |
+
"opening and closing arguments must be strings if no content expression is given"
|
| 533 |
+
)
|
| 534 |
+
ret = Forward()
|
| 535 |
+
if ignoreExpr is not None:
|
| 536 |
+
ret <<= Group(
|
| 537 |
+
Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer)
|
| 538 |
+
)
|
| 539 |
+
else:
|
| 540 |
+
ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer))
|
| 541 |
+
ret.set_name(f"nested {opener}{closer} expression")
|
| 542 |
+
# don't override error message from content expressions
|
| 543 |
+
ret.errmsg = None
|
| 544 |
+
return ret
|
| 545 |
+
|
| 546 |
+
|
| 547 |
+
def _makeTags(tagStr, xml, suppress_LT=Suppress("<"), suppress_GT=Suppress(">")):
|
| 548 |
+
"""Internal helper to construct opening and closing tag expressions, given a tag name"""
|
| 549 |
+
if isinstance(tagStr, str_type):
|
| 550 |
+
resname = tagStr
|
| 551 |
+
tagStr = Keyword(tagStr, caseless=not xml)
|
| 552 |
+
else:
|
| 553 |
+
resname = tagStr.name
|
| 554 |
+
|
| 555 |
+
tagAttrName = Word(alphas, alphanums + "_-:")
|
| 556 |
+
if xml:
|
| 557 |
+
tagAttrValue = dbl_quoted_string.copy().set_parse_action(remove_quotes)
|
| 558 |
+
openTag = (
|
| 559 |
+
suppress_LT
|
| 560 |
+
+ tagStr("tag")
|
| 561 |
+
+ Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue)))
|
| 562 |
+
+ Opt("/", default=[False])("empty").set_parse_action(
|
| 563 |
+
lambda s, l, t: t[0] == "/"
|
| 564 |
+
)
|
| 565 |
+
+ suppress_GT
|
| 566 |
+
)
|
| 567 |
+
else:
|
| 568 |
+
tagAttrValue = quoted_string.copy().set_parse_action(remove_quotes) | Word(
|
| 569 |
+
printables, exclude_chars=">"
|
| 570 |
+
)
|
| 571 |
+
openTag = (
|
| 572 |
+
suppress_LT
|
| 573 |
+
+ tagStr("tag")
|
| 574 |
+
+ Dict(
|
| 575 |
+
ZeroOrMore(
|
| 576 |
+
Group(
|
| 577 |
+
tagAttrName.set_parse_action(lambda t: t[0].lower())
|
| 578 |
+
+ Opt(Suppress("=") + tagAttrValue)
|
| 579 |
+
)
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
+ Opt("/", default=[False])("empty").set_parse_action(
|
| 583 |
+
lambda s, l, t: t[0] == "/"
|
| 584 |
+
)
|
| 585 |
+
+ suppress_GT
|
| 586 |
+
)
|
| 587 |
+
closeTag = Combine(Literal("</") + tagStr + ">", adjacent=False)
|
| 588 |
+
|
| 589 |
+
openTag.set_name(f"<{resname}>")
|
| 590 |
+
# add start<tagname> results name in parse action now that ungrouped names are not reported at two levels
|
| 591 |
+
openTag.add_parse_action(
|
| 592 |
+
lambda t: t.__setitem__(
|
| 593 |
+
"start" + "".join(resname.replace(":", " ").title().split()), t.copy()
|
| 594 |
+
)
|
| 595 |
+
)
|
| 596 |
+
closeTag = closeTag(
|
| 597 |
+
"end" + "".join(resname.replace(":", " ").title().split())
|
| 598 |
+
).set_name(f"</{resname}>")
|
| 599 |
+
openTag.tag = resname
|
| 600 |
+
closeTag.tag = resname
|
| 601 |
+
openTag.tag_body = SkipTo(closeTag())
|
| 602 |
+
return openTag, closeTag
|
| 603 |
+
|
| 604 |
+
|
| 605 |
+
def make_html_tags(
|
| 606 |
+
tag_str: Union[str, ParserElement]
|
| 607 |
+
) -> tuple[ParserElement, ParserElement]:
|
| 608 |
+
"""Helper to construct opening and closing tag expressions for HTML,
|
| 609 |
+
given a tag name. Matches tags in either upper or lower case,
|
| 610 |
+
attributes with namespaces and with quoted or unquoted values.
|
| 611 |
+
|
| 612 |
+
Example::
|
| 613 |
+
|
| 614 |
+
text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>'
|
| 615 |
+
# make_html_tags returns pyparsing expressions for the opening and
|
| 616 |
+
# closing tags as a 2-tuple
|
| 617 |
+
a, a_end = make_html_tags("A")
|
| 618 |
+
link_expr = a + SkipTo(a_end)("link_text") + a_end
|
| 619 |
+
|
| 620 |
+
for link in link_expr.search_string(text):
|
| 621 |
+
# attributes in the <A> tag (like "href" shown here) are
|
| 622 |
+
# also accessible as named results
|
| 623 |
+
print(link.link_text, '->', link.href)
|
| 624 |
+
|
| 625 |
+
prints::
|
| 626 |
+
|
| 627 |
+
pyparsing -> https://github.com/pyparsing/pyparsing/wiki
|
| 628 |
+
"""
|
| 629 |
+
return _makeTags(tag_str, False)
|
| 630 |
+
|
| 631 |
+
|
| 632 |
+
def make_xml_tags(
|
| 633 |
+
tag_str: Union[str, ParserElement]
|
| 634 |
+
) -> tuple[ParserElement, ParserElement]:
|
| 635 |
+
"""Helper to construct opening and closing tag expressions for XML,
|
| 636 |
+
given a tag name. Matches tags only in the given upper/lower case.
|
| 637 |
+
|
| 638 |
+
Example: similar to :class:`make_html_tags`
|
| 639 |
+
"""
|
| 640 |
+
return _makeTags(tag_str, True)
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
any_open_tag: ParserElement
|
| 644 |
+
any_close_tag: ParserElement
|
| 645 |
+
any_open_tag, any_close_tag = make_html_tags(
|
| 646 |
+
Word(alphas, alphanums + "_:").set_name("any tag")
|
| 647 |
+
)
|
| 648 |
+
|
| 649 |
+
_htmlEntityMap = {k.rstrip(";"): v for k, v in html.entities.html5.items()}
|
| 650 |
+
_most_common_entities = "nbsp lt gt amp quot apos cent pound euro copy".replace(
|
| 651 |
+
" ", "|"
|
| 652 |
+
)
|
| 653 |
+
common_html_entity = Regex(
|
| 654 |
+
lambda: f"&(?P<entity>{_most_common_entities}|{make_compressed_re(_htmlEntityMap)});"
|
| 655 |
+
).set_name("common HTML entity")
|
| 656 |
+
|
| 657 |
+
|
| 658 |
+
def replace_html_entity(s, l, t):
|
| 659 |
+
"""Helper parser action to replace common HTML entities with their special characters"""
|
| 660 |
+
return _htmlEntityMap.get(t.entity)
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
class OpAssoc(Enum):
|
| 664 |
+
"""Enumeration of operator associativity
|
| 665 |
+
- used in constructing InfixNotationOperatorSpec for :class:`infix_notation`"""
|
| 666 |
+
|
| 667 |
+
LEFT = 1
|
| 668 |
+
RIGHT = 2
|
| 669 |
+
|
| 670 |
+
|
| 671 |
+
InfixNotationOperatorArgType = Union[
|
| 672 |
+
ParserElement, str, tuple[Union[ParserElement, str], Union[ParserElement, str]]
|
| 673 |
+
]
|
| 674 |
+
InfixNotationOperatorSpec = Union[
|
| 675 |
+
tuple[
|
| 676 |
+
InfixNotationOperatorArgType,
|
| 677 |
+
int,
|
| 678 |
+
OpAssoc,
|
| 679 |
+
typing.Optional[ParseAction],
|
| 680 |
+
],
|
| 681 |
+
tuple[
|
| 682 |
+
InfixNotationOperatorArgType,
|
| 683 |
+
int,
|
| 684 |
+
OpAssoc,
|
| 685 |
+
],
|
| 686 |
+
]
|
| 687 |
+
|
| 688 |
+
|
| 689 |
+
def infix_notation(
|
| 690 |
+
base_expr: ParserElement,
|
| 691 |
+
op_list: list[InfixNotationOperatorSpec],
|
| 692 |
+
lpar: Union[str, ParserElement] = Suppress("("),
|
| 693 |
+
rpar: Union[str, ParserElement] = Suppress(")"),
|
| 694 |
+
) -> ParserElement:
|
| 695 |
+
"""Helper method for constructing grammars of expressions made up of
|
| 696 |
+
operators working in a precedence hierarchy. Operators may be unary
|
| 697 |
+
or binary, left- or right-associative. Parse actions can also be
|
| 698 |
+
attached to operator expressions. The generated parser will also
|
| 699 |
+
recognize the use of parentheses to override operator precedences
|
| 700 |
+
(see example below).
|
| 701 |
+
|
| 702 |
+
Note: if you define a deep operator list, you may see performance
|
| 703 |
+
issues when using infix_notation. See
|
| 704 |
+
:class:`ParserElement.enable_packrat` for a mechanism to potentially
|
| 705 |
+
improve your parser performance.
|
| 706 |
+
|
| 707 |
+
Parameters:
|
| 708 |
+
|
| 709 |
+
- ``base_expr`` - expression representing the most basic operand to
|
| 710 |
+
be used in the expression
|
| 711 |
+
- ``op_list`` - list of tuples, one for each operator precedence level
|
| 712 |
+
in the expression grammar; each tuple is of the form ``(op_expr,
|
| 713 |
+
num_operands, right_left_assoc, (optional)parse_action)``, where:
|
| 714 |
+
|
| 715 |
+
- ``op_expr`` is the pyparsing expression for the operator; may also
|
| 716 |
+
be a string, which will be converted to a Literal; if ``num_operands``
|
| 717 |
+
is 3, ``op_expr`` is a tuple of two expressions, for the two
|
| 718 |
+
operators separating the 3 terms
|
| 719 |
+
- ``num_operands`` is the number of terms for this operator (must be 1,
|
| 720 |
+
2, or 3)
|
| 721 |
+
- ``right_left_assoc`` is the indicator whether the operator is right
|
| 722 |
+
or left associative, using the pyparsing-defined constants
|
| 723 |
+
``OpAssoc.RIGHT`` and ``OpAssoc.LEFT``.
|
| 724 |
+
- ``parse_action`` is the parse action to be associated with
|
| 725 |
+
expressions matching this operator expression (the parse action
|
| 726 |
+
tuple member may be omitted); if the parse action is passed
|
| 727 |
+
a tuple or list of functions, this is equivalent to calling
|
| 728 |
+
``set_parse_action(*fn)``
|
| 729 |
+
(:class:`ParserElement.set_parse_action`)
|
| 730 |
+
- ``lpar`` - expression for matching left-parentheses; if passed as a
|
| 731 |
+
str, then will be parsed as ``Suppress(lpar)``. If lpar is passed as
|
| 732 |
+
an expression (such as ``Literal('(')``), then it will be kept in
|
| 733 |
+
the parsed results, and grouped with them. (default= ``Suppress('(')``)
|
| 734 |
+
- ``rpar`` - expression for matching right-parentheses; if passed as a
|
| 735 |
+
str, then will be parsed as ``Suppress(rpar)``. If rpar is passed as
|
| 736 |
+
an expression (such as ``Literal(')')``), then it will be kept in
|
| 737 |
+
the parsed results, and grouped with them. (default= ``Suppress(')')``)
|
| 738 |
+
|
| 739 |
+
Example::
|
| 740 |
+
|
| 741 |
+
# simple example of four-function arithmetic with ints and
|
| 742 |
+
# variable names
|
| 743 |
+
integer = pyparsing_common.signed_integer
|
| 744 |
+
varname = pyparsing_common.identifier
|
| 745 |
+
|
| 746 |
+
arith_expr = infix_notation(integer | varname,
|
| 747 |
+
[
|
| 748 |
+
('-', 1, OpAssoc.RIGHT),
|
| 749 |
+
(one_of('* /'), 2, OpAssoc.LEFT),
|
| 750 |
+
(one_of('+ -'), 2, OpAssoc.LEFT),
|
| 751 |
+
])
|
| 752 |
+
|
| 753 |
+
arith_expr.run_tests('''
|
| 754 |
+
5+3*6
|
| 755 |
+
(5+3)*6
|
| 756 |
+
-2--11
|
| 757 |
+
''', full_dump=False)
|
| 758 |
+
|
| 759 |
+
prints::
|
| 760 |
+
|
| 761 |
+
5+3*6
|
| 762 |
+
[[5, '+', [3, '*', 6]]]
|
| 763 |
+
|
| 764 |
+
(5+3)*6
|
| 765 |
+
[[[5, '+', 3], '*', 6]]
|
| 766 |
+
|
| 767 |
+
(5+x)*y
|
| 768 |
+
[[[5, '+', 'x'], '*', 'y']]
|
| 769 |
+
|
| 770 |
+
-2--11
|
| 771 |
+
[[['-', 2], '-', ['-', 11]]]
|
| 772 |
+
"""
|
| 773 |
+
|
| 774 |
+
# captive version of FollowedBy that does not do parse actions or capture results names
|
| 775 |
+
class _FB(FollowedBy):
|
| 776 |
+
def parseImpl(self, instring, loc, doActions=True):
|
| 777 |
+
self.expr.try_parse(instring, loc)
|
| 778 |
+
return loc, []
|
| 779 |
+
|
| 780 |
+
_FB.__name__ = "FollowedBy>"
|
| 781 |
+
|
| 782 |
+
ret = Forward()
|
| 783 |
+
ret.set_name(f"{base_expr.name}_expression")
|
| 784 |
+
if isinstance(lpar, str):
|
| 785 |
+
lpar = Suppress(lpar)
|
| 786 |
+
if isinstance(rpar, str):
|
| 787 |
+
rpar = Suppress(rpar)
|
| 788 |
+
|
| 789 |
+
nested_expr = (lpar + ret + rpar).set_name(f"nested_{base_expr.name}")
|
| 790 |
+
|
| 791 |
+
# if lpar and rpar are not suppressed, wrap in group
|
| 792 |
+
if not (isinstance(lpar, Suppress) and isinstance(rpar, Suppress)):
|
| 793 |
+
lastExpr = base_expr | Group(nested_expr)
|
| 794 |
+
else:
|
| 795 |
+
lastExpr = base_expr | nested_expr
|
| 796 |
+
|
| 797 |
+
arity: int
|
| 798 |
+
rightLeftAssoc: opAssoc
|
| 799 |
+
pa: typing.Optional[ParseAction]
|
| 800 |
+
opExpr1: ParserElement
|
| 801 |
+
opExpr2: ParserElement
|
| 802 |
+
matchExpr: ParserElement
|
| 803 |
+
match_lookahead: ParserElement
|
| 804 |
+
for operDef in op_list:
|
| 805 |
+
opExpr, arity, rightLeftAssoc, pa = (operDef + (None,))[:4] # type: ignore[assignment]
|
| 806 |
+
if isinstance(opExpr, str_type):
|
| 807 |
+
opExpr = ParserElement._literalStringClass(opExpr)
|
| 808 |
+
opExpr = typing.cast(ParserElement, opExpr)
|
| 809 |
+
if arity == 3:
|
| 810 |
+
if not isinstance(opExpr, (tuple, list)) or len(opExpr) != 2:
|
| 811 |
+
raise ValueError(
|
| 812 |
+
"if numterms=3, opExpr must be a tuple or list of two expressions"
|
| 813 |
+
)
|
| 814 |
+
opExpr1, opExpr2 = opExpr
|
| 815 |
+
term_name = f"{opExpr1}{opExpr2} operations"
|
| 816 |
+
else:
|
| 817 |
+
term_name = f"{opExpr} operations"
|
| 818 |
+
|
| 819 |
+
if not 1 <= arity <= 3:
|
| 820 |
+
raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
|
| 821 |
+
|
| 822 |
+
if rightLeftAssoc not in (OpAssoc.LEFT, OpAssoc.RIGHT):
|
| 823 |
+
raise ValueError("operator must indicate right or left associativity")
|
| 824 |
+
|
| 825 |
+
thisExpr: ParserElement = Forward().set_name(term_name)
|
| 826 |
+
thisExpr = typing.cast(Forward, thisExpr)
|
| 827 |
+
match_lookahead = And([])
|
| 828 |
+
if rightLeftAssoc is OpAssoc.LEFT:
|
| 829 |
+
if arity == 1:
|
| 830 |
+
match_lookahead = _FB(lastExpr + opExpr)
|
| 831 |
+
matchExpr = Group(lastExpr + opExpr[1, ...])
|
| 832 |
+
elif arity == 2:
|
| 833 |
+
if opExpr is not None:
|
| 834 |
+
match_lookahead = _FB(lastExpr + opExpr + lastExpr)
|
| 835 |
+
matchExpr = Group(lastExpr + (opExpr + lastExpr)[1, ...])
|
| 836 |
+
else:
|
| 837 |
+
match_lookahead = _FB(lastExpr + lastExpr)
|
| 838 |
+
matchExpr = Group(lastExpr[2, ...])
|
| 839 |
+
elif arity == 3:
|
| 840 |
+
match_lookahead = _FB(
|
| 841 |
+
lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr
|
| 842 |
+
)
|
| 843 |
+
matchExpr = Group(
|
| 844 |
+
lastExpr + (opExpr1 + lastExpr + opExpr2 + lastExpr)[1, ...]
|
| 845 |
+
)
|
| 846 |
+
elif rightLeftAssoc is OpAssoc.RIGHT:
|
| 847 |
+
if arity == 1:
|
| 848 |
+
# try to avoid LR with this extra test
|
| 849 |
+
if not isinstance(opExpr, Opt):
|
| 850 |
+
opExpr = Opt(opExpr)
|
| 851 |
+
match_lookahead = _FB(opExpr.expr + thisExpr)
|
| 852 |
+
matchExpr = Group(opExpr + thisExpr)
|
| 853 |
+
elif arity == 2:
|
| 854 |
+
if opExpr is not None:
|
| 855 |
+
match_lookahead = _FB(lastExpr + opExpr + thisExpr)
|
| 856 |
+
matchExpr = Group(lastExpr + (opExpr + thisExpr)[1, ...])
|
| 857 |
+
else:
|
| 858 |
+
match_lookahead = _FB(lastExpr + thisExpr)
|
| 859 |
+
matchExpr = Group(lastExpr + thisExpr[1, ...])
|
| 860 |
+
elif arity == 3:
|
| 861 |
+
match_lookahead = _FB(
|
| 862 |
+
lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr
|
| 863 |
+
)
|
| 864 |
+
matchExpr = Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr)
|
| 865 |
+
|
| 866 |
+
# suppress lookahead expr from railroad diagrams
|
| 867 |
+
match_lookahead.show_in_diagram = False
|
| 868 |
+
|
| 869 |
+
# TODO - determine why this statement can't be included in the following
|
| 870 |
+
# if pa block
|
| 871 |
+
matchExpr = match_lookahead + matchExpr
|
| 872 |
+
|
| 873 |
+
if pa:
|
| 874 |
+
if isinstance(pa, (tuple, list)):
|
| 875 |
+
matchExpr.set_parse_action(*pa)
|
| 876 |
+
else:
|
| 877 |
+
matchExpr.set_parse_action(pa)
|
| 878 |
+
|
| 879 |
+
thisExpr <<= (matchExpr | lastExpr).setName(term_name)
|
| 880 |
+
lastExpr = thisExpr
|
| 881 |
+
|
| 882 |
+
ret <<= lastExpr
|
| 883 |
+
return ret
|
| 884 |
+
|
| 885 |
+
|
| 886 |
+
def indentedBlock(blockStatementExpr, indentStack, indent=True, backup_stacks=[]):
|
| 887 |
+
"""
|
| 888 |
+
(DEPRECATED - use :class:`IndentedBlock` class instead)
|
| 889 |
+
Helper method for defining space-delimited indentation blocks,
|
| 890 |
+
such as those used to define block statements in Python source code.
|
| 891 |
+
|
| 892 |
+
Parameters:
|
| 893 |
+
|
| 894 |
+
- ``blockStatementExpr`` - expression defining syntax of statement that
|
| 895 |
+
is repeated within the indented block
|
| 896 |
+
- ``indentStack`` - list created by caller to manage indentation stack
|
| 897 |
+
(multiple ``statementWithIndentedBlock`` expressions within a single
|
| 898 |
+
grammar should share a common ``indentStack``)
|
| 899 |
+
- ``indent`` - boolean indicating whether block must be indented beyond
|
| 900 |
+
the current level; set to ``False`` for block of left-most statements
|
| 901 |
+
(default= ``True``)
|
| 902 |
+
|
| 903 |
+
A valid block must contain at least one ``blockStatement``.
|
| 904 |
+
|
| 905 |
+
(Note that indentedBlock uses internal parse actions which make it
|
| 906 |
+
incompatible with packrat parsing.)
|
| 907 |
+
|
| 908 |
+
Example::
|
| 909 |
+
|
| 910 |
+
data = '''
|
| 911 |
+
def A(z):
|
| 912 |
+
A1
|
| 913 |
+
B = 100
|
| 914 |
+
G = A2
|
| 915 |
+
A2
|
| 916 |
+
A3
|
| 917 |
+
B
|
| 918 |
+
def BB(a,b,c):
|
| 919 |
+
BB1
|
| 920 |
+
def BBA():
|
| 921 |
+
bba1
|
| 922 |
+
bba2
|
| 923 |
+
bba3
|
| 924 |
+
C
|
| 925 |
+
D
|
| 926 |
+
def spam(x,y):
|
| 927 |
+
def eggs(z):
|
| 928 |
+
pass
|
| 929 |
+
'''
|
| 930 |
+
|
| 931 |
+
|
| 932 |
+
indentStack = [1]
|
| 933 |
+
stmt = Forward()
|
| 934 |
+
|
| 935 |
+
identifier = Word(alphas, alphanums)
|
| 936 |
+
funcDecl = ("def" + identifier + Group("(" + Opt(delimitedList(identifier)) + ")") + ":")
|
| 937 |
+
func_body = indentedBlock(stmt, indentStack)
|
| 938 |
+
funcDef = Group(funcDecl + func_body)
|
| 939 |
+
|
| 940 |
+
rvalue = Forward()
|
| 941 |
+
funcCall = Group(identifier + "(" + Opt(delimitedList(rvalue)) + ")")
|
| 942 |
+
rvalue << (funcCall | identifier | Word(nums))
|
| 943 |
+
assignment = Group(identifier + "=" + rvalue)
|
| 944 |
+
stmt << (funcDef | assignment | identifier)
|
| 945 |
+
|
| 946 |
+
module_body = stmt[1, ...]
|
| 947 |
+
|
| 948 |
+
parseTree = module_body.parseString(data)
|
| 949 |
+
parseTree.pprint()
|
| 950 |
+
|
| 951 |
+
prints::
|
| 952 |
+
|
| 953 |
+
[['def',
|
| 954 |
+
'A',
|
| 955 |
+
['(', 'z', ')'],
|
| 956 |
+
':',
|
| 957 |
+
[['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
|
| 958 |
+
'B',
|
| 959 |
+
['def',
|
| 960 |
+
'BB',
|
| 961 |
+
['(', 'a', 'b', 'c', ')'],
|
| 962 |
+
':',
|
| 963 |
+
[['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
|
| 964 |
+
'C',
|
| 965 |
+
'D',
|
| 966 |
+
['def',
|
| 967 |
+
'spam',
|
| 968 |
+
['(', 'x', 'y', ')'],
|
| 969 |
+
':',
|
| 970 |
+
[[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]]
|
| 971 |
+
"""
|
| 972 |
+
backup_stacks.append(indentStack[:])
|
| 973 |
+
|
| 974 |
+
def reset_stack():
|
| 975 |
+
indentStack[:] = backup_stacks[-1]
|
| 976 |
+
|
| 977 |
+
def checkPeerIndent(s, l, t):
|
| 978 |
+
if l >= len(s):
|
| 979 |
+
return
|
| 980 |
+
curCol = col(l, s)
|
| 981 |
+
if curCol != indentStack[-1]:
|
| 982 |
+
if curCol > indentStack[-1]:
|
| 983 |
+
raise ParseException(s, l, "illegal nesting")
|
| 984 |
+
raise ParseException(s, l, "not a peer entry")
|
| 985 |
+
|
| 986 |
+
def checkSubIndent(s, l, t):
|
| 987 |
+
curCol = col(l, s)
|
| 988 |
+
if curCol > indentStack[-1]:
|
| 989 |
+
indentStack.append(curCol)
|
| 990 |
+
else:
|
| 991 |
+
raise ParseException(s, l, "not a subentry")
|
| 992 |
+
|
| 993 |
+
def checkUnindent(s, l, t):
|
| 994 |
+
if l >= len(s):
|
| 995 |
+
return
|
| 996 |
+
curCol = col(l, s)
|
| 997 |
+
if not (indentStack and curCol in indentStack):
|
| 998 |
+
raise ParseException(s, l, "not an unindent")
|
| 999 |
+
if curCol < indentStack[-1]:
|
| 1000 |
+
indentStack.pop()
|
| 1001 |
+
|
| 1002 |
+
NL = OneOrMore(LineEnd().set_whitespace_chars("\t ").suppress())
|
| 1003 |
+
INDENT = (Empty() + Empty().set_parse_action(checkSubIndent)).set_name("INDENT")
|
| 1004 |
+
PEER = Empty().set_parse_action(checkPeerIndent).set_name("")
|
| 1005 |
+
UNDENT = Empty().set_parse_action(checkUnindent).set_name("UNINDENT")
|
| 1006 |
+
if indent:
|
| 1007 |
+
smExpr = Group(
|
| 1008 |
+
Opt(NL)
|
| 1009 |
+
+ INDENT
|
| 1010 |
+
+ OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL))
|
| 1011 |
+
+ UNDENT
|
| 1012 |
+
)
|
| 1013 |
+
else:
|
| 1014 |
+
smExpr = Group(
|
| 1015 |
+
Opt(NL)
|
| 1016 |
+
+ OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL))
|
| 1017 |
+
+ Opt(UNDENT)
|
| 1018 |
+
)
|
| 1019 |
+
|
| 1020 |
+
# add a parse action to remove backup_stack from list of backups
|
| 1021 |
+
smExpr.add_parse_action(
|
| 1022 |
+
lambda: backup_stacks.pop(-1) and None if backup_stacks else None
|
| 1023 |
+
)
|
| 1024 |
+
smExpr.set_fail_action(lambda a, b, c, d: reset_stack())
|
| 1025 |
+
blockStatementExpr.ignore(_bslash + LineEnd())
|
| 1026 |
+
return smExpr.set_name("indented block")
|
| 1027 |
+
|
| 1028 |
+
|
| 1029 |
+
# it's easy to get these comment structures wrong - they're very common,
|
| 1030 |
+
# so may as well make them available
|
| 1031 |
+
c_style_comment = Regex(r"/\*(?:[^*]|\*(?!/))*\*\/").set_name("C style comment")
|
| 1032 |
+
"Comment of the form ``/* ... */``"
|
| 1033 |
+
|
| 1034 |
+
html_comment = Regex(r"<!--[\s\S]*?-->").set_name("HTML comment")
|
| 1035 |
+
"Comment of the form ``<!-- ... -->``"
|
| 1036 |
+
|
| 1037 |
+
rest_of_line = Regex(r".*").leave_whitespace().set_name("rest of line")
|
| 1038 |
+
dbl_slash_comment = Regex(r"//(?:\\\n|[^\n])*").set_name("// comment")
|
| 1039 |
+
"Comment of the form ``// ... (to end of line)``"
|
| 1040 |
+
|
| 1041 |
+
cpp_style_comment = Regex(
|
| 1042 |
+
r"(?:/\*(?:[^*]|\*(?!/))*\*\/)|(?://(?:\\\n|[^\n])*)"
|
| 1043 |
+
).set_name("C++ style comment")
|
| 1044 |
+
"Comment of either form :class:`c_style_comment` or :class:`dbl_slash_comment`"
|
| 1045 |
+
|
| 1046 |
+
java_style_comment = cpp_style_comment
|
| 1047 |
+
"Same as :class:`cpp_style_comment`"
|
| 1048 |
+
|
| 1049 |
+
python_style_comment = Regex(r"#.*").set_name("Python style comment")
|
| 1050 |
+
"Comment of the form ``# ... (to end of line)``"
|
| 1051 |
+
|
| 1052 |
+
|
| 1053 |
+
# build list of built-in expressions, for future reference if a global default value
|
| 1054 |
+
# gets updated
|
| 1055 |
+
_builtin_exprs: list[ParserElement] = [
|
| 1056 |
+
v for v in vars().values() if isinstance(v, ParserElement)
|
| 1057 |
+
]
|
| 1058 |
+
|
| 1059 |
+
|
| 1060 |
+
# compatibility function, superseded by DelimitedList class
|
| 1061 |
+
def delimited_list(
|
| 1062 |
+
expr: Union[str, ParserElement],
|
| 1063 |
+
delim: Union[str, ParserElement] = ",",
|
| 1064 |
+
combine: bool = False,
|
| 1065 |
+
min: typing.Optional[int] = None,
|
| 1066 |
+
max: typing.Optional[int] = None,
|
| 1067 |
+
*,
|
| 1068 |
+
allow_trailing_delim: bool = False,
|
| 1069 |
+
) -> ParserElement:
|
| 1070 |
+
"""(DEPRECATED - use :class:`DelimitedList` class)"""
|
| 1071 |
+
return DelimitedList(
|
| 1072 |
+
expr, delim, combine, min, max, allow_trailing_delim=allow_trailing_delim
|
| 1073 |
+
)
|
| 1074 |
+
|
| 1075 |
+
|
| 1076 |
+
# Compatibility synonyms
|
| 1077 |
+
# fmt: off
|
| 1078 |
+
opAssoc = OpAssoc
|
| 1079 |
+
anyOpenTag = any_open_tag
|
| 1080 |
+
anyCloseTag = any_close_tag
|
| 1081 |
+
commonHTMLEntity = common_html_entity
|
| 1082 |
+
cStyleComment = c_style_comment
|
| 1083 |
+
htmlComment = html_comment
|
| 1084 |
+
restOfLine = rest_of_line
|
| 1085 |
+
dblSlashComment = dbl_slash_comment
|
| 1086 |
+
cppStyleComment = cpp_style_comment
|
| 1087 |
+
javaStyleComment = java_style_comment
|
| 1088 |
+
pythonStyleComment = python_style_comment
|
| 1089 |
+
delimitedList = replaced_by_pep8("delimitedList", DelimitedList)
|
| 1090 |
+
delimited_list = replaced_by_pep8("delimited_list", DelimitedList)
|
| 1091 |
+
countedArray = replaced_by_pep8("countedArray", counted_array)
|
| 1092 |
+
matchPreviousLiteral = replaced_by_pep8("matchPreviousLiteral", match_previous_literal)
|
| 1093 |
+
matchPreviousExpr = replaced_by_pep8("matchPreviousExpr", match_previous_expr)
|
| 1094 |
+
oneOf = replaced_by_pep8("oneOf", one_of)
|
| 1095 |
+
dictOf = replaced_by_pep8("dictOf", dict_of)
|
| 1096 |
+
originalTextFor = replaced_by_pep8("originalTextFor", original_text_for)
|
| 1097 |
+
nestedExpr = replaced_by_pep8("nestedExpr", nested_expr)
|
| 1098 |
+
makeHTMLTags = replaced_by_pep8("makeHTMLTags", make_html_tags)
|
| 1099 |
+
makeXMLTags = replaced_by_pep8("makeXMLTags", make_xml_tags)
|
| 1100 |
+
replaceHTMLEntity = replaced_by_pep8("replaceHTMLEntity", replace_html_entity)
|
| 1101 |
+
infixNotation = replaced_by_pep8("infixNotation", infix_notation)
|
| 1102 |
+
# fmt: on
|
.venv/lib/python3.11/site-packages/pyparsing/py.typed
ADDED
|
File without changes
|
.venv/lib/python3.11/site-packages/pyparsing/results.py
ADDED
|
@@ -0,0 +1,816 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# results.py
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
import collections
|
| 5 |
+
from collections.abc import (
|
| 6 |
+
MutableMapping,
|
| 7 |
+
Mapping,
|
| 8 |
+
MutableSequence,
|
| 9 |
+
Iterator,
|
| 10 |
+
Iterable,
|
| 11 |
+
)
|
| 12 |
+
import pprint
|
| 13 |
+
from typing import Any
|
| 14 |
+
|
| 15 |
+
from .util import replaced_by_pep8
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
str_type: tuple[type, ...] = (str, bytes)
|
| 19 |
+
_generator_type = type((_ for _ in ()))
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class _ParseResultsWithOffset:
|
| 23 |
+
tup: tuple[ParseResults, int]
|
| 24 |
+
__slots__ = ["tup"]
|
| 25 |
+
|
| 26 |
+
def __init__(self, p1: ParseResults, p2: int):
|
| 27 |
+
self.tup: tuple[ParseResults, int] = (p1, p2)
|
| 28 |
+
|
| 29 |
+
def __getitem__(self, i):
|
| 30 |
+
return self.tup[i]
|
| 31 |
+
|
| 32 |
+
def __getstate__(self):
|
| 33 |
+
return self.tup
|
| 34 |
+
|
| 35 |
+
def __setstate__(self, *args):
|
| 36 |
+
self.tup = args[0]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class ParseResults:
|
| 40 |
+
"""Structured parse results, to provide multiple means of access to
|
| 41 |
+
the parsed data:
|
| 42 |
+
|
| 43 |
+
- as a list (``len(results)``)
|
| 44 |
+
- by list index (``results[0], results[1]``, etc.)
|
| 45 |
+
- by attribute (``results.<results_name>`` - see :class:`ParserElement.set_results_name`)
|
| 46 |
+
|
| 47 |
+
Example::
|
| 48 |
+
|
| 49 |
+
integer = Word(nums)
|
| 50 |
+
date_str = (integer.set_results_name("year") + '/'
|
| 51 |
+
+ integer.set_results_name("month") + '/'
|
| 52 |
+
+ integer.set_results_name("day"))
|
| 53 |
+
# equivalent form:
|
| 54 |
+
# date_str = (integer("year") + '/'
|
| 55 |
+
# + integer("month") + '/'
|
| 56 |
+
# + integer("day"))
|
| 57 |
+
|
| 58 |
+
# parse_string returns a ParseResults object
|
| 59 |
+
result = date_str.parse_string("1999/12/31")
|
| 60 |
+
|
| 61 |
+
def test(s, fn=repr):
|
| 62 |
+
print(f"{s} -> {fn(eval(s))}")
|
| 63 |
+
test("list(result)")
|
| 64 |
+
test("result[0]")
|
| 65 |
+
test("result['month']")
|
| 66 |
+
test("result.day")
|
| 67 |
+
test("'month' in result")
|
| 68 |
+
test("'minutes' in result")
|
| 69 |
+
test("result.dump()", str)
|
| 70 |
+
|
| 71 |
+
prints::
|
| 72 |
+
|
| 73 |
+
list(result) -> ['1999', '/', '12', '/', '31']
|
| 74 |
+
result[0] -> '1999'
|
| 75 |
+
result['month'] -> '12'
|
| 76 |
+
result.day -> '31'
|
| 77 |
+
'month' in result -> True
|
| 78 |
+
'minutes' in result -> False
|
| 79 |
+
result.dump() -> ['1999', '/', '12', '/', '31']
|
| 80 |
+
- day: '31'
|
| 81 |
+
- month: '12'
|
| 82 |
+
- year: '1999'
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
_null_values: tuple[Any, ...] = (None, [], ())
|
| 86 |
+
|
| 87 |
+
_name: str
|
| 88 |
+
_parent: ParseResults
|
| 89 |
+
_all_names: set[str]
|
| 90 |
+
_modal: bool
|
| 91 |
+
_toklist: list[Any]
|
| 92 |
+
_tokdict: dict[str, Any]
|
| 93 |
+
|
| 94 |
+
__slots__ = (
|
| 95 |
+
"_name",
|
| 96 |
+
"_parent",
|
| 97 |
+
"_all_names",
|
| 98 |
+
"_modal",
|
| 99 |
+
"_toklist",
|
| 100 |
+
"_tokdict",
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
class List(list):
|
| 104 |
+
"""
|
| 105 |
+
Simple wrapper class to distinguish parsed list results that should be preserved
|
| 106 |
+
as actual Python lists, instead of being converted to :class:`ParseResults`::
|
| 107 |
+
|
| 108 |
+
LBRACK, RBRACK = map(pp.Suppress, "[]")
|
| 109 |
+
element = pp.Forward()
|
| 110 |
+
item = ppc.integer
|
| 111 |
+
element_list = LBRACK + pp.DelimitedList(element) + RBRACK
|
| 112 |
+
|
| 113 |
+
# add parse actions to convert from ParseResults to actual Python collection types
|
| 114 |
+
def as_python_list(t):
|
| 115 |
+
return pp.ParseResults.List(t.as_list())
|
| 116 |
+
element_list.add_parse_action(as_python_list)
|
| 117 |
+
|
| 118 |
+
element <<= item | element_list
|
| 119 |
+
|
| 120 |
+
element.run_tests('''
|
| 121 |
+
100
|
| 122 |
+
[2,3,4]
|
| 123 |
+
[[2, 1],3,4]
|
| 124 |
+
[(2, 1),3,4]
|
| 125 |
+
(2,3,4)
|
| 126 |
+
''', post_parse=lambda s, r: (r[0], type(r[0])))
|
| 127 |
+
|
| 128 |
+
prints::
|
| 129 |
+
|
| 130 |
+
100
|
| 131 |
+
(100, <class 'int'>)
|
| 132 |
+
|
| 133 |
+
[2,3,4]
|
| 134 |
+
([2, 3, 4], <class 'list'>)
|
| 135 |
+
|
| 136 |
+
[[2, 1],3,4]
|
| 137 |
+
([[2, 1], 3, 4], <class 'list'>)
|
| 138 |
+
|
| 139 |
+
(Used internally by :class:`Group` when `aslist=True`.)
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
def __new__(cls, contained=None):
|
| 143 |
+
if contained is None:
|
| 144 |
+
contained = []
|
| 145 |
+
|
| 146 |
+
if not isinstance(contained, list):
|
| 147 |
+
raise TypeError(
|
| 148 |
+
f"{cls.__name__} may only be constructed with a list, not {type(contained).__name__}"
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
return list.__new__(cls)
|
| 152 |
+
|
| 153 |
+
def __new__(cls, toklist=None, name=None, **kwargs):
|
| 154 |
+
if isinstance(toklist, ParseResults):
|
| 155 |
+
return toklist
|
| 156 |
+
self = object.__new__(cls)
|
| 157 |
+
self._name = None
|
| 158 |
+
self._parent = None
|
| 159 |
+
self._all_names = set()
|
| 160 |
+
|
| 161 |
+
if toklist is None:
|
| 162 |
+
self._toklist = []
|
| 163 |
+
elif isinstance(toklist, (list, _generator_type)):
|
| 164 |
+
self._toklist = (
|
| 165 |
+
[toklist[:]]
|
| 166 |
+
if isinstance(toklist, ParseResults.List)
|
| 167 |
+
else list(toklist)
|
| 168 |
+
)
|
| 169 |
+
else:
|
| 170 |
+
self._toklist = [toklist]
|
| 171 |
+
self._tokdict = dict()
|
| 172 |
+
return self
|
| 173 |
+
|
| 174 |
+
# Performance tuning: we construct a *lot* of these, so keep this
|
| 175 |
+
# constructor as small and fast as possible
|
| 176 |
+
def __init__(
|
| 177 |
+
self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance
|
| 178 |
+
) -> None:
|
| 179 |
+
self._tokdict: dict[str, _ParseResultsWithOffset]
|
| 180 |
+
self._modal = modal
|
| 181 |
+
|
| 182 |
+
if name is None or name == "":
|
| 183 |
+
return
|
| 184 |
+
|
| 185 |
+
if isinstance(name, int):
|
| 186 |
+
name = str(name)
|
| 187 |
+
|
| 188 |
+
if not modal:
|
| 189 |
+
self._all_names = {name}
|
| 190 |
+
|
| 191 |
+
self._name = name
|
| 192 |
+
|
| 193 |
+
if toklist in self._null_values:
|
| 194 |
+
return
|
| 195 |
+
|
| 196 |
+
if isinstance(toklist, (str_type, type)):
|
| 197 |
+
toklist = [toklist]
|
| 198 |
+
|
| 199 |
+
if asList:
|
| 200 |
+
if isinstance(toklist, ParseResults):
|
| 201 |
+
self[name] = _ParseResultsWithOffset(ParseResults(toklist._toklist), 0)
|
| 202 |
+
else:
|
| 203 |
+
self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]), 0)
|
| 204 |
+
self[name]._name = name
|
| 205 |
+
return
|
| 206 |
+
|
| 207 |
+
try:
|
| 208 |
+
self[name] = toklist[0]
|
| 209 |
+
except (KeyError, TypeError, IndexError):
|
| 210 |
+
if toklist is not self:
|
| 211 |
+
self[name] = toklist
|
| 212 |
+
else:
|
| 213 |
+
self._name = name
|
| 214 |
+
|
| 215 |
+
def __getitem__(self, i):
|
| 216 |
+
if isinstance(i, (int, slice)):
|
| 217 |
+
return self._toklist[i]
|
| 218 |
+
|
| 219 |
+
if i not in self._all_names:
|
| 220 |
+
return self._tokdict[i][-1][0]
|
| 221 |
+
|
| 222 |
+
return ParseResults([v[0] for v in self._tokdict[i]])
|
| 223 |
+
|
| 224 |
+
def __setitem__(self, k, v, isinstance=isinstance):
|
| 225 |
+
if isinstance(v, _ParseResultsWithOffset):
|
| 226 |
+
self._tokdict[k] = self._tokdict.get(k, list()) + [v]
|
| 227 |
+
sub = v[0]
|
| 228 |
+
elif isinstance(k, (int, slice)):
|
| 229 |
+
self._toklist[k] = v
|
| 230 |
+
sub = v
|
| 231 |
+
else:
|
| 232 |
+
self._tokdict[k] = self._tokdict.get(k, []) + [
|
| 233 |
+
_ParseResultsWithOffset(v, 0)
|
| 234 |
+
]
|
| 235 |
+
sub = v
|
| 236 |
+
if isinstance(sub, ParseResults):
|
| 237 |
+
sub._parent = self
|
| 238 |
+
|
| 239 |
+
def __delitem__(self, i):
|
| 240 |
+
if not isinstance(i, (int, slice)):
|
| 241 |
+
del self._tokdict[i]
|
| 242 |
+
return
|
| 243 |
+
|
| 244 |
+
mylen = len(self._toklist)
|
| 245 |
+
del self._toklist[i]
|
| 246 |
+
|
| 247 |
+
# convert int to slice
|
| 248 |
+
if isinstance(i, int):
|
| 249 |
+
if i < 0:
|
| 250 |
+
i += mylen
|
| 251 |
+
i = slice(i, i + 1)
|
| 252 |
+
# get removed indices
|
| 253 |
+
removed = list(range(*i.indices(mylen)))
|
| 254 |
+
removed.reverse()
|
| 255 |
+
# fixup indices in token dictionary
|
| 256 |
+
for occurrences in self._tokdict.values():
|
| 257 |
+
for j in removed:
|
| 258 |
+
for k, (value, position) in enumerate(occurrences):
|
| 259 |
+
occurrences[k] = _ParseResultsWithOffset(
|
| 260 |
+
value, position - (position > j)
|
| 261 |
+
)
|
| 262 |
+
|
| 263 |
+
def __contains__(self, k) -> bool:
|
| 264 |
+
return k in self._tokdict
|
| 265 |
+
|
| 266 |
+
def __len__(self) -> int:
|
| 267 |
+
return len(self._toklist)
|
| 268 |
+
|
| 269 |
+
def __bool__(self) -> bool:
|
| 270 |
+
return not not (self._toklist or self._tokdict)
|
| 271 |
+
|
| 272 |
+
def __iter__(self) -> Iterator:
|
| 273 |
+
return iter(self._toklist)
|
| 274 |
+
|
| 275 |
+
def __reversed__(self) -> Iterator:
|
| 276 |
+
return iter(self._toklist[::-1])
|
| 277 |
+
|
| 278 |
+
def keys(self):
|
| 279 |
+
return iter(self._tokdict)
|
| 280 |
+
|
| 281 |
+
def values(self):
|
| 282 |
+
return (self[k] for k in self.keys())
|
| 283 |
+
|
| 284 |
+
def items(self):
|
| 285 |
+
return ((k, self[k]) for k in self.keys())
|
| 286 |
+
|
| 287 |
+
def haskeys(self) -> bool:
|
| 288 |
+
"""
|
| 289 |
+
Since ``keys()`` returns an iterator, this method is helpful in bypassing
|
| 290 |
+
code that looks for the existence of any defined results names."""
|
| 291 |
+
return not not self._tokdict
|
| 292 |
+
|
| 293 |
+
def pop(self, *args, **kwargs):
|
| 294 |
+
"""
|
| 295 |
+
Removes and returns item at specified index (default= ``last``).
|
| 296 |
+
Supports both ``list`` and ``dict`` semantics for ``pop()``. If
|
| 297 |
+
passed no argument or an integer argument, it will use ``list``
|
| 298 |
+
semantics and pop tokens from the list of parsed tokens. If passed
|
| 299 |
+
a non-integer argument (most likely a string), it will use ``dict``
|
| 300 |
+
semantics and pop the corresponding value from any defined results
|
| 301 |
+
names. A second default return value argument is supported, just as in
|
| 302 |
+
``dict.pop()``.
|
| 303 |
+
|
| 304 |
+
Example::
|
| 305 |
+
|
| 306 |
+
numlist = Word(nums)[...]
|
| 307 |
+
print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321']
|
| 308 |
+
|
| 309 |
+
def remove_first(tokens):
|
| 310 |
+
tokens.pop(0)
|
| 311 |
+
numlist.add_parse_action(remove_first)
|
| 312 |
+
print(numlist.parse_string("0 123 321")) # -> ['123', '321']
|
| 313 |
+
|
| 314 |
+
label = Word(alphas)
|
| 315 |
+
patt = label("LABEL") + Word(nums)[1, ...]
|
| 316 |
+
print(patt.parse_string("AAB 123 321").dump())
|
| 317 |
+
|
| 318 |
+
# Use pop() in a parse action to remove named result (note that corresponding value is not
|
| 319 |
+
# removed from list form of results)
|
| 320 |
+
def remove_LABEL(tokens):
|
| 321 |
+
tokens.pop("LABEL")
|
| 322 |
+
return tokens
|
| 323 |
+
patt.add_parse_action(remove_LABEL)
|
| 324 |
+
print(patt.parse_string("AAB 123 321").dump())
|
| 325 |
+
|
| 326 |
+
prints::
|
| 327 |
+
|
| 328 |
+
['AAB', '123', '321']
|
| 329 |
+
- LABEL: 'AAB'
|
| 330 |
+
|
| 331 |
+
['AAB', '123', '321']
|
| 332 |
+
"""
|
| 333 |
+
if not args:
|
| 334 |
+
args = [-1]
|
| 335 |
+
for k, v in kwargs.items():
|
| 336 |
+
if k == "default":
|
| 337 |
+
args = (args[0], v)
|
| 338 |
+
else:
|
| 339 |
+
raise TypeError(f"pop() got an unexpected keyword argument {k!r}")
|
| 340 |
+
if isinstance(args[0], int) or len(args) == 1 or args[0] in self:
|
| 341 |
+
index = args[0]
|
| 342 |
+
ret = self[index]
|
| 343 |
+
del self[index]
|
| 344 |
+
return ret
|
| 345 |
+
else:
|
| 346 |
+
defaultvalue = args[1]
|
| 347 |
+
return defaultvalue
|
| 348 |
+
|
| 349 |
+
def get(self, key, default_value=None):
|
| 350 |
+
"""
|
| 351 |
+
Returns named result matching the given key, or if there is no
|
| 352 |
+
such name, then returns the given ``default_value`` or ``None`` if no
|
| 353 |
+
``default_value`` is specified.
|
| 354 |
+
|
| 355 |
+
Similar to ``dict.get()``.
|
| 356 |
+
|
| 357 |
+
Example::
|
| 358 |
+
|
| 359 |
+
integer = Word(nums)
|
| 360 |
+
date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
|
| 361 |
+
|
| 362 |
+
result = date_str.parse_string("1999/12/31")
|
| 363 |
+
print(result.get("year")) # -> '1999'
|
| 364 |
+
print(result.get("hour", "not specified")) # -> 'not specified'
|
| 365 |
+
print(result.get("hour")) # -> None
|
| 366 |
+
"""
|
| 367 |
+
if key in self:
|
| 368 |
+
return self[key]
|
| 369 |
+
else:
|
| 370 |
+
return default_value
|
| 371 |
+
|
| 372 |
+
def insert(self, index, ins_string):
|
| 373 |
+
"""
|
| 374 |
+
Inserts new element at location index in the list of parsed tokens.
|
| 375 |
+
|
| 376 |
+
Similar to ``list.insert()``.
|
| 377 |
+
|
| 378 |
+
Example::
|
| 379 |
+
|
| 380 |
+
numlist = Word(nums)[...]
|
| 381 |
+
print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321']
|
| 382 |
+
|
| 383 |
+
# use a parse action to insert the parse location in the front of the parsed results
|
| 384 |
+
def insert_locn(locn, tokens):
|
| 385 |
+
tokens.insert(0, locn)
|
| 386 |
+
numlist.add_parse_action(insert_locn)
|
| 387 |
+
print(numlist.parse_string("0 123 321")) # -> [0, '0', '123', '321']
|
| 388 |
+
"""
|
| 389 |
+
self._toklist.insert(index, ins_string)
|
| 390 |
+
# fixup indices in token dictionary
|
| 391 |
+
for occurrences in self._tokdict.values():
|
| 392 |
+
for k, (value, position) in enumerate(occurrences):
|
| 393 |
+
occurrences[k] = _ParseResultsWithOffset(
|
| 394 |
+
value, position + (position > index)
|
| 395 |
+
)
|
| 396 |
+
|
| 397 |
+
def append(self, item):
|
| 398 |
+
"""
|
| 399 |
+
Add single element to end of ``ParseResults`` list of elements.
|
| 400 |
+
|
| 401 |
+
Example::
|
| 402 |
+
|
| 403 |
+
numlist = Word(nums)[...]
|
| 404 |
+
print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321']
|
| 405 |
+
|
| 406 |
+
# use a parse action to compute the sum of the parsed integers, and add it to the end
|
| 407 |
+
def append_sum(tokens):
|
| 408 |
+
tokens.append(sum(map(int, tokens)))
|
| 409 |
+
numlist.add_parse_action(append_sum)
|
| 410 |
+
print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321', 444]
|
| 411 |
+
"""
|
| 412 |
+
self._toklist.append(item)
|
| 413 |
+
|
| 414 |
+
def extend(self, itemseq):
|
| 415 |
+
"""
|
| 416 |
+
Add sequence of elements to end of ``ParseResults`` list of elements.
|
| 417 |
+
|
| 418 |
+
Example::
|
| 419 |
+
|
| 420 |
+
patt = Word(alphas)[1, ...]
|
| 421 |
+
|
| 422 |
+
# use a parse action to append the reverse of the matched strings, to make a palindrome
|
| 423 |
+
def make_palindrome(tokens):
|
| 424 |
+
tokens.extend(reversed([t[::-1] for t in tokens]))
|
| 425 |
+
return ''.join(tokens)
|
| 426 |
+
patt.add_parse_action(make_palindrome)
|
| 427 |
+
print(patt.parse_string("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
|
| 428 |
+
"""
|
| 429 |
+
if isinstance(itemseq, ParseResults):
|
| 430 |
+
self.__iadd__(itemseq)
|
| 431 |
+
else:
|
| 432 |
+
self._toklist.extend(itemseq)
|
| 433 |
+
|
| 434 |
+
def clear(self):
|
| 435 |
+
"""
|
| 436 |
+
Clear all elements and results names.
|
| 437 |
+
"""
|
| 438 |
+
del self._toklist[:]
|
| 439 |
+
self._tokdict.clear()
|
| 440 |
+
|
| 441 |
+
def __getattr__(self, name):
|
| 442 |
+
try:
|
| 443 |
+
return self[name]
|
| 444 |
+
except KeyError:
|
| 445 |
+
if name.startswith("__"):
|
| 446 |
+
raise AttributeError(name)
|
| 447 |
+
return ""
|
| 448 |
+
|
| 449 |
+
def __add__(self, other: ParseResults) -> ParseResults:
|
| 450 |
+
ret = self.copy()
|
| 451 |
+
ret += other
|
| 452 |
+
return ret
|
| 453 |
+
|
| 454 |
+
def __iadd__(self, other: ParseResults) -> ParseResults:
|
| 455 |
+
if not other:
|
| 456 |
+
return self
|
| 457 |
+
|
| 458 |
+
if other._tokdict:
|
| 459 |
+
offset = len(self._toklist)
|
| 460 |
+
addoffset = lambda a: offset if a < 0 else a + offset
|
| 461 |
+
otheritems = other._tokdict.items()
|
| 462 |
+
otherdictitems = [
|
| 463 |
+
(k, _ParseResultsWithOffset(v[0], addoffset(v[1])))
|
| 464 |
+
for k, vlist in otheritems
|
| 465 |
+
for v in vlist
|
| 466 |
+
]
|
| 467 |
+
for k, v in otherdictitems:
|
| 468 |
+
self[k] = v
|
| 469 |
+
if isinstance(v[0], ParseResults):
|
| 470 |
+
v[0]._parent = self
|
| 471 |
+
|
| 472 |
+
self._toklist += other._toklist
|
| 473 |
+
self._all_names |= other._all_names
|
| 474 |
+
return self
|
| 475 |
+
|
| 476 |
+
def __radd__(self, other) -> ParseResults:
|
| 477 |
+
if isinstance(other, int) and other == 0:
|
| 478 |
+
# useful for merging many ParseResults using sum() builtin
|
| 479 |
+
return self.copy()
|
| 480 |
+
else:
|
| 481 |
+
# this may raise a TypeError - so be it
|
| 482 |
+
return other + self
|
| 483 |
+
|
| 484 |
+
def __repr__(self) -> str:
|
| 485 |
+
return f"{type(self).__name__}({self._toklist!r}, {self.as_dict()})"
|
| 486 |
+
|
| 487 |
+
def __str__(self) -> str:
|
| 488 |
+
return (
|
| 489 |
+
"["
|
| 490 |
+
+ ", ".join(
|
| 491 |
+
[
|
| 492 |
+
str(i) if isinstance(i, ParseResults) else repr(i)
|
| 493 |
+
for i in self._toklist
|
| 494 |
+
]
|
| 495 |
+
)
|
| 496 |
+
+ "]"
|
| 497 |
+
)
|
| 498 |
+
|
| 499 |
+
def _asStringList(self, sep=""):
|
| 500 |
+
out = []
|
| 501 |
+
for item in self._toklist:
|
| 502 |
+
if out and sep:
|
| 503 |
+
out.append(sep)
|
| 504 |
+
if isinstance(item, ParseResults):
|
| 505 |
+
out += item._asStringList()
|
| 506 |
+
else:
|
| 507 |
+
out.append(str(item))
|
| 508 |
+
return out
|
| 509 |
+
|
| 510 |
+
def as_list(self, *, flatten: bool = False) -> list:
|
| 511 |
+
"""
|
| 512 |
+
Returns the parse results as a nested list of matching tokens, all converted to strings.
|
| 513 |
+
If flatten is True, all the nesting levels in the returned list are collapsed.
|
| 514 |
+
|
| 515 |
+
Example::
|
| 516 |
+
|
| 517 |
+
patt = Word(alphas)[1, ...]
|
| 518 |
+
result = patt.parse_string("sldkj lsdkj sldkj")
|
| 519 |
+
# even though the result prints in string-like form, it is actually a pyparsing ParseResults
|
| 520 |
+
print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
|
| 521 |
+
|
| 522 |
+
# Use as_list() to create an actual list
|
| 523 |
+
result_list = result.as_list()
|
| 524 |
+
print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
|
| 525 |
+
"""
|
| 526 |
+
|
| 527 |
+
def flattened(pr):
|
| 528 |
+
to_visit = collections.deque([*self])
|
| 529 |
+
while to_visit:
|
| 530 |
+
to_do = to_visit.popleft()
|
| 531 |
+
if isinstance(to_do, ParseResults):
|
| 532 |
+
to_visit.extendleft(to_do[::-1])
|
| 533 |
+
else:
|
| 534 |
+
yield to_do
|
| 535 |
+
|
| 536 |
+
if flatten:
|
| 537 |
+
return [*flattened(self)]
|
| 538 |
+
else:
|
| 539 |
+
return [
|
| 540 |
+
res.as_list() if isinstance(res, ParseResults) else res
|
| 541 |
+
for res in self._toklist
|
| 542 |
+
]
|
| 543 |
+
|
| 544 |
+
def as_dict(self) -> dict:
|
| 545 |
+
"""
|
| 546 |
+
Returns the named parse results as a nested dictionary.
|
| 547 |
+
|
| 548 |
+
Example::
|
| 549 |
+
|
| 550 |
+
integer = Word(nums)
|
| 551 |
+
date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
|
| 552 |
+
|
| 553 |
+
result = date_str.parse_string('12/31/1999')
|
| 554 |
+
print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
|
| 555 |
+
|
| 556 |
+
result_dict = result.as_dict()
|
| 557 |
+
print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}
|
| 558 |
+
|
| 559 |
+
# even though a ParseResults supports dict-like access, sometime you just need to have a dict
|
| 560 |
+
import json
|
| 561 |
+
print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
|
| 562 |
+
print(json.dumps(result.as_dict())) # -> {"month": "31", "day": "1999", "year": "12"}
|
| 563 |
+
"""
|
| 564 |
+
|
| 565 |
+
def to_item(obj):
|
| 566 |
+
if isinstance(obj, ParseResults):
|
| 567 |
+
return obj.as_dict() if obj.haskeys() else [to_item(v) for v in obj]
|
| 568 |
+
else:
|
| 569 |
+
return obj
|
| 570 |
+
|
| 571 |
+
return dict((k, to_item(v)) for k, v in self.items())
|
| 572 |
+
|
| 573 |
+
def copy(self) -> ParseResults:
|
| 574 |
+
"""
|
| 575 |
+
Returns a new shallow copy of a :class:`ParseResults` object. `ParseResults`
|
| 576 |
+
items contained within the source are shared with the copy. Use
|
| 577 |
+
:class:`ParseResults.deepcopy()` to create a copy with its own separate
|
| 578 |
+
content values.
|
| 579 |
+
"""
|
| 580 |
+
ret = ParseResults(self._toklist)
|
| 581 |
+
ret._tokdict = self._tokdict.copy()
|
| 582 |
+
ret._parent = self._parent
|
| 583 |
+
ret._all_names |= self._all_names
|
| 584 |
+
ret._name = self._name
|
| 585 |
+
return ret
|
| 586 |
+
|
| 587 |
+
def deepcopy(self) -> ParseResults:
|
| 588 |
+
"""
|
| 589 |
+
Returns a new deep copy of a :class:`ParseResults` object.
|
| 590 |
+
"""
|
| 591 |
+
ret = self.copy()
|
| 592 |
+
# replace values with copies if they are of known mutable types
|
| 593 |
+
for i, obj in enumerate(self._toklist):
|
| 594 |
+
if isinstance(obj, ParseResults):
|
| 595 |
+
ret._toklist[i] = obj.deepcopy()
|
| 596 |
+
elif isinstance(obj, (str, bytes)):
|
| 597 |
+
pass
|
| 598 |
+
elif isinstance(obj, MutableMapping):
|
| 599 |
+
ret._toklist[i] = dest = type(obj)()
|
| 600 |
+
for k, v in obj.items():
|
| 601 |
+
dest[k] = v.deepcopy() if isinstance(v, ParseResults) else v
|
| 602 |
+
elif isinstance(obj, Iterable):
|
| 603 |
+
ret._toklist[i] = type(obj)(
|
| 604 |
+
v.deepcopy() if isinstance(v, ParseResults) else v for v in obj # type: ignore[call-arg]
|
| 605 |
+
)
|
| 606 |
+
return ret
|
| 607 |
+
|
| 608 |
+
def get_name(self) -> str | None:
|
| 609 |
+
r"""
|
| 610 |
+
Returns the results name for this token expression. Useful when several
|
| 611 |
+
different expressions might match at a particular location.
|
| 612 |
+
|
| 613 |
+
Example::
|
| 614 |
+
|
| 615 |
+
integer = Word(nums)
|
| 616 |
+
ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
|
| 617 |
+
house_number_expr = Suppress('#') + Word(nums, alphanums)
|
| 618 |
+
user_data = (Group(house_number_expr)("house_number")
|
| 619 |
+
| Group(ssn_expr)("ssn")
|
| 620 |
+
| Group(integer)("age"))
|
| 621 |
+
user_info = user_data[1, ...]
|
| 622 |
+
|
| 623 |
+
result = user_info.parse_string("22 111-22-3333 #221B")
|
| 624 |
+
for item in result:
|
| 625 |
+
print(item.get_name(), ':', item[0])
|
| 626 |
+
|
| 627 |
+
prints::
|
| 628 |
+
|
| 629 |
+
age : 22
|
| 630 |
+
ssn : 111-22-3333
|
| 631 |
+
house_number : 221B
|
| 632 |
+
"""
|
| 633 |
+
if self._name:
|
| 634 |
+
return self._name
|
| 635 |
+
elif self._parent:
|
| 636 |
+
par: ParseResults = self._parent
|
| 637 |
+
parent_tokdict_items = par._tokdict.items()
|
| 638 |
+
return next(
|
| 639 |
+
(
|
| 640 |
+
k
|
| 641 |
+
for k, vlist in parent_tokdict_items
|
| 642 |
+
for v, loc in vlist
|
| 643 |
+
if v is self
|
| 644 |
+
),
|
| 645 |
+
None,
|
| 646 |
+
)
|
| 647 |
+
elif (
|
| 648 |
+
len(self) == 1
|
| 649 |
+
and len(self._tokdict) == 1
|
| 650 |
+
and next(iter(self._tokdict.values()))[0][1] in (0, -1)
|
| 651 |
+
):
|
| 652 |
+
return next(iter(self._tokdict.keys()))
|
| 653 |
+
else:
|
| 654 |
+
return None
|
| 655 |
+
|
| 656 |
+
def dump(self, indent="", full=True, include_list=True, _depth=0) -> str:
|
| 657 |
+
"""
|
| 658 |
+
Diagnostic method for listing out the contents of
|
| 659 |
+
a :class:`ParseResults`. Accepts an optional ``indent`` argument so
|
| 660 |
+
that this string can be embedded in a nested display of other data.
|
| 661 |
+
|
| 662 |
+
Example::
|
| 663 |
+
|
| 664 |
+
integer = Word(nums)
|
| 665 |
+
date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
|
| 666 |
+
|
| 667 |
+
result = date_str.parse_string('1999/12/31')
|
| 668 |
+
print(result.dump())
|
| 669 |
+
|
| 670 |
+
prints::
|
| 671 |
+
|
| 672 |
+
['1999', '/', '12', '/', '31']
|
| 673 |
+
- day: '31'
|
| 674 |
+
- month: '12'
|
| 675 |
+
- year: '1999'
|
| 676 |
+
"""
|
| 677 |
+
out = []
|
| 678 |
+
NL = "\n"
|
| 679 |
+
out.append(indent + str(self.as_list()) if include_list else "")
|
| 680 |
+
|
| 681 |
+
if not full:
|
| 682 |
+
return "".join(out)
|
| 683 |
+
|
| 684 |
+
if self.haskeys():
|
| 685 |
+
items = sorted((str(k), v) for k, v in self.items())
|
| 686 |
+
for k, v in items:
|
| 687 |
+
if out:
|
| 688 |
+
out.append(NL)
|
| 689 |
+
out.append(f"{indent}{(' ' * _depth)}- {k}: ")
|
| 690 |
+
if not isinstance(v, ParseResults):
|
| 691 |
+
out.append(repr(v))
|
| 692 |
+
continue
|
| 693 |
+
|
| 694 |
+
if not v:
|
| 695 |
+
out.append(str(v))
|
| 696 |
+
continue
|
| 697 |
+
|
| 698 |
+
out.append(
|
| 699 |
+
v.dump(
|
| 700 |
+
indent=indent,
|
| 701 |
+
full=full,
|
| 702 |
+
include_list=include_list,
|
| 703 |
+
_depth=_depth + 1,
|
| 704 |
+
)
|
| 705 |
+
)
|
| 706 |
+
if not any(isinstance(vv, ParseResults) for vv in self):
|
| 707 |
+
return "".join(out)
|
| 708 |
+
|
| 709 |
+
v = self
|
| 710 |
+
incr = " "
|
| 711 |
+
nl = "\n"
|
| 712 |
+
for i, vv in enumerate(v):
|
| 713 |
+
if isinstance(vv, ParseResults):
|
| 714 |
+
vv_dump = vv.dump(
|
| 715 |
+
indent=indent,
|
| 716 |
+
full=full,
|
| 717 |
+
include_list=include_list,
|
| 718 |
+
_depth=_depth + 1,
|
| 719 |
+
)
|
| 720 |
+
out.append(
|
| 721 |
+
f"{nl}{indent}{incr * _depth}[{i}]:{nl}{indent}{incr * (_depth + 1)}{vv_dump}"
|
| 722 |
+
)
|
| 723 |
+
else:
|
| 724 |
+
out.append(
|
| 725 |
+
f"{nl}{indent}{incr * _depth}[{i}]:{nl}{indent}{incr * (_depth + 1)}{vv}"
|
| 726 |
+
)
|
| 727 |
+
|
| 728 |
+
return "".join(out)
|
| 729 |
+
|
| 730 |
+
def pprint(self, *args, **kwargs):
|
| 731 |
+
"""
|
| 732 |
+
Pretty-printer for parsed results as a list, using the
|
| 733 |
+
`pprint <https://docs.python.org/3/library/pprint.html>`_ module.
|
| 734 |
+
Accepts additional positional or keyword args as defined for
|
| 735 |
+
`pprint.pprint <https://docs.python.org/3/library/pprint.html#pprint.pprint>`_ .
|
| 736 |
+
|
| 737 |
+
Example::
|
| 738 |
+
|
| 739 |
+
ident = Word(alphas, alphanums)
|
| 740 |
+
num = Word(nums)
|
| 741 |
+
func = Forward()
|
| 742 |
+
term = ident | num | Group('(' + func + ')')
|
| 743 |
+
func <<= ident + Group(Optional(DelimitedList(term)))
|
| 744 |
+
result = func.parse_string("fna a,b,(fnb c,d,200),100")
|
| 745 |
+
result.pprint(width=40)
|
| 746 |
+
|
| 747 |
+
prints::
|
| 748 |
+
|
| 749 |
+
['fna',
|
| 750 |
+
['a',
|
| 751 |
+
'b',
|
| 752 |
+
['(', 'fnb', ['c', 'd', '200'], ')'],
|
| 753 |
+
'100']]
|
| 754 |
+
"""
|
| 755 |
+
pprint.pprint(self.as_list(), *args, **kwargs)
|
| 756 |
+
|
| 757 |
+
# add support for pickle protocol
|
| 758 |
+
def __getstate__(self):
|
| 759 |
+
return (
|
| 760 |
+
self._toklist,
|
| 761 |
+
(
|
| 762 |
+
self._tokdict.copy(),
|
| 763 |
+
None,
|
| 764 |
+
self._all_names,
|
| 765 |
+
self._name,
|
| 766 |
+
),
|
| 767 |
+
)
|
| 768 |
+
|
| 769 |
+
def __setstate__(self, state):
|
| 770 |
+
self._toklist, (self._tokdict, par, inAccumNames, self._name) = state
|
| 771 |
+
self._all_names = set(inAccumNames)
|
| 772 |
+
self._parent = None
|
| 773 |
+
|
| 774 |
+
def __getnewargs__(self):
|
| 775 |
+
return self._toklist, self._name
|
| 776 |
+
|
| 777 |
+
def __dir__(self):
|
| 778 |
+
return dir(type(self)) + list(self.keys())
|
| 779 |
+
|
| 780 |
+
@classmethod
|
| 781 |
+
def from_dict(cls, other, name=None) -> ParseResults:
|
| 782 |
+
"""
|
| 783 |
+
Helper classmethod to construct a ``ParseResults`` from a ``dict``, preserving the
|
| 784 |
+
name-value relations as results names. If an optional ``name`` argument is
|
| 785 |
+
given, a nested ``ParseResults`` will be returned.
|
| 786 |
+
"""
|
| 787 |
+
|
| 788 |
+
def is_iterable(obj):
|
| 789 |
+
try:
|
| 790 |
+
iter(obj)
|
| 791 |
+
except Exception:
|
| 792 |
+
return False
|
| 793 |
+
# str's are iterable, but in pyparsing, we don't want to iterate over them
|
| 794 |
+
else:
|
| 795 |
+
return not isinstance(obj, str_type)
|
| 796 |
+
|
| 797 |
+
ret = cls([])
|
| 798 |
+
for k, v in other.items():
|
| 799 |
+
if isinstance(v, Mapping):
|
| 800 |
+
ret += cls.from_dict(v, name=k)
|
| 801 |
+
else:
|
| 802 |
+
ret += cls([v], name=k, asList=is_iterable(v))
|
| 803 |
+
if name is not None:
|
| 804 |
+
ret = cls([ret], name=name)
|
| 805 |
+
return ret
|
| 806 |
+
|
| 807 |
+
asList = as_list
|
| 808 |
+
"""Deprecated - use :class:`as_list`"""
|
| 809 |
+
asDict = as_dict
|
| 810 |
+
"""Deprecated - use :class:`as_dict`"""
|
| 811 |
+
getName = get_name
|
| 812 |
+
"""Deprecated - use :class:`get_name`"""
|
| 813 |
+
|
| 814 |
+
|
| 815 |
+
MutableMapping.register(ParseResults)
|
| 816 |
+
MutableSequence.register(ParseResults)
|
.venv/lib/python3.11/site-packages/pyparsing/testing.py
ADDED
|
@@ -0,0 +1,362 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# testing.py
|
| 2 |
+
|
| 3 |
+
from contextlib import contextmanager
|
| 4 |
+
import re
|
| 5 |
+
import typing
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
from .core import (
|
| 9 |
+
ParserElement,
|
| 10 |
+
ParseException,
|
| 11 |
+
Keyword,
|
| 12 |
+
__diag__,
|
| 13 |
+
__compat__,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class pyparsing_test:
|
| 18 |
+
"""
|
| 19 |
+
namespace class for classes useful in writing unit tests
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
class reset_pyparsing_context:
|
| 23 |
+
"""
|
| 24 |
+
Context manager to be used when writing unit tests that modify pyparsing config values:
|
| 25 |
+
- packrat parsing
|
| 26 |
+
- bounded recursion parsing
|
| 27 |
+
- default whitespace characters.
|
| 28 |
+
- default keyword characters
|
| 29 |
+
- literal string auto-conversion class
|
| 30 |
+
- __diag__ settings
|
| 31 |
+
|
| 32 |
+
Example::
|
| 33 |
+
|
| 34 |
+
with reset_pyparsing_context():
|
| 35 |
+
# test that literals used to construct a grammar are automatically suppressed
|
| 36 |
+
ParserElement.inlineLiteralsUsing(Suppress)
|
| 37 |
+
|
| 38 |
+
term = Word(alphas) | Word(nums)
|
| 39 |
+
group = Group('(' + term[...] + ')')
|
| 40 |
+
|
| 41 |
+
# assert that the '()' characters are not included in the parsed tokens
|
| 42 |
+
self.assertParseAndCheckList(group, "(abc 123 def)", ['abc', '123', 'def'])
|
| 43 |
+
|
| 44 |
+
# after exiting context manager, literals are converted to Literal expressions again
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
def __init__(self):
|
| 48 |
+
self._save_context = {}
|
| 49 |
+
|
| 50 |
+
def save(self):
|
| 51 |
+
self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS
|
| 52 |
+
self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS
|
| 53 |
+
|
| 54 |
+
self._save_context["literal_string_class"] = (
|
| 55 |
+
ParserElement._literalStringClass
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
self._save_context["verbose_stacktrace"] = ParserElement.verbose_stacktrace
|
| 59 |
+
|
| 60 |
+
self._save_context["packrat_enabled"] = ParserElement._packratEnabled
|
| 61 |
+
if ParserElement._packratEnabled:
|
| 62 |
+
self._save_context["packrat_cache_size"] = (
|
| 63 |
+
ParserElement.packrat_cache.size
|
| 64 |
+
)
|
| 65 |
+
else:
|
| 66 |
+
self._save_context["packrat_cache_size"] = None
|
| 67 |
+
self._save_context["packrat_parse"] = ParserElement._parse
|
| 68 |
+
self._save_context["recursion_enabled"] = (
|
| 69 |
+
ParserElement._left_recursion_enabled
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
self._save_context["__diag__"] = {
|
| 73 |
+
name: getattr(__diag__, name) for name in __diag__._all_names
|
| 74 |
+
}
|
| 75 |
+
|
| 76 |
+
self._save_context["__compat__"] = {
|
| 77 |
+
"collect_all_And_tokens": __compat__.collect_all_And_tokens
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
return self
|
| 81 |
+
|
| 82 |
+
def restore(self):
|
| 83 |
+
# reset pyparsing global state
|
| 84 |
+
if (
|
| 85 |
+
ParserElement.DEFAULT_WHITE_CHARS
|
| 86 |
+
!= self._save_context["default_whitespace"]
|
| 87 |
+
):
|
| 88 |
+
ParserElement.set_default_whitespace_chars(
|
| 89 |
+
self._save_context["default_whitespace"]
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
ParserElement.verbose_stacktrace = self._save_context["verbose_stacktrace"]
|
| 93 |
+
|
| 94 |
+
Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"]
|
| 95 |
+
ParserElement.inlineLiteralsUsing(
|
| 96 |
+
self._save_context["literal_string_class"]
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
for name, value in self._save_context["__diag__"].items():
|
| 100 |
+
(__diag__.enable if value else __diag__.disable)(name)
|
| 101 |
+
|
| 102 |
+
ParserElement._packratEnabled = False
|
| 103 |
+
if self._save_context["packrat_enabled"]:
|
| 104 |
+
ParserElement.enable_packrat(self._save_context["packrat_cache_size"])
|
| 105 |
+
else:
|
| 106 |
+
ParserElement._parse = self._save_context["packrat_parse"]
|
| 107 |
+
ParserElement._left_recursion_enabled = self._save_context[
|
| 108 |
+
"recursion_enabled"
|
| 109 |
+
]
|
| 110 |
+
|
| 111 |
+
__compat__.collect_all_And_tokens = self._save_context["__compat__"]
|
| 112 |
+
|
| 113 |
+
return self
|
| 114 |
+
|
| 115 |
+
def copy(self):
|
| 116 |
+
ret = type(self)()
|
| 117 |
+
ret._save_context.update(self._save_context)
|
| 118 |
+
return ret
|
| 119 |
+
|
| 120 |
+
def __enter__(self):
|
| 121 |
+
return self.save()
|
| 122 |
+
|
| 123 |
+
def __exit__(self, *args):
|
| 124 |
+
self.restore()
|
| 125 |
+
|
| 126 |
+
class TestParseResultsAsserts:
|
| 127 |
+
"""
|
| 128 |
+
A mixin class to add parse results assertion methods to normal unittest.TestCase classes.
|
| 129 |
+
"""
|
| 130 |
+
|
| 131 |
+
def assertParseResultsEquals(
|
| 132 |
+
self, result, expected_list=None, expected_dict=None, msg=None
|
| 133 |
+
):
|
| 134 |
+
"""
|
| 135 |
+
Unit test assertion to compare a :class:`ParseResults` object with an optional ``expected_list``,
|
| 136 |
+
and compare any defined results names with an optional ``expected_dict``.
|
| 137 |
+
"""
|
| 138 |
+
if expected_list is not None:
|
| 139 |
+
self.assertEqual(expected_list, result.as_list(), msg=msg)
|
| 140 |
+
if expected_dict is not None:
|
| 141 |
+
self.assertEqual(expected_dict, result.as_dict(), msg=msg)
|
| 142 |
+
|
| 143 |
+
def assertParseAndCheckList(
|
| 144 |
+
self, expr, test_string, expected_list, msg=None, verbose=True
|
| 145 |
+
):
|
| 146 |
+
"""
|
| 147 |
+
Convenience wrapper assert to test a parser element and input string, and assert that
|
| 148 |
+
the resulting ``ParseResults.asList()`` is equal to the ``expected_list``.
|
| 149 |
+
"""
|
| 150 |
+
result = expr.parse_string(test_string, parse_all=True)
|
| 151 |
+
if verbose:
|
| 152 |
+
print(result.dump())
|
| 153 |
+
else:
|
| 154 |
+
print(result.as_list())
|
| 155 |
+
self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg)
|
| 156 |
+
|
| 157 |
+
def assertParseAndCheckDict(
|
| 158 |
+
self, expr, test_string, expected_dict, msg=None, verbose=True
|
| 159 |
+
):
|
| 160 |
+
"""
|
| 161 |
+
Convenience wrapper assert to test a parser element and input string, and assert that
|
| 162 |
+
the resulting ``ParseResults.asDict()`` is equal to the ``expected_dict``.
|
| 163 |
+
"""
|
| 164 |
+
result = expr.parse_string(test_string, parseAll=True)
|
| 165 |
+
if verbose:
|
| 166 |
+
print(result.dump())
|
| 167 |
+
else:
|
| 168 |
+
print(result.as_list())
|
| 169 |
+
self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg)
|
| 170 |
+
|
| 171 |
+
def assertRunTestResults(
|
| 172 |
+
self, run_tests_report, expected_parse_results=None, msg=None
|
| 173 |
+
):
|
| 174 |
+
"""
|
| 175 |
+
Unit test assertion to evaluate output of ``ParserElement.runTests()``. If a list of
|
| 176 |
+
list-dict tuples is given as the ``expected_parse_results`` argument, then these are zipped
|
| 177 |
+
with the report tuples returned by ``runTests`` and evaluated using ``assertParseResultsEquals``.
|
| 178 |
+
Finally, asserts that the overall ``runTests()`` success value is ``True``.
|
| 179 |
+
|
| 180 |
+
:param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests
|
| 181 |
+
:param expected_parse_results (optional): [tuple(str, list, dict, Exception)]
|
| 182 |
+
"""
|
| 183 |
+
run_test_success, run_test_results = run_tests_report
|
| 184 |
+
|
| 185 |
+
if expected_parse_results is None:
|
| 186 |
+
self.assertTrue(
|
| 187 |
+
run_test_success, msg=msg if msg is not None else "failed runTests"
|
| 188 |
+
)
|
| 189 |
+
return
|
| 190 |
+
|
| 191 |
+
merged = [
|
| 192 |
+
(*rpt, expected)
|
| 193 |
+
for rpt, expected in zip(run_test_results, expected_parse_results)
|
| 194 |
+
]
|
| 195 |
+
for test_string, result, expected in merged:
|
| 196 |
+
# expected should be a tuple containing a list and/or a dict or an exception,
|
| 197 |
+
# and optional failure message string
|
| 198 |
+
# an empty tuple will skip any result validation
|
| 199 |
+
fail_msg = next((exp for exp in expected if isinstance(exp, str)), None)
|
| 200 |
+
expected_exception = next(
|
| 201 |
+
(
|
| 202 |
+
exp
|
| 203 |
+
for exp in expected
|
| 204 |
+
if isinstance(exp, type) and issubclass(exp, Exception)
|
| 205 |
+
),
|
| 206 |
+
None,
|
| 207 |
+
)
|
| 208 |
+
if expected_exception is not None:
|
| 209 |
+
with self.assertRaises(
|
| 210 |
+
expected_exception=expected_exception, msg=fail_msg or msg
|
| 211 |
+
):
|
| 212 |
+
if isinstance(result, Exception):
|
| 213 |
+
raise result
|
| 214 |
+
else:
|
| 215 |
+
expected_list = next(
|
| 216 |
+
(exp for exp in expected if isinstance(exp, list)), None
|
| 217 |
+
)
|
| 218 |
+
expected_dict = next(
|
| 219 |
+
(exp for exp in expected if isinstance(exp, dict)), None
|
| 220 |
+
)
|
| 221 |
+
if (expected_list, expected_dict) != (None, None):
|
| 222 |
+
self.assertParseResultsEquals(
|
| 223 |
+
result,
|
| 224 |
+
expected_list=expected_list,
|
| 225 |
+
expected_dict=expected_dict,
|
| 226 |
+
msg=fail_msg or msg,
|
| 227 |
+
)
|
| 228 |
+
else:
|
| 229 |
+
# warning here maybe?
|
| 230 |
+
print(f"no validation for {test_string!r}")
|
| 231 |
+
|
| 232 |
+
# do this last, in case some specific test results can be reported instead
|
| 233 |
+
self.assertTrue(
|
| 234 |
+
run_test_success, msg=msg if msg is not None else "failed runTests"
|
| 235 |
+
)
|
| 236 |
+
|
| 237 |
+
@contextmanager
|
| 238 |
+
def assertRaisesParseException(
|
| 239 |
+
self, exc_type=ParseException, expected_msg=None, msg=None
|
| 240 |
+
):
|
| 241 |
+
if expected_msg is not None:
|
| 242 |
+
if isinstance(expected_msg, str):
|
| 243 |
+
expected_msg = re.escape(expected_msg)
|
| 244 |
+
with self.assertRaisesRegex(exc_type, expected_msg, msg=msg) as ctx:
|
| 245 |
+
yield ctx
|
| 246 |
+
|
| 247 |
+
else:
|
| 248 |
+
with self.assertRaises(exc_type, msg=msg) as ctx:
|
| 249 |
+
yield ctx
|
| 250 |
+
|
| 251 |
+
@staticmethod
|
| 252 |
+
def with_line_numbers(
|
| 253 |
+
s: str,
|
| 254 |
+
start_line: typing.Optional[int] = None,
|
| 255 |
+
end_line: typing.Optional[int] = None,
|
| 256 |
+
expand_tabs: bool = True,
|
| 257 |
+
eol_mark: str = "|",
|
| 258 |
+
mark_spaces: typing.Optional[str] = None,
|
| 259 |
+
mark_control: typing.Optional[str] = None,
|
| 260 |
+
*,
|
| 261 |
+
indent: typing.Union[str, int] = "",
|
| 262 |
+
base_1: bool = True,
|
| 263 |
+
) -> str:
|
| 264 |
+
"""
|
| 265 |
+
Helpful method for debugging a parser - prints a string with line and column numbers.
|
| 266 |
+
(Line and column numbers are 1-based by default - if debugging a parse action,
|
| 267 |
+
pass base_1=False, to correspond to the loc value passed to the parse action.)
|
| 268 |
+
|
| 269 |
+
:param s: tuple(bool, str - string to be printed with line and column numbers
|
| 270 |
+
:param start_line: int - (optional) starting line number in s to print (default=1)
|
| 271 |
+
:param end_line: int - (optional) ending line number in s to print (default=len(s))
|
| 272 |
+
:param expand_tabs: bool - (optional) expand tabs to spaces, to match the pyparsing default
|
| 273 |
+
:param eol_mark: str - (optional) string to mark the end of lines, helps visualize trailing spaces (default="|")
|
| 274 |
+
:param mark_spaces: str - (optional) special character to display in place of spaces
|
| 275 |
+
:param mark_control: str - (optional) convert non-printing control characters to a placeholding
|
| 276 |
+
character; valid values:
|
| 277 |
+
- "unicode" - replaces control chars with Unicode symbols, such as "␍" and "␊"
|
| 278 |
+
- any single character string - replace control characters with given string
|
| 279 |
+
- None (default) - string is displayed as-is
|
| 280 |
+
:param indent: str | int - (optional) string to indent with line and column numbers; if an int
|
| 281 |
+
is passed, converted to " " * indent
|
| 282 |
+
:param base_1: bool - (optional) whether to label string using base 1; if False, string will be
|
| 283 |
+
labeled based at 0 (default=True)
|
| 284 |
+
|
| 285 |
+
:return: str - input string with leading line numbers and column number headers
|
| 286 |
+
"""
|
| 287 |
+
if expand_tabs:
|
| 288 |
+
s = s.expandtabs()
|
| 289 |
+
if isinstance(indent, int):
|
| 290 |
+
indent = " " * indent
|
| 291 |
+
indent = indent.expandtabs()
|
| 292 |
+
if mark_control is not None:
|
| 293 |
+
mark_control = typing.cast(str, mark_control)
|
| 294 |
+
if mark_control == "unicode":
|
| 295 |
+
transtable_map = {
|
| 296 |
+
c: u for c, u in zip(range(0, 33), range(0x2400, 0x2433))
|
| 297 |
+
}
|
| 298 |
+
transtable_map[127] = 0x2421
|
| 299 |
+
tbl = str.maketrans(transtable_map)
|
| 300 |
+
eol_mark = ""
|
| 301 |
+
else:
|
| 302 |
+
ord_mark_control = ord(mark_control)
|
| 303 |
+
tbl = str.maketrans(
|
| 304 |
+
{c: ord_mark_control for c in list(range(0, 32)) + [127]}
|
| 305 |
+
)
|
| 306 |
+
s = s.translate(tbl)
|
| 307 |
+
if mark_spaces is not None and mark_spaces != " ":
|
| 308 |
+
if mark_spaces == "unicode":
|
| 309 |
+
tbl = str.maketrans({9: 0x2409, 32: 0x2423})
|
| 310 |
+
s = s.translate(tbl)
|
| 311 |
+
else:
|
| 312 |
+
s = s.replace(" ", mark_spaces)
|
| 313 |
+
if start_line is None:
|
| 314 |
+
start_line = 0
|
| 315 |
+
if end_line is None:
|
| 316 |
+
end_line = len(s)
|
| 317 |
+
end_line = min(end_line, len(s))
|
| 318 |
+
start_line = min(max(0, start_line), end_line)
|
| 319 |
+
|
| 320 |
+
if mark_control != "unicode":
|
| 321 |
+
s_lines = s.splitlines()[start_line - base_1 : end_line]
|
| 322 |
+
else:
|
| 323 |
+
s_lines = [
|
| 324 |
+
line + "␊" for line in s.split("␊")[start_line - base_1 : end_line]
|
| 325 |
+
]
|
| 326 |
+
if not s_lines:
|
| 327 |
+
return ""
|
| 328 |
+
|
| 329 |
+
lineno_width = len(str(end_line))
|
| 330 |
+
max_line_len = max(len(line) for line in s_lines)
|
| 331 |
+
lead = indent + " " * (lineno_width + 1)
|
| 332 |
+
if max_line_len >= 99:
|
| 333 |
+
header0 = (
|
| 334 |
+
lead
|
| 335 |
+
+ ("" if base_1 else " ")
|
| 336 |
+
+ "".join(
|
| 337 |
+
f"{' ' * 99}{(i + 1) % 100}"
|
| 338 |
+
for i in range(1 if base_1 else 0, max(max_line_len // 100, 1))
|
| 339 |
+
)
|
| 340 |
+
+ "\n"
|
| 341 |
+
)
|
| 342 |
+
else:
|
| 343 |
+
header0 = ""
|
| 344 |
+
header1 = (
|
| 345 |
+
("" if base_1 else " ")
|
| 346 |
+
+ lead
|
| 347 |
+
+ "".join(f" {(i + 1) % 10}" for i in range(-(-max_line_len // 10)))
|
| 348 |
+
+ "\n"
|
| 349 |
+
)
|
| 350 |
+
digits = "1234567890"
|
| 351 |
+
header2 = (
|
| 352 |
+
lead + ("" if base_1 else "0") + digits * (-(-max_line_len // 10)) + "\n"
|
| 353 |
+
)
|
| 354 |
+
return (
|
| 355 |
+
header1
|
| 356 |
+
+ header2
|
| 357 |
+
+ "\n".join(
|
| 358 |
+
f"{indent}{i:{lineno_width}d}:{line}{eol_mark}"
|
| 359 |
+
for i, line in enumerate(s_lines, start=start_line + base_1)
|
| 360 |
+
)
|
| 361 |
+
+ "\n"
|
| 362 |
+
)
|
.venv/lib/python3.11/site-packages/pyparsing/unicode.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# unicode.py
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
from itertools import filterfalse
|
| 5 |
+
from typing import Union
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class _lazyclassproperty:
|
| 9 |
+
def __init__(self, fn):
|
| 10 |
+
self.fn = fn
|
| 11 |
+
self.__doc__ = fn.__doc__
|
| 12 |
+
self.__name__ = fn.__name__
|
| 13 |
+
|
| 14 |
+
def __get__(self, obj, cls):
|
| 15 |
+
if cls is None:
|
| 16 |
+
cls = type(obj)
|
| 17 |
+
if not hasattr(cls, "_intern") or any(
|
| 18 |
+
cls._intern is getattr(superclass, "_intern", [])
|
| 19 |
+
for superclass in cls.__mro__[1:]
|
| 20 |
+
):
|
| 21 |
+
cls._intern = {}
|
| 22 |
+
attrname = self.fn.__name__
|
| 23 |
+
if attrname not in cls._intern:
|
| 24 |
+
cls._intern[attrname] = self.fn(cls)
|
| 25 |
+
return cls._intern[attrname]
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
UnicodeRangeList = list[Union[tuple[int, int], tuple[int]]]
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class unicode_set:
|
| 32 |
+
"""
|
| 33 |
+
A set of Unicode characters, for language-specific strings for
|
| 34 |
+
``alphas``, ``nums``, ``alphanums``, and ``printables``.
|
| 35 |
+
A unicode_set is defined by a list of ranges in the Unicode character
|
| 36 |
+
set, in a class attribute ``_ranges``. Ranges can be specified using
|
| 37 |
+
2-tuples or a 1-tuple, such as::
|
| 38 |
+
|
| 39 |
+
_ranges = [
|
| 40 |
+
(0x0020, 0x007e),
|
| 41 |
+
(0x00a0, 0x00ff),
|
| 42 |
+
(0x0100,),
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
Ranges are left- and right-inclusive. A 1-tuple of (x,) is treated as (x, x).
|
| 46 |
+
|
| 47 |
+
A unicode set can also be defined using multiple inheritance of other unicode sets::
|
| 48 |
+
|
| 49 |
+
class CJK(Chinese, Japanese, Korean):
|
| 50 |
+
pass
|
| 51 |
+
"""
|
| 52 |
+
|
| 53 |
+
_ranges: UnicodeRangeList = []
|
| 54 |
+
|
| 55 |
+
@_lazyclassproperty
|
| 56 |
+
def _chars_for_ranges(cls) -> list[str]:
|
| 57 |
+
ret: list[int] = []
|
| 58 |
+
for cc in cls.__mro__: # type: ignore[attr-defined]
|
| 59 |
+
if cc is unicode_set:
|
| 60 |
+
break
|
| 61 |
+
for rr in getattr(cc, "_ranges", ()):
|
| 62 |
+
ret.extend(range(rr[0], rr[-1] + 1))
|
| 63 |
+
return sorted(chr(c) for c in set(ret))
|
| 64 |
+
|
| 65 |
+
@_lazyclassproperty
|
| 66 |
+
def printables(cls) -> str:
|
| 67 |
+
"""all non-whitespace characters in this range"""
|
| 68 |
+
return "".join(filterfalse(str.isspace, cls._chars_for_ranges))
|
| 69 |
+
|
| 70 |
+
@_lazyclassproperty
|
| 71 |
+
def alphas(cls) -> str:
|
| 72 |
+
"""all alphabetic characters in this range"""
|
| 73 |
+
return "".join(filter(str.isalpha, cls._chars_for_ranges))
|
| 74 |
+
|
| 75 |
+
@_lazyclassproperty
|
| 76 |
+
def nums(cls) -> str:
|
| 77 |
+
"""all numeric digit characters in this range"""
|
| 78 |
+
return "".join(filter(str.isdigit, cls._chars_for_ranges))
|
| 79 |
+
|
| 80 |
+
@_lazyclassproperty
|
| 81 |
+
def alphanums(cls) -> str:
|
| 82 |
+
"""all alphanumeric characters in this range"""
|
| 83 |
+
return cls.alphas + cls.nums
|
| 84 |
+
|
| 85 |
+
@_lazyclassproperty
|
| 86 |
+
def identchars(cls) -> str:
|
| 87 |
+
"""all characters in this range that are valid identifier characters, plus underscore '_'"""
|
| 88 |
+
return "".join(
|
| 89 |
+
sorted(
|
| 90 |
+
set(filter(str.isidentifier, cls._chars_for_ranges))
|
| 91 |
+
| set(
|
| 92 |
+
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµº"
|
| 93 |
+
"ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ"
|
| 94 |
+
"_"
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
@_lazyclassproperty
|
| 100 |
+
def identbodychars(cls) -> str:
|
| 101 |
+
"""
|
| 102 |
+
all characters in this range that are valid identifier body characters,
|
| 103 |
+
plus the digits 0-9, and · (Unicode MIDDLE DOT)
|
| 104 |
+
"""
|
| 105 |
+
identifier_chars = set(
|
| 106 |
+
c for c in cls._chars_for_ranges if ("_" + c).isidentifier()
|
| 107 |
+
)
|
| 108 |
+
return "".join(
|
| 109 |
+
sorted(identifier_chars | set(cls.identchars) | set("0123456789·"))
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
@_lazyclassproperty
|
| 113 |
+
def identifier(cls):
|
| 114 |
+
"""
|
| 115 |
+
a pyparsing Word expression for an identifier using this range's definitions for
|
| 116 |
+
identchars and identbodychars
|
| 117 |
+
"""
|
| 118 |
+
from pyparsing import Word
|
| 119 |
+
|
| 120 |
+
return Word(cls.identchars, cls.identbodychars)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class pyparsing_unicode(unicode_set):
|
| 124 |
+
"""
|
| 125 |
+
A namespace class for defining common language unicode_sets.
|
| 126 |
+
"""
|
| 127 |
+
|
| 128 |
+
# fmt: off
|
| 129 |
+
|
| 130 |
+
# define ranges in language character sets
|
| 131 |
+
_ranges: UnicodeRangeList = [
|
| 132 |
+
(0x0020, sys.maxunicode),
|
| 133 |
+
]
|
| 134 |
+
|
| 135 |
+
class BasicMultilingualPlane(unicode_set):
|
| 136 |
+
"""Unicode set for the Basic Multilingual Plane"""
|
| 137 |
+
_ranges: UnicodeRangeList = [
|
| 138 |
+
(0x0020, 0xFFFF),
|
| 139 |
+
]
|
| 140 |
+
|
| 141 |
+
class Latin1(unicode_set):
|
| 142 |
+
"""Unicode set for Latin-1 Unicode Character Range"""
|
| 143 |
+
_ranges: UnicodeRangeList = [
|
| 144 |
+
(0x0020, 0x007E),
|
| 145 |
+
(0x00A0, 0x00FF),
|
| 146 |
+
]
|
| 147 |
+
|
| 148 |
+
class LatinA(unicode_set):
|
| 149 |
+
"""Unicode set for Latin-A Unicode Character Range"""
|
| 150 |
+
_ranges: UnicodeRangeList = [
|
| 151 |
+
(0x0100, 0x017F),
|
| 152 |
+
]
|
| 153 |
+
|
| 154 |
+
class LatinB(unicode_set):
|
| 155 |
+
"""Unicode set for Latin-B Unicode Character Range"""
|
| 156 |
+
_ranges: UnicodeRangeList = [
|
| 157 |
+
(0x0180, 0x024F),
|
| 158 |
+
]
|
| 159 |
+
|
| 160 |
+
class Greek(unicode_set):
|
| 161 |
+
"""Unicode set for Greek Unicode Character Ranges"""
|
| 162 |
+
_ranges: UnicodeRangeList = [
|
| 163 |
+
(0x0342, 0x0345),
|
| 164 |
+
(0x0370, 0x0377),
|
| 165 |
+
(0x037A, 0x037F),
|
| 166 |
+
(0x0384, 0x038A),
|
| 167 |
+
(0x038C,),
|
| 168 |
+
(0x038E, 0x03A1),
|
| 169 |
+
(0x03A3, 0x03E1),
|
| 170 |
+
(0x03F0, 0x03FF),
|
| 171 |
+
(0x1D26, 0x1D2A),
|
| 172 |
+
(0x1D5E,),
|
| 173 |
+
(0x1D60,),
|
| 174 |
+
(0x1D66, 0x1D6A),
|
| 175 |
+
(0x1F00, 0x1F15),
|
| 176 |
+
(0x1F18, 0x1F1D),
|
| 177 |
+
(0x1F20, 0x1F45),
|
| 178 |
+
(0x1F48, 0x1F4D),
|
| 179 |
+
(0x1F50, 0x1F57),
|
| 180 |
+
(0x1F59,),
|
| 181 |
+
(0x1F5B,),
|
| 182 |
+
(0x1F5D,),
|
| 183 |
+
(0x1F5F, 0x1F7D),
|
| 184 |
+
(0x1F80, 0x1FB4),
|
| 185 |
+
(0x1FB6, 0x1FC4),
|
| 186 |
+
(0x1FC6, 0x1FD3),
|
| 187 |
+
(0x1FD6, 0x1FDB),
|
| 188 |
+
(0x1FDD, 0x1FEF),
|
| 189 |
+
(0x1FF2, 0x1FF4),
|
| 190 |
+
(0x1FF6, 0x1FFE),
|
| 191 |
+
(0x2129,),
|
| 192 |
+
(0x2719, 0x271A),
|
| 193 |
+
(0xAB65,),
|
| 194 |
+
(0x10140, 0x1018D),
|
| 195 |
+
(0x101A0,),
|
| 196 |
+
(0x1D200, 0x1D245),
|
| 197 |
+
(0x1F7A1, 0x1F7A7),
|
| 198 |
+
]
|
| 199 |
+
|
| 200 |
+
class Cyrillic(unicode_set):
|
| 201 |
+
"""Unicode set for Cyrillic Unicode Character Range"""
|
| 202 |
+
_ranges: UnicodeRangeList = [
|
| 203 |
+
(0x0400, 0x052F),
|
| 204 |
+
(0x1C80, 0x1C88),
|
| 205 |
+
(0x1D2B,),
|
| 206 |
+
(0x1D78,),
|
| 207 |
+
(0x2DE0, 0x2DFF),
|
| 208 |
+
(0xA640, 0xA672),
|
| 209 |
+
(0xA674, 0xA69F),
|
| 210 |
+
(0xFE2E, 0xFE2F),
|
| 211 |
+
]
|
| 212 |
+
|
| 213 |
+
class Chinese(unicode_set):
|
| 214 |
+
"""Unicode set for Chinese Unicode Character Range"""
|
| 215 |
+
_ranges: UnicodeRangeList = [
|
| 216 |
+
(0x2E80, 0x2E99),
|
| 217 |
+
(0x2E9B, 0x2EF3),
|
| 218 |
+
(0x31C0, 0x31E3),
|
| 219 |
+
(0x3400, 0x4DB5),
|
| 220 |
+
(0x4E00, 0x9FEF),
|
| 221 |
+
(0xA700, 0xA707),
|
| 222 |
+
(0xF900, 0xFA6D),
|
| 223 |
+
(0xFA70, 0xFAD9),
|
| 224 |
+
(0x16FE2, 0x16FE3),
|
| 225 |
+
(0x1F210, 0x1F212),
|
| 226 |
+
(0x1F214, 0x1F23B),
|
| 227 |
+
(0x1F240, 0x1F248),
|
| 228 |
+
(0x20000, 0x2A6D6),
|
| 229 |
+
(0x2A700, 0x2B734),
|
| 230 |
+
(0x2B740, 0x2B81D),
|
| 231 |
+
(0x2B820, 0x2CEA1),
|
| 232 |
+
(0x2CEB0, 0x2EBE0),
|
| 233 |
+
(0x2F800, 0x2FA1D),
|
| 234 |
+
]
|
| 235 |
+
|
| 236 |
+
class Japanese(unicode_set):
|
| 237 |
+
"""Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges"""
|
| 238 |
+
|
| 239 |
+
class Kanji(unicode_set):
|
| 240 |
+
"Unicode set for Kanji Unicode Character Range"
|
| 241 |
+
_ranges: UnicodeRangeList = [
|
| 242 |
+
(0x4E00, 0x9FBF),
|
| 243 |
+
(0x3000, 0x303F),
|
| 244 |
+
]
|
| 245 |
+
|
| 246 |
+
class Hiragana(unicode_set):
|
| 247 |
+
"""Unicode set for Hiragana Unicode Character Range"""
|
| 248 |
+
_ranges: UnicodeRangeList = [
|
| 249 |
+
(0x3041, 0x3096),
|
| 250 |
+
(0x3099, 0x30A0),
|
| 251 |
+
(0x30FC,),
|
| 252 |
+
(0xFF70,),
|
| 253 |
+
(0x1B001,),
|
| 254 |
+
(0x1B150, 0x1B152),
|
| 255 |
+
(0x1F200,),
|
| 256 |
+
]
|
| 257 |
+
|
| 258 |
+
class Katakana(unicode_set):
|
| 259 |
+
"""Unicode set for Katakana Unicode Character Range"""
|
| 260 |
+
_ranges: UnicodeRangeList = [
|
| 261 |
+
(0x3099, 0x309C),
|
| 262 |
+
(0x30A0, 0x30FF),
|
| 263 |
+
(0x31F0, 0x31FF),
|
| 264 |
+
(0x32D0, 0x32FE),
|
| 265 |
+
(0xFF65, 0xFF9F),
|
| 266 |
+
(0x1B000,),
|
| 267 |
+
(0x1B164, 0x1B167),
|
| 268 |
+
(0x1F201, 0x1F202),
|
| 269 |
+
(0x1F213,),
|
| 270 |
+
]
|
| 271 |
+
|
| 272 |
+
漢字 = Kanji
|
| 273 |
+
カタカナ = Katakana
|
| 274 |
+
ひらがな = Hiragana
|
| 275 |
+
|
| 276 |
+
_ranges = (
|
| 277 |
+
Kanji._ranges
|
| 278 |
+
+ Hiragana._ranges
|
| 279 |
+
+ Katakana._ranges
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
class Hangul(unicode_set):
|
| 283 |
+
"""Unicode set for Hangul (Korean) Unicode Character Range"""
|
| 284 |
+
_ranges: UnicodeRangeList = [
|
| 285 |
+
(0x1100, 0x11FF),
|
| 286 |
+
(0x302E, 0x302F),
|
| 287 |
+
(0x3131, 0x318E),
|
| 288 |
+
(0x3200, 0x321C),
|
| 289 |
+
(0x3260, 0x327B),
|
| 290 |
+
(0x327E,),
|
| 291 |
+
(0xA960, 0xA97C),
|
| 292 |
+
(0xAC00, 0xD7A3),
|
| 293 |
+
(0xD7B0, 0xD7C6),
|
| 294 |
+
(0xD7CB, 0xD7FB),
|
| 295 |
+
(0xFFA0, 0xFFBE),
|
| 296 |
+
(0xFFC2, 0xFFC7),
|
| 297 |
+
(0xFFCA, 0xFFCF),
|
| 298 |
+
(0xFFD2, 0xFFD7),
|
| 299 |
+
(0xFFDA, 0xFFDC),
|
| 300 |
+
]
|
| 301 |
+
|
| 302 |
+
Korean = Hangul
|
| 303 |
+
|
| 304 |
+
class CJK(Chinese, Japanese, Hangul):
|
| 305 |
+
"""Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range"""
|
| 306 |
+
|
| 307 |
+
class Thai(unicode_set):
|
| 308 |
+
"""Unicode set for Thai Unicode Character Range"""
|
| 309 |
+
_ranges: UnicodeRangeList = [
|
| 310 |
+
(0x0E01, 0x0E3A),
|
| 311 |
+
(0x0E3F, 0x0E5B)
|
| 312 |
+
]
|
| 313 |
+
|
| 314 |
+
class Arabic(unicode_set):
|
| 315 |
+
"""Unicode set for Arabic Unicode Character Range"""
|
| 316 |
+
_ranges: UnicodeRangeList = [
|
| 317 |
+
(0x0600, 0x061B),
|
| 318 |
+
(0x061E, 0x06FF),
|
| 319 |
+
(0x0700, 0x077F),
|
| 320 |
+
]
|
| 321 |
+
|
| 322 |
+
class Hebrew(unicode_set):
|
| 323 |
+
"""Unicode set for Hebrew Unicode Character Range"""
|
| 324 |
+
_ranges: UnicodeRangeList = [
|
| 325 |
+
(0x0591, 0x05C7),
|
| 326 |
+
(0x05D0, 0x05EA),
|
| 327 |
+
(0x05EF, 0x05F4),
|
| 328 |
+
(0xFB1D, 0xFB36),
|
| 329 |
+
(0xFB38, 0xFB3C),
|
| 330 |
+
(0xFB3E,),
|
| 331 |
+
(0xFB40, 0xFB41),
|
| 332 |
+
(0xFB43, 0xFB44),
|
| 333 |
+
(0xFB46, 0xFB4F),
|
| 334 |
+
]
|
| 335 |
+
|
| 336 |
+
class Devanagari(unicode_set):
|
| 337 |
+
"""Unicode set for Devanagari Unicode Character Range"""
|
| 338 |
+
_ranges: UnicodeRangeList = [
|
| 339 |
+
(0x0900, 0x097F),
|
| 340 |
+
(0xA8E0, 0xA8FF)
|
| 341 |
+
]
|
| 342 |
+
|
| 343 |
+
BMP = BasicMultilingualPlane
|
| 344 |
+
|
| 345 |
+
# add language identifiers using language Unicode
|
| 346 |
+
العربية = Arabic
|
| 347 |
+
中文 = Chinese
|
| 348 |
+
кириллица = Cyrillic
|
| 349 |
+
Ελληνικά = Greek
|
| 350 |
+
עִברִית = Hebrew
|
| 351 |
+
日本語 = Japanese
|
| 352 |
+
한국어 = Korean
|
| 353 |
+
ไทย = Thai
|
| 354 |
+
देवनागरी = Devanagari
|
| 355 |
+
|
| 356 |
+
# fmt: on
|
.venv/lib/python3.11/site-packages/pyparsing/util.py
ADDED
|
@@ -0,0 +1,398 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# util.py
|
| 2 |
+
import contextlib
|
| 3 |
+
from functools import lru_cache, wraps
|
| 4 |
+
import inspect
|
| 5 |
+
import itertools
|
| 6 |
+
import types
|
| 7 |
+
from typing import Callable, Union, Iterable, TypeVar, cast
|
| 8 |
+
import warnings
|
| 9 |
+
|
| 10 |
+
_bslash = chr(92)
|
| 11 |
+
C = TypeVar("C", bound=Callable)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class __config_flags:
|
| 15 |
+
"""Internal class for defining compatibility and debugging flags"""
|
| 16 |
+
|
| 17 |
+
_all_names: list[str] = []
|
| 18 |
+
_fixed_names: list[str] = []
|
| 19 |
+
_type_desc = "configuration"
|
| 20 |
+
|
| 21 |
+
@classmethod
|
| 22 |
+
def _set(cls, dname, value):
|
| 23 |
+
if dname in cls._fixed_names:
|
| 24 |
+
warnings.warn(
|
| 25 |
+
f"{cls.__name__}.{dname} {cls._type_desc} is {str(getattr(cls, dname)).upper()}"
|
| 26 |
+
f" and cannot be overridden",
|
| 27 |
+
stacklevel=3,
|
| 28 |
+
)
|
| 29 |
+
return
|
| 30 |
+
if dname in cls._all_names:
|
| 31 |
+
setattr(cls, dname, value)
|
| 32 |
+
else:
|
| 33 |
+
raise ValueError(f"no such {cls._type_desc} {dname!r}")
|
| 34 |
+
|
| 35 |
+
enable = classmethod(lambda cls, name: cls._set(name, True))
|
| 36 |
+
disable = classmethod(lambda cls, name: cls._set(name, False))
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@lru_cache(maxsize=128)
|
| 40 |
+
def col(loc: int, strg: str) -> int:
|
| 41 |
+
"""
|
| 42 |
+
Returns current column within a string, counting newlines as line separators.
|
| 43 |
+
The first column is number 1.
|
| 44 |
+
|
| 45 |
+
Note: the default parsing behavior is to expand tabs in the input string
|
| 46 |
+
before starting the parsing process. See
|
| 47 |
+
:class:`ParserElement.parse_string` for more
|
| 48 |
+
information on parsing strings containing ``<TAB>`` s, and suggested
|
| 49 |
+
methods to maintain a consistent view of the parsed string, the parse
|
| 50 |
+
location, and line and column positions within the parsed string.
|
| 51 |
+
"""
|
| 52 |
+
s = strg
|
| 53 |
+
return 1 if 0 < loc < len(s) and s[loc - 1] == "\n" else loc - s.rfind("\n", 0, loc)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@lru_cache(maxsize=128)
|
| 57 |
+
def lineno(loc: int, strg: str) -> int:
|
| 58 |
+
"""Returns current line number within a string, counting newlines as line separators.
|
| 59 |
+
The first line is number 1.
|
| 60 |
+
|
| 61 |
+
Note - the default parsing behavior is to expand tabs in the input string
|
| 62 |
+
before starting the parsing process. See :class:`ParserElement.parse_string`
|
| 63 |
+
for more information on parsing strings containing ``<TAB>`` s, and
|
| 64 |
+
suggested methods to maintain a consistent view of the parsed string, the
|
| 65 |
+
parse location, and line and column positions within the parsed string.
|
| 66 |
+
"""
|
| 67 |
+
return strg.count("\n", 0, loc) + 1
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
@lru_cache(maxsize=128)
|
| 71 |
+
def line(loc: int, strg: str) -> str:
|
| 72 |
+
"""
|
| 73 |
+
Returns the line of text containing loc within a string, counting newlines as line separators.
|
| 74 |
+
"""
|
| 75 |
+
last_cr = strg.rfind("\n", 0, loc)
|
| 76 |
+
next_cr = strg.find("\n", loc)
|
| 77 |
+
return strg[last_cr + 1 : next_cr] if next_cr >= 0 else strg[last_cr + 1 :]
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class _UnboundedCache:
|
| 81 |
+
def __init__(self):
|
| 82 |
+
cache = {}
|
| 83 |
+
cache_get = cache.get
|
| 84 |
+
self.not_in_cache = not_in_cache = object()
|
| 85 |
+
|
| 86 |
+
def get(_, key):
|
| 87 |
+
return cache_get(key, not_in_cache)
|
| 88 |
+
|
| 89 |
+
def set_(_, key, value):
|
| 90 |
+
cache[key] = value
|
| 91 |
+
|
| 92 |
+
def clear(_):
|
| 93 |
+
cache.clear()
|
| 94 |
+
|
| 95 |
+
self.size = None
|
| 96 |
+
self.get = types.MethodType(get, self)
|
| 97 |
+
self.set = types.MethodType(set_, self)
|
| 98 |
+
self.clear = types.MethodType(clear, self)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class _FifoCache:
|
| 102 |
+
def __init__(self, size):
|
| 103 |
+
cache = {}
|
| 104 |
+
self.size = size
|
| 105 |
+
self.not_in_cache = not_in_cache = object()
|
| 106 |
+
cache_get = cache.get
|
| 107 |
+
cache_pop = cache.pop
|
| 108 |
+
|
| 109 |
+
def get(_, key):
|
| 110 |
+
return cache_get(key, not_in_cache)
|
| 111 |
+
|
| 112 |
+
def set_(_, key, value):
|
| 113 |
+
cache[key] = value
|
| 114 |
+
while len(cache) > size:
|
| 115 |
+
# pop oldest element in cache by getting the first key
|
| 116 |
+
cache_pop(next(iter(cache)))
|
| 117 |
+
|
| 118 |
+
def clear(_):
|
| 119 |
+
cache.clear()
|
| 120 |
+
|
| 121 |
+
self.get = types.MethodType(get, self)
|
| 122 |
+
self.set = types.MethodType(set_, self)
|
| 123 |
+
self.clear = types.MethodType(clear, self)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
class LRUMemo:
|
| 127 |
+
"""
|
| 128 |
+
A memoizing mapping that retains `capacity` deleted items
|
| 129 |
+
|
| 130 |
+
The memo tracks retained items by their access order; once `capacity` items
|
| 131 |
+
are retained, the least recently used item is discarded.
|
| 132 |
+
"""
|
| 133 |
+
|
| 134 |
+
def __init__(self, capacity):
|
| 135 |
+
self._capacity = capacity
|
| 136 |
+
self._active = {}
|
| 137 |
+
self._memory = {}
|
| 138 |
+
|
| 139 |
+
def __getitem__(self, key):
|
| 140 |
+
try:
|
| 141 |
+
return self._active[key]
|
| 142 |
+
except KeyError:
|
| 143 |
+
self._memory[key] = self._memory.pop(key)
|
| 144 |
+
return self._memory[key]
|
| 145 |
+
|
| 146 |
+
def __setitem__(self, key, value):
|
| 147 |
+
self._memory.pop(key, None)
|
| 148 |
+
self._active[key] = value
|
| 149 |
+
|
| 150 |
+
def __delitem__(self, key):
|
| 151 |
+
try:
|
| 152 |
+
value = self._active.pop(key)
|
| 153 |
+
except KeyError:
|
| 154 |
+
pass
|
| 155 |
+
else:
|
| 156 |
+
oldest_keys = list(self._memory)[: -(self._capacity + 1)]
|
| 157 |
+
for key_to_delete in oldest_keys:
|
| 158 |
+
self._memory.pop(key_to_delete)
|
| 159 |
+
self._memory[key] = value
|
| 160 |
+
|
| 161 |
+
def clear(self):
|
| 162 |
+
self._active.clear()
|
| 163 |
+
self._memory.clear()
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
class UnboundedMemo(dict):
|
| 167 |
+
"""
|
| 168 |
+
A memoizing mapping that retains all deleted items
|
| 169 |
+
"""
|
| 170 |
+
|
| 171 |
+
def __delitem__(self, key):
|
| 172 |
+
pass
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def _escape_regex_range_chars(s: str) -> str:
|
| 176 |
+
# escape these chars: ^-[]
|
| 177 |
+
for c in r"\^-[]":
|
| 178 |
+
s = s.replace(c, _bslash + c)
|
| 179 |
+
s = s.replace("\n", r"\n")
|
| 180 |
+
s = s.replace("\t", r"\t")
|
| 181 |
+
return str(s)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class _GroupConsecutive:
|
| 185 |
+
"""
|
| 186 |
+
Used as a callable `key` for itertools.groupby to group
|
| 187 |
+
characters that are consecutive:
|
| 188 |
+
itertools.groupby("abcdejkmpqrs", key=IsConsecutive())
|
| 189 |
+
yields:
|
| 190 |
+
(0, iter(['a', 'b', 'c', 'd', 'e']))
|
| 191 |
+
(1, iter(['j', 'k']))
|
| 192 |
+
(2, iter(['m']))
|
| 193 |
+
(3, iter(['p', 'q', 'r', 's']))
|
| 194 |
+
"""
|
| 195 |
+
|
| 196 |
+
def __init__(self):
|
| 197 |
+
self.prev = 0
|
| 198 |
+
self.counter = itertools.count()
|
| 199 |
+
self.value = -1
|
| 200 |
+
|
| 201 |
+
def __call__(self, char: str) -> int:
|
| 202 |
+
c_int = ord(char)
|
| 203 |
+
self.prev, prev = c_int, self.prev
|
| 204 |
+
if c_int - prev > 1:
|
| 205 |
+
self.value = next(self.counter)
|
| 206 |
+
return self.value
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def _collapse_string_to_ranges(
|
| 210 |
+
s: Union[str, Iterable[str]], re_escape: bool = True
|
| 211 |
+
) -> str:
|
| 212 |
+
r"""
|
| 213 |
+
Take a string or list of single-character strings, and return
|
| 214 |
+
a string of the consecutive characters in that string collapsed
|
| 215 |
+
into groups, as might be used in a regular expression '[a-z]'
|
| 216 |
+
character set:
|
| 217 |
+
'a' -> 'a' -> '[a]'
|
| 218 |
+
'bc' -> 'bc' -> '[bc]'
|
| 219 |
+
'defgh' -> 'd-h' -> '[d-h]'
|
| 220 |
+
'fdgeh' -> 'd-h' -> '[d-h]'
|
| 221 |
+
'jklnpqrtu' -> 'j-lnp-rtu' -> '[j-lnp-rtu]'
|
| 222 |
+
Duplicates get collapsed out:
|
| 223 |
+
'aaa' -> 'a' -> '[a]'
|
| 224 |
+
'bcbccb' -> 'bc' -> '[bc]'
|
| 225 |
+
'defghhgf' -> 'd-h' -> '[d-h]'
|
| 226 |
+
'jklnpqrjjjtu' -> 'j-lnp-rtu' -> '[j-lnp-rtu]'
|
| 227 |
+
Spaces are preserved:
|
| 228 |
+
'ab c' -> ' a-c' -> '[ a-c]'
|
| 229 |
+
Characters that are significant when defining regex ranges
|
| 230 |
+
get escaped:
|
| 231 |
+
'acde[]-' -> r'\-\[\]ac-e' -> r'[\-\[\]ac-e]'
|
| 232 |
+
"""
|
| 233 |
+
|
| 234 |
+
# Developer notes:
|
| 235 |
+
# - Do not optimize this code assuming that the given input string
|
| 236 |
+
# or internal lists will be short (such as in loading generators into
|
| 237 |
+
# lists to make it easier to find the last element); this method is also
|
| 238 |
+
# used to generate regex ranges for character sets in the pyparsing.unicode
|
| 239 |
+
# classes, and these can be _very_ long lists of strings
|
| 240 |
+
|
| 241 |
+
def escape_re_range_char(c: str) -> str:
|
| 242 |
+
return "\\" + c if c in r"\^-][" else c
|
| 243 |
+
|
| 244 |
+
def no_escape_re_range_char(c: str) -> str:
|
| 245 |
+
return c
|
| 246 |
+
|
| 247 |
+
if not re_escape:
|
| 248 |
+
escape_re_range_char = no_escape_re_range_char
|
| 249 |
+
|
| 250 |
+
ret = []
|
| 251 |
+
|
| 252 |
+
# reduce input string to remove duplicates, and put in sorted order
|
| 253 |
+
s_chars: list[str] = sorted(set(s))
|
| 254 |
+
|
| 255 |
+
if len(s_chars) > 2:
|
| 256 |
+
# find groups of characters that are consecutive (can be collapsed
|
| 257 |
+
# down to "<first>-<last>")
|
| 258 |
+
for _, chars in itertools.groupby(s_chars, key=_GroupConsecutive()):
|
| 259 |
+
# _ is unimportant, is just used to identify groups
|
| 260 |
+
# chars is an iterator of one or more consecutive characters
|
| 261 |
+
# that comprise the current group
|
| 262 |
+
first = last = next(chars)
|
| 263 |
+
with contextlib.suppress(ValueError):
|
| 264 |
+
*_, last = chars
|
| 265 |
+
|
| 266 |
+
if first == last:
|
| 267 |
+
# there was only a single char in this group
|
| 268 |
+
ret.append(escape_re_range_char(first))
|
| 269 |
+
|
| 270 |
+
elif last == chr(ord(first) + 1):
|
| 271 |
+
# there were only 2 characters in this group
|
| 272 |
+
# 'a','b' -> 'ab'
|
| 273 |
+
ret.append(f"{escape_re_range_char(first)}{escape_re_range_char(last)}")
|
| 274 |
+
|
| 275 |
+
else:
|
| 276 |
+
# there were > 2 characters in this group, make into a range
|
| 277 |
+
# 'c','d','e' -> 'c-e'
|
| 278 |
+
ret.append(
|
| 279 |
+
f"{escape_re_range_char(first)}-{escape_re_range_char(last)}"
|
| 280 |
+
)
|
| 281 |
+
else:
|
| 282 |
+
# only 1 or 2 chars were given to form into groups
|
| 283 |
+
# 'a' -> ['a']
|
| 284 |
+
# 'bc' -> ['b', 'c']
|
| 285 |
+
# 'dg' -> ['d', 'g']
|
| 286 |
+
# no need to list them with "-", just return as a list
|
| 287 |
+
# (after escaping)
|
| 288 |
+
ret = [escape_re_range_char(c) for c in s_chars]
|
| 289 |
+
|
| 290 |
+
return "".join(ret)
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
def _flatten(ll: Iterable) -> list:
|
| 294 |
+
ret = []
|
| 295 |
+
to_visit = [*ll]
|
| 296 |
+
while to_visit:
|
| 297 |
+
i = to_visit.pop(0)
|
| 298 |
+
if isinstance(i, Iterable) and not isinstance(i, str):
|
| 299 |
+
to_visit[:0] = i
|
| 300 |
+
else:
|
| 301 |
+
ret.append(i)
|
| 302 |
+
return ret
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
def make_compressed_re(
|
| 306 |
+
word_list: Iterable[str], max_level: int = 2, _level: int = 1
|
| 307 |
+
) -> str:
|
| 308 |
+
"""
|
| 309 |
+
Create a regular expression string from a list of words, collapsing by common
|
| 310 |
+
prefixes and optional suffixes.
|
| 311 |
+
|
| 312 |
+
Calls itself recursively to build nested sublists for each group of suffixes
|
| 313 |
+
that have a shared prefix.
|
| 314 |
+
"""
|
| 315 |
+
|
| 316 |
+
def get_suffixes_from_common_prefixes(namelist: list[str]):
|
| 317 |
+
if len(namelist) > 1:
|
| 318 |
+
for prefix, suffixes in itertools.groupby(namelist, key=lambda s: s[:1]):
|
| 319 |
+
yield prefix, sorted([s[1:] for s in suffixes], key=len, reverse=True)
|
| 320 |
+
else:
|
| 321 |
+
yield namelist[0][0], [namelist[0][1:]]
|
| 322 |
+
|
| 323 |
+
if max_level == 0:
|
| 324 |
+
return "|".join(sorted(word_list, key=len, reverse=True))
|
| 325 |
+
|
| 326 |
+
ret = []
|
| 327 |
+
sep = ""
|
| 328 |
+
for initial, suffixes in get_suffixes_from_common_prefixes(sorted(word_list)):
|
| 329 |
+
ret.append(sep)
|
| 330 |
+
sep = "|"
|
| 331 |
+
|
| 332 |
+
trailing = ""
|
| 333 |
+
if "" in suffixes:
|
| 334 |
+
trailing = "?"
|
| 335 |
+
suffixes.remove("")
|
| 336 |
+
|
| 337 |
+
if len(suffixes) > 1:
|
| 338 |
+
if all(len(s) == 1 for s in suffixes):
|
| 339 |
+
ret.append(f"{initial}[{''.join(suffixes)}]{trailing}")
|
| 340 |
+
else:
|
| 341 |
+
if _level < max_level:
|
| 342 |
+
suffix_re = make_compressed_re(
|
| 343 |
+
sorted(suffixes), max_level, _level + 1
|
| 344 |
+
)
|
| 345 |
+
ret.append(f"{initial}({suffix_re}){trailing}")
|
| 346 |
+
else:
|
| 347 |
+
suffixes.sort(key=len, reverse=True)
|
| 348 |
+
ret.append(f"{initial}({'|'.join(suffixes)}){trailing}")
|
| 349 |
+
else:
|
| 350 |
+
if suffixes:
|
| 351 |
+
suffix = suffixes[0]
|
| 352 |
+
if len(suffix) > 1 and trailing:
|
| 353 |
+
ret.append(f"{initial}({suffix}){trailing}")
|
| 354 |
+
else:
|
| 355 |
+
ret.append(f"{initial}{suffix}{trailing}")
|
| 356 |
+
else:
|
| 357 |
+
ret.append(initial)
|
| 358 |
+
return "".join(ret)
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
def replaced_by_pep8(compat_name: str, fn: C) -> C:
|
| 362 |
+
# In a future version, uncomment the code in the internal _inner() functions
|
| 363 |
+
# to begin emitting DeprecationWarnings.
|
| 364 |
+
|
| 365 |
+
# Unwrap staticmethod/classmethod
|
| 366 |
+
fn = getattr(fn, "__func__", fn)
|
| 367 |
+
|
| 368 |
+
# (Presence of 'self' arg in signature is used by explain_exception() methods, so we take
|
| 369 |
+
# some extra steps to add it if present in decorated function.)
|
| 370 |
+
if ["self"] == list(inspect.signature(fn).parameters)[:1]:
|
| 371 |
+
|
| 372 |
+
@wraps(fn)
|
| 373 |
+
def _inner(self, *args, **kwargs):
|
| 374 |
+
# warnings.warn(
|
| 375 |
+
# f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=2
|
| 376 |
+
# )
|
| 377 |
+
return fn(self, *args, **kwargs)
|
| 378 |
+
|
| 379 |
+
else:
|
| 380 |
+
|
| 381 |
+
@wraps(fn)
|
| 382 |
+
def _inner(*args, **kwargs):
|
| 383 |
+
# warnings.warn(
|
| 384 |
+
# f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=2
|
| 385 |
+
# )
|
| 386 |
+
return fn(*args, **kwargs)
|
| 387 |
+
|
| 388 |
+
_inner.__doc__ = f"""Deprecated - use :class:`{fn.__name__}`"""
|
| 389 |
+
_inner.__name__ = compat_name
|
| 390 |
+
_inner.__annotations__ = fn.__annotations__
|
| 391 |
+
if isinstance(fn, types.FunctionType):
|
| 392 |
+
_inner.__kwdefaults__ = fn.__kwdefaults__ # type: ignore [attr-defined]
|
| 393 |
+
elif isinstance(fn, type) and hasattr(fn, "__init__"):
|
| 394 |
+
_inner.__kwdefaults__ = fn.__init__.__kwdefaults__ # type: ignore [misc,attr-defined]
|
| 395 |
+
else:
|
| 396 |
+
_inner.__kwdefaults__ = None # type: ignore [attr-defined]
|
| 397 |
+
_inner.__qualname__ = fn.__qualname__
|
| 398 |
+
return cast(C, _inner)
|
.venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
BSD 3-Clause License
|
| 2 |
+
|
| 3 |
+
Copyright (c) Soumith Chintala 2016,
|
| 4 |
+
All rights reserved.
|
| 5 |
+
|
| 6 |
+
Redistribution and use in source and binary forms, with or without
|
| 7 |
+
modification, are permitted provided that the following conditions are met:
|
| 8 |
+
|
| 9 |
+
* Redistributions of source code must retain the above copyright notice, this
|
| 10 |
+
list of conditions and the following disclaimer.
|
| 11 |
+
|
| 12 |
+
* Redistributions in binary form must reproduce the above copyright notice,
|
| 13 |
+
this list of conditions and the following disclaimer in the documentation
|
| 14 |
+
and/or other materials provided with the distribution.
|
| 15 |
+
|
| 16 |
+
* Neither the name of the copyright holder nor the names of its
|
| 17 |
+
contributors may be used to endorse or promote products derived from
|
| 18 |
+
this software without specific prior written permission.
|
| 19 |
+
|
| 20 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
| 21 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
| 22 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 23 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
| 24 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
| 25 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 26 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 27 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
| 28 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
.venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: torchvision
|
| 3 |
+
Version: 0.20.1
|
| 4 |
+
Summary: image and video datasets and models for torch deep learning
|
| 5 |
+
Home-page: https://github.com/pytorch/vision
|
| 6 |
+
Author: PyTorch Core Team
|
| 7 |
+
Author-email: soumith@pytorch.org
|
| 8 |
+
License: BSD
|
| 9 |
+
Requires-Python: >=3.8
|
| 10 |
+
Description-Content-Type: text/markdown
|
| 11 |
+
License-File: LICENSE
|
| 12 |
+
Requires-Dist: numpy
|
| 13 |
+
Requires-Dist: torch (==2.5.1)
|
| 14 |
+
Requires-Dist: pillow (!=8.3.*,>=5.3.0)
|
| 15 |
+
Provides-Extra: gdown
|
| 16 |
+
Requires-Dist: gdown (>=4.7.3) ; extra == 'gdown'
|
| 17 |
+
Provides-Extra: scipy
|
| 18 |
+
Requires-Dist: scipy ; extra == 'scipy'
|
| 19 |
+
|
| 20 |
+
# torchvision
|
| 21 |
+
|
| 22 |
+
[](https://pepy.tech/project/torchvision)
|
| 23 |
+
[](https://pytorch.org/vision/stable/index.html)
|
| 24 |
+
|
| 25 |
+
The torchvision package consists of popular datasets, model architectures, and common image transformations for computer
|
| 26 |
+
vision.
|
| 27 |
+
|
| 28 |
+
## Installation
|
| 29 |
+
|
| 30 |
+
Please refer to the [official
|
| 31 |
+
instructions](https://pytorch.org/get-started/locally/) to install the stable
|
| 32 |
+
versions of `torch` and `torchvision` on your system.
|
| 33 |
+
|
| 34 |
+
To build source, refer to our [contributing
|
| 35 |
+
page](https://github.com/pytorch/vision/blob/main/CONTRIBUTING.md#development-installation).
|
| 36 |
+
|
| 37 |
+
The following is the corresponding `torchvision` versions and supported Python
|
| 38 |
+
versions.
|
| 39 |
+
|
| 40 |
+
| `torch` | `torchvision` | Python |
|
| 41 |
+
| ------------------ | ------------------ | ------------------- |
|
| 42 |
+
| `main` / `nightly` | `main` / `nightly` | `>=3.9`, `<=3.12` |
|
| 43 |
+
| `2.4` | `0.19` | `>=3.8`, `<=3.12` |
|
| 44 |
+
| `2.3` | `0.18` | `>=3.8`, `<=3.12` |
|
| 45 |
+
| `2.2` | `0.17` | `>=3.8`, `<=3.11` |
|
| 46 |
+
| `2.1` | `0.16` | `>=3.8`, `<=3.11` |
|
| 47 |
+
| `2.0` | `0.15` | `>=3.8`, `<=3.11` |
|
| 48 |
+
|
| 49 |
+
<details>
|
| 50 |
+
<summary>older versions</summary>
|
| 51 |
+
|
| 52 |
+
| `torch` | `torchvision` | Python |
|
| 53 |
+
|---------|-------------------|---------------------------|
|
| 54 |
+
| `1.13` | `0.14` | `>=3.7.2`, `<=3.10` |
|
| 55 |
+
| `1.12` | `0.13` | `>=3.7`, `<=3.10` |
|
| 56 |
+
| `1.11` | `0.12` | `>=3.7`, `<=3.10` |
|
| 57 |
+
| `1.10` | `0.11` | `>=3.6`, `<=3.9` |
|
| 58 |
+
| `1.9` | `0.10` | `>=3.6`, `<=3.9` |
|
| 59 |
+
| `1.8` | `0.9` | `>=3.6`, `<=3.9` |
|
| 60 |
+
| `1.7` | `0.8` | `>=3.6`, `<=3.9` |
|
| 61 |
+
| `1.6` | `0.7` | `>=3.6`, `<=3.8` |
|
| 62 |
+
| `1.5` | `0.6` | `>=3.5`, `<=3.8` |
|
| 63 |
+
| `1.4` | `0.5` | `==2.7`, `>=3.5`, `<=3.8` |
|
| 64 |
+
| `1.3` | `0.4.2` / `0.4.3` | `==2.7`, `>=3.5`, `<=3.7` |
|
| 65 |
+
| `1.2` | `0.4.1` | `==2.7`, `>=3.5`, `<=3.7` |
|
| 66 |
+
| `1.1` | `0.3` | `==2.7`, `>=3.5`, `<=3.7` |
|
| 67 |
+
| `<=1.0` | `0.2` | `==2.7`, `>=3.5`, `<=3.7` |
|
| 68 |
+
|
| 69 |
+
</details>
|
| 70 |
+
|
| 71 |
+
## Image Backends
|
| 72 |
+
|
| 73 |
+
Torchvision currently supports the following image backends:
|
| 74 |
+
|
| 75 |
+
- torch tensors
|
| 76 |
+
- PIL images:
|
| 77 |
+
- [Pillow](https://python-pillow.org/)
|
| 78 |
+
- [Pillow-SIMD](https://github.com/uploadcare/pillow-simd) - a **much faster** drop-in replacement for Pillow with SIMD.
|
| 79 |
+
|
| 80 |
+
Read more in in our [docs](https://pytorch.org/vision/stable/transforms.html).
|
| 81 |
+
|
| 82 |
+
## [UNSTABLE] Video Backend
|
| 83 |
+
|
| 84 |
+
Torchvision currently supports the following video backends:
|
| 85 |
+
|
| 86 |
+
- [pyav](https://github.com/PyAV-Org/PyAV) (default) - Pythonic binding for ffmpeg libraries.
|
| 87 |
+
- video_reader - This needs ffmpeg to be installed and torchvision to be built from source. There shouldn't be any
|
| 88 |
+
conflicting version of ffmpeg installed. Currently, this is only supported on Linux.
|
| 89 |
+
|
| 90 |
+
```
|
| 91 |
+
conda install -c conda-forge 'ffmpeg<4.3'
|
| 92 |
+
python setup.py install
|
| 93 |
+
```
|
| 94 |
+
|
| 95 |
+
# Using the models on C++
|
| 96 |
+
|
| 97 |
+
Refer to [example/cpp](https://github.com/pytorch/vision/tree/main/examples/cpp).
|
| 98 |
+
|
| 99 |
+
**DISCLAIMER**: the `libtorchvision` library includes the torchvision
|
| 100 |
+
custom ops as well as most of the C++ torchvision APIs. Those APIs do not come
|
| 101 |
+
with any backward-compatibility guarantees and may change from one version to
|
| 102 |
+
the next. Only the Python APIs are stable and with backward-compatibility
|
| 103 |
+
guarantees. So, if you need stability within a C++ environment, your best bet is
|
| 104 |
+
to export the Python APIs via torchscript.
|
| 105 |
+
|
| 106 |
+
## Documentation
|
| 107 |
+
|
| 108 |
+
You can find the API documentation on the pytorch website: <https://pytorch.org/vision/stable/index.html>
|
| 109 |
+
|
| 110 |
+
## Contributing
|
| 111 |
+
|
| 112 |
+
See the [CONTRIBUTING](CONTRIBUTING.md) file for how to help out.
|
| 113 |
+
|
| 114 |
+
## Disclaimer on Datasets
|
| 115 |
+
|
| 116 |
+
This is a utility library that downloads and prepares public datasets. We do not host or distribute these datasets,
|
| 117 |
+
vouch for their quality or fairness, or claim that you have license to use the dataset. It is your responsibility to
|
| 118 |
+
determine whether you have permission to use the dataset under the dataset's license.
|
| 119 |
+
|
| 120 |
+
If you're a dataset owner and wish to update any part of it (description, citation, etc.), or do not want your dataset
|
| 121 |
+
to be included in this library, please get in touch through a GitHub issue. Thanks for your contribution to the ML
|
| 122 |
+
community!
|
| 123 |
+
|
| 124 |
+
## Pre-trained Model License
|
| 125 |
+
|
| 126 |
+
The pre-trained models provided in this library may have their own licenses or terms and conditions derived from the
|
| 127 |
+
dataset used for training. It is your responsibility to determine whether you have permission to use the models for your
|
| 128 |
+
use case.
|
| 129 |
+
|
| 130 |
+
More specifically, SWAG models are released under the CC-BY-NC 4.0 license. See
|
| 131 |
+
[SWAG LICENSE](https://github.com/facebookresearch/SWAG/blob/main/LICENSE) for additional details.
|
| 132 |
+
|
| 133 |
+
## Citing TorchVision
|
| 134 |
+
|
| 135 |
+
If you find TorchVision useful in your work, please consider citing the following BibTeX entry:
|
| 136 |
+
|
| 137 |
+
```bibtex
|
| 138 |
+
@software{torchvision2016,
|
| 139 |
+
title = {TorchVision: PyTorch's Computer Vision library},
|
| 140 |
+
author = {TorchVision maintainers and contributors},
|
| 141 |
+
year = 2016,
|
| 142 |
+
journal = {GitHub repository},
|
| 143 |
+
publisher = {GitHub},
|
| 144 |
+
howpublished = {\url{https://github.com/pytorch/vision}}
|
| 145 |
+
}
|
| 146 |
+
```
|
.venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,384 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
torchvision-0.20.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
torchvision-0.20.1.dist-info/LICENSE,sha256=ZQL2doUc_iX4r3VTHfsyN1tzJbc8N-e0N0H6QiiT5x0,1517
|
| 3 |
+
torchvision-0.20.1.dist-info/METADATA,sha256=KdpnEZQcR2IsY1ARsKthcFVj1_jftHgCQSJ_kP843Wk,6068
|
| 4 |
+
torchvision-0.20.1.dist-info/RECORD,,
|
| 5 |
+
torchvision-0.20.1.dist-info/WHEEL,sha256=BbwEHnplQi27k2WzlwFVqlnQcGtT21PYXfeY1Xqnk8M,104
|
| 6 |
+
torchvision-0.20.1.dist-info/top_level.txt,sha256=ucJZoaluBW9BGYT4TuCE6zoZY_JuSP30wbDh-IRpxUU,12
|
| 7 |
+
torchvision.libs/libcudart.41118559.so.12,sha256=h3QiT1sRpzsV0HSj_M5zJzIsXEz9_ZJNaoJnee7JaP4,707904
|
| 8 |
+
torchvision.libs/libjpeg.ceea7512.so.62,sha256=Q0Nt1U7kvyOPOH37o9EyH96wBEFcgH1NNJDDaL1eXew,285328
|
| 9 |
+
torchvision.libs/libnvjpeg.02b6d700.so.12,sha256=btLqpaLN_zk7yVxUNHS5YRemcYi76kbqdNEvf4OHR5c,6722352
|
| 10 |
+
torchvision.libs/libpng16.7f72a3c5.so.16,sha256=oLjzyAs4Xamd6gz3yNqpVAANI7AWTxEqvnsvr_Cg9j0,1079081
|
| 11 |
+
torchvision.libs/libwebp.4a54d2c8.so.4,sha256=PCamJr4mr2g5WEa4H_DP9GDtB0TInsPuUMYmiaYTdhE,320536
|
| 12 |
+
torchvision.libs/libz.5f199d92.so.1,sha256=Cw5oKp3H_UiVpngyiPhRt5PciWM_KHFAJ5dPpNZvORQ,124744
|
| 13 |
+
torchvision/_C.so,sha256=-34bdXC9j8FPlJd5P4nhiMzxYdfBTKHyNuADaHee5gk,7746688
|
| 14 |
+
torchvision/__init__.py,sha256=7iyfQRDPEgPbSMQmAWBzKawfGXCfqRwVL42V61NDenM,3534
|
| 15 |
+
torchvision/__pycache__/__init__.cpython-311.pyc,,
|
| 16 |
+
torchvision/__pycache__/_internally_replaced_utils.cpython-311.pyc,,
|
| 17 |
+
torchvision/__pycache__/_meta_registrations.cpython-311.pyc,,
|
| 18 |
+
torchvision/__pycache__/_utils.cpython-311.pyc,,
|
| 19 |
+
torchvision/__pycache__/extension.cpython-311.pyc,,
|
| 20 |
+
torchvision/__pycache__/utils.cpython-311.pyc,,
|
| 21 |
+
torchvision/__pycache__/version.cpython-311.pyc,,
|
| 22 |
+
torchvision/_internally_replaced_utils.py,sha256=67zSUHOn6JwdnMUQchHgpNLCtWQQ9dJFpV_OUn8Qb_w,1389
|
| 23 |
+
torchvision/_meta_registrations.py,sha256=lkEGW61fKUrGSh0iOFsZ1ZHskItS1EJ9Oo2UfM-OvQ8,7208
|
| 24 |
+
torchvision/_utils.py,sha256=6TWK0JGaZVQrofgCAp5ox61_NQE2gIwhYouKQMiTaJ8,934
|
| 25 |
+
torchvision/datasets/__init__.py,sha256=AHSoX8LkWIt7RGlJDmk64pDvmWq6GCh-D7XwE2l382A,3587
|
| 26 |
+
torchvision/datasets/__pycache__/__init__.cpython-311.pyc,,
|
| 27 |
+
torchvision/datasets/__pycache__/_optical_flow.cpython-311.pyc,,
|
| 28 |
+
torchvision/datasets/__pycache__/_stereo_matching.cpython-311.pyc,,
|
| 29 |
+
torchvision/datasets/__pycache__/caltech.cpython-311.pyc,,
|
| 30 |
+
torchvision/datasets/__pycache__/celeba.cpython-311.pyc,,
|
| 31 |
+
torchvision/datasets/__pycache__/cifar.cpython-311.pyc,,
|
| 32 |
+
torchvision/datasets/__pycache__/cityscapes.cpython-311.pyc,,
|
| 33 |
+
torchvision/datasets/__pycache__/clevr.cpython-311.pyc,,
|
| 34 |
+
torchvision/datasets/__pycache__/coco.cpython-311.pyc,,
|
| 35 |
+
torchvision/datasets/__pycache__/country211.cpython-311.pyc,,
|
| 36 |
+
torchvision/datasets/__pycache__/dtd.cpython-311.pyc,,
|
| 37 |
+
torchvision/datasets/__pycache__/eurosat.cpython-311.pyc,,
|
| 38 |
+
torchvision/datasets/__pycache__/fakedata.cpython-311.pyc,,
|
| 39 |
+
torchvision/datasets/__pycache__/fer2013.cpython-311.pyc,,
|
| 40 |
+
torchvision/datasets/__pycache__/fgvc_aircraft.cpython-311.pyc,,
|
| 41 |
+
torchvision/datasets/__pycache__/flickr.cpython-311.pyc,,
|
| 42 |
+
torchvision/datasets/__pycache__/flowers102.cpython-311.pyc,,
|
| 43 |
+
torchvision/datasets/__pycache__/folder.cpython-311.pyc,,
|
| 44 |
+
torchvision/datasets/__pycache__/food101.cpython-311.pyc,,
|
| 45 |
+
torchvision/datasets/__pycache__/gtsrb.cpython-311.pyc,,
|
| 46 |
+
torchvision/datasets/__pycache__/hmdb51.cpython-311.pyc,,
|
| 47 |
+
torchvision/datasets/__pycache__/imagenet.cpython-311.pyc,,
|
| 48 |
+
torchvision/datasets/__pycache__/imagenette.cpython-311.pyc,,
|
| 49 |
+
torchvision/datasets/__pycache__/inaturalist.cpython-311.pyc,,
|
| 50 |
+
torchvision/datasets/__pycache__/kinetics.cpython-311.pyc,,
|
| 51 |
+
torchvision/datasets/__pycache__/kitti.cpython-311.pyc,,
|
| 52 |
+
torchvision/datasets/__pycache__/lfw.cpython-311.pyc,,
|
| 53 |
+
torchvision/datasets/__pycache__/lsun.cpython-311.pyc,,
|
| 54 |
+
torchvision/datasets/__pycache__/mnist.cpython-311.pyc,,
|
| 55 |
+
torchvision/datasets/__pycache__/moving_mnist.cpython-311.pyc,,
|
| 56 |
+
torchvision/datasets/__pycache__/omniglot.cpython-311.pyc,,
|
| 57 |
+
torchvision/datasets/__pycache__/oxford_iiit_pet.cpython-311.pyc,,
|
| 58 |
+
torchvision/datasets/__pycache__/pcam.cpython-311.pyc,,
|
| 59 |
+
torchvision/datasets/__pycache__/phototour.cpython-311.pyc,,
|
| 60 |
+
torchvision/datasets/__pycache__/places365.cpython-311.pyc,,
|
| 61 |
+
torchvision/datasets/__pycache__/rendered_sst2.cpython-311.pyc,,
|
| 62 |
+
torchvision/datasets/__pycache__/sbd.cpython-311.pyc,,
|
| 63 |
+
torchvision/datasets/__pycache__/sbu.cpython-311.pyc,,
|
| 64 |
+
torchvision/datasets/__pycache__/semeion.cpython-311.pyc,,
|
| 65 |
+
torchvision/datasets/__pycache__/stanford_cars.cpython-311.pyc,,
|
| 66 |
+
torchvision/datasets/__pycache__/stl10.cpython-311.pyc,,
|
| 67 |
+
torchvision/datasets/__pycache__/sun397.cpython-311.pyc,,
|
| 68 |
+
torchvision/datasets/__pycache__/svhn.cpython-311.pyc,,
|
| 69 |
+
torchvision/datasets/__pycache__/ucf101.cpython-311.pyc,,
|
| 70 |
+
torchvision/datasets/__pycache__/usps.cpython-311.pyc,,
|
| 71 |
+
torchvision/datasets/__pycache__/utils.cpython-311.pyc,,
|
| 72 |
+
torchvision/datasets/__pycache__/video_utils.cpython-311.pyc,,
|
| 73 |
+
torchvision/datasets/__pycache__/vision.cpython-311.pyc,,
|
| 74 |
+
torchvision/datasets/__pycache__/voc.cpython-311.pyc,,
|
| 75 |
+
torchvision/datasets/__pycache__/widerface.cpython-311.pyc,,
|
| 76 |
+
torchvision/datasets/_optical_flow.py,sha256=oRm_6rlBpJyi9d2IeTiebHssDEXQDKEKGw3ZqNVDMrg,19697
|
| 77 |
+
torchvision/datasets/_stereo_matching.py,sha256=f1sAkmyKKmFtyvrw4osElkMR7vupD8gEp8Y2rQ4btFA,49112
|
| 78 |
+
torchvision/datasets/caltech.py,sha256=6W8artbXAhp7lok8LDhx28Q5-MkupkyUmmc1RTXACnQ,8933
|
| 79 |
+
torchvision/datasets/celeba.py,sha256=BfMfogQ5DkzdbZMXF7qC7PMSAEY4o-jeEQTYKdGszeQ,8470
|
| 80 |
+
torchvision/datasets/cifar.py,sha256=mwMBBDUu10FE1SshtQQaQ65jSt3XeH44rkkaUUN_UcE,5850
|
| 81 |
+
torchvision/datasets/cityscapes.py,sha256=h6uX9d886G86_zm1Ok_Nz876wA7oC50qDWfQTn8ErKA,10321
|
| 82 |
+
torchvision/datasets/clevr.py,sha256=Yw2dTlep-ERTzIsKHPGL9cblF88mGlRcoGoBGac1XZ0,3460
|
| 83 |
+
torchvision/datasets/coco.py,sha256=Zmfp6yZgWcDxXLDshcTnxDaKC6xvYsasPcBh_j9E9m4,4180
|
| 84 |
+
torchvision/datasets/country211.py,sha256=T_WIsox6Ve6CxmFwnx6bX3KkLy1xzBCbAFBcGqHVYC8,2436
|
| 85 |
+
torchvision/datasets/dtd.py,sha256=c6GtnNd4xj4BCE52GMaXnn-AnZm7yn9Yha8Iwb5xhCo,4019
|
| 86 |
+
torchvision/datasets/eurosat.py,sha256=nKBDlYaYupwughReDD7Z_EH_WVTqqSyGRBjnIjmvUUk,2307
|
| 87 |
+
torchvision/datasets/fakedata.py,sha256=gKmN6VyQzWjjeEPpLPxb9i4DWwW-MtGVJfZf8uwHgyo,2447
|
| 88 |
+
torchvision/datasets/fer2013.py,sha256=f_Zj3Qf32x8ew5dZu8A03uph3I4AUvmmZabaLhTSMnU,5118
|
| 89 |
+
torchvision/datasets/fgvc_aircraft.py,sha256=Y5P7SsYLeXDuxy7VHVTx9TYDKHloxtxlxT4JBDgbvXg,4626
|
| 90 |
+
torchvision/datasets/flickr.py,sha256=rcbyRlYd-d_vRW9qmOPfX1bKBgFu4NbF-qlldqt2mcU,5431
|
| 91 |
+
torchvision/datasets/flowers102.py,sha256=SdPXQtHAeZ5Iod0xyK2Xq7n0ENA6YIoEUFfRqiBu1Q0,4641
|
| 92 |
+
torchvision/datasets/folder.py,sha256=bh7Jv0BOphBkKYxD-BogUWexE9RIrGR0FLM5MR24aGM,12919
|
| 93 |
+
torchvision/datasets/food101.py,sha256=1vbbbahI-Lp9xySy5bsnS50TeV93ovesSIotY0astw0,3752
|
| 94 |
+
torchvision/datasets/gtsrb.py,sha256=0n6GQIGPuKU7yA0tSpiAA1UktoShE2vzeA2EqhQZK-Q,3785
|
| 95 |
+
torchvision/datasets/hmdb51.py,sha256=lC16QNHvbKkS8QfgVdhBvSwN2eLRFUBUNL021nkvgdc,5971
|
| 96 |
+
torchvision/datasets/imagenet.py,sha256=kllmhLsUPgm88rww0j-OaEa-iuzGgyu49q6gphpXLjA,8691
|
| 97 |
+
torchvision/datasets/imagenette.py,sha256=zzgx2cWRkDCrzX93qbhv4fOdngu8WXpTT6M0ZCg_AsE,4456
|
| 98 |
+
torchvision/datasets/inaturalist.py,sha256=8F43yInRw4Q4yAjWalwhgDIYkvzHtWBiQ_MtB0Jyn4g,10161
|
| 99 |
+
torchvision/datasets/kinetics.py,sha256=JlLErOUo7OQf_lp-vUS2yNtfP5vxMgjl-onPLj2tffw,10416
|
| 100 |
+
torchvision/datasets/kitti.py,sha256=8mCScWNce0OdG3b6vWCJGR370CydbK2Iy8W96Dfsl0I,5637
|
| 101 |
+
torchvision/datasets/lfw.py,sha256=7cwiL0PgnnS2d2CTse8LL2mOoo_eremqosyYmHETiwI,10560
|
| 102 |
+
torchvision/datasets/lsun.py,sha256=SAwzOTu0cQr7Tfo-iT0hIT986lCwOXKsIQYccSPDTBg,5728
|
| 103 |
+
torchvision/datasets/mnist.py,sha256=ymXGCJfp0V3YLsMGw15Ofry-_NwmbvaXnp13eJ67GQA,21718
|
| 104 |
+
torchvision/datasets/moving_mnist.py,sha256=6yCTZVgIlWy2f9bNlrAjpUWryeLohaWuN0bRhMdAERw,3644
|
| 105 |
+
torchvision/datasets/omniglot.py,sha256=b2MTG1TVxq3dk2ASBdHLu5uxLBnT4lpgSer8k9uuQq4,4151
|
| 106 |
+
torchvision/datasets/oxford_iiit_pet.py,sha256=t4me06AwjDjSTIE7f80VFuGxISGHFPz6B4Sn3uOrCBw,5519
|
| 107 |
+
torchvision/datasets/pcam.py,sha256=Ub7UWrAufIzLXN8p6Cunt7osnHCNTL-sxDmEMGypq2Q,5285
|
| 108 |
+
torchvision/datasets/phototour.py,sha256=4Sjdg-1dHk5Me5Ku-G75zSek0vs0CqkpQUgGF0KzI84,8037
|
| 109 |
+
torchvision/datasets/places365.py,sha256=rdktgfZAQWtXwptMeXNsNz3mqftmaN7DqMqWH0eTicY,7259
|
| 110 |
+
torchvision/datasets/rendered_sst2.py,sha256=2NRiL3I1hDrOdNllubdQ-gQ-Unaaqb2mLAXG4_JL5wY,3597
|
| 111 |
+
torchvision/datasets/samplers/__init__.py,sha256=W1ZtQpGLG6aoHylo1t8PEsHIVoWwso5bSFk9JzKfH8g,161
|
| 112 |
+
torchvision/datasets/samplers/__pycache__/__init__.cpython-311.pyc,,
|
| 113 |
+
torchvision/datasets/samplers/__pycache__/clip_sampler.cpython-311.pyc,,
|
| 114 |
+
torchvision/datasets/samplers/clip_sampler.py,sha256=1-k3bxToGpBlqC4-iyVDggtojA701NflW0nBRLK27tQ,6244
|
| 115 |
+
torchvision/datasets/sbd.py,sha256=BpowMEO3_IxJgyjrtEN7XSLAKlrONVhCGr2kJXtTIzs,5414
|
| 116 |
+
torchvision/datasets/sbu.py,sha256=LFMPoEeuf7w0ABpevnIAuoxnTL-n1F1yzBVtB2z7m08,4143
|
| 117 |
+
torchvision/datasets/semeion.py,sha256=6GK9LWRZgwOFQA6yVxe5V7IsbM64-H4smYfPBquYGhY,3148
|
| 118 |
+
torchvision/datasets/stanford_cars.py,sha256=WgmPvMR-ZOpw-IV53Ud2cNvnnC1rHUDl-soCJSzEP1Y,4517
|
| 119 |
+
torchvision/datasets/stl10.py,sha256=0rUR0czJgbilfJ57L8qvwsSdojEhBsxtXLNzdxEJJPc,7293
|
| 120 |
+
torchvision/datasets/sun397.py,sha256=q_qfa_rdx4GUklR9oIHCgQC0JUKXMc7UudTq6yUeJPQ,2783
|
| 121 |
+
torchvision/datasets/svhn.py,sha256=Vk8VO74JUUaZHvejvkWJBRnmk-zpmHwjksMCZoBDWDc,4828
|
| 122 |
+
torchvision/datasets/ucf101.py,sha256=s7rHl7qonY7PnmEZac_O2gmJUIVFzyNxVbvMY7IY_Io,5533
|
| 123 |
+
torchvision/datasets/usps.py,sha256=7IP-xNZUJQNibubSodJgnpUJlCvNe-prd8BHsrbzSR0,3500
|
| 124 |
+
torchvision/datasets/utils.py,sha256=OJP_dKoAM1gx6OUSjLQnwRAN4DRFMx-iAHLDxBResro,16355
|
| 125 |
+
torchvision/datasets/video_utils.py,sha256=14GvzCRi7tbfeCq31MN9XP_6-bfewRSrvwavO4VBFdk,17213
|
| 126 |
+
torchvision/datasets/vision.py,sha256=x8AuTqEBwwBoHmkkWD6Iki8o5LMxac2yhrzIFBDgodE,4249
|
| 127 |
+
torchvision/datasets/voc.py,sha256=LhdQavn7-nq13zf9HIfjNYxPDa5SaTUDgayDe8uLfZc,8835
|
| 128 |
+
torchvision/datasets/widerface.py,sha256=f70xsvDz-PGLUA2eUFP6wSqbaA_ws0EErUPFvjnJ7wE,8323
|
| 129 |
+
torchvision/extension.py,sha256=YWBDURfCFXSmRvXi2iEg2L0hafN2-RnybpImh9JAUtQ,3141
|
| 130 |
+
torchvision/image.so,sha256=yCN3wsK-YM7fgMFxh02NUNiwkQL-QsILOkJrdxWh_E0,667281
|
| 131 |
+
torchvision/io/__init__.py,sha256=GMwjZuig-LWPufamClwl5EpFq0fExa7MXabkaMEuaHs,1625
|
| 132 |
+
torchvision/io/__pycache__/__init__.cpython-311.pyc,,
|
| 133 |
+
torchvision/io/__pycache__/_load_gpu_decoder.cpython-311.pyc,,
|
| 134 |
+
torchvision/io/__pycache__/_video_opt.cpython-311.pyc,,
|
| 135 |
+
torchvision/io/__pycache__/image.cpython-311.pyc,,
|
| 136 |
+
torchvision/io/__pycache__/video.cpython-311.pyc,,
|
| 137 |
+
torchvision/io/__pycache__/video_reader.cpython-311.pyc,,
|
| 138 |
+
torchvision/io/_load_gpu_decoder.py,sha256=Cc8eP620qPDFc0q2qd-VYtjxtsgFPjOgg7Z04RXRziU,178
|
| 139 |
+
torchvision/io/_video_opt.py,sha256=oW2Vvs13fa9nopb4Ot3n_VNiOUCn5ZPLQnH8Xf8-81g,20456
|
| 140 |
+
torchvision/io/image.py,sha256=KooxdS2Ov2_mnbIOnYbSJU3SjPMvY0ck6NKIZ3hWneQ,17714
|
| 141 |
+
torchvision/io/video.py,sha256=AGMKrxzGb2KStloWlElYidVUvu3rRnYZyQF62MFXKgk,16779
|
| 142 |
+
torchvision/io/video_reader.py,sha256=eI09x1vuUsbtL6rnyeiv894y8EA9bfdJakV1zWYzBtQ,11689
|
| 143 |
+
torchvision/models/__init__.py,sha256=A8GQPE1bl3oUHpuD9ND53DV557IPY4459FNLW6sVXGI,865
|
| 144 |
+
torchvision/models/__pycache__/__init__.cpython-311.pyc,,
|
| 145 |
+
torchvision/models/__pycache__/_api.cpython-311.pyc,,
|
| 146 |
+
torchvision/models/__pycache__/_meta.cpython-311.pyc,,
|
| 147 |
+
torchvision/models/__pycache__/_utils.cpython-311.pyc,,
|
| 148 |
+
torchvision/models/__pycache__/alexnet.cpython-311.pyc,,
|
| 149 |
+
torchvision/models/__pycache__/convnext.cpython-311.pyc,,
|
| 150 |
+
torchvision/models/__pycache__/densenet.cpython-311.pyc,,
|
| 151 |
+
torchvision/models/__pycache__/efficientnet.cpython-311.pyc,,
|
| 152 |
+
torchvision/models/__pycache__/feature_extraction.cpython-311.pyc,,
|
| 153 |
+
torchvision/models/__pycache__/googlenet.cpython-311.pyc,,
|
| 154 |
+
torchvision/models/__pycache__/inception.cpython-311.pyc,,
|
| 155 |
+
torchvision/models/__pycache__/maxvit.cpython-311.pyc,,
|
| 156 |
+
torchvision/models/__pycache__/mnasnet.cpython-311.pyc,,
|
| 157 |
+
torchvision/models/__pycache__/mobilenet.cpython-311.pyc,,
|
| 158 |
+
torchvision/models/__pycache__/mobilenetv2.cpython-311.pyc,,
|
| 159 |
+
torchvision/models/__pycache__/mobilenetv3.cpython-311.pyc,,
|
| 160 |
+
torchvision/models/__pycache__/regnet.cpython-311.pyc,,
|
| 161 |
+
torchvision/models/__pycache__/resnet.cpython-311.pyc,,
|
| 162 |
+
torchvision/models/__pycache__/shufflenetv2.cpython-311.pyc,,
|
| 163 |
+
torchvision/models/__pycache__/squeezenet.cpython-311.pyc,,
|
| 164 |
+
torchvision/models/__pycache__/swin_transformer.cpython-311.pyc,,
|
| 165 |
+
torchvision/models/__pycache__/vgg.cpython-311.pyc,,
|
| 166 |
+
torchvision/models/__pycache__/vision_transformer.cpython-311.pyc,,
|
| 167 |
+
torchvision/models/_api.py,sha256=uIIJnxX1zYMNpdvJ0haSq15_XlR1QteFZBYVAdtEheg,10054
|
| 168 |
+
torchvision/models/_meta.py,sha256=fqpeQBsf9EEYbmApQ8Q0LKyM9_UFwjireII5mwDbwJY,28875
|
| 169 |
+
torchvision/models/_utils.py,sha256=S8uDD7maNefy-fEW6mpz8dFU68acK1HxN0kt1qpkkDo,10893
|
| 170 |
+
torchvision/models/alexnet.py,sha256=dvBZLVH60TOTHCNNkWg0TFLtuJ5Ghh_xXN73r3Vyq58,4488
|
| 171 |
+
torchvision/models/convnext.py,sha256=tP73tH-us6h2KSdVcPypEX9Izk5lsr82KsGT15mj4NE,15326
|
| 172 |
+
torchvision/models/densenet.py,sha256=OZEsHJw76kOSRG4TKhLy7lPGsGEixy6llHkpC8snSOo,16825
|
| 173 |
+
torchvision/models/detection/__init__.py,sha256=JwYm_fTGO_FeRg4eTOQLwQPZ9lC9jheZ-QEoJgqKTjg,168
|
| 174 |
+
torchvision/models/detection/__pycache__/__init__.cpython-311.pyc,,
|
| 175 |
+
torchvision/models/detection/__pycache__/_utils.cpython-311.pyc,,
|
| 176 |
+
torchvision/models/detection/__pycache__/anchor_utils.cpython-311.pyc,,
|
| 177 |
+
torchvision/models/detection/__pycache__/backbone_utils.cpython-311.pyc,,
|
| 178 |
+
torchvision/models/detection/__pycache__/faster_rcnn.cpython-311.pyc,,
|
| 179 |
+
torchvision/models/detection/__pycache__/fcos.cpython-311.pyc,,
|
| 180 |
+
torchvision/models/detection/__pycache__/generalized_rcnn.cpython-311.pyc,,
|
| 181 |
+
torchvision/models/detection/__pycache__/image_list.cpython-311.pyc,,
|
| 182 |
+
torchvision/models/detection/__pycache__/keypoint_rcnn.cpython-311.pyc,,
|
| 183 |
+
torchvision/models/detection/__pycache__/mask_rcnn.cpython-311.pyc,,
|
| 184 |
+
torchvision/models/detection/__pycache__/retinanet.cpython-311.pyc,,
|
| 185 |
+
torchvision/models/detection/__pycache__/roi_heads.cpython-311.pyc,,
|
| 186 |
+
torchvision/models/detection/__pycache__/rpn.cpython-311.pyc,,
|
| 187 |
+
torchvision/models/detection/__pycache__/ssd.cpython-311.pyc,,
|
| 188 |
+
torchvision/models/detection/__pycache__/ssdlite.cpython-311.pyc,,
|
| 189 |
+
torchvision/models/detection/__pycache__/transform.cpython-311.pyc,,
|
| 190 |
+
torchvision/models/detection/_utils.py,sha256=2y3FQ4F5yXhFM7VIWmu_70FpKgZjxdT_ucfzYwi3ZUQ,22127
|
| 191 |
+
torchvision/models/detection/anchor_utils.py,sha256=8Ix1Vp3i2kgJGr6esie3rw0_yAjtrUSvLXVKPaoZeQo,11859
|
| 192 |
+
torchvision/models/detection/backbone_utils.py,sha256=4FyzocR6YS7cG5IJTMRwC44tupbXQDA_Ru_8qqaju2I,10548
|
| 193 |
+
torchvision/models/detection/faster_rcnn.py,sha256=8DnegLKZnr8Q-zrzGT7_peIc_k_R1q1ijDH5n1P3gQE,36979
|
| 194 |
+
torchvision/models/detection/fcos.py,sha256=8r8MayvUMeTKfDoza4Hy67ChgRglLzBG6YS5qNe84sM,34235
|
| 195 |
+
torchvision/models/detection/generalized_rcnn.py,sha256=4-Dp8Vx-SjDDSZ7TsZ11rmkvEH336aLuSOlERXiQ7fs,4743
|
| 196 |
+
torchvision/models/detection/image_list.py,sha256=SUJ3xMn-1xc6ivYZUNIdWBh3RH9xD8EtCdpsXnPI_iM,783
|
| 197 |
+
torchvision/models/detection/keypoint_rcnn.py,sha256=4HxwRrp8lJfdyi8K3eBq4vstbRrL8bZc2Hhh-pVHjsI,21947
|
| 198 |
+
torchvision/models/detection/mask_rcnn.py,sha256=X1GQS314qOy4uCCp7MPfH6W12IydRwW-tDCmCnB1FGg,26713
|
| 199 |
+
torchvision/models/detection/retinanet.py,sha256=17Q0RdqqugASEVDGJfr8lCD61zjEqD5XxwQZAmZUZ24,37300
|
| 200 |
+
torchvision/models/detection/roi_heads.py,sha256=Uh9950xZUEmejwD2pRRhKvqNV0bY_G2Om8yGC2EdDDg,33822
|
| 201 |
+
torchvision/models/detection/rpn.py,sha256=7jbqPpLelnGCb5Fn-muUXeZF9EQ2nhE5r2aNAuR9V0M,15838
|
| 202 |
+
torchvision/models/detection/ssd.py,sha256=tbsgVbRD36WrjkZEB4xi1fvOXT62ry0p8G_Sd-j5CrY,28979
|
| 203 |
+
torchvision/models/detection/ssdlite.py,sha256=8nyEUYONUYe319JpgevKEfjr_FxCgDNU8gOyfuZ3L3c,13219
|
| 204 |
+
torchvision/models/detection/transform.py,sha256=Ma0CDvLCMlk3MxS3asXcDxrSosRLacaLpi-T34LXm1A,12189
|
| 205 |
+
torchvision/models/efficientnet.py,sha256=4qyeoXkYGFyUsBDt8TygDYycMMt1zhGwB_l4PmoPv4g,43090
|
| 206 |
+
torchvision/models/feature_extraction.py,sha256=RD4Ba_6FPKRVBZs1Io3ebA1P-iZS7T7flxY5MWPPlv4,26339
|
| 207 |
+
torchvision/models/googlenet.py,sha256=ni7VlSJW2_zG0Adxx56fuN5t4yI6vROBAuAu06-V4f0,12806
|
| 208 |
+
torchvision/models/inception.py,sha256=ifrLErzOVG-vlwQOMXLX5yMgcpHxCQQ17L7Wacn5QhQ,18851
|
| 209 |
+
torchvision/models/maxvit.py,sha256=_8L8gG5ob2DCZJbiny81P1fBAMmOcOKbTngckPy8xTE,32053
|
| 210 |
+
torchvision/models/mnasnet.py,sha256=h9jY1TupaChZj9khnXya_l4O1exUWhWOOCmhJCCImKc,17574
|
| 211 |
+
torchvision/models/mobilenet.py,sha256=lSRVxw2TL3LFBwCadvyvH6n3GzqUTnK2-rhX3MOgSrs,211
|
| 212 |
+
torchvision/models/mobilenetv2.py,sha256=v9cRBAp7_C_50JFkjGZ0luvuh45oCYgYn37pcG2UL8o,9710
|
| 213 |
+
torchvision/models/mobilenetv3.py,sha256=-Xk32m_Wdn-ap8wCL4Tl7wjiROIwDwhasInYTMwwOrE,16279
|
| 214 |
+
torchvision/models/optical_flow/__init__.py,sha256=0zRlMWQJCjFqoUafUXVgO89-z7em7tACo9E8hHSq9RQ,20
|
| 215 |
+
torchvision/models/optical_flow/__pycache__/__init__.cpython-311.pyc,,
|
| 216 |
+
torchvision/models/optical_flow/__pycache__/_utils.cpython-311.pyc,,
|
| 217 |
+
torchvision/models/optical_flow/__pycache__/raft.cpython-311.pyc,,
|
| 218 |
+
torchvision/models/optical_flow/_utils.py,sha256=v-tQJzYmYukrD1sQAE-5j5jxyvComwF1UdGkz5tVTLw,2077
|
| 219 |
+
torchvision/models/optical_flow/raft.py,sha256=FpSLPXisugu5Rzp_D5XCr037snBapMJ0dDPrw9c3CNk,39995
|
| 220 |
+
torchvision/models/quantization/__init__.py,sha256=gqFM7zI4UUHKKBDJAumozOn7xPL0JtvyNS8Ejz6QXp0,125
|
| 221 |
+
torchvision/models/quantization/__pycache__/__init__.cpython-311.pyc,,
|
| 222 |
+
torchvision/models/quantization/__pycache__/googlenet.cpython-311.pyc,,
|
| 223 |
+
torchvision/models/quantization/__pycache__/inception.cpython-311.pyc,,
|
| 224 |
+
torchvision/models/quantization/__pycache__/mobilenet.cpython-311.pyc,,
|
| 225 |
+
torchvision/models/quantization/__pycache__/mobilenetv2.cpython-311.pyc,,
|
| 226 |
+
torchvision/models/quantization/__pycache__/mobilenetv3.cpython-311.pyc,,
|
| 227 |
+
torchvision/models/quantization/__pycache__/resnet.cpython-311.pyc,,
|
| 228 |
+
torchvision/models/quantization/__pycache__/shufflenetv2.cpython-311.pyc,,
|
| 229 |
+
torchvision/models/quantization/__pycache__/utils.cpython-311.pyc,,
|
| 230 |
+
torchvision/models/quantization/googlenet.py,sha256=C-8lm9TnjkEuwu6zaPp0r5mb0QMYvTMGOtz2--s1IFo,8080
|
| 231 |
+
torchvision/models/quantization/inception.py,sha256=hg8K1QNk7T-Qo3zOB47eupS3Thu_RjVI6mG2HzAEx8M,10815
|
| 232 |
+
torchvision/models/quantization/mobilenet.py,sha256=lSRVxw2TL3LFBwCadvyvH6n3GzqUTnK2-rhX3MOgSrs,211
|
| 233 |
+
torchvision/models/quantization/mobilenetv2.py,sha256=ggpNLU4_JkyMn8IPTgj1p0xx_Wvspcii2Wd3ISj5tBE,5883
|
| 234 |
+
torchvision/models/quantization/mobilenetv3.py,sha256=PVWmSP62Pn8hQkd682l6uYFLQp1nxZltMOE-FhhO9OU,9230
|
| 235 |
+
torchvision/models/quantization/resnet.py,sha256=9Hb6KyPv33Jj1A6JciXvGX06q0RkwwP10u8GxFfmorM,17939
|
| 236 |
+
torchvision/models/quantization/shufflenetv2.py,sha256=eS2y34ZTG03dNJgtVJ2qSXQWZ22PHIWBYeC8cbvI1yI,16884
|
| 237 |
+
torchvision/models/quantization/utils.py,sha256=n8mWsK9_Ek_M2AqGKPLoLlcKaYGH2PrF2l5_W84oBMk,2058
|
| 238 |
+
torchvision/models/regnet.py,sha256=-7s5n0qzXZPR9HgzOk9aj1sv9dWZ3AxnP7CmZRdUeZI,63553
|
| 239 |
+
torchvision/models/resnet.py,sha256=dJmlBZrXsaH491Q8BLShN5UUD62DfDhTC0j_XZYQv24,38932
|
| 240 |
+
torchvision/models/segmentation/__init__.py,sha256=TGk6UdVXAMtwBpYalrvdXZnmSwqzTDOT1lgKrfzhHrQ,66
|
| 241 |
+
torchvision/models/segmentation/__pycache__/__init__.cpython-311.pyc,,
|
| 242 |
+
torchvision/models/segmentation/__pycache__/_utils.cpython-311.pyc,,
|
| 243 |
+
torchvision/models/segmentation/__pycache__/deeplabv3.cpython-311.pyc,,
|
| 244 |
+
torchvision/models/segmentation/__pycache__/fcn.cpython-311.pyc,,
|
| 245 |
+
torchvision/models/segmentation/__pycache__/lraspp.cpython-311.pyc,,
|
| 246 |
+
torchvision/models/segmentation/_utils.py,sha256=QfyqCtH_MJnIkKW5m-98GZD2MjtPYLtPTDi79pcIGhs,1197
|
| 247 |
+
torchvision/models/segmentation/deeplabv3.py,sha256=wVgXz21sugSck2KbG7WD-wgMwCAW0wd8jBGhgue300s,15015
|
| 248 |
+
torchvision/models/segmentation/fcn.py,sha256=I1FqaZZVPc3Fbg_7E2L5qpumnupxBYc7KYsW03EG_Cs,8973
|
| 249 |
+
torchvision/models/segmentation/lraspp.py,sha256=dt5DJ_qbDZlEM0SIuN87JU43JHfVlb8Oepp76KDv5tw,7643
|
| 250 |
+
torchvision/models/shufflenetv2.py,sha256=84FiPfkhJpSw6Q9Jmaug5MW5qmWCO3VhAPF61EiMn7Q,15444
|
| 251 |
+
torchvision/models/squeezenet.py,sha256=apjFPEI5nr_493bAQsR245EorzaMYXVQSqdcveyAfy0,8763
|
| 252 |
+
torchvision/models/swin_transformer.py,sha256=VwvnImWcjblashj0OONycDJnIkz-zRDpm365v_a0-zo,39337
|
| 253 |
+
torchvision/models/vgg.py,sha256=jYjIoY2jtKAc-aURCQsvbgBxup1Gh4fVZSt2NzFLlZY,19225
|
| 254 |
+
torchvision/models/video/__init__.py,sha256=O4HB-RaXgCtnvpMDAuMBaIeKIiYEkNxra_fmAHLUIJM,93
|
| 255 |
+
torchvision/models/video/__pycache__/__init__.cpython-311.pyc,,
|
| 256 |
+
torchvision/models/video/__pycache__/mvit.cpython-311.pyc,,
|
| 257 |
+
torchvision/models/video/__pycache__/resnet.cpython-311.pyc,,
|
| 258 |
+
torchvision/models/video/__pycache__/s3d.cpython-311.pyc,,
|
| 259 |
+
torchvision/models/video/__pycache__/swin_transformer.cpython-311.pyc,,
|
| 260 |
+
torchvision/models/video/mvit.py,sha256=0AZ31K5QcUBWZUUPTI1FCCM2Fma95bPs1o82zzpw2i0,32998
|
| 261 |
+
torchvision/models/video/resnet.py,sha256=RUnbUXFmoWNo_XbEKLmVSM8LUDcyv6jGZJ8GGpZi_6U,16771
|
| 262 |
+
torchvision/models/video/s3d.py,sha256=jx9gMP18Bzb7UO3vjejVBHlrCrJPdWFDfTn7XeU5kMg,7815
|
| 263 |
+
torchvision/models/video/swin_transformer.py,sha256=3GMyPGPeMcwJ1p9TGiRbpIlP-G7Qv_jWNbZmqIwMNyA,27688
|
| 264 |
+
torchvision/models/vision_transformer.py,sha256=O4mdBjYFsp-HTZA9bXfux_wJzIPRv1uS43PjuNh52zc,32136
|
| 265 |
+
torchvision/ops/__init__.py,sha256=eVv16QSBwgKaojOUHMPCy4ou9ZeFh-HoCV4DpqrZG4U,1928
|
| 266 |
+
torchvision/ops/__pycache__/__init__.cpython-311.pyc,,
|
| 267 |
+
torchvision/ops/__pycache__/_box_convert.cpython-311.pyc,,
|
| 268 |
+
torchvision/ops/__pycache__/_register_onnx_ops.cpython-311.pyc,,
|
| 269 |
+
torchvision/ops/__pycache__/_utils.cpython-311.pyc,,
|
| 270 |
+
torchvision/ops/__pycache__/boxes.cpython-311.pyc,,
|
| 271 |
+
torchvision/ops/__pycache__/ciou_loss.cpython-311.pyc,,
|
| 272 |
+
torchvision/ops/__pycache__/deform_conv.cpython-311.pyc,,
|
| 273 |
+
torchvision/ops/__pycache__/diou_loss.cpython-311.pyc,,
|
| 274 |
+
torchvision/ops/__pycache__/drop_block.cpython-311.pyc,,
|
| 275 |
+
torchvision/ops/__pycache__/feature_pyramid_network.cpython-311.pyc,,
|
| 276 |
+
torchvision/ops/__pycache__/focal_loss.cpython-311.pyc,,
|
| 277 |
+
torchvision/ops/__pycache__/giou_loss.cpython-311.pyc,,
|
| 278 |
+
torchvision/ops/__pycache__/misc.cpython-311.pyc,,
|
| 279 |
+
torchvision/ops/__pycache__/poolers.cpython-311.pyc,,
|
| 280 |
+
torchvision/ops/__pycache__/ps_roi_align.cpython-311.pyc,,
|
| 281 |
+
torchvision/ops/__pycache__/ps_roi_pool.cpython-311.pyc,,
|
| 282 |
+
torchvision/ops/__pycache__/roi_align.cpython-311.pyc,,
|
| 283 |
+
torchvision/ops/__pycache__/roi_pool.cpython-311.pyc,,
|
| 284 |
+
torchvision/ops/__pycache__/stochastic_depth.cpython-311.pyc,,
|
| 285 |
+
torchvision/ops/_box_convert.py,sha256=_bRRpErwk03rcPuscO1tCI9v3l88oNlDBDl2jzPlbKo,2409
|
| 286 |
+
torchvision/ops/_register_onnx_ops.py,sha256=Fyb1kC2m2OqZdfW_M86pt9-S66e1qNUhXNu1EQRa034,4181
|
| 287 |
+
torchvision/ops/_utils.py,sha256=pVHPpsmx6XcfGjUVk-XAEnd8QJBkrw_cT6fO_IwICE4,3630
|
| 288 |
+
torchvision/ops/boxes.py,sha256=n1aBkhkQYOwYdjkQMv5S9_G1NhpaBhmx3iwuJAq3nC8,16363
|
| 289 |
+
torchvision/ops/ciou_loss.py,sha256=3HClrMMKOJ3bndIUinNp3cp6Cim4-ZmmfuLn1-NPDUo,2756
|
| 290 |
+
torchvision/ops/deform_conv.py,sha256=fJxkVR_p_OQMzMja4flvmTgqDPvrOOcwzDG8bV7Q7pE,6990
|
| 291 |
+
torchvision/ops/diou_loss.py,sha256=tssNJhII4WT-wmidFS8gFNteQIAJz-Nd1Q7Trz1BjIY,3362
|
| 292 |
+
torchvision/ops/drop_block.py,sha256=A4EGIl7txrU_QmkI1N0W9hfd8tq8yx6zq32oYXaddLQ,5855
|
| 293 |
+
torchvision/ops/feature_pyramid_network.py,sha256=mfkaygxRz-0TAdTMq2fCAL-E0WxlRnTfdb-s_J5qPE4,8702
|
| 294 |
+
torchvision/ops/focal_loss.py,sha256=9kFqGyA0-hodRw9Au74k-FuS14OhsAvbFxDGvpx08Sg,2261
|
| 295 |
+
torchvision/ops/giou_loss.py,sha256=OXSaMZDZ0qy7jgaQ9exB_DMQXzcATBAFiIjzSlOV-bQ,2696
|
| 296 |
+
torchvision/ops/misc.py,sha256=yFnK7GT9OCMfDrn4NtQXKdh5broi1xocL94SoyqhWuw,13572
|
| 297 |
+
torchvision/ops/poolers.py,sha256=zzYhH7poMwGlYxDvAvCaL9emg9X7sM4xZFLEy0zvv5s,11920
|
| 298 |
+
torchvision/ops/ps_roi_align.py,sha256=4iAbeUVTessAcxvJhuARN_aFGUTZC9R4KrKC_mBH3MQ,3625
|
| 299 |
+
torchvision/ops/ps_roi_pool.py,sha256=jOv-2pAZdLFvvt4r4NwiRfxU5WAOy_vi6gxZjMvlusw,2870
|
| 300 |
+
torchvision/ops/roi_align.py,sha256=Ig9jLul90wBM3kaZuYEutsJEXfaCo3D0s_PxYMr9jQc,11292
|
| 301 |
+
torchvision/ops/roi_pool.py,sha256=70ou6Xc7qJxKe3SC54QIW3L99PoS0gLlwGocaYDbD2w,2943
|
| 302 |
+
torchvision/ops/stochastic_depth.py,sha256=ISZ9noJyZLxpTG-wa2VmPs66qjhVsP7ZxWHvumWSP3U,2236
|
| 303 |
+
torchvision/transforms/__init__.py,sha256=EMft42B1JAiU11J1rxIN4Znis6EJPbp-bsGjAzH-24M,53
|
| 304 |
+
torchvision/transforms/__pycache__/__init__.cpython-311.pyc,,
|
| 305 |
+
torchvision/transforms/__pycache__/_functional_pil.cpython-311.pyc,,
|
| 306 |
+
torchvision/transforms/__pycache__/_functional_tensor.cpython-311.pyc,,
|
| 307 |
+
torchvision/transforms/__pycache__/_functional_video.cpython-311.pyc,,
|
| 308 |
+
torchvision/transforms/__pycache__/_presets.cpython-311.pyc,,
|
| 309 |
+
torchvision/transforms/__pycache__/_transforms_video.cpython-311.pyc,,
|
| 310 |
+
torchvision/transforms/__pycache__/autoaugment.cpython-311.pyc,,
|
| 311 |
+
torchvision/transforms/__pycache__/functional.cpython-311.pyc,,
|
| 312 |
+
torchvision/transforms/__pycache__/transforms.cpython-311.pyc,,
|
| 313 |
+
torchvision/transforms/_functional_pil.py,sha256=TXZK3Y0huFHhXUGPin6ET5ToNoCbgdNGy65f8MPSpM0,12070
|
| 314 |
+
torchvision/transforms/_functional_tensor.py,sha256=3cEs8IYfRNQyff5Iriv--cZTWOIfvw2eaWiHU1-94AE,33939
|
| 315 |
+
torchvision/transforms/_functional_video.py,sha256=YcV557YglbJsq9SRGJHFoRbtxawiLSJ1oM5rV75OyqQ,3857
|
| 316 |
+
torchvision/transforms/_presets.py,sha256=RAjD6DgpU4QnNxV0MfZ3uHgzuARf-cdxD3Vo_KKIYeY,8510
|
| 317 |
+
torchvision/transforms/_transforms_video.py,sha256=Buz5LCWVPGiEonHE-cXIXfbkBhNc0qxVraxkNdxKp8o,4950
|
| 318 |
+
torchvision/transforms/autoaugment.py,sha256=JcbdEDbR0-OqTE4cwkhVSB45woFZQ_Fq5xmjFu_3bjg,28243
|
| 319 |
+
torchvision/transforms/functional.py,sha256=r9DojEr-0oqCOLuSMH0B4kWtI3UEbY_4jS7RBWDZKqM,67855
|
| 320 |
+
torchvision/transforms/transforms.py,sha256=eRIUr0I1i7BMqrXm4xsBJQYYGpvIkDr_VMsctQOI0M4,85547
|
| 321 |
+
torchvision/transforms/v2/__init__.py,sha256=UUJgzZguNPl7B33Zt3gexO7gSApSuWHTpzE7fNXQpA0,1545
|
| 322 |
+
torchvision/transforms/v2/__pycache__/__init__.cpython-311.pyc,,
|
| 323 |
+
torchvision/transforms/v2/__pycache__/_augment.cpython-311.pyc,,
|
| 324 |
+
torchvision/transforms/v2/__pycache__/_auto_augment.cpython-311.pyc,,
|
| 325 |
+
torchvision/transforms/v2/__pycache__/_color.cpython-311.pyc,,
|
| 326 |
+
torchvision/transforms/v2/__pycache__/_container.cpython-311.pyc,,
|
| 327 |
+
torchvision/transforms/v2/__pycache__/_deprecated.cpython-311.pyc,,
|
| 328 |
+
torchvision/transforms/v2/__pycache__/_geometry.cpython-311.pyc,,
|
| 329 |
+
torchvision/transforms/v2/__pycache__/_meta.cpython-311.pyc,,
|
| 330 |
+
torchvision/transforms/v2/__pycache__/_misc.cpython-311.pyc,,
|
| 331 |
+
torchvision/transforms/v2/__pycache__/_temporal.cpython-311.pyc,,
|
| 332 |
+
torchvision/transforms/v2/__pycache__/_transform.cpython-311.pyc,,
|
| 333 |
+
torchvision/transforms/v2/__pycache__/_type_conversion.cpython-311.pyc,,
|
| 334 |
+
torchvision/transforms/v2/__pycache__/_utils.cpython-311.pyc,,
|
| 335 |
+
torchvision/transforms/v2/_augment.py,sha256=NtbxWHrD1tbBJ9LVGcYsEv1tlHqpQyYNE23aH0NZ868,16159
|
| 336 |
+
torchvision/transforms/v2/_auto_augment.py,sha256=sQWkEF1N17XU4F6nBGva7kUuiuGNEOCAGHYGn8oa0A8,32025
|
| 337 |
+
torchvision/transforms/v2/_color.py,sha256=YHc7vhv4XR0CfSmEUoGQDexbENjV_whIHi9c-JgPrpo,16990
|
| 338 |
+
torchvision/transforms/v2/_container.py,sha256=SFh-FU8ceir934hxS_VkbVQq0SxzGSULPaYpouJJhPs,6055
|
| 339 |
+
torchvision/transforms/v2/_deprecated.py,sha256=0kXQWo6x1D2Gg98pJ0wahiDHuJBGNvsadZwdFtOM5YE,1947
|
| 340 |
+
torchvision/transforms/v2/_geometry.py,sha256=Ux5ghMCEVwpYYKB4sBamJUIfRbz8EutjfI_cskbNnhk,67606
|
| 341 |
+
torchvision/transforms/v2/_meta.py,sha256=Pcrh0dKMgwfpHTdho8uXcYYfKtbHy36VVyz4o2umld0,1405
|
| 342 |
+
torchvision/transforms/v2/_misc.py,sha256=Y-QjkjKYGMJYQvRP1elB_5gSwsvJR-I2vCEheBLCwuo,19114
|
| 343 |
+
torchvision/transforms/v2/_temporal.py,sha256=ByHqYqy1KO1Rd-Cg-eynHQEnF4y7OaMGIeO44kl8QJw,906
|
| 344 |
+
torchvision/transforms/v2/_transform.py,sha256=008PBMswQWIc7dEmhWqm772_O4ciDY3rycGu08nhcME,8476
|
| 345 |
+
torchvision/transforms/v2/_type_conversion.py,sha256=f3J1wYeB_zTaF8mxIjoudDKCiljmWqLGszSS9DN5EsQ,2860
|
| 346 |
+
torchvision/transforms/v2/_utils.py,sha256=AjGKWomXlDX2I1jCd4ROkJr8nRtr3ofm3MdXRH3YTTo,8652
|
| 347 |
+
torchvision/transforms/v2/functional/__init__.py,sha256=4SDjzgj9e4oM4IUKy9YJAwXFnBoLpygd8sSM_7sMvK0,3546
|
| 348 |
+
torchvision/transforms/v2/functional/__pycache__/__init__.cpython-311.pyc,,
|
| 349 |
+
torchvision/transforms/v2/functional/__pycache__/_augment.cpython-311.pyc,,
|
| 350 |
+
torchvision/transforms/v2/functional/__pycache__/_color.cpython-311.pyc,,
|
| 351 |
+
torchvision/transforms/v2/functional/__pycache__/_deprecated.cpython-311.pyc,,
|
| 352 |
+
torchvision/transforms/v2/functional/__pycache__/_geometry.cpython-311.pyc,,
|
| 353 |
+
torchvision/transforms/v2/functional/__pycache__/_meta.cpython-311.pyc,,
|
| 354 |
+
torchvision/transforms/v2/functional/__pycache__/_misc.cpython-311.pyc,,
|
| 355 |
+
torchvision/transforms/v2/functional/__pycache__/_temporal.cpython-311.pyc,,
|
| 356 |
+
torchvision/transforms/v2/functional/__pycache__/_type_conversion.cpython-311.pyc,,
|
| 357 |
+
torchvision/transforms/v2/functional/__pycache__/_utils.cpython-311.pyc,,
|
| 358 |
+
torchvision/transforms/v2/functional/_augment.py,sha256=MRM8E3_gKfTTC0qFt3cKI4UxTxQtuGI9MeY2mBsrj04,3473
|
| 359 |
+
torchvision/transforms/v2/functional/_color.py,sha256=nUASg1bTHmsf2AT_1Q7CLNXhObrRPbB1w2fDuz9k5e8,30244
|
| 360 |
+
torchvision/transforms/v2/functional/_deprecated.py,sha256=ycYZLDwDyd612aPbTKIV3gqhCRLMdF03MQELct4LeGs,801
|
| 361 |
+
torchvision/transforms/v2/functional/_geometry.py,sha256=5QL4IdQV72PkJX61c4A5M4WLq60ihTQB6g1PE9tMqmM,87520
|
| 362 |
+
torchvision/transforms/v2/functional/_meta.py,sha256=AxTEF6mdybAW1lC_DcjfKlxvSuiVupnqbJJrqS5x4lc,10547
|
| 363 |
+
torchvision/transforms/v2/functional/_misc.py,sha256=OXu4GTCF9i_1lz7T62gKcEs94faBO7wyYmpUOCnkUEY,17517
|
| 364 |
+
torchvision/transforms/v2/functional/_temporal.py,sha256=24CQCXXO12TnW7aUiUQdrk5DRSpTPONjjC4jaGh3lH4,1136
|
| 365 |
+
torchvision/transforms/v2/functional/_type_conversion.py,sha256=78wl0dNPwX08jOCW6KcZSGy8RAQqyxMtdrTUQVQlUTM,869
|
| 366 |
+
torchvision/transforms/v2/functional/_utils.py,sha256=tsmwIF37Z9QnP9x3x4hAs1hLrcvL78GLkuO6Rq1EUTk,5479
|
| 367 |
+
torchvision/tv_tensors/__init__.py,sha256=C6N8p5aulpehsOBBmH1cPIY1xiOSASZVBfnlXgGvR_s,1509
|
| 368 |
+
torchvision/tv_tensors/__pycache__/__init__.cpython-311.pyc,,
|
| 369 |
+
torchvision/tv_tensors/__pycache__/_bounding_boxes.cpython-311.pyc,,
|
| 370 |
+
torchvision/tv_tensors/__pycache__/_dataset_wrapper.cpython-311.pyc,,
|
| 371 |
+
torchvision/tv_tensors/__pycache__/_image.cpython-311.pyc,,
|
| 372 |
+
torchvision/tv_tensors/__pycache__/_mask.cpython-311.pyc,,
|
| 373 |
+
torchvision/tv_tensors/__pycache__/_torch_function_helpers.cpython-311.pyc,,
|
| 374 |
+
torchvision/tv_tensors/__pycache__/_tv_tensor.cpython-311.pyc,,
|
| 375 |
+
torchvision/tv_tensors/__pycache__/_video.cpython-311.pyc,,
|
| 376 |
+
torchvision/tv_tensors/_bounding_boxes.py,sha256=_-bDwN1gnHpfnHXEK0O6bQrcEOv656VOliHOgoNstpw,4493
|
| 377 |
+
torchvision/tv_tensors/_dataset_wrapper.py,sha256=fNnk3CSXipBNFsmnsPpa10DRN0I_Ly4Xib2Y5Zng9Ro,24505
|
| 378 |
+
torchvision/tv_tensors/_image.py,sha256=bwx4n8qObrknE3xEIDJOs0vWJzCg4XISjtXR7ksJTgs,1934
|
| 379 |
+
torchvision/tv_tensors/_mask.py,sha256=s85DdYFK6cyrL0_MnhAC2jTJxZzL7MJ8DTx985JPVhQ,1478
|
| 380 |
+
torchvision/tv_tensors/_torch_function_helpers.py,sha256=81qDZqgzUeSgfSeWhsrw1Ukwltvf97WbwmKWHm7X8X0,2276
|
| 381 |
+
torchvision/tv_tensors/_tv_tensor.py,sha256=dGQJhvOVTjb1LVT5qPZLJxox30uDMmODB26Iz6TjVbc,6248
|
| 382 |
+
torchvision/tv_tensors/_video.py,sha256=4dQ5Rh_0ghPtaLVSOxVWXJv1uWi8ZKXlfbRsBZ3roxw,1416
|
| 383 |
+
torchvision/utils.py,sha256=cGBWrAicxrx1YECsTGm7m_JL1GaGXp_UmAA9rmIQ3t8,26734
|
| 384 |
+
torchvision/version.py,sha256=P_l-ZSRLCCu_2SuJrwuv_07WrX_5RAvKwEbhRkRj9vg,203
|
.venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (72.1.0)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp311-cp311-linux_x86_64
|
| 5 |
+
|
.venv/lib/python3.11/site-packages/torchvision-0.20.1.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
torchvision
|
.venv/lib/python3.11/site-packages/torio/_extension/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .utils import _init_ffmpeg, _LazyImporter
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
_FFMPEG_EXT = None
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def lazy_import_ffmpeg_ext():
|
| 8 |
+
"""Load FFmpeg integration based on availability in lazy manner"""
|
| 9 |
+
|
| 10 |
+
global _FFMPEG_EXT
|
| 11 |
+
if _FFMPEG_EXT is None:
|
| 12 |
+
_FFMPEG_EXT = _LazyImporter("_torio_ffmpeg", _init_ffmpeg)
|
| 13 |
+
return _FFMPEG_EXT
|
.venv/lib/python3.11/site-packages/torio/_extension/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (613 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/torio/_extension/__pycache__/utils.cpython-311.pyc
ADDED
|
Binary file (7.59 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/torio/_extension/utils.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import types
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
|
| 7 |
+
import torch
|
| 8 |
+
|
| 9 |
+
_LG = logging.getLogger(__name__)
|
| 10 |
+
_LIB_DIR = Path(__file__).parent.parent / "lib"
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class _LazyImporter(types.ModuleType):
|
| 14 |
+
"""Lazily import module/extension."""
|
| 15 |
+
|
| 16 |
+
def __init__(self, name, import_func):
|
| 17 |
+
super().__init__(name)
|
| 18 |
+
self.import_func = import_func
|
| 19 |
+
self.module = None
|
| 20 |
+
|
| 21 |
+
# Note:
|
| 22 |
+
# Python caches what was retrieved with `__getattr__`, so this method will not be
|
| 23 |
+
# called again for the same item.
|
| 24 |
+
def __getattr__(self, item):
|
| 25 |
+
self._import_once()
|
| 26 |
+
return getattr(self.module, item)
|
| 27 |
+
|
| 28 |
+
def __repr__(self):
|
| 29 |
+
if self.module is None:
|
| 30 |
+
return f"<module '{self.__module__}.{self.__class__.__name__}(\"{self.name}\")'>"
|
| 31 |
+
return repr(self.module)
|
| 32 |
+
|
| 33 |
+
def __dir__(self):
|
| 34 |
+
self._import_once()
|
| 35 |
+
return dir(self.module)
|
| 36 |
+
|
| 37 |
+
def _import_once(self):
|
| 38 |
+
if self.module is None:
|
| 39 |
+
self.module = self.import_func()
|
| 40 |
+
# Note:
|
| 41 |
+
# By attaching the module attributes to self,
|
| 42 |
+
# module attributes are directly accessible.
|
| 43 |
+
# This allows to avoid calling __getattr__ for every attribute access.
|
| 44 |
+
self.__dict__.update(self.module.__dict__)
|
| 45 |
+
|
| 46 |
+
def is_available(self):
|
| 47 |
+
try:
|
| 48 |
+
self._import_once()
|
| 49 |
+
except Exception:
|
| 50 |
+
return False
|
| 51 |
+
return True
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def _get_lib_path(lib: str):
|
| 55 |
+
suffix = "pyd" if os.name == "nt" else "so"
|
| 56 |
+
path = _LIB_DIR / f"{lib}.{suffix}"
|
| 57 |
+
return path
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _load_lib(lib: str) -> bool:
|
| 61 |
+
"""Load extension module
|
| 62 |
+
|
| 63 |
+
Note:
|
| 64 |
+
In case `torio` is deployed with `pex` format, the library file
|
| 65 |
+
is not in a standard location.
|
| 66 |
+
In this case, we expect that `libtorio` is available somewhere
|
| 67 |
+
in the search path of dynamic loading mechanism, so that importing
|
| 68 |
+
`_torio` will have library loader find and load `libtorio`.
|
| 69 |
+
This is the reason why the function should not raising an error when the library
|
| 70 |
+
file is not found.
|
| 71 |
+
|
| 72 |
+
Returns:
|
| 73 |
+
bool:
|
| 74 |
+
True if the library file is found AND the library loaded without failure.
|
| 75 |
+
False if the library file is not found (like in the case where torio
|
| 76 |
+
is deployed with pex format, thus the shared library file is
|
| 77 |
+
in a non-standard location.).
|
| 78 |
+
If the library file is found but there is an issue loading the library,
|
| 79 |
+
(such as missing dependency) then this function raises the exception as-is.
|
| 80 |
+
|
| 81 |
+
Raises:
|
| 82 |
+
Exception:
|
| 83 |
+
If the library file is found, but there is an issue loading the library file,
|
| 84 |
+
(when underlying `ctype.DLL` throws an exception), this function will pass
|
| 85 |
+
the exception as-is, instead of catching it and returning bool.
|
| 86 |
+
The expected case is `OSError` thrown by `ctype.DLL` when a dynamic dependency
|
| 87 |
+
is not found.
|
| 88 |
+
This behavior was chosen because the expected failure case is not recoverable.
|
| 89 |
+
If a dependency is missing, then users have to install it.
|
| 90 |
+
"""
|
| 91 |
+
path = _get_lib_path(lib)
|
| 92 |
+
if not path.exists():
|
| 93 |
+
return False
|
| 94 |
+
torch.ops.load_library(path)
|
| 95 |
+
return True
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
_FFMPEG_VERS = ["6", "5", "4", ""]
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def _find_versionsed_ffmpeg_extension(version: str):
|
| 102 |
+
ext = f"torio.lib._torio_ffmpeg{version}"
|
| 103 |
+
lib = f"libtorio_ffmpeg{version}"
|
| 104 |
+
|
| 105 |
+
if not importlib.util.find_spec(ext):
|
| 106 |
+
raise RuntimeError(f"FFmpeg{version} extension is not available.")
|
| 107 |
+
|
| 108 |
+
_load_lib(lib)
|
| 109 |
+
return importlib.import_module(ext)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def _find_ffmpeg_extension(ffmpeg_vers):
|
| 113 |
+
for ffmpeg_ver in ffmpeg_vers:
|
| 114 |
+
_LG.debug("Loading FFmpeg%s", ffmpeg_ver)
|
| 115 |
+
try:
|
| 116 |
+
ext = _find_versionsed_ffmpeg_extension(ffmpeg_ver)
|
| 117 |
+
_LG.debug("Successfully loaded FFmpeg%s", ffmpeg_ver)
|
| 118 |
+
return ext
|
| 119 |
+
except Exception:
|
| 120 |
+
_LG.debug("Failed to load FFmpeg%s extension.", ffmpeg_ver, exc_info=True)
|
| 121 |
+
continue
|
| 122 |
+
raise ImportError(
|
| 123 |
+
f"Failed to intialize FFmpeg extension. Tried versions: {ffmpeg_vers}. "
|
| 124 |
+
"Enable DEBUG logging to see more details about the error."
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def _get_ffmpeg_versions():
|
| 129 |
+
ffmpeg_vers = _FFMPEG_VERS
|
| 130 |
+
# User override
|
| 131 |
+
if (ffmpeg_ver := os.environ.get("TORIO_USE_FFMPEG_VERSION")) is not None:
|
| 132 |
+
if ffmpeg_ver not in ffmpeg_vers:
|
| 133 |
+
raise ValueError(
|
| 134 |
+
f"The FFmpeg version '{ffmpeg_ver}' (read from TORIO_USE_FFMPEG_VERSION) "
|
| 135 |
+
f"is not one of supported values. Possible values are {ffmpeg_vers}"
|
| 136 |
+
)
|
| 137 |
+
ffmpeg_vers = [ffmpeg_ver]
|
| 138 |
+
return ffmpeg_vers
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def _init_ffmpeg():
|
| 142 |
+
ffmpeg_vers = _get_ffmpeg_versions()
|
| 143 |
+
ext = _find_ffmpeg_extension(ffmpeg_vers)
|
| 144 |
+
ext.init()
|
| 145 |
+
if ext.get_log_level() > 8:
|
| 146 |
+
ext.set_log_level(8)
|
| 147 |
+
return ext
|
.venv/lib/python3.11/site-packages/torio/utils/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (263 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/torio/utils/__pycache__/ffmpeg_utils.cpython-311.pyc
ADDED
|
Binary file (10.5 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/virtualenv/__init__.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from .run import cli_run, session_via_cli
|
| 4 |
+
from .version import __version__
|
| 5 |
+
|
| 6 |
+
__all__ = [
|
| 7 |
+
"__version__",
|
| 8 |
+
"cli_run",
|
| 9 |
+
"session_via_cli",
|
| 10 |
+
]
|
.venv/lib/python3.11/site-packages/virtualenv/__main__.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
from timeit import default_timer
|
| 7 |
+
|
| 8 |
+
LOGGER = logging.getLogger(__name__)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def run(args=None, options=None, env=None):
|
| 12 |
+
env = os.environ if env is None else env
|
| 13 |
+
start = default_timer()
|
| 14 |
+
from virtualenv.run import cli_run # noqa: PLC0415
|
| 15 |
+
from virtualenv.util.error import ProcessCallFailedError # noqa: PLC0415
|
| 16 |
+
|
| 17 |
+
if args is None:
|
| 18 |
+
args = sys.argv[1:]
|
| 19 |
+
try:
|
| 20 |
+
session = cli_run(args, options, env)
|
| 21 |
+
LOGGER.warning(LogSession(session, start))
|
| 22 |
+
except ProcessCallFailedError as exception:
|
| 23 |
+
print(f"subprocess call failed for {exception.cmd} with code {exception.code}") # noqa: T201
|
| 24 |
+
print(exception.out, file=sys.stdout, end="") # noqa: T201
|
| 25 |
+
print(exception.err, file=sys.stderr, end="") # noqa: T201
|
| 26 |
+
raise SystemExit(exception.code) # noqa: B904
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class LogSession:
|
| 30 |
+
def __init__(self, session, start) -> None:
|
| 31 |
+
self.session = session
|
| 32 |
+
self.start = start
|
| 33 |
+
|
| 34 |
+
def __str__(self) -> str:
|
| 35 |
+
spec = self.session.creator.interpreter.spec
|
| 36 |
+
elapsed = (default_timer() - self.start) * 1000
|
| 37 |
+
lines = [
|
| 38 |
+
f"created virtual environment {spec} in {elapsed:.0f}ms",
|
| 39 |
+
f" creator {self.session.creator!s}",
|
| 40 |
+
]
|
| 41 |
+
if self.session.seeder.enabled:
|
| 42 |
+
lines.append(f" seeder {self.session.seeder!s}")
|
| 43 |
+
path = self.session.creator.purelib.iterdir()
|
| 44 |
+
packages = sorted("==".join(i.stem.split("-")) for i in path if i.suffix == ".dist-info")
|
| 45 |
+
lines.append(f" added seed packages: {', '.join(packages)}")
|
| 46 |
+
|
| 47 |
+
if self.session.activators:
|
| 48 |
+
lines.append(f" activators {','.join(i.__class__.__name__ for i in self.session.activators)}")
|
| 49 |
+
return "\n".join(lines)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def run_with_catch(args=None, env=None):
|
| 53 |
+
from virtualenv.config.cli.parser import VirtualEnvOptions # noqa: PLC0415
|
| 54 |
+
|
| 55 |
+
env = os.environ if env is None else env
|
| 56 |
+
options = VirtualEnvOptions()
|
| 57 |
+
try:
|
| 58 |
+
run(args, options, env)
|
| 59 |
+
except (KeyboardInterrupt, SystemExit, Exception) as exception:
|
| 60 |
+
try:
|
| 61 |
+
if getattr(options, "with_traceback", False):
|
| 62 |
+
raise
|
| 63 |
+
if not (isinstance(exception, SystemExit) and exception.code == 0):
|
| 64 |
+
LOGGER.error("%s: %s", type(exception).__name__, exception) # noqa: TRY400
|
| 65 |
+
code = exception.code if isinstance(exception, SystemExit) else 1
|
| 66 |
+
sys.exit(code)
|
| 67 |
+
finally:
|
| 68 |
+
for handler in LOGGER.handlers: # force flush of log messages before the trace is printed
|
| 69 |
+
handler.flush()
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
if __name__ == "__main__": # pragma: no cov
|
| 73 |
+
run_with_catch() # pragma: no cov
|
.venv/lib/python3.11/site-packages/virtualenv/info.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import platform
|
| 6 |
+
import sys
|
| 7 |
+
import tempfile
|
| 8 |
+
|
| 9 |
+
IMPLEMENTATION = platform.python_implementation()
|
| 10 |
+
IS_PYPY = IMPLEMENTATION == "PyPy"
|
| 11 |
+
IS_CPYTHON = IMPLEMENTATION == "CPython"
|
| 12 |
+
IS_WIN = sys.platform == "win32"
|
| 13 |
+
IS_MAC_ARM64 = sys.platform == "darwin" and platform.machine() == "arm64"
|
| 14 |
+
ROOT = os.path.realpath(os.path.join(os.path.abspath(__file__), os.path.pardir, os.path.pardir))
|
| 15 |
+
IS_ZIPAPP = os.path.isfile(ROOT)
|
| 16 |
+
_CAN_SYMLINK = _FS_CASE_SENSITIVE = _CFG_DIR = _DATA_DIR = None
|
| 17 |
+
LOGGER = logging.getLogger(__name__)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def fs_is_case_sensitive():
|
| 21 |
+
global _FS_CASE_SENSITIVE # noqa: PLW0603
|
| 22 |
+
|
| 23 |
+
if _FS_CASE_SENSITIVE is None:
|
| 24 |
+
with tempfile.NamedTemporaryFile(prefix="TmP") as tmp_file:
|
| 25 |
+
_FS_CASE_SENSITIVE = not os.path.exists(tmp_file.name.lower())
|
| 26 |
+
LOGGER.debug("filesystem is %scase-sensitive", "" if _FS_CASE_SENSITIVE else "not ")
|
| 27 |
+
return _FS_CASE_SENSITIVE
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fs_supports_symlink():
|
| 31 |
+
global _CAN_SYMLINK # noqa: PLW0603
|
| 32 |
+
|
| 33 |
+
if _CAN_SYMLINK is None:
|
| 34 |
+
can = False
|
| 35 |
+
if hasattr(os, "symlink"):
|
| 36 |
+
if IS_WIN:
|
| 37 |
+
with tempfile.NamedTemporaryFile(prefix="TmP") as tmp_file:
|
| 38 |
+
temp_dir = os.path.dirname(tmp_file.name)
|
| 39 |
+
dest = os.path.join(temp_dir, f"{tmp_file.name}-{'b'}")
|
| 40 |
+
try:
|
| 41 |
+
os.symlink(tmp_file.name, dest)
|
| 42 |
+
can = True
|
| 43 |
+
except (OSError, NotImplementedError):
|
| 44 |
+
pass
|
| 45 |
+
LOGGER.debug("symlink on filesystem does%s work", "" if can else " not")
|
| 46 |
+
else:
|
| 47 |
+
can = True
|
| 48 |
+
_CAN_SYMLINK = can
|
| 49 |
+
return _CAN_SYMLINK
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def fs_path_id(path: str) -> str:
|
| 53 |
+
return path.casefold() if fs_is_case_sensitive() else path
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
__all__ = (
|
| 57 |
+
"IS_CPYTHON",
|
| 58 |
+
"IS_MAC_ARM64",
|
| 59 |
+
"IS_PYPY",
|
| 60 |
+
"IS_WIN",
|
| 61 |
+
"IS_ZIPAPP",
|
| 62 |
+
"ROOT",
|
| 63 |
+
"fs_is_case_sensitive",
|
| 64 |
+
"fs_path_id",
|
| 65 |
+
"fs_supports_symlink",
|
| 66 |
+
)
|
.venv/lib/python3.11/site-packages/virtualenv/report.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
LEVELS = {
|
| 7 |
+
0: logging.CRITICAL,
|
| 8 |
+
1: logging.ERROR,
|
| 9 |
+
2: logging.WARNING,
|
| 10 |
+
3: logging.INFO,
|
| 11 |
+
4: logging.DEBUG,
|
| 12 |
+
5: logging.NOTSET,
|
| 13 |
+
}
|
| 14 |
+
|
| 15 |
+
MAX_LEVEL = max(LEVELS.keys())
|
| 16 |
+
LOGGER = logging.getLogger()
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def setup_report(verbosity, show_pid=False): # noqa: FBT002
|
| 20 |
+
_clean_handlers(LOGGER)
|
| 21 |
+
verbosity = min(verbosity, MAX_LEVEL) # pragma: no cover
|
| 22 |
+
level = LEVELS[verbosity]
|
| 23 |
+
msg_format = "%(message)s"
|
| 24 |
+
if level <= logging.DEBUG:
|
| 25 |
+
locate = "module"
|
| 26 |
+
msg_format = f"%(relativeCreated)d {msg_format} [%(levelname)s %({locate})s:%(lineno)d]"
|
| 27 |
+
if show_pid:
|
| 28 |
+
msg_format = f"[%(process)d] {msg_format}"
|
| 29 |
+
formatter = logging.Formatter(msg_format)
|
| 30 |
+
stream_handler = logging.StreamHandler(stream=sys.stdout)
|
| 31 |
+
stream_handler.setLevel(level)
|
| 32 |
+
LOGGER.setLevel(logging.NOTSET)
|
| 33 |
+
stream_handler.setFormatter(formatter)
|
| 34 |
+
LOGGER.addHandler(stream_handler)
|
| 35 |
+
level_name = logging.getLevelName(level)
|
| 36 |
+
LOGGER.debug("setup logging to %s", level_name)
|
| 37 |
+
logging.getLogger("distlib").setLevel(logging.ERROR)
|
| 38 |
+
return verbosity
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def _clean_handlers(log):
|
| 42 |
+
for log_handler in list(log.handlers): # remove handlers of libraries
|
| 43 |
+
log.removeHandler(log_handler)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
__all__ = [
|
| 47 |
+
"LEVELS",
|
| 48 |
+
"MAX_LEVEL",
|
| 49 |
+
"setup_report",
|
| 50 |
+
]
|
.venv/lib/python3.11/site-packages/virtualenv/run/__init__.py
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
from functools import partial
|
| 6 |
+
|
| 7 |
+
from virtualenv.app_data import make_app_data
|
| 8 |
+
from virtualenv.config.cli.parser import VirtualEnvConfigParser
|
| 9 |
+
from virtualenv.report import LEVELS, setup_report
|
| 10 |
+
from virtualenv.run.session import Session
|
| 11 |
+
from virtualenv.seed.wheels.periodic_update import manual_upgrade
|
| 12 |
+
from virtualenv.version import __version__
|
| 13 |
+
|
| 14 |
+
from .plugin.activators import ActivationSelector
|
| 15 |
+
from .plugin.creators import CreatorSelector
|
| 16 |
+
from .plugin.discovery import get_discover
|
| 17 |
+
from .plugin.seeders import SeederSelector
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def cli_run(args, options=None, setup_logging=True, env=None): # noqa: FBT002
|
| 21 |
+
"""
|
| 22 |
+
Create a virtual environment given some command line interface arguments.
|
| 23 |
+
|
| 24 |
+
:param args: the command line arguments
|
| 25 |
+
:param options: passing in a ``VirtualEnvOptions`` object allows return of the parsed options
|
| 26 |
+
:param setup_logging: ``True`` if setup logging handlers, ``False`` to use handlers already registered
|
| 27 |
+
:param env: environment variables to use
|
| 28 |
+
:return: the session object of the creation (its structure for now is experimental and might change on short notice)
|
| 29 |
+
"""
|
| 30 |
+
env = os.environ if env is None else env
|
| 31 |
+
of_session = session_via_cli(args, options, setup_logging, env)
|
| 32 |
+
with of_session:
|
| 33 |
+
of_session.run()
|
| 34 |
+
return of_session
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def session_via_cli(args, options=None, setup_logging=True, env=None): # noqa: FBT002
|
| 38 |
+
"""
|
| 39 |
+
Create a virtualenv session (same as cli_run, but this does not perform the creation). Use this if you just want to
|
| 40 |
+
query what the virtual environment would look like, but not actually create it.
|
| 41 |
+
|
| 42 |
+
:param args: the command line arguments
|
| 43 |
+
:param options: passing in a ``VirtualEnvOptions`` object allows return of the parsed options
|
| 44 |
+
:param setup_logging: ``True`` if setup logging handlers, ``False`` to use handlers already registered
|
| 45 |
+
:param env: environment variables to use
|
| 46 |
+
:return: the session object of the creation (its structure for now is experimental and might change on short notice)
|
| 47 |
+
""" # noqa: D205
|
| 48 |
+
env = os.environ if env is None else env
|
| 49 |
+
parser, elements = build_parser(args, options, setup_logging, env)
|
| 50 |
+
options = parser.parse_args(args)
|
| 51 |
+
creator, seeder, activators = tuple(e.create(options) for e in elements) # create types
|
| 52 |
+
return Session(
|
| 53 |
+
options.verbosity,
|
| 54 |
+
options.app_data,
|
| 55 |
+
parser._interpreter, # noqa: SLF001
|
| 56 |
+
creator,
|
| 57 |
+
seeder,
|
| 58 |
+
activators,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def build_parser(args=None, options=None, setup_logging=True, env=None): # noqa: FBT002
|
| 63 |
+
parser = VirtualEnvConfigParser(options, os.environ if env is None else env)
|
| 64 |
+
add_version_flag(parser)
|
| 65 |
+
parser.add_argument(
|
| 66 |
+
"--with-traceback",
|
| 67 |
+
dest="with_traceback",
|
| 68 |
+
action="store_true",
|
| 69 |
+
default=False,
|
| 70 |
+
help="on failure also display the stacktrace internals of virtualenv",
|
| 71 |
+
)
|
| 72 |
+
_do_report_setup(parser, args, setup_logging)
|
| 73 |
+
options = load_app_data(args, parser, options)
|
| 74 |
+
handle_extra_commands(options)
|
| 75 |
+
|
| 76 |
+
discover = get_discover(parser, args)
|
| 77 |
+
parser._interpreter = interpreter = discover.interpreter # noqa: SLF001
|
| 78 |
+
if interpreter is None:
|
| 79 |
+
msg = f"failed to find interpreter for {discover}"
|
| 80 |
+
raise RuntimeError(msg)
|
| 81 |
+
elements = [
|
| 82 |
+
CreatorSelector(interpreter, parser),
|
| 83 |
+
SeederSelector(interpreter, parser),
|
| 84 |
+
ActivationSelector(interpreter, parser),
|
| 85 |
+
]
|
| 86 |
+
options, _ = parser.parse_known_args(args)
|
| 87 |
+
for element in elements:
|
| 88 |
+
element.handle_selected_arg_parse(options)
|
| 89 |
+
parser.enable_help()
|
| 90 |
+
return parser, elements
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def build_parser_only(args=None):
|
| 94 |
+
"""Used to provide a parser for the doc generation."""
|
| 95 |
+
return build_parser(args)[0]
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def handle_extra_commands(options):
|
| 99 |
+
if options.upgrade_embed_wheels:
|
| 100 |
+
result = manual_upgrade(options.app_data, options.env)
|
| 101 |
+
raise SystemExit(result)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def load_app_data(args, parser, options):
|
| 105 |
+
parser.add_argument(
|
| 106 |
+
"--read-only-app-data",
|
| 107 |
+
action="store_true",
|
| 108 |
+
help="use app data folder in read-only mode (write operations will fail with error)",
|
| 109 |
+
)
|
| 110 |
+
options, _ = parser.parse_known_args(args, namespace=options)
|
| 111 |
+
|
| 112 |
+
# here we need a write-able application data (e.g. the zipapp might need this for discovery cache)
|
| 113 |
+
parser.add_argument(
|
| 114 |
+
"--app-data",
|
| 115 |
+
help="a data folder used as cache by the virtualenv",
|
| 116 |
+
type=partial(make_app_data, read_only=options.read_only_app_data, env=options.env),
|
| 117 |
+
default=make_app_data(None, read_only=options.read_only_app_data, env=options.env),
|
| 118 |
+
)
|
| 119 |
+
parser.add_argument(
|
| 120 |
+
"--reset-app-data",
|
| 121 |
+
action="store_true",
|
| 122 |
+
help="start with empty app data folder",
|
| 123 |
+
)
|
| 124 |
+
parser.add_argument(
|
| 125 |
+
"--upgrade-embed-wheels",
|
| 126 |
+
action="store_true",
|
| 127 |
+
help="trigger a manual update of the embedded wheels",
|
| 128 |
+
)
|
| 129 |
+
options, _ = parser.parse_known_args(args, namespace=options)
|
| 130 |
+
if options.reset_app_data:
|
| 131 |
+
options.app_data.reset()
|
| 132 |
+
return options
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def add_version_flag(parser):
|
| 136 |
+
import virtualenv # noqa: PLC0415
|
| 137 |
+
|
| 138 |
+
parser.add_argument(
|
| 139 |
+
"--version",
|
| 140 |
+
action="version",
|
| 141 |
+
version=f"%(prog)s {__version__} from {virtualenv.__file__}",
|
| 142 |
+
help="display the version of the virtualenv package and its location, then exit",
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def _do_report_setup(parser, args, setup_logging):
|
| 147 |
+
level_map = ", ".join(f"{logging.getLevelName(line)}={c}" for c, line in sorted(LEVELS.items()))
|
| 148 |
+
msg = "verbosity = verbose - quiet, default {}, mapping => {}"
|
| 149 |
+
verbosity_group = parser.add_argument_group(
|
| 150 |
+
title="verbosity",
|
| 151 |
+
description=msg.format(logging.getLevelName(LEVELS[3]), level_map),
|
| 152 |
+
)
|
| 153 |
+
verbosity = verbosity_group.add_mutually_exclusive_group()
|
| 154 |
+
verbosity.add_argument("-v", "--verbose", action="count", dest="verbose", help="increase verbosity", default=2)
|
| 155 |
+
verbosity.add_argument("-q", "--quiet", action="count", dest="quiet", help="decrease verbosity", default=0)
|
| 156 |
+
option, _ = parser.parse_known_args(args)
|
| 157 |
+
if setup_logging:
|
| 158 |
+
setup_report(option.verbosity)
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
__all__ = [
|
| 162 |
+
"cli_run",
|
| 163 |
+
"session_via_cli",
|
| 164 |
+
]
|
.venv/lib/python3.11/site-packages/virtualenv/run/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (9.14 kB). View file
|
|
|