Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_binned_statistic.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_binomtest.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_censored_data.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_distr_params.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_entropy.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_ksstats.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_mstats_extras.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_qmc.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_qmvnt.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_relative_risk.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_result_classes.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_sampling.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_survival.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_tukeylambda_stats.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_variation.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_warnings_errors.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/contingency.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/mstats_basic.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gradio_client/__init__.py +7 -0
- parrot/lib/python3.10/site-packages/gradio_client/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gradio_client/__pycache__/client.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gradio_client/__pycache__/data_classes.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gradio_client/__pycache__/documentation.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gradio_client/__pycache__/serializing.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gradio_client/__pycache__/utils.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gradio_client/client.py +1114 -0
- parrot/lib/python3.10/site-packages/gradio_client/data_classes.py +15 -0
- parrot/lib/python3.10/site-packages/gradio_client/documentation.py +266 -0
- parrot/lib/python3.10/site-packages/gradio_client/media_data.py +0 -0
- parrot/lib/python3.10/site-packages/gradio_client/serializing.py +550 -0
- parrot/lib/python3.10/site-packages/gradio_client/types.json +199 -0
- parrot/lib/python3.10/site-packages/gradio_client/utils.py +561 -0
- parrot/lib/python3.10/site-packages/gradio_client/version.txt +1 -0
- parrot/lib/python3.10/site-packages/hjson-3.1.0.dist-info/RECORD +77 -0
- parrot/lib/python3.10/site-packages/hjson-3.1.0.dist-info/entry_points.txt +2 -0
- parrot/lib/python3.10/site-packages/narwhals-1.8.3.dist-info/INSTALLER +1 -0
- parrot/lib/python3.10/site-packages/narwhals-1.8.3.dist-info/METADATA +168 -0
- parrot/lib/python3.10/site-packages/narwhals-1.8.3.dist-info/WHEEL +4 -0
- parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/INSTALLER +1 -0
- parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/LICENSE +20 -0
- parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/METADATA +27 -0
- parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/RECORD +15 -0
- parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/REQUESTED +0 -0
- parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/entry_points.txt +3 -0
- parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/top_level.txt +1 -0
- parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/INSTALLER +1 -0
- parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/METADATA +57 -0
- parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/RECORD +663 -0
- parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/REQUESTED +0 -0
- parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/WHEEL +4 -0
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_binned_statistic.cpython-310.pyc
ADDED
|
Binary file (28.2 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_binomtest.cpython-310.pyc
ADDED
|
Binary file (11.4 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_censored_data.cpython-310.pyc
ADDED
|
Binary file (17.2 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_distr_params.cpython-310.pyc
ADDED
|
Binary file (6.69 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_entropy.cpython-310.pyc
ADDED
|
Binary file (15.1 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_ksstats.cpython-310.pyc
ADDED
|
Binary file (11.9 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_mstats_extras.cpython-310.pyc
ADDED
|
Binary file (15.4 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_qmc.cpython-310.pyc
ADDED
|
Binary file (88.9 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_qmvnt.cpython-310.pyc
ADDED
|
Binary file (13.5 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_relative_risk.cpython-310.pyc
ADDED
|
Binary file (8.32 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_result_classes.cpython-310.pyc
ADDED
|
Binary file (1.02 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_sampling.cpython-310.pyc
ADDED
|
Binary file (43 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_survival.cpython-310.pyc
ADDED
|
Binary file (22.8 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_tukeylambda_stats.cpython-310.pyc
ADDED
|
Binary file (3.09 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_variation.cpython-310.pyc
ADDED
|
Binary file (4.59 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/_warnings_errors.cpython-310.pyc
ADDED
|
Binary file (1.72 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/contingency.cpython-310.pyc
ADDED
|
Binary file (15.1 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/scipy/stats/__pycache__/mstats_basic.cpython-310.pyc
ADDED
|
Binary file (1.29 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/gradio_client/__init__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from gradio_client.client import Client
|
| 2 |
+
from gradio_client.utils import __version__
|
| 3 |
+
|
| 4 |
+
__all__ = [
|
| 5 |
+
"Client",
|
| 6 |
+
"__version__",
|
| 7 |
+
]
|
parrot/lib/python3.10/site-packages/gradio_client/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (304 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/gradio_client/__pycache__/client.cpython-310.pyc
ADDED
|
Binary file (39.6 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/gradio_client/__pycache__/data_classes.cpython-310.pyc
ADDED
|
Binary file (646 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/gradio_client/__pycache__/documentation.cpython-310.pyc
ADDED
|
Binary file (6.56 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/gradio_client/__pycache__/serializing.cpython-310.pyc
ADDED
|
Binary file (18.9 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/gradio_client/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (16.4 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/gradio_client/client.py
ADDED
|
@@ -0,0 +1,1114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""The main Client class for the Python client."""
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
import concurrent.futures
|
| 5 |
+
import json
|
| 6 |
+
import os
|
| 7 |
+
import re
|
| 8 |
+
import tempfile
|
| 9 |
+
import threading
|
| 10 |
+
import time
|
| 11 |
+
import urllib.parse
|
| 12 |
+
import uuid
|
| 13 |
+
import warnings
|
| 14 |
+
from concurrent.futures import Future, TimeoutError
|
| 15 |
+
from datetime import datetime
|
| 16 |
+
from pathlib import Path
|
| 17 |
+
from threading import Lock
|
| 18 |
+
from typing import Any, Callable, Literal
|
| 19 |
+
|
| 20 |
+
import huggingface_hub
|
| 21 |
+
import requests
|
| 22 |
+
import websockets
|
| 23 |
+
from huggingface_hub import SpaceHardware, SpaceStage
|
| 24 |
+
from huggingface_hub.utils import (
|
| 25 |
+
RepositoryNotFoundError,
|
| 26 |
+
build_hf_headers,
|
| 27 |
+
send_telemetry,
|
| 28 |
+
)
|
| 29 |
+
from packaging import version
|
| 30 |
+
|
| 31 |
+
from gradio_client import serializing, utils
|
| 32 |
+
from gradio_client.documentation import document, set_documentation_group
|
| 33 |
+
from gradio_client.serializing import Serializable
|
| 34 |
+
from gradio_client.utils import (
|
| 35 |
+
Communicator,
|
| 36 |
+
JobStatus,
|
| 37 |
+
Status,
|
| 38 |
+
StatusUpdate,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
set_documentation_group("py-client")
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
DEFAULT_TEMP_DIR = os.environ.get("GRADIO_TEMP_DIR") or str(
|
| 45 |
+
Path(tempfile.gettempdir()) / "gradio"
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
@document("predict", "submit", "view_api", "duplicate")
|
| 50 |
+
class Client:
|
| 51 |
+
"""
|
| 52 |
+
The main Client class for the Python client. This class is used to connect to a remote Gradio app and call its API endpoints.
|
| 53 |
+
|
| 54 |
+
Example:
|
| 55 |
+
from gradio_client import Client
|
| 56 |
+
|
| 57 |
+
client = Client("abidlabs/whisper-large-v2") # connecting to a Hugging Face Space
|
| 58 |
+
client.predict("test.mp4", api_name="/predict")
|
| 59 |
+
>> What a nice recording! # returns the result of the remote API call
|
| 60 |
+
|
| 61 |
+
client = Client("https://bec81a83-5b5c-471e.gradio.live") # connecting to a temporary Gradio share URL
|
| 62 |
+
job = client.submit("hello", api_name="/predict") # runs the prediction in a background thread
|
| 63 |
+
job.result()
|
| 64 |
+
>> 49 # returns the result of the remote API call (blocking call)
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
def __init__(
|
| 68 |
+
self,
|
| 69 |
+
src: str,
|
| 70 |
+
hf_token: str | None = None,
|
| 71 |
+
max_workers: int = 40,
|
| 72 |
+
serialize: bool = True,
|
| 73 |
+
output_dir: str | Path | None = DEFAULT_TEMP_DIR,
|
| 74 |
+
verbose: bool = True,
|
| 75 |
+
):
|
| 76 |
+
"""
|
| 77 |
+
Parameters:
|
| 78 |
+
src: Either the name of the Hugging Face Space to load, (e.g. "abidlabs/whisper-large-v2") or the full URL (including "http" or "https") of the hosted Gradio app to load (e.g. "http://mydomain.com/app" or "https://bec81a83-5b5c-471e.gradio.live/").
|
| 79 |
+
hf_token: The Hugging Face token to use to access private Spaces. Automatically fetched if you are logged in via the Hugging Face Hub CLI. Obtain from: https://huggingface.co/settings/token
|
| 80 |
+
max_workers: The maximum number of thread workers that can be used to make requests to the remote Gradio app simultaneously.
|
| 81 |
+
serialize: Whether the client should serialize the inputs and deserialize the outputs of the remote API. If set to False, the client will pass the inputs and outputs as-is, without serializing/deserializing them. E.g. you if you set this to False, you'd submit an image in base64 format instead of a filepath, and you'd get back an image in base64 format from the remote API instead of a filepath.
|
| 82 |
+
output_dir: The directory to save files that are downloaded from the remote API. If None, reads from the GRADIO_TEMP_DIR environment variable. Defaults to a temporary directory on your machine.
|
| 83 |
+
verbose: Whether the client should print statements to the console.
|
| 84 |
+
"""
|
| 85 |
+
self.verbose = verbose
|
| 86 |
+
self.hf_token = hf_token
|
| 87 |
+
self.serialize = serialize
|
| 88 |
+
self.headers = build_hf_headers(
|
| 89 |
+
token=hf_token,
|
| 90 |
+
library_name="gradio_client",
|
| 91 |
+
library_version=utils.__version__,
|
| 92 |
+
)
|
| 93 |
+
self.space_id = None
|
| 94 |
+
self.output_dir = output_dir
|
| 95 |
+
|
| 96 |
+
if src.startswith("http://") or src.startswith("https://"):
|
| 97 |
+
_src = src if src.endswith("/") else src + "/"
|
| 98 |
+
else:
|
| 99 |
+
_src = self._space_name_to_src(src)
|
| 100 |
+
if _src is None:
|
| 101 |
+
raise ValueError(
|
| 102 |
+
f"Could not find Space: {src}. If it is a private Space, please provide an hf_token."
|
| 103 |
+
)
|
| 104 |
+
self.space_id = src
|
| 105 |
+
self.src = _src
|
| 106 |
+
state = self._get_space_state()
|
| 107 |
+
if state == SpaceStage.BUILDING:
|
| 108 |
+
if self.verbose:
|
| 109 |
+
print("Space is still building. Please wait...")
|
| 110 |
+
while self._get_space_state() == SpaceStage.BUILDING:
|
| 111 |
+
time.sleep(2) # so we don't get rate limited by the API
|
| 112 |
+
pass
|
| 113 |
+
if state in utils.INVALID_RUNTIME:
|
| 114 |
+
raise ValueError(
|
| 115 |
+
f"The current space is in the invalid state: {state}. "
|
| 116 |
+
"Please contact the owner to fix this."
|
| 117 |
+
)
|
| 118 |
+
if self.verbose:
|
| 119 |
+
print(f"Loaded as API: {self.src} ✔")
|
| 120 |
+
|
| 121 |
+
self.api_url = urllib.parse.urljoin(self.src, utils.API_URL)
|
| 122 |
+
self.ws_url = urllib.parse.urljoin(
|
| 123 |
+
self.src.replace("http", "ws", 1), utils.WS_URL
|
| 124 |
+
)
|
| 125 |
+
self.upload_url = urllib.parse.urljoin(self.src, utils.UPLOAD_URL)
|
| 126 |
+
self.reset_url = urllib.parse.urljoin(self.src, utils.RESET_URL)
|
| 127 |
+
self.config = self._get_config()
|
| 128 |
+
self.session_hash = str(uuid.uuid4())
|
| 129 |
+
|
| 130 |
+
self.endpoints = [
|
| 131 |
+
Endpoint(self, fn_index, dependency)
|
| 132 |
+
for fn_index, dependency in enumerate(self.config["dependencies"])
|
| 133 |
+
]
|
| 134 |
+
|
| 135 |
+
# Create a pool of threads to handle the requests
|
| 136 |
+
self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers)
|
| 137 |
+
|
| 138 |
+
# Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1
|
| 139 |
+
threading.Thread(target=self._telemetry_thread).start()
|
| 140 |
+
|
| 141 |
+
@classmethod
|
| 142 |
+
def duplicate(
|
| 143 |
+
cls,
|
| 144 |
+
from_id: str,
|
| 145 |
+
to_id: str | None = None,
|
| 146 |
+
hf_token: str | None = None,
|
| 147 |
+
private: bool = True,
|
| 148 |
+
hardware: Literal[
|
| 149 |
+
"cpu-basic",
|
| 150 |
+
"cpu-upgrade",
|
| 151 |
+
"t4-small",
|
| 152 |
+
"t4-medium",
|
| 153 |
+
"a10g-small",
|
| 154 |
+
"a10g-large",
|
| 155 |
+
"a100-large",
|
| 156 |
+
]
|
| 157 |
+
| SpaceHardware
|
| 158 |
+
| None = None,
|
| 159 |
+
secrets: dict[str, str] | None = None,
|
| 160 |
+
sleep_timeout: int = 5,
|
| 161 |
+
max_workers: int = 40,
|
| 162 |
+
verbose: bool = True,
|
| 163 |
+
):
|
| 164 |
+
"""
|
| 165 |
+
Duplicates a Hugging Face Space under your account and returns a Client object
|
| 166 |
+
for the new Space. No duplication is created if the Space already exists in your
|
| 167 |
+
account (to override this, provide a new name for the new Space using `to_id`).
|
| 168 |
+
To use this method, you must provide an `hf_token` or be logged in via the Hugging
|
| 169 |
+
Face Hub CLI.
|
| 170 |
+
|
| 171 |
+
The new Space will be private by default and use the same hardware as the original
|
| 172 |
+
Space. This can be changed by using the `private` and `hardware` parameters. For
|
| 173 |
+
hardware upgrades (beyond the basic CPU tier), you may be required to provide
|
| 174 |
+
billing information on Hugging Face: https://huggingface.co/settings/billing
|
| 175 |
+
|
| 176 |
+
Parameters:
|
| 177 |
+
from_id: The name of the Hugging Face Space to duplicate in the format "{username}/{space_id}", e.g. "gradio/whisper".
|
| 178 |
+
to_id: The name of the new Hugging Face Space to create, e.g. "abidlabs/whisper-duplicate". If not provided, the new Space will be named "{your_HF_username}/{space_id}".
|
| 179 |
+
hf_token: The Hugging Face token to use to access private Spaces. Automatically fetched if you are logged in via the Hugging Face Hub CLI. Obtain from: https://huggingface.co/settings/token
|
| 180 |
+
private: Whether the new Space should be private (True) or public (False). Defaults to True.
|
| 181 |
+
hardware: The hardware tier to use for the new Space. Defaults to the same hardware tier as the original Space. Options include "cpu-basic", "cpu-upgrade", "t4-small", "t4-medium", "a10g-small", "a10g-large", "a100-large", subject to availability.
|
| 182 |
+
secrets: A dictionary of (secret key, secret value) to pass to the new Space. Defaults to None. Secrets are only used when the Space is duplicated for the first time, and are not updated if the duplicated Space already exists.
|
| 183 |
+
sleep_timeout: The number of minutes after which the duplicate Space will be puased if no requests are made to it (to minimize billing charges). Defaults to 5 minutes.
|
| 184 |
+
max_workers: The maximum number of thread workers that can be used to make requests to the remote Gradio app simultaneously.
|
| 185 |
+
verbose: Whether the client should print statements to the console.
|
| 186 |
+
Example:
|
| 187 |
+
import os
|
| 188 |
+
from gradio_client import Client
|
| 189 |
+
HF_TOKEN = os.environ.get("HF_TOKEN")
|
| 190 |
+
client = Client.duplicate("abidlabs/whisper", hf_token=HF_TOKEN)
|
| 191 |
+
client.predict("audio_sample.wav")
|
| 192 |
+
>> "This is a test of the whisper speech recognition model."
|
| 193 |
+
"""
|
| 194 |
+
try:
|
| 195 |
+
original_info = huggingface_hub.get_space_runtime(from_id, token=hf_token)
|
| 196 |
+
except RepositoryNotFoundError as rnfe:
|
| 197 |
+
raise ValueError(
|
| 198 |
+
f"Could not find Space: {from_id}. If it is a private Space, please provide an `hf_token`."
|
| 199 |
+
) from rnfe
|
| 200 |
+
if to_id:
|
| 201 |
+
if "/" in to_id:
|
| 202 |
+
to_id = to_id.split("/")[1]
|
| 203 |
+
space_id = huggingface_hub.get_full_repo_name(to_id, token=hf_token)
|
| 204 |
+
else:
|
| 205 |
+
space_id = huggingface_hub.get_full_repo_name(
|
| 206 |
+
from_id.split("/")[1], token=hf_token
|
| 207 |
+
)
|
| 208 |
+
try:
|
| 209 |
+
huggingface_hub.get_space_runtime(space_id, token=hf_token)
|
| 210 |
+
if verbose:
|
| 211 |
+
print(
|
| 212 |
+
f"Using your existing Space: {utils.SPACE_URL.format(space_id)} 🤗"
|
| 213 |
+
)
|
| 214 |
+
if secrets is not None:
|
| 215 |
+
warnings.warn(
|
| 216 |
+
"Secrets are only used when the Space is duplicated for the first time, and are not updated if the duplicated Space already exists."
|
| 217 |
+
)
|
| 218 |
+
except RepositoryNotFoundError:
|
| 219 |
+
if verbose:
|
| 220 |
+
print(f"Creating a duplicate of {from_id} for your own use... 🤗")
|
| 221 |
+
huggingface_hub.duplicate_space(
|
| 222 |
+
from_id=from_id,
|
| 223 |
+
to_id=space_id,
|
| 224 |
+
token=hf_token,
|
| 225 |
+
exist_ok=True,
|
| 226 |
+
private=private,
|
| 227 |
+
)
|
| 228 |
+
if secrets is not None:
|
| 229 |
+
for key, value in secrets.items():
|
| 230 |
+
huggingface_hub.add_space_secret(
|
| 231 |
+
space_id, key, value, token=hf_token
|
| 232 |
+
)
|
| 233 |
+
if verbose:
|
| 234 |
+
print(f"Created new Space: {utils.SPACE_URL.format(space_id)}")
|
| 235 |
+
current_info = huggingface_hub.get_space_runtime(space_id, token=hf_token)
|
| 236 |
+
current_hardware = (
|
| 237 |
+
current_info.hardware or huggingface_hub.SpaceHardware.CPU_BASIC
|
| 238 |
+
)
|
| 239 |
+
hardware = hardware or original_info.hardware
|
| 240 |
+
if current_hardware != hardware:
|
| 241 |
+
huggingface_hub.request_space_hardware(space_id, hardware) # type: ignore
|
| 242 |
+
print(
|
| 243 |
+
f"-------\nNOTE: this Space uses upgraded hardware: {hardware}... see billing info at https://huggingface.co/settings/billing\n-------"
|
| 244 |
+
)
|
| 245 |
+
# Setting a timeout only works if the hardware is not basic
|
| 246 |
+
# so set it here after the hardware has been requested
|
| 247 |
+
if hardware != huggingface_hub.SpaceHardware.CPU_BASIC:
|
| 248 |
+
utils.set_space_timeout(
|
| 249 |
+
space_id, hf_token=hf_token, timeout_in_seconds=sleep_timeout * 60
|
| 250 |
+
)
|
| 251 |
+
if verbose:
|
| 252 |
+
print("")
|
| 253 |
+
client = cls(
|
| 254 |
+
space_id, hf_token=hf_token, max_workers=max_workers, verbose=verbose
|
| 255 |
+
)
|
| 256 |
+
return client
|
| 257 |
+
|
| 258 |
+
def _get_space_state(self):
|
| 259 |
+
if not self.space_id:
|
| 260 |
+
return None
|
| 261 |
+
info = huggingface_hub.get_space_runtime(self.space_id, token=self.hf_token)
|
| 262 |
+
return info.stage
|
| 263 |
+
|
| 264 |
+
def predict(
|
| 265 |
+
self,
|
| 266 |
+
*args,
|
| 267 |
+
api_name: str | None = None,
|
| 268 |
+
fn_index: int | None = None,
|
| 269 |
+
) -> Any:
|
| 270 |
+
"""
|
| 271 |
+
Calls the Gradio API and returns the result (this is a blocking call).
|
| 272 |
+
|
| 273 |
+
Parameters:
|
| 274 |
+
args: The arguments to pass to the remote API. The order of the arguments must match the order of the inputs in the Gradio app.
|
| 275 |
+
api_name: The name of the API endpoint to call starting with a leading slash, e.g. "/predict". Does not need to be provided if the Gradio app has only one named API endpoint.
|
| 276 |
+
fn_index: As an alternative to api_name, this parameter takes the index of the API endpoint to call, e.g. 0. Both api_name and fn_index can be provided, but if they conflict, api_name will take precedence.
|
| 277 |
+
Returns:
|
| 278 |
+
The result of the API call. Will be a Tuple if the API has multiple outputs.
|
| 279 |
+
Example:
|
| 280 |
+
from gradio_client import Client
|
| 281 |
+
client = Client(src="gradio/calculator")
|
| 282 |
+
client.predict(5, "add", 4, api_name="/predict")
|
| 283 |
+
>> 9.0
|
| 284 |
+
"""
|
| 285 |
+
return self.submit(*args, api_name=api_name, fn_index=fn_index).result()
|
| 286 |
+
|
| 287 |
+
def submit(
|
| 288 |
+
self,
|
| 289 |
+
*args,
|
| 290 |
+
api_name: str | None = None,
|
| 291 |
+
fn_index: int | None = None,
|
| 292 |
+
result_callbacks: Callable | list[Callable] | None = None,
|
| 293 |
+
) -> Job:
|
| 294 |
+
"""
|
| 295 |
+
Creates and returns a Job object which calls the Gradio API in a background thread. The job can be used to retrieve the status and result of the remote API call.
|
| 296 |
+
|
| 297 |
+
Parameters:
|
| 298 |
+
args: The arguments to pass to the remote API. The order of the arguments must match the order of the inputs in the Gradio app.
|
| 299 |
+
api_name: The name of the API endpoint to call starting with a leading slash, e.g. "/predict". Does not need to be provided if the Gradio app has only one named API endpoint.
|
| 300 |
+
fn_index: As an alternative to api_name, this parameter takes the index of the API endpoint to call, e.g. 0. Both api_name and fn_index can be provided, but if they conflict, api_name will take precedence.
|
| 301 |
+
result_callbacks: A callback function, or list of callback functions, to be called when the result is ready. If a list of functions is provided, they will be called in order. The return values from the remote API are provided as separate parameters into the callback. If None, no callback will be called.
|
| 302 |
+
Returns:
|
| 303 |
+
A Job object that can be used to retrieve the status and result of the remote API call.
|
| 304 |
+
Example:
|
| 305 |
+
from gradio_client import Client
|
| 306 |
+
client = Client(src="gradio/calculator")
|
| 307 |
+
job = client.submit(5, "add", 4, api_name="/predict")
|
| 308 |
+
job.status()
|
| 309 |
+
>> <Status.STARTING: 'STARTING'>
|
| 310 |
+
job.result() # blocking call
|
| 311 |
+
>> 9.0
|
| 312 |
+
"""
|
| 313 |
+
inferred_fn_index = self._infer_fn_index(api_name, fn_index)
|
| 314 |
+
|
| 315 |
+
helper = None
|
| 316 |
+
if self.endpoints[inferred_fn_index].use_ws:
|
| 317 |
+
helper = Communicator(
|
| 318 |
+
Lock(),
|
| 319 |
+
JobStatus(),
|
| 320 |
+
self.endpoints[inferred_fn_index].process_predictions,
|
| 321 |
+
self.reset_url,
|
| 322 |
+
)
|
| 323 |
+
end_to_end_fn = self.endpoints[inferred_fn_index].make_end_to_end_fn(helper)
|
| 324 |
+
future = self.executor.submit(end_to_end_fn, *args)
|
| 325 |
+
|
| 326 |
+
job = Job(
|
| 327 |
+
future, communicator=helper, verbose=self.verbose, space_id=self.space_id
|
| 328 |
+
)
|
| 329 |
+
|
| 330 |
+
if result_callbacks:
|
| 331 |
+
if isinstance(result_callbacks, Callable):
|
| 332 |
+
result_callbacks = [result_callbacks]
|
| 333 |
+
|
| 334 |
+
def create_fn(callback) -> Callable:
|
| 335 |
+
def fn(future):
|
| 336 |
+
if isinstance(future.result(), tuple):
|
| 337 |
+
callback(*future.result())
|
| 338 |
+
else:
|
| 339 |
+
callback(future.result())
|
| 340 |
+
|
| 341 |
+
return fn
|
| 342 |
+
|
| 343 |
+
for callback in result_callbacks:
|
| 344 |
+
job.add_done_callback(create_fn(callback))
|
| 345 |
+
|
| 346 |
+
return job
|
| 347 |
+
|
| 348 |
+
def view_api(
|
| 349 |
+
self,
|
| 350 |
+
all_endpoints: bool | None = None,
|
| 351 |
+
print_info: bool = True,
|
| 352 |
+
return_format: Literal["dict", "str"] | None = None,
|
| 353 |
+
) -> dict | str | None:
|
| 354 |
+
"""
|
| 355 |
+
Prints the usage info for the API. If the Gradio app has multiple API endpoints, the usage info for each endpoint will be printed separately. If return_format="dict" the info is returned in dictionary format, as shown in the example below.
|
| 356 |
+
|
| 357 |
+
Parameters:
|
| 358 |
+
all_endpoints: If True, prints information for both named and unnamed endpoints in the Gradio app. If False, will only print info about named endpoints. If None (default), will print info about named endpoints, unless there aren't any -- in which it will print info about unnamed endpoints.
|
| 359 |
+
print_info: If True, prints the usage info to the console. If False, does not print the usage info.
|
| 360 |
+
return_format: If None, nothing is returned. If "str", returns the same string that would be printed to the console. If "dict", returns the usage info as a dictionary that can be programmatically parsed, and *all endpoints are returned in the dictionary* regardless of the value of `all_endpoints`. The format of the dictionary is in the docstring of this method.
|
| 361 |
+
Example:
|
| 362 |
+
from gradio_client import Client
|
| 363 |
+
client = Client(src="gradio/calculator")
|
| 364 |
+
client.view_api(return_format="dict")
|
| 365 |
+
>> {
|
| 366 |
+
'named_endpoints': {
|
| 367 |
+
'/predict': {
|
| 368 |
+
'parameters': [
|
| 369 |
+
{
|
| 370 |
+
'label': 'num1',
|
| 371 |
+
'type_python': 'int | float',
|
| 372 |
+
'type_description': 'numeric value',
|
| 373 |
+
'component': 'Number',
|
| 374 |
+
'example_input': '5'
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
'label': 'operation',
|
| 378 |
+
'type_python': 'str',
|
| 379 |
+
'type_description': 'string value',
|
| 380 |
+
'component': 'Radio',
|
| 381 |
+
'example_input': 'add'
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
'label': 'num2',
|
| 385 |
+
'type_python': 'int | float',
|
| 386 |
+
'type_description': 'numeric value',
|
| 387 |
+
'component': 'Number',
|
| 388 |
+
'example_input': '5'
|
| 389 |
+
},
|
| 390 |
+
],
|
| 391 |
+
'returns': [
|
| 392 |
+
{
|
| 393 |
+
'label': 'output',
|
| 394 |
+
'type_python': 'int | float',
|
| 395 |
+
'type_description': 'numeric value',
|
| 396 |
+
'component': 'Number',
|
| 397 |
+
},
|
| 398 |
+
]
|
| 399 |
+
},
|
| 400 |
+
'/flag': {
|
| 401 |
+
'parameters': [
|
| 402 |
+
...
|
| 403 |
+
],
|
| 404 |
+
'returns': [
|
| 405 |
+
...
|
| 406 |
+
]
|
| 407 |
+
}
|
| 408 |
+
}
|
| 409 |
+
'unnamed_endpoints': {
|
| 410 |
+
2: {
|
| 411 |
+
'parameters': [
|
| 412 |
+
...
|
| 413 |
+
],
|
| 414 |
+
'returns': [
|
| 415 |
+
...
|
| 416 |
+
]
|
| 417 |
+
}
|
| 418 |
+
}
|
| 419 |
+
}
|
| 420 |
+
}
|
| 421 |
+
|
| 422 |
+
"""
|
| 423 |
+
if self.serialize:
|
| 424 |
+
api_info_url = urllib.parse.urljoin(self.src, utils.API_INFO_URL)
|
| 425 |
+
else:
|
| 426 |
+
api_info_url = urllib.parse.urljoin(self.src, utils.RAW_API_INFO_URL)
|
| 427 |
+
|
| 428 |
+
# Versions of Gradio older than 3.29.0 returned format of the API info
|
| 429 |
+
# from the /info endpoint
|
| 430 |
+
if version.parse(self.config.get("version", "2.0")) > version.Version("3.29.0"):
|
| 431 |
+
r = requests.get(api_info_url, headers=self.headers)
|
| 432 |
+
if r.ok:
|
| 433 |
+
info = r.json()
|
| 434 |
+
else:
|
| 435 |
+
raise ValueError(f"Could not fetch api info for {self.src}")
|
| 436 |
+
else:
|
| 437 |
+
fetch = requests.post(
|
| 438 |
+
utils.SPACE_FETCHER_URL,
|
| 439 |
+
json={"config": json.dumps(self.config), "serialize": self.serialize},
|
| 440 |
+
)
|
| 441 |
+
if fetch.ok:
|
| 442 |
+
info = fetch.json()["api"]
|
| 443 |
+
else:
|
| 444 |
+
raise ValueError(f"Could not fetch api info for {self.src}")
|
| 445 |
+
num_named_endpoints = len(info["named_endpoints"])
|
| 446 |
+
num_unnamed_endpoints = len(info["unnamed_endpoints"])
|
| 447 |
+
if num_named_endpoints == 0 and all_endpoints is None:
|
| 448 |
+
all_endpoints = True
|
| 449 |
+
|
| 450 |
+
human_info = "Client.predict() Usage Info\n---------------------------\n"
|
| 451 |
+
human_info += f"Named API endpoints: {num_named_endpoints}\n"
|
| 452 |
+
|
| 453 |
+
for api_name, endpoint_info in info["named_endpoints"].items():
|
| 454 |
+
human_info += self._render_endpoints_info(api_name, endpoint_info)
|
| 455 |
+
|
| 456 |
+
if all_endpoints:
|
| 457 |
+
human_info += f"\nUnnamed API endpoints: {num_unnamed_endpoints}\n"
|
| 458 |
+
for fn_index, endpoint_info in info["unnamed_endpoints"].items():
|
| 459 |
+
# When loading from json, the fn_indices are read as strings
|
| 460 |
+
# because json keys can only be strings
|
| 461 |
+
human_info += self._render_endpoints_info(int(fn_index), endpoint_info)
|
| 462 |
+
else:
|
| 463 |
+
if num_unnamed_endpoints > 0:
|
| 464 |
+
human_info += f"\nUnnamed API endpoints: {num_unnamed_endpoints}, to view, run Client.view_api(all_endpoints=True)\n"
|
| 465 |
+
|
| 466 |
+
if print_info:
|
| 467 |
+
print(human_info)
|
| 468 |
+
if return_format == "str":
|
| 469 |
+
return human_info
|
| 470 |
+
elif return_format == "dict":
|
| 471 |
+
return info
|
| 472 |
+
|
| 473 |
+
def reset_session(self) -> None:
|
| 474 |
+
self.session_hash = str(uuid.uuid4())
|
| 475 |
+
|
| 476 |
+
def _render_endpoints_info(
|
| 477 |
+
self,
|
| 478 |
+
name_or_index: str | int,
|
| 479 |
+
endpoints_info: dict[str, list[dict[str, Any]]],
|
| 480 |
+
) -> str:
|
| 481 |
+
parameter_names = [p["label"] for p in endpoints_info["parameters"]]
|
| 482 |
+
parameter_names = [utils.sanitize_parameter_names(p) for p in parameter_names]
|
| 483 |
+
rendered_parameters = ", ".join(parameter_names)
|
| 484 |
+
if rendered_parameters:
|
| 485 |
+
rendered_parameters = rendered_parameters + ", "
|
| 486 |
+
return_values = [p["label"] for p in endpoints_info["returns"]]
|
| 487 |
+
return_values = [utils.sanitize_parameter_names(r) for r in return_values]
|
| 488 |
+
rendered_return_values = ", ".join(return_values)
|
| 489 |
+
if len(return_values) > 1:
|
| 490 |
+
rendered_return_values = f"({rendered_return_values})"
|
| 491 |
+
|
| 492 |
+
if isinstance(name_or_index, str):
|
| 493 |
+
final_param = f'api_name="{name_or_index}"'
|
| 494 |
+
elif isinstance(name_or_index, int):
|
| 495 |
+
final_param = f"fn_index={name_or_index}"
|
| 496 |
+
else:
|
| 497 |
+
raise ValueError("name_or_index must be a string or integer")
|
| 498 |
+
|
| 499 |
+
human_info = f"\n - predict({rendered_parameters}{final_param}) -> {rendered_return_values}\n"
|
| 500 |
+
human_info += " Parameters:\n"
|
| 501 |
+
if endpoints_info["parameters"]:
|
| 502 |
+
for info in endpoints_info["parameters"]:
|
| 503 |
+
desc = (
|
| 504 |
+
f" ({info['python_type']['description']})"
|
| 505 |
+
if info["python_type"].get("description")
|
| 506 |
+
else ""
|
| 507 |
+
)
|
| 508 |
+
type_ = info["python_type"]["type"]
|
| 509 |
+
human_info += f" - [{info['component']}] {utils.sanitize_parameter_names(info['label'])}: {type_}{desc} \n"
|
| 510 |
+
else:
|
| 511 |
+
human_info += " - None\n"
|
| 512 |
+
human_info += " Returns:\n"
|
| 513 |
+
if endpoints_info["returns"]:
|
| 514 |
+
for info in endpoints_info["returns"]:
|
| 515 |
+
desc = (
|
| 516 |
+
f" ({info['python_type']['description']})"
|
| 517 |
+
if info["python_type"].get("description")
|
| 518 |
+
else ""
|
| 519 |
+
)
|
| 520 |
+
type_ = info["python_type"]["type"]
|
| 521 |
+
human_info += f" - [{info['component']}] {utils.sanitize_parameter_names(info['label'])}: {type_}{desc} \n"
|
| 522 |
+
else:
|
| 523 |
+
human_info += " - None\n"
|
| 524 |
+
|
| 525 |
+
return human_info
|
| 526 |
+
|
| 527 |
+
def __repr__(self):
|
| 528 |
+
return self.view_api(print_info=False, return_format="str")
|
| 529 |
+
|
| 530 |
+
def __str__(self):
|
| 531 |
+
return self.view_api(print_info=False, return_format="str")
|
| 532 |
+
|
| 533 |
+
def _telemetry_thread(self) -> None:
|
| 534 |
+
# Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1
|
| 535 |
+
data = {
|
| 536 |
+
"src": self.src,
|
| 537 |
+
}
|
| 538 |
+
try:
|
| 539 |
+
send_telemetry(
|
| 540 |
+
topic="py_client/initiated",
|
| 541 |
+
library_name="gradio_client",
|
| 542 |
+
library_version=utils.__version__,
|
| 543 |
+
user_agent=data,
|
| 544 |
+
)
|
| 545 |
+
except Exception:
|
| 546 |
+
pass
|
| 547 |
+
|
| 548 |
+
def _infer_fn_index(self, api_name: str | None, fn_index: int | None) -> int:
|
| 549 |
+
inferred_fn_index = None
|
| 550 |
+
if api_name is not None:
|
| 551 |
+
for i, d in enumerate(self.config["dependencies"]):
|
| 552 |
+
config_api_name = d.get("api_name")
|
| 553 |
+
if config_api_name is None or config_api_name is False:
|
| 554 |
+
continue
|
| 555 |
+
if "/" + config_api_name == api_name:
|
| 556 |
+
inferred_fn_index = i
|
| 557 |
+
break
|
| 558 |
+
else:
|
| 559 |
+
error_message = f"Cannot find a function with `api_name`: {api_name}."
|
| 560 |
+
if not api_name.startswith("/"):
|
| 561 |
+
error_message += " Did you mean to use a leading slash?"
|
| 562 |
+
raise ValueError(error_message)
|
| 563 |
+
elif fn_index is not None:
|
| 564 |
+
inferred_fn_index = fn_index
|
| 565 |
+
else:
|
| 566 |
+
valid_endpoints = [
|
| 567 |
+
e for e in self.endpoints if e.is_valid and e.api_name is not None
|
| 568 |
+
]
|
| 569 |
+
if len(valid_endpoints) == 1:
|
| 570 |
+
inferred_fn_index = valid_endpoints[0].fn_index
|
| 571 |
+
else:
|
| 572 |
+
raise ValueError(
|
| 573 |
+
"This Gradio app might have multiple endpoints. Please specify an `api_name` or `fn_index`"
|
| 574 |
+
)
|
| 575 |
+
return inferred_fn_index
|
| 576 |
+
|
| 577 |
+
def __del__(self):
|
| 578 |
+
if hasattr(self, "executor"):
|
| 579 |
+
self.executor.shutdown(wait=True)
|
| 580 |
+
|
| 581 |
+
def _space_name_to_src(self, space) -> str | None:
|
| 582 |
+
return huggingface_hub.space_info(space, token=self.hf_token).host # type: ignore
|
| 583 |
+
|
| 584 |
+
def _get_config(self) -> dict:
|
| 585 |
+
r = requests.get(
|
| 586 |
+
urllib.parse.urljoin(self.src, utils.CONFIG_URL), headers=self.headers
|
| 587 |
+
)
|
| 588 |
+
if r.ok:
|
| 589 |
+
return r.json()
|
| 590 |
+
else: # to support older versions of Gradio
|
| 591 |
+
r = requests.get(self.src, headers=self.headers)
|
| 592 |
+
# some basic regex to extract the config
|
| 593 |
+
result = re.search(r"window.gradio_config = (.*?);[\s]*</script>", r.text)
|
| 594 |
+
try:
|
| 595 |
+
config = json.loads(result.group(1)) # type: ignore
|
| 596 |
+
except AttributeError as ae:
|
| 597 |
+
raise ValueError(
|
| 598 |
+
f"Could not get Gradio config from: {self.src}"
|
| 599 |
+
) from ae
|
| 600 |
+
if "allow_flagging" in config:
|
| 601 |
+
raise ValueError(
|
| 602 |
+
"Gradio 2.x is not supported by this client. Please upgrade your Gradio app to Gradio 3.x or higher."
|
| 603 |
+
)
|
| 604 |
+
return config
|
| 605 |
+
|
| 606 |
+
|
| 607 |
+
class Endpoint:
|
| 608 |
+
"""Helper class for storing all the information about a single API endpoint."""
|
| 609 |
+
|
| 610 |
+
def __init__(self, client: Client, fn_index: int, dependency: dict):
|
| 611 |
+
self.client: Client = client
|
| 612 |
+
self.fn_index = fn_index
|
| 613 |
+
self.dependency = dependency
|
| 614 |
+
api_name = dependency.get("api_name")
|
| 615 |
+
self.api_name: str | None = (
|
| 616 |
+
None if (api_name is None or api_name is False) else "/" + api_name
|
| 617 |
+
)
|
| 618 |
+
self.use_ws = self._use_websocket(self.dependency)
|
| 619 |
+
self.input_component_types = []
|
| 620 |
+
self.output_component_types = []
|
| 621 |
+
self.root_url = client.src + "/" if not client.src.endswith("/") else client.src
|
| 622 |
+
try:
|
| 623 |
+
# Only a real API endpoint if backend_fn is True (so not just a frontend function), serializers are valid,
|
| 624 |
+
# and api_name is not False (meaning that the developer has explicitly disabled the API endpoint)
|
| 625 |
+
self.serializers, self.deserializers = self._setup_serializers()
|
| 626 |
+
self.is_valid = self.dependency["backend_fn"] and self.api_name is not False
|
| 627 |
+
except AssertionError:
|
| 628 |
+
self.is_valid = False
|
| 629 |
+
|
| 630 |
+
def __repr__(self):
|
| 631 |
+
return f"Endpoint src: {self.client.src}, api_name: {self.api_name}, fn_index: {self.fn_index}"
|
| 632 |
+
|
| 633 |
+
def __str__(self):
|
| 634 |
+
return self.__repr__()
|
| 635 |
+
|
| 636 |
+
def make_end_to_end_fn(self, helper: Communicator | None = None):
|
| 637 |
+
_predict = self.make_predict(helper)
|
| 638 |
+
|
| 639 |
+
def _inner(*data):
|
| 640 |
+
if not self.is_valid:
|
| 641 |
+
raise utils.InvalidAPIEndpointError()
|
| 642 |
+
data = self.insert_state(*data)
|
| 643 |
+
if self.client.serialize:
|
| 644 |
+
data = self.serialize(*data)
|
| 645 |
+
predictions = _predict(*data)
|
| 646 |
+
predictions = self.process_predictions(*predictions)
|
| 647 |
+
# Append final output only if not already present
|
| 648 |
+
# for consistency between generators and not generators
|
| 649 |
+
if helper:
|
| 650 |
+
with helper.lock:
|
| 651 |
+
if not helper.job.outputs:
|
| 652 |
+
helper.job.outputs.append(predictions)
|
| 653 |
+
return predictions
|
| 654 |
+
|
| 655 |
+
return _inner
|
| 656 |
+
|
| 657 |
+
def make_predict(self, helper: Communicator | None = None):
|
| 658 |
+
def _predict(*data) -> tuple:
|
| 659 |
+
data = json.dumps(
|
| 660 |
+
{
|
| 661 |
+
"data": data,
|
| 662 |
+
"fn_index": self.fn_index,
|
| 663 |
+
"session_hash": self.client.session_hash,
|
| 664 |
+
}
|
| 665 |
+
)
|
| 666 |
+
hash_data = json.dumps(
|
| 667 |
+
{
|
| 668 |
+
"fn_index": self.fn_index,
|
| 669 |
+
"session_hash": self.client.session_hash,
|
| 670 |
+
}
|
| 671 |
+
)
|
| 672 |
+
|
| 673 |
+
if self.use_ws:
|
| 674 |
+
result = utils.synchronize_async(self._ws_fn, data, hash_data, helper)
|
| 675 |
+
if "error" in result:
|
| 676 |
+
raise ValueError(result["error"])
|
| 677 |
+
else:
|
| 678 |
+
response = requests.post(
|
| 679 |
+
self.client.api_url, headers=self.client.headers, data=data
|
| 680 |
+
)
|
| 681 |
+
result = json.loads(response.content.decode("utf-8"))
|
| 682 |
+
try:
|
| 683 |
+
output = result["data"]
|
| 684 |
+
except KeyError as ke:
|
| 685 |
+
is_public_space = (
|
| 686 |
+
self.client.space_id
|
| 687 |
+
and not huggingface_hub.space_info(self.client.space_id).private
|
| 688 |
+
)
|
| 689 |
+
if "error" in result and "429" in result["error"] and is_public_space:
|
| 690 |
+
raise utils.TooManyRequestsError(
|
| 691 |
+
f"Too many requests to the API, please try again later. To avoid being rate-limited, "
|
| 692 |
+
f"please duplicate the Space using Client.duplicate({self.client.space_id}) "
|
| 693 |
+
f"and pass in your Hugging Face token."
|
| 694 |
+
) from None
|
| 695 |
+
elif "error" in result:
|
| 696 |
+
raise ValueError(result["error"]) from None
|
| 697 |
+
raise KeyError(
|
| 698 |
+
f"Could not find 'data' key in response. Response received: {result}"
|
| 699 |
+
) from ke
|
| 700 |
+
return tuple(output)
|
| 701 |
+
|
| 702 |
+
return _predict
|
| 703 |
+
|
| 704 |
+
def _predict_resolve(self, *data) -> Any:
|
| 705 |
+
"""Needed for gradio.load(), which has a slightly different signature for serializing/deserializing"""
|
| 706 |
+
outputs = self.make_predict()(*data)
|
| 707 |
+
if len(self.dependency["outputs"]) == 1:
|
| 708 |
+
return outputs[0]
|
| 709 |
+
return outputs
|
| 710 |
+
|
| 711 |
+
def _upload(
|
| 712 |
+
self, file_paths: list[str | list[str]]
|
| 713 |
+
) -> list[str | list[str]] | list[dict[str, Any] | list[dict[str, Any]]]:
|
| 714 |
+
if not file_paths:
|
| 715 |
+
return []
|
| 716 |
+
# Put all the filepaths in one file
|
| 717 |
+
# but then keep track of which index in the
|
| 718 |
+
# original list they came from so we can recreate
|
| 719 |
+
# the original structure
|
| 720 |
+
files = []
|
| 721 |
+
indices = []
|
| 722 |
+
for i, fs in enumerate(file_paths):
|
| 723 |
+
if not isinstance(fs, list):
|
| 724 |
+
fs = [fs]
|
| 725 |
+
for f in fs:
|
| 726 |
+
files.append(("files", (Path(f).name, open(f, "rb")))) # noqa: SIM115
|
| 727 |
+
indices.append(i)
|
| 728 |
+
r = requests.post(
|
| 729 |
+
self.client.upload_url, headers=self.client.headers, files=files
|
| 730 |
+
)
|
| 731 |
+
if r.status_code != 200:
|
| 732 |
+
uploaded = file_paths
|
| 733 |
+
else:
|
| 734 |
+
uploaded = []
|
| 735 |
+
result = r.json()
|
| 736 |
+
for i, fs in enumerate(file_paths):
|
| 737 |
+
if isinstance(fs, list):
|
| 738 |
+
output = [o for ix, o in enumerate(result) if indices[ix] == i]
|
| 739 |
+
res = [
|
| 740 |
+
{
|
| 741 |
+
"is_file": True,
|
| 742 |
+
"name": o,
|
| 743 |
+
"orig_name": Path(f).name,
|
| 744 |
+
"data": None,
|
| 745 |
+
}
|
| 746 |
+
for f, o in zip(fs, output)
|
| 747 |
+
]
|
| 748 |
+
else:
|
| 749 |
+
o = next(o for ix, o in enumerate(result) if indices[ix] == i)
|
| 750 |
+
res = {
|
| 751 |
+
"is_file": True,
|
| 752 |
+
"name": o,
|
| 753 |
+
"orig_name": Path(fs).name,
|
| 754 |
+
"data": None,
|
| 755 |
+
}
|
| 756 |
+
uploaded.append(res)
|
| 757 |
+
return uploaded
|
| 758 |
+
|
| 759 |
+
def _add_uploaded_files_to_data(
|
| 760 |
+
self,
|
| 761 |
+
files: list[str | list[str]] | list[dict[str, Any] | list[dict[str, Any]]],
|
| 762 |
+
data: list[Any],
|
| 763 |
+
) -> None:
|
| 764 |
+
"""Helper function to modify the input data with the uploaded files."""
|
| 765 |
+
file_counter = 0
|
| 766 |
+
for i, t in enumerate(self.input_component_types):
|
| 767 |
+
if t in ["file", "uploadbutton"]:
|
| 768 |
+
data[i] = files[file_counter]
|
| 769 |
+
file_counter += 1
|
| 770 |
+
|
| 771 |
+
def insert_state(self, *data) -> tuple:
|
| 772 |
+
data = list(data)
|
| 773 |
+
for i, input_component_type in enumerate(self.input_component_types):
|
| 774 |
+
if input_component_type == utils.STATE_COMPONENT:
|
| 775 |
+
data.insert(i, None)
|
| 776 |
+
return tuple(data)
|
| 777 |
+
|
| 778 |
+
def remove_state(self, *data) -> tuple:
|
| 779 |
+
data = [
|
| 780 |
+
d
|
| 781 |
+
for d, oct in zip(data, self.output_component_types)
|
| 782 |
+
if oct != utils.STATE_COMPONENT
|
| 783 |
+
]
|
| 784 |
+
return tuple(data)
|
| 785 |
+
|
| 786 |
+
def reduce_singleton_output(self, *data) -> Any:
|
| 787 |
+
if (
|
| 788 |
+
len(
|
| 789 |
+
[
|
| 790 |
+
oct
|
| 791 |
+
for oct in self.output_component_types
|
| 792 |
+
if oct != utils.STATE_COMPONENT
|
| 793 |
+
]
|
| 794 |
+
)
|
| 795 |
+
== 1
|
| 796 |
+
):
|
| 797 |
+
return data[0]
|
| 798 |
+
else:
|
| 799 |
+
return data
|
| 800 |
+
|
| 801 |
+
def serialize(self, *data) -> tuple:
|
| 802 |
+
assert len(data) == len(
|
| 803 |
+
self.serializers
|
| 804 |
+
), f"Expected {len(self.serializers)} arguments, got {len(data)}"
|
| 805 |
+
|
| 806 |
+
files = [
|
| 807 |
+
f
|
| 808 |
+
for f, t in zip(data, self.input_component_types)
|
| 809 |
+
if t in ["file", "uploadbutton"]
|
| 810 |
+
]
|
| 811 |
+
uploaded_files = self._upload(files)
|
| 812 |
+
data = list(data)
|
| 813 |
+
self._add_uploaded_files_to_data(uploaded_files, data)
|
| 814 |
+
o = tuple([s.serialize(d) for s, d in zip(self.serializers, data)])
|
| 815 |
+
return o
|
| 816 |
+
|
| 817 |
+
def deserialize(self, *data) -> tuple:
|
| 818 |
+
assert len(data) == len(
|
| 819 |
+
self.deserializers
|
| 820 |
+
), f"Expected {len(self.deserializers)} outputs, got {len(data)}"
|
| 821 |
+
outputs = tuple(
|
| 822 |
+
[
|
| 823 |
+
s.deserialize(
|
| 824 |
+
d,
|
| 825 |
+
save_dir=self.client.output_dir,
|
| 826 |
+
hf_token=self.client.hf_token,
|
| 827 |
+
root_url=self.root_url,
|
| 828 |
+
)
|
| 829 |
+
for s, d in zip(self.deserializers, data)
|
| 830 |
+
]
|
| 831 |
+
)
|
| 832 |
+
return outputs
|
| 833 |
+
|
| 834 |
+
def process_predictions(self, *predictions):
|
| 835 |
+
if self.client.serialize:
|
| 836 |
+
predictions = self.deserialize(*predictions)
|
| 837 |
+
predictions = self.remove_state(*predictions)
|
| 838 |
+
predictions = self.reduce_singleton_output(*predictions)
|
| 839 |
+
return predictions
|
| 840 |
+
|
| 841 |
+
def _setup_serializers(self) -> tuple[list[Serializable], list[Serializable]]:
|
| 842 |
+
inputs = self.dependency["inputs"]
|
| 843 |
+
serializers = []
|
| 844 |
+
|
| 845 |
+
for i in inputs:
|
| 846 |
+
for component in self.client.config["components"]:
|
| 847 |
+
if component["id"] == i:
|
| 848 |
+
component_name = component["type"]
|
| 849 |
+
self.input_component_types.append(component_name)
|
| 850 |
+
if component.get("serializer"):
|
| 851 |
+
serializer_name = component["serializer"]
|
| 852 |
+
assert (
|
| 853 |
+
serializer_name in serializing.SERIALIZER_MAPPING
|
| 854 |
+
), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version."
|
| 855 |
+
serializer = serializing.SERIALIZER_MAPPING[serializer_name]
|
| 856 |
+
else:
|
| 857 |
+
assert (
|
| 858 |
+
component_name in serializing.COMPONENT_MAPPING
|
| 859 |
+
), f"Unknown component: {component_name}, you may need to update your gradio_client version."
|
| 860 |
+
serializer = serializing.COMPONENT_MAPPING[component_name]
|
| 861 |
+
serializers.append(serializer()) # type: ignore
|
| 862 |
+
|
| 863 |
+
outputs = self.dependency["outputs"]
|
| 864 |
+
deserializers = []
|
| 865 |
+
for i in outputs:
|
| 866 |
+
for component in self.client.config["components"]:
|
| 867 |
+
if component["id"] == i:
|
| 868 |
+
component_name = component["type"]
|
| 869 |
+
self.output_component_types.append(component_name)
|
| 870 |
+
if component.get("serializer"):
|
| 871 |
+
serializer_name = component["serializer"]
|
| 872 |
+
assert (
|
| 873 |
+
serializer_name in serializing.SERIALIZER_MAPPING
|
| 874 |
+
), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version."
|
| 875 |
+
deserializer = serializing.SERIALIZER_MAPPING[serializer_name]
|
| 876 |
+
else:
|
| 877 |
+
assert (
|
| 878 |
+
component_name in serializing.COMPONENT_MAPPING
|
| 879 |
+
), f"Unknown component: {component_name}, you may need to update your gradio_client version."
|
| 880 |
+
deserializer = serializing.COMPONENT_MAPPING[component_name]
|
| 881 |
+
deserializers.append(deserializer()) # type: ignore
|
| 882 |
+
|
| 883 |
+
return serializers, deserializers
|
| 884 |
+
|
| 885 |
+
def _use_websocket(self, dependency: dict) -> bool:
|
| 886 |
+
queue_enabled = self.client.config.get("enable_queue", False)
|
| 887 |
+
queue_uses_websocket = version.parse(
|
| 888 |
+
self.client.config.get("version", "2.0")
|
| 889 |
+
) >= version.Version("3.2")
|
| 890 |
+
dependency_uses_queue = dependency.get("queue", False) is not False
|
| 891 |
+
return queue_enabled and queue_uses_websocket and dependency_uses_queue
|
| 892 |
+
|
| 893 |
+
async def _ws_fn(self, data, hash_data, helper: Communicator):
|
| 894 |
+
async with websockets.connect( # type: ignore
|
| 895 |
+
self.client.ws_url,
|
| 896 |
+
open_timeout=10,
|
| 897 |
+
extra_headers=self.client.headers,
|
| 898 |
+
max_size=1024 * 1024 * 1024,
|
| 899 |
+
) as websocket:
|
| 900 |
+
return await utils.get_pred_from_ws(websocket, data, hash_data, helper)
|
| 901 |
+
|
| 902 |
+
|
| 903 |
+
@document("result", "outputs", "status")
|
| 904 |
+
class Job(Future):
|
| 905 |
+
"""
|
| 906 |
+
A Job is a wrapper over the Future class that represents a prediction call that has been
|
| 907 |
+
submitted by the Gradio client. This class is not meant to be instantiated directly, but rather
|
| 908 |
+
is created by the Client.submit() method.
|
| 909 |
+
|
| 910 |
+
A Job object includes methods to get the status of the prediction call, as well to get the outputs of
|
| 911 |
+
the prediction call. Job objects are also iterable, and can be used in a loop to get the outputs
|
| 912 |
+
of prediction calls as they become available for generator endpoints.
|
| 913 |
+
"""
|
| 914 |
+
|
| 915 |
+
def __init__(
|
| 916 |
+
self,
|
| 917 |
+
future: Future,
|
| 918 |
+
communicator: Communicator | None = None,
|
| 919 |
+
verbose: bool = True,
|
| 920 |
+
space_id: str | None = None,
|
| 921 |
+
):
|
| 922 |
+
"""
|
| 923 |
+
Parameters:
|
| 924 |
+
future: The future object that represents the prediction call, created by the Client.submit() method
|
| 925 |
+
communicator: The communicator object that is used to communicate between the client and the background thread running the job
|
| 926 |
+
verbose: Whether to print any status-related messages to the console
|
| 927 |
+
space_id: The space ID corresponding to the Client object that created this Job object
|
| 928 |
+
"""
|
| 929 |
+
self.future = future
|
| 930 |
+
self.communicator = communicator
|
| 931 |
+
self._counter = 0
|
| 932 |
+
self.verbose = verbose
|
| 933 |
+
self.space_id = space_id
|
| 934 |
+
|
| 935 |
+
def __iter__(self) -> Job:
|
| 936 |
+
return self
|
| 937 |
+
|
| 938 |
+
def __next__(self) -> tuple | Any:
|
| 939 |
+
if not self.communicator:
|
| 940 |
+
raise StopIteration()
|
| 941 |
+
|
| 942 |
+
with self.communicator.lock:
|
| 943 |
+
if self.communicator.job.latest_status.code == Status.FINISHED:
|
| 944 |
+
raise StopIteration()
|
| 945 |
+
|
| 946 |
+
while True:
|
| 947 |
+
with self.communicator.lock:
|
| 948 |
+
if len(self.communicator.job.outputs) == self._counter + 1:
|
| 949 |
+
o = self.communicator.job.outputs[self._counter]
|
| 950 |
+
self._counter += 1
|
| 951 |
+
return o
|
| 952 |
+
if self.communicator.job.latest_status.code == Status.FINISHED:
|
| 953 |
+
raise StopIteration()
|
| 954 |
+
|
| 955 |
+
def result(self, timeout: float | None = None) -> Any:
|
| 956 |
+
"""
|
| 957 |
+
Return the result of the call that the future represents. Raises CancelledError: If the future was cancelled, TimeoutError: If the future didn't finish executing before the given timeout, and Exception: If the call raised then that exception will be raised.
|
| 958 |
+
|
| 959 |
+
Parameters:
|
| 960 |
+
timeout: The number of seconds to wait for the result if the future isn't done. If None, then there is no limit on the wait time.
|
| 961 |
+
Returns:
|
| 962 |
+
The result of the call that the future represents.
|
| 963 |
+
Example:
|
| 964 |
+
from gradio_client import Client
|
| 965 |
+
calculator = Client(src="gradio/calculator")
|
| 966 |
+
job = calculator.submit("foo", "add", 4, fn_index=0)
|
| 967 |
+
job.result(timeout=5)
|
| 968 |
+
>> 9
|
| 969 |
+
"""
|
| 970 |
+
if self.communicator:
|
| 971 |
+
timeout = timeout or float("inf")
|
| 972 |
+
if self.future._exception: # type: ignore
|
| 973 |
+
raise self.future._exception # type: ignore
|
| 974 |
+
with self.communicator.lock:
|
| 975 |
+
if self.communicator.job.outputs:
|
| 976 |
+
return self.communicator.job.outputs[0]
|
| 977 |
+
start = datetime.now()
|
| 978 |
+
while True:
|
| 979 |
+
if (datetime.now() - start).seconds > timeout:
|
| 980 |
+
raise TimeoutError()
|
| 981 |
+
if self.future._exception: # type: ignore
|
| 982 |
+
raise self.future._exception # type: ignore
|
| 983 |
+
with self.communicator.lock:
|
| 984 |
+
if self.communicator.job.outputs:
|
| 985 |
+
return self.communicator.job.outputs[0]
|
| 986 |
+
time.sleep(0.01)
|
| 987 |
+
else:
|
| 988 |
+
return super().result(timeout=timeout)
|
| 989 |
+
|
| 990 |
+
def outputs(self) -> list[tuple | Any]:
|
| 991 |
+
"""
|
| 992 |
+
Returns a list containing the latest outputs from the Job.
|
| 993 |
+
|
| 994 |
+
If the endpoint has multiple output components, the list will contain
|
| 995 |
+
a tuple of results. Otherwise, it will contain the results without storing them
|
| 996 |
+
in tuples.
|
| 997 |
+
|
| 998 |
+
For endpoints that are queued, this list will contain the final job output even
|
| 999 |
+
if that endpoint does not use a generator function.
|
| 1000 |
+
|
| 1001 |
+
Example:
|
| 1002 |
+
from gradio_client import Client
|
| 1003 |
+
client = Client(src="gradio/count_generator")
|
| 1004 |
+
job = client.submit(3, api_name="/count")
|
| 1005 |
+
while not job.done():
|
| 1006 |
+
time.sleep(0.1)
|
| 1007 |
+
job.outputs()
|
| 1008 |
+
>> ['0', '1', '2']
|
| 1009 |
+
"""
|
| 1010 |
+
if not self.communicator:
|
| 1011 |
+
return []
|
| 1012 |
+
else:
|
| 1013 |
+
with self.communicator.lock:
|
| 1014 |
+
return self.communicator.job.outputs
|
| 1015 |
+
|
| 1016 |
+
def status(self) -> StatusUpdate:
|
| 1017 |
+
"""
|
| 1018 |
+
Returns the latest status update from the Job in the form of a StatusUpdate
|
| 1019 |
+
object, which contains the following fields: code, rank, queue_size, success, time, eta, and progress_data.
|
| 1020 |
+
|
| 1021 |
+
progress_data is a list of updates emitted by the gr.Progress() tracker of the event handler. Each element
|
| 1022 |
+
of the list has the following fields: index, length, unit, progress, desc. If the event handler does not have
|
| 1023 |
+
a gr.Progress() tracker, the progress_data field will be None.
|
| 1024 |
+
|
| 1025 |
+
Example:
|
| 1026 |
+
from gradio_client import Client
|
| 1027 |
+
client = Client(src="gradio/calculator")
|
| 1028 |
+
job = client.submit(5, "add", 4, api_name="/predict")
|
| 1029 |
+
job.status()
|
| 1030 |
+
>> <Status.STARTING: 'STARTING'>
|
| 1031 |
+
job.status().eta
|
| 1032 |
+
>> 43.241 # seconds
|
| 1033 |
+
"""
|
| 1034 |
+
time = datetime.now()
|
| 1035 |
+
cancelled = False
|
| 1036 |
+
if self.communicator:
|
| 1037 |
+
with self.communicator.lock:
|
| 1038 |
+
cancelled = self.communicator.should_cancel
|
| 1039 |
+
if cancelled:
|
| 1040 |
+
return StatusUpdate(
|
| 1041 |
+
code=Status.CANCELLED,
|
| 1042 |
+
rank=0,
|
| 1043 |
+
queue_size=None,
|
| 1044 |
+
success=False,
|
| 1045 |
+
time=time,
|
| 1046 |
+
eta=None,
|
| 1047 |
+
progress_data=None,
|
| 1048 |
+
)
|
| 1049 |
+
if self.done():
|
| 1050 |
+
if not self.future._exception: # type: ignore
|
| 1051 |
+
return StatusUpdate(
|
| 1052 |
+
code=Status.FINISHED,
|
| 1053 |
+
rank=0,
|
| 1054 |
+
queue_size=None,
|
| 1055 |
+
success=True,
|
| 1056 |
+
time=time,
|
| 1057 |
+
eta=None,
|
| 1058 |
+
progress_data=None,
|
| 1059 |
+
)
|
| 1060 |
+
else:
|
| 1061 |
+
return StatusUpdate(
|
| 1062 |
+
code=Status.FINISHED,
|
| 1063 |
+
rank=0,
|
| 1064 |
+
queue_size=None,
|
| 1065 |
+
success=False,
|
| 1066 |
+
time=time,
|
| 1067 |
+
eta=None,
|
| 1068 |
+
progress_data=None,
|
| 1069 |
+
)
|
| 1070 |
+
else:
|
| 1071 |
+
if not self.communicator:
|
| 1072 |
+
return StatusUpdate(
|
| 1073 |
+
code=Status.PROCESSING,
|
| 1074 |
+
rank=0,
|
| 1075 |
+
queue_size=None,
|
| 1076 |
+
success=None,
|
| 1077 |
+
time=time,
|
| 1078 |
+
eta=None,
|
| 1079 |
+
progress_data=None,
|
| 1080 |
+
)
|
| 1081 |
+
else:
|
| 1082 |
+
with self.communicator.lock:
|
| 1083 |
+
eta = self.communicator.job.latest_status.eta
|
| 1084 |
+
if self.verbose and self.space_id and eta and eta > 30:
|
| 1085 |
+
print(
|
| 1086 |
+
f"Due to heavy traffic on this app, the prediction will take approximately {int(eta)} seconds."
|
| 1087 |
+
f"For faster predictions without waiting in queue, you may duplicate the space using: Client.duplicate({self.space_id})"
|
| 1088 |
+
)
|
| 1089 |
+
return self.communicator.job.latest_status
|
| 1090 |
+
|
| 1091 |
+
def __getattr__(self, name):
|
| 1092 |
+
"""Forwards any properties to the Future class."""
|
| 1093 |
+
return getattr(self.future, name)
|
| 1094 |
+
|
| 1095 |
+
def cancel(self) -> bool:
|
| 1096 |
+
"""Cancels the job as best as possible.
|
| 1097 |
+
|
| 1098 |
+
If the app you are connecting to has the gradio queue enabled, the job
|
| 1099 |
+
will be cancelled locally as soon as possible. For apps that do not use the
|
| 1100 |
+
queue, the job cannot be cancelled if it's been sent to the local executor
|
| 1101 |
+
(for the time being).
|
| 1102 |
+
|
| 1103 |
+
Note: In general, this DOES not stop the process from running in the upstream server
|
| 1104 |
+
except for the following situations:
|
| 1105 |
+
|
| 1106 |
+
1. If the job is queued upstream, it will be removed from the queue and the server will not run the job
|
| 1107 |
+
2. If the job has iterative outputs, the job will finish as soon as the current iteration finishes running
|
| 1108 |
+
3. If the job has not been picked up by the queue yet, the queue will not pick up the job
|
| 1109 |
+
"""
|
| 1110 |
+
if self.communicator:
|
| 1111 |
+
with self.communicator.lock:
|
| 1112 |
+
self.communicator.should_cancel = True
|
| 1113 |
+
return True
|
| 1114 |
+
return self.future.cancel()
|
parrot/lib/python3.10/site-packages/gradio_client/data_classes.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TypedDict
|
| 4 |
+
|
| 5 |
+
from typing_extensions import NotRequired
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class FileData(TypedDict):
|
| 9 |
+
name: str | None # filename
|
| 10 |
+
data: str | None # base64 encoded data
|
| 11 |
+
size: NotRequired[int | None] # size in bytes
|
| 12 |
+
is_file: NotRequired[
|
| 13 |
+
bool
|
| 14 |
+
] # whether the data corresponds to a file or base64 encoded data
|
| 15 |
+
orig_name: NotRequired[str] # original filename
|
parrot/lib/python3.10/site-packages/gradio_client/documentation.py
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Contains methods that generate documentation for Gradio functions and classes."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import inspect
|
| 6 |
+
from typing import Callable
|
| 7 |
+
|
| 8 |
+
classes_to_document = {}
|
| 9 |
+
classes_inherit_documentation = {}
|
| 10 |
+
documentation_group = None
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def set_documentation_group(m):
|
| 14 |
+
global documentation_group
|
| 15 |
+
documentation_group = m
|
| 16 |
+
if m not in classes_to_document:
|
| 17 |
+
classes_to_document[m] = []
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def extract_instance_attr_doc(cls, attr):
|
| 21 |
+
code = inspect.getsource(cls.__init__)
|
| 22 |
+
lines = [line.strip() for line in code.split("\n")]
|
| 23 |
+
i = None
|
| 24 |
+
for i, line in enumerate(lines): # noqa: B007
|
| 25 |
+
if line.startswith("self." + attr + ":") or line.startswith(
|
| 26 |
+
"self." + attr + " ="
|
| 27 |
+
):
|
| 28 |
+
break
|
| 29 |
+
assert i is not None, f"Could not find {attr} in {cls.__name__}"
|
| 30 |
+
start_line = lines.index('"""', i)
|
| 31 |
+
end_line = lines.index('"""', start_line + 1)
|
| 32 |
+
for j in range(i + 1, start_line):
|
| 33 |
+
assert not lines[j].startswith("self."), (
|
| 34 |
+
f"Found another attribute before docstring for {attr} in {cls.__name__}: "
|
| 35 |
+
+ lines[j]
|
| 36 |
+
+ "\n start:"
|
| 37 |
+
+ lines[i]
|
| 38 |
+
)
|
| 39 |
+
doc_string = " ".join(lines[start_line + 1 : end_line])
|
| 40 |
+
return doc_string
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def document(*fns, inherit=False):
|
| 44 |
+
"""
|
| 45 |
+
Defines the @document decorator which adds classes or functions to the Gradio
|
| 46 |
+
documentation at www.gradio.app/docs.
|
| 47 |
+
|
| 48 |
+
Usage examples:
|
| 49 |
+
- Put @document() above a class to document the class and its constructor.
|
| 50 |
+
- Put @document("fn1", "fn2") above a class to also document methods fn1 and fn2.
|
| 51 |
+
- Put @document("*fn3") with an asterisk above a class to document the instance attribute methods f3.
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
def inner_doc(cls):
|
| 55 |
+
global documentation_group
|
| 56 |
+
if inherit:
|
| 57 |
+
classes_inherit_documentation[cls] = None
|
| 58 |
+
classes_to_document[documentation_group].append((cls, fns))
|
| 59 |
+
return cls
|
| 60 |
+
|
| 61 |
+
return inner_doc
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def document_fn(fn: Callable, cls) -> tuple[str, list[dict], dict, str | None]:
|
| 65 |
+
"""
|
| 66 |
+
Generates documentation for any function.
|
| 67 |
+
Parameters:
|
| 68 |
+
fn: Function to document
|
| 69 |
+
Returns:
|
| 70 |
+
description: General description of fn
|
| 71 |
+
parameters: A list of dicts for each parameter, storing data for the parameter name, annotation and doc
|
| 72 |
+
return: A dict storing data for the returned annotation and doc
|
| 73 |
+
example: Code for an example use of the fn
|
| 74 |
+
"""
|
| 75 |
+
doc_str = inspect.getdoc(fn) or ""
|
| 76 |
+
doc_lines = doc_str.split("\n")
|
| 77 |
+
signature = inspect.signature(fn)
|
| 78 |
+
description, parameters, returns, examples = [], {}, [], []
|
| 79 |
+
mode = "description"
|
| 80 |
+
for line in doc_lines:
|
| 81 |
+
line = line.rstrip()
|
| 82 |
+
if line == "Parameters:":
|
| 83 |
+
mode = "parameter"
|
| 84 |
+
elif line.startswith("Example:"):
|
| 85 |
+
mode = "example"
|
| 86 |
+
if "(" in line and ")" in line:
|
| 87 |
+
c = line.split("(")[1].split(")")[0]
|
| 88 |
+
if c != cls.__name__:
|
| 89 |
+
mode = "ignore"
|
| 90 |
+
elif line == "Returns:":
|
| 91 |
+
mode = "return"
|
| 92 |
+
else:
|
| 93 |
+
if mode == "description":
|
| 94 |
+
description.append(line if line.strip() else "<br>")
|
| 95 |
+
continue
|
| 96 |
+
if not (line.startswith(" ") or line.strip() == ""):
|
| 97 |
+
print(line)
|
| 98 |
+
assert (
|
| 99 |
+
line.startswith(" ") or line.strip() == ""
|
| 100 |
+
), f"Documentation format for {fn.__name__} has format error in line: {line}"
|
| 101 |
+
line = line[4:]
|
| 102 |
+
if mode == "parameter":
|
| 103 |
+
colon_index = line.index(": ")
|
| 104 |
+
assert (
|
| 105 |
+
colon_index > -1
|
| 106 |
+
), f"Documentation format for {fn.__name__} has format error in line: {line}"
|
| 107 |
+
parameter = line[:colon_index]
|
| 108 |
+
parameter_doc = line[colon_index + 2 :]
|
| 109 |
+
parameters[parameter] = parameter_doc
|
| 110 |
+
elif mode == "return":
|
| 111 |
+
returns.append(line)
|
| 112 |
+
elif mode == "example":
|
| 113 |
+
examples.append(line)
|
| 114 |
+
description_doc = " ".join(description)
|
| 115 |
+
parameter_docs = []
|
| 116 |
+
for param_name, param in signature.parameters.items():
|
| 117 |
+
if param_name.startswith("_"):
|
| 118 |
+
continue
|
| 119 |
+
if param_name in ["kwargs", "args"] and param_name not in parameters:
|
| 120 |
+
continue
|
| 121 |
+
parameter_doc = {
|
| 122 |
+
"name": param_name,
|
| 123 |
+
"annotation": param.annotation,
|
| 124 |
+
"doc": parameters.get(param_name),
|
| 125 |
+
}
|
| 126 |
+
if param_name in parameters:
|
| 127 |
+
del parameters[param_name]
|
| 128 |
+
if param.default != inspect.Parameter.empty:
|
| 129 |
+
default = param.default
|
| 130 |
+
if type(default) == str:
|
| 131 |
+
default = '"' + default + '"'
|
| 132 |
+
if default.__class__.__module__ != "builtins":
|
| 133 |
+
default = f"{default.__class__.__name__}()"
|
| 134 |
+
parameter_doc["default"] = default
|
| 135 |
+
elif parameter_doc["doc"] is not None:
|
| 136 |
+
if "kwargs" in parameter_doc["doc"]:
|
| 137 |
+
parameter_doc["kwargs"] = True
|
| 138 |
+
if "args" in parameter_doc["doc"]:
|
| 139 |
+
parameter_doc["args"] = True
|
| 140 |
+
parameter_docs.append(parameter_doc)
|
| 141 |
+
assert (
|
| 142 |
+
len(parameters) == 0
|
| 143 |
+
), f"Documentation format for {fn.__name__} documents nonexistent parameters: {''.join(parameters.keys())}"
|
| 144 |
+
if len(returns) == 0:
|
| 145 |
+
return_docs = {}
|
| 146 |
+
elif len(returns) == 1:
|
| 147 |
+
return_docs = {"annotation": signature.return_annotation, "doc": returns[0]}
|
| 148 |
+
else:
|
| 149 |
+
return_docs = {}
|
| 150 |
+
# raise ValueError("Does not support multiple returns yet.")
|
| 151 |
+
examples_doc = "\n".join(examples) if len(examples) > 0 else None
|
| 152 |
+
return description_doc, parameter_docs, return_docs, examples_doc
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def document_cls(cls):
|
| 156 |
+
doc_str = inspect.getdoc(cls)
|
| 157 |
+
if doc_str is None:
|
| 158 |
+
return "", {}, ""
|
| 159 |
+
tags = {}
|
| 160 |
+
description_lines = []
|
| 161 |
+
mode = "description"
|
| 162 |
+
for line in doc_str.split("\n"):
|
| 163 |
+
line = line.rstrip()
|
| 164 |
+
if line.endswith(":") and " " not in line:
|
| 165 |
+
mode = line[:-1].lower()
|
| 166 |
+
tags[mode] = []
|
| 167 |
+
elif line.split(" ")[0].endswith(":") and not line.startswith(" "):
|
| 168 |
+
tag = line[: line.index(":")].lower()
|
| 169 |
+
value = line[line.index(":") + 2 :]
|
| 170 |
+
tags[tag] = value
|
| 171 |
+
else:
|
| 172 |
+
if mode == "description":
|
| 173 |
+
description_lines.append(line if line.strip() else "<br>")
|
| 174 |
+
else:
|
| 175 |
+
assert (
|
| 176 |
+
line.startswith(" ") or not line.strip()
|
| 177 |
+
), f"Documentation format for {cls.__name__} has format error in line: {line}"
|
| 178 |
+
tags[mode].append(line[4:])
|
| 179 |
+
if "example" in tags:
|
| 180 |
+
example = "\n".join(tags["example"])
|
| 181 |
+
del tags["example"]
|
| 182 |
+
else:
|
| 183 |
+
example = None
|
| 184 |
+
for key, val in tags.items():
|
| 185 |
+
if isinstance(val, list):
|
| 186 |
+
tags[key] = "<br>".join(val)
|
| 187 |
+
description = " ".join(description_lines).replace("\n", "<br>")
|
| 188 |
+
return description, tags, example
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def generate_documentation():
|
| 192 |
+
documentation = {}
|
| 193 |
+
for mode, class_list in classes_to_document.items():
|
| 194 |
+
documentation[mode] = []
|
| 195 |
+
for cls, fns in class_list:
|
| 196 |
+
fn_to_document = cls if inspect.isfunction(cls) else cls.__init__
|
| 197 |
+
_, parameter_doc, return_doc, _ = document_fn(fn_to_document, cls)
|
| 198 |
+
cls_description, cls_tags, cls_example = document_cls(cls)
|
| 199 |
+
cls_documentation = {
|
| 200 |
+
"class": cls,
|
| 201 |
+
"name": cls.__name__,
|
| 202 |
+
"description": cls_description,
|
| 203 |
+
"tags": cls_tags,
|
| 204 |
+
"parameters": parameter_doc,
|
| 205 |
+
"returns": return_doc,
|
| 206 |
+
"example": cls_example,
|
| 207 |
+
"fns": [],
|
| 208 |
+
}
|
| 209 |
+
for fn_name in fns:
|
| 210 |
+
instance_attribute_fn = fn_name.startswith("*")
|
| 211 |
+
if instance_attribute_fn:
|
| 212 |
+
fn_name = fn_name[1:]
|
| 213 |
+
# Instance attribute fns are classes
|
| 214 |
+
# whose __call__ method determines their behavior
|
| 215 |
+
fn = getattr(cls(), fn_name).__call__
|
| 216 |
+
else:
|
| 217 |
+
fn = getattr(cls, fn_name)
|
| 218 |
+
if not callable(fn):
|
| 219 |
+
description_doc = str(fn)
|
| 220 |
+
parameter_docs = {}
|
| 221 |
+
return_docs = {}
|
| 222 |
+
examples_doc = ""
|
| 223 |
+
override_signature = f"gr.{cls.__name__}.{fn_name}"
|
| 224 |
+
else:
|
| 225 |
+
(
|
| 226 |
+
description_doc,
|
| 227 |
+
parameter_docs,
|
| 228 |
+
return_docs,
|
| 229 |
+
examples_doc,
|
| 230 |
+
) = document_fn(fn, cls)
|
| 231 |
+
override_signature = None
|
| 232 |
+
if instance_attribute_fn:
|
| 233 |
+
description_doc = extract_instance_attr_doc(cls, fn_name)
|
| 234 |
+
cls_documentation["fns"].append(
|
| 235 |
+
{
|
| 236 |
+
"fn": fn,
|
| 237 |
+
"name": fn_name,
|
| 238 |
+
"description": description_doc,
|
| 239 |
+
"tags": {},
|
| 240 |
+
"parameters": parameter_docs,
|
| 241 |
+
"returns": return_docs,
|
| 242 |
+
"example": examples_doc,
|
| 243 |
+
"override_signature": override_signature,
|
| 244 |
+
}
|
| 245 |
+
)
|
| 246 |
+
documentation[mode].append(cls_documentation)
|
| 247 |
+
if cls in classes_inherit_documentation:
|
| 248 |
+
classes_inherit_documentation[cls] = cls_documentation["fns"]
|
| 249 |
+
for mode, class_list in classes_to_document.items():
|
| 250 |
+
for i, (cls, _) in enumerate(class_list):
|
| 251 |
+
for super_class in classes_inherit_documentation:
|
| 252 |
+
if (
|
| 253 |
+
inspect.isclass(cls)
|
| 254 |
+
and issubclass(cls, super_class)
|
| 255 |
+
and cls != super_class
|
| 256 |
+
):
|
| 257 |
+
for inherited_fn in classes_inherit_documentation[super_class]:
|
| 258 |
+
inherited_fn = dict(inherited_fn)
|
| 259 |
+
try:
|
| 260 |
+
inherited_fn["description"] = extract_instance_attr_doc(
|
| 261 |
+
cls, inherited_fn["name"]
|
| 262 |
+
)
|
| 263 |
+
except (ValueError, AssertionError):
|
| 264 |
+
pass
|
| 265 |
+
documentation[mode][i]["fns"].append(inherited_fn)
|
| 266 |
+
return documentation
|
parrot/lib/python3.10/site-packages/gradio_client/media_data.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
parrot/lib/python3.10/site-packages/gradio_client/serializing.py
ADDED
|
@@ -0,0 +1,550 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import os
|
| 5 |
+
import uuid
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
from typing import Any
|
| 8 |
+
|
| 9 |
+
from gradio_client import media_data, utils
|
| 10 |
+
from gradio_client.data_classes import FileData
|
| 11 |
+
|
| 12 |
+
with open(Path(__file__).parent / "types.json") as f:
|
| 13 |
+
serializer_types = json.load(f)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class Serializable:
|
| 17 |
+
def serialized_info(self):
|
| 18 |
+
"""
|
| 19 |
+
The typing information for this component as a dictionary whose values are a list of 2 strings: [Python type, language-agnostic description].
|
| 20 |
+
Keys of the dictionary are: raw_input, raw_output, serialized_input, serialized_output
|
| 21 |
+
"""
|
| 22 |
+
return self.api_info()
|
| 23 |
+
|
| 24 |
+
def api_info(self) -> dict[str, list[str]]:
|
| 25 |
+
"""
|
| 26 |
+
The typing information for this component as a dictionary whose values are a list of 2 strings: [Python type, language-agnostic description].
|
| 27 |
+
Keys of the dictionary are: raw_input, raw_output, serialized_input, serialized_output
|
| 28 |
+
"""
|
| 29 |
+
raise NotImplementedError()
|
| 30 |
+
|
| 31 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 32 |
+
"""
|
| 33 |
+
The example inputs for this component as a dictionary whose values are example inputs compatible with this component.
|
| 34 |
+
Keys of the dictionary are: raw, serialized
|
| 35 |
+
"""
|
| 36 |
+
raise NotImplementedError()
|
| 37 |
+
|
| 38 |
+
# For backwards compatibility
|
| 39 |
+
def input_api_info(self) -> tuple[str, str]:
|
| 40 |
+
api_info = self.api_info()
|
| 41 |
+
return (api_info["serialized_input"][0], api_info["serialized_input"][1])
|
| 42 |
+
|
| 43 |
+
# For backwards compatibility
|
| 44 |
+
def output_api_info(self) -> tuple[str, str]:
|
| 45 |
+
api_info = self.api_info()
|
| 46 |
+
return (api_info["serialized_output"][0], api_info["serialized_output"][1])
|
| 47 |
+
|
| 48 |
+
def serialize(self, x: Any, load_dir: str | Path = ""):
|
| 49 |
+
"""
|
| 50 |
+
Convert data from human-readable format to serialized format for a browser.
|
| 51 |
+
"""
|
| 52 |
+
return x
|
| 53 |
+
|
| 54 |
+
def deserialize(
|
| 55 |
+
self,
|
| 56 |
+
x: Any,
|
| 57 |
+
save_dir: str | Path | None = None,
|
| 58 |
+
root_url: str | None = None,
|
| 59 |
+
hf_token: str | None = None,
|
| 60 |
+
):
|
| 61 |
+
"""
|
| 62 |
+
Convert data from serialized format for a browser to human-readable format.
|
| 63 |
+
"""
|
| 64 |
+
return x
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class SimpleSerializable(Serializable):
|
| 68 |
+
"""General class that does not perform any serialization or deserialization."""
|
| 69 |
+
|
| 70 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 71 |
+
return {
|
| 72 |
+
"info": serializer_types["SimpleSerializable"],
|
| 73 |
+
"serialized_info": False,
|
| 74 |
+
}
|
| 75 |
+
|
| 76 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 77 |
+
return {
|
| 78 |
+
"raw": None,
|
| 79 |
+
"serialized": None,
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class StringSerializable(Serializable):
|
| 84 |
+
"""Expects a string as input/output but performs no serialization."""
|
| 85 |
+
|
| 86 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 87 |
+
return {
|
| 88 |
+
"info": serializer_types["StringSerializable"],
|
| 89 |
+
"serialized_info": False,
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 93 |
+
return {
|
| 94 |
+
"raw": "Howdy!",
|
| 95 |
+
"serialized": "Howdy!",
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class ListStringSerializable(Serializable):
|
| 100 |
+
"""Expects a list of strings as input/output but performs no serialization."""
|
| 101 |
+
|
| 102 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 103 |
+
return {
|
| 104 |
+
"info": serializer_types["ListStringSerializable"],
|
| 105 |
+
"serialized_info": False,
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 109 |
+
return {
|
| 110 |
+
"raw": ["Howdy!", "Merhaba"],
|
| 111 |
+
"serialized": ["Howdy!", "Merhaba"],
|
| 112 |
+
}
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class BooleanSerializable(Serializable):
|
| 116 |
+
"""Expects a boolean as input/output but performs no serialization."""
|
| 117 |
+
|
| 118 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 119 |
+
return {
|
| 120 |
+
"info": serializer_types["BooleanSerializable"],
|
| 121 |
+
"serialized_info": False,
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 125 |
+
return {
|
| 126 |
+
"raw": True,
|
| 127 |
+
"serialized": True,
|
| 128 |
+
}
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class NumberSerializable(Serializable):
|
| 132 |
+
"""Expects a number (int/float) as input/output but performs no serialization."""
|
| 133 |
+
|
| 134 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 135 |
+
return {
|
| 136 |
+
"info": serializer_types["NumberSerializable"],
|
| 137 |
+
"serialized_info": False,
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 141 |
+
return {
|
| 142 |
+
"raw": 5,
|
| 143 |
+
"serialized": 5,
|
| 144 |
+
}
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class ImgSerializable(Serializable):
|
| 148 |
+
"""Expects a base64 string as input/output which is serialized to a filepath."""
|
| 149 |
+
|
| 150 |
+
def serialized_info(self):
|
| 151 |
+
return {"type": "string", "description": "filepath or URL to image"}
|
| 152 |
+
|
| 153 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 154 |
+
return {"info": serializer_types["ImgSerializable"], "serialized_info": True}
|
| 155 |
+
|
| 156 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 157 |
+
return {
|
| 158 |
+
"raw": media_data.BASE64_IMAGE,
|
| 159 |
+
"serialized": "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png",
|
| 160 |
+
}
|
| 161 |
+
|
| 162 |
+
def serialize(
|
| 163 |
+
self,
|
| 164 |
+
x: str | None,
|
| 165 |
+
load_dir: str | Path = "",
|
| 166 |
+
) -> str | None:
|
| 167 |
+
"""
|
| 168 |
+
Convert from human-friendly version of a file (string filepath) to a serialized
|
| 169 |
+
representation (base64).
|
| 170 |
+
Parameters:
|
| 171 |
+
x: String path to file to serialize
|
| 172 |
+
load_dir: Path to directory containing x
|
| 173 |
+
"""
|
| 174 |
+
if not x:
|
| 175 |
+
return None
|
| 176 |
+
if utils.is_http_url_like(x):
|
| 177 |
+
return utils.encode_url_to_base64(x)
|
| 178 |
+
return utils.encode_file_to_base64(Path(load_dir) / x)
|
| 179 |
+
|
| 180 |
+
def deserialize(
|
| 181 |
+
self,
|
| 182 |
+
x: str | None,
|
| 183 |
+
save_dir: str | Path | None = None,
|
| 184 |
+
root_url: str | None = None,
|
| 185 |
+
hf_token: str | None = None,
|
| 186 |
+
) -> str | None:
|
| 187 |
+
"""
|
| 188 |
+
Convert from serialized representation of a file (base64) to a human-friendly
|
| 189 |
+
version (string filepath). Optionally, save the file to the directory specified by save_dir
|
| 190 |
+
Parameters:
|
| 191 |
+
x: Base64 representation of image to deserialize into a string filepath
|
| 192 |
+
save_dir: Path to directory to save the deserialized image to
|
| 193 |
+
root_url: Ignored
|
| 194 |
+
hf_token: Ignored
|
| 195 |
+
"""
|
| 196 |
+
if x is None or x == "":
|
| 197 |
+
return None
|
| 198 |
+
file = utils.decode_base64_to_file(x, dir=save_dir)
|
| 199 |
+
return file.name
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
class FileSerializable(Serializable):
|
| 203 |
+
"""Expects a dict with base64 representation of object as input/output which is serialized to a filepath."""
|
| 204 |
+
|
| 205 |
+
def serialized_info(self):
|
| 206 |
+
return self._single_file_serialized_info()
|
| 207 |
+
|
| 208 |
+
def _single_file_api_info(self):
|
| 209 |
+
return {
|
| 210 |
+
"info": serializer_types["SingleFileSerializable"],
|
| 211 |
+
"serialized_info": True,
|
| 212 |
+
}
|
| 213 |
+
|
| 214 |
+
def _single_file_serialized_info(self):
|
| 215 |
+
return {"type": "string", "description": "filepath or URL to file"}
|
| 216 |
+
|
| 217 |
+
def _multiple_file_serialized_info(self):
|
| 218 |
+
return {
|
| 219 |
+
"type": "array",
|
| 220 |
+
"description": "List of filepath(s) or URL(s) to files",
|
| 221 |
+
"items": {"type": "string", "description": "filepath or URL to file"},
|
| 222 |
+
}
|
| 223 |
+
|
| 224 |
+
def _multiple_file_api_info(self):
|
| 225 |
+
return {
|
| 226 |
+
"info": serializer_types["MultipleFileSerializable"],
|
| 227 |
+
"serialized_info": True,
|
| 228 |
+
}
|
| 229 |
+
|
| 230 |
+
def api_info(self) -> dict[str, dict | bool]:
|
| 231 |
+
return self._single_file_api_info()
|
| 232 |
+
|
| 233 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 234 |
+
return self._single_file_example_inputs()
|
| 235 |
+
|
| 236 |
+
def _single_file_example_inputs(self) -> dict[str, Any]:
|
| 237 |
+
return {
|
| 238 |
+
"raw": {"is_file": False, "data": media_data.BASE64_FILE},
|
| 239 |
+
"serialized": "https://github.com/gradio-app/gradio/raw/main/test/test_files/sample_file.pdf",
|
| 240 |
+
}
|
| 241 |
+
|
| 242 |
+
def _multiple_file_example_inputs(self) -> dict[str, Any]:
|
| 243 |
+
return {
|
| 244 |
+
"raw": [{"is_file": False, "data": media_data.BASE64_FILE}],
|
| 245 |
+
"serialized": [
|
| 246 |
+
"https://github.com/gradio-app/gradio/raw/main/test/test_files/sample_file.pdf"
|
| 247 |
+
],
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
def _serialize_single(
|
| 251 |
+
self, x: str | FileData | None, load_dir: str | Path = ""
|
| 252 |
+
) -> FileData | None:
|
| 253 |
+
if x is None or isinstance(x, dict):
|
| 254 |
+
return x
|
| 255 |
+
if utils.is_http_url_like(x):
|
| 256 |
+
filename = x
|
| 257 |
+
size = None
|
| 258 |
+
else:
|
| 259 |
+
filename = str(Path(load_dir) / x)
|
| 260 |
+
size = Path(filename).stat().st_size
|
| 261 |
+
return {
|
| 262 |
+
"name": filename,
|
| 263 |
+
"data": utils.encode_url_or_file_to_base64(filename),
|
| 264 |
+
"orig_name": Path(filename).name,
|
| 265 |
+
"is_file": False,
|
| 266 |
+
"size": size,
|
| 267 |
+
}
|
| 268 |
+
|
| 269 |
+
def _deserialize_single(
|
| 270 |
+
self,
|
| 271 |
+
x: str | FileData | None,
|
| 272 |
+
save_dir: str | None = None,
|
| 273 |
+
root_url: str | None = None,
|
| 274 |
+
hf_token: str | None = None,
|
| 275 |
+
) -> str | None:
|
| 276 |
+
if x is None:
|
| 277 |
+
return None
|
| 278 |
+
if isinstance(x, str):
|
| 279 |
+
file_name = utils.decode_base64_to_file(x, dir=save_dir).name
|
| 280 |
+
elif isinstance(x, dict):
|
| 281 |
+
if x.get("is_file"):
|
| 282 |
+
filepath = x.get("name")
|
| 283 |
+
assert filepath is not None, f"The 'name' field is missing in {x}"
|
| 284 |
+
if root_url is not None:
|
| 285 |
+
file_name = utils.download_tmp_copy_of_file(
|
| 286 |
+
root_url + "file=" + filepath,
|
| 287 |
+
hf_token=hf_token,
|
| 288 |
+
dir=save_dir,
|
| 289 |
+
)
|
| 290 |
+
else:
|
| 291 |
+
file_name = utils.create_tmp_copy_of_file(filepath, dir=save_dir)
|
| 292 |
+
else:
|
| 293 |
+
data = x.get("data")
|
| 294 |
+
assert data is not None, f"The 'data' field is missing in {x}"
|
| 295 |
+
file_name = utils.decode_base64_to_file(data, dir=save_dir).name
|
| 296 |
+
else:
|
| 297 |
+
raise ValueError(
|
| 298 |
+
f"A FileSerializable component can only deserialize a string or a dict, not a {type(x)}: {x}"
|
| 299 |
+
)
|
| 300 |
+
return file_name
|
| 301 |
+
|
| 302 |
+
def serialize(
|
| 303 |
+
self,
|
| 304 |
+
x: str | FileData | None | list[str | FileData | None],
|
| 305 |
+
load_dir: str | Path = "",
|
| 306 |
+
) -> FileData | None | list[FileData | None]:
|
| 307 |
+
"""
|
| 308 |
+
Convert from human-friendly version of a file (string filepath) to a
|
| 309 |
+
serialized representation (base64)
|
| 310 |
+
Parameters:
|
| 311 |
+
x: String path to file to serialize
|
| 312 |
+
load_dir: Path to directory containing x
|
| 313 |
+
"""
|
| 314 |
+
if x is None or x == "":
|
| 315 |
+
return None
|
| 316 |
+
if isinstance(x, list):
|
| 317 |
+
return [self._serialize_single(f, load_dir=load_dir) for f in x]
|
| 318 |
+
else:
|
| 319 |
+
return self._serialize_single(x, load_dir=load_dir)
|
| 320 |
+
|
| 321 |
+
def deserialize(
|
| 322 |
+
self,
|
| 323 |
+
x: str | FileData | None | list[str | FileData | None],
|
| 324 |
+
save_dir: Path | str | None = None,
|
| 325 |
+
root_url: str | None = None,
|
| 326 |
+
hf_token: str | None = None,
|
| 327 |
+
) -> str | None | list[str | None]:
|
| 328 |
+
"""
|
| 329 |
+
Convert from serialized representation of a file (base64) to a human-friendly
|
| 330 |
+
version (string filepath). Optionally, save the file to the directory specified by `save_dir`
|
| 331 |
+
Parameters:
|
| 332 |
+
x: Base64 representation of file to deserialize into a string filepath
|
| 333 |
+
save_dir: Path to directory to save the deserialized file to
|
| 334 |
+
root_url: If this component is loaded from an external Space, this is the URL of the Space.
|
| 335 |
+
hf_token: If this component is loaded from an external private Space, this is the access token for the Space
|
| 336 |
+
"""
|
| 337 |
+
if x is None:
|
| 338 |
+
return None
|
| 339 |
+
if isinstance(save_dir, Path):
|
| 340 |
+
save_dir = str(save_dir)
|
| 341 |
+
if isinstance(x, list):
|
| 342 |
+
return [
|
| 343 |
+
self._deserialize_single(
|
| 344 |
+
f, save_dir=save_dir, root_url=root_url, hf_token=hf_token
|
| 345 |
+
)
|
| 346 |
+
for f in x
|
| 347 |
+
]
|
| 348 |
+
else:
|
| 349 |
+
return self._deserialize_single(
|
| 350 |
+
x, save_dir=save_dir, root_url=root_url, hf_token=hf_token
|
| 351 |
+
)
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
class VideoSerializable(FileSerializable):
|
| 355 |
+
def serialized_info(self):
|
| 356 |
+
return {"type": "string", "description": "filepath or URL to video file"}
|
| 357 |
+
|
| 358 |
+
def api_info(self) -> dict[str, dict | bool]:
|
| 359 |
+
return {"info": serializer_types["FileSerializable"], "serialized_info": True}
|
| 360 |
+
|
| 361 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 362 |
+
return {
|
| 363 |
+
"raw": {"is_file": False, "data": media_data.BASE64_VIDEO},
|
| 364 |
+
"serialized": "https://github.com/gradio-app/gradio/raw/main/test/test_files/video_sample.mp4",
|
| 365 |
+
}
|
| 366 |
+
|
| 367 |
+
def serialize(
|
| 368 |
+
self, x: str | None, load_dir: str | Path = ""
|
| 369 |
+
) -> tuple[FileData | None, None]:
|
| 370 |
+
return (super().serialize(x, load_dir), None) # type: ignore
|
| 371 |
+
|
| 372 |
+
def deserialize(
|
| 373 |
+
self,
|
| 374 |
+
x: tuple[FileData | None, FileData | None] | None,
|
| 375 |
+
save_dir: Path | str | None = None,
|
| 376 |
+
root_url: str | None = None,
|
| 377 |
+
hf_token: str | None = None,
|
| 378 |
+
) -> str | tuple[str | None, str | None] | None:
|
| 379 |
+
"""
|
| 380 |
+
Convert from serialized representation of a file (base64) to a human-friendly
|
| 381 |
+
version (string filepath). Optionally, save the file to the directory specified by `save_dir`
|
| 382 |
+
"""
|
| 383 |
+
if isinstance(x, (tuple, list)):
|
| 384 |
+
assert len(x) == 2, f"Expected tuple of length 2. Received: {x}"
|
| 385 |
+
x_as_list = [x[0], x[1]]
|
| 386 |
+
else:
|
| 387 |
+
raise ValueError(f"Expected tuple of length 2. Received: {x}")
|
| 388 |
+
deserialized_file = super().deserialize(x_as_list, save_dir, root_url, hf_token) # type: ignore
|
| 389 |
+
if isinstance(deserialized_file, list):
|
| 390 |
+
return deserialized_file[0] # ignore subtitles
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
class JSONSerializable(Serializable):
|
| 394 |
+
def serialized_info(self):
|
| 395 |
+
return {"type": "string", "description": "filepath to JSON file"}
|
| 396 |
+
|
| 397 |
+
def api_info(self) -> dict[str, dict | bool]:
|
| 398 |
+
return {"info": serializer_types["JSONSerializable"], "serialized_info": True}
|
| 399 |
+
|
| 400 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 401 |
+
return {
|
| 402 |
+
"raw": {"a": 1, "b": 2},
|
| 403 |
+
"serialized": None,
|
| 404 |
+
}
|
| 405 |
+
|
| 406 |
+
def serialize(
|
| 407 |
+
self,
|
| 408 |
+
x: str | None,
|
| 409 |
+
load_dir: str | Path = "",
|
| 410 |
+
) -> dict | list | None:
|
| 411 |
+
"""
|
| 412 |
+
Convert from a a human-friendly version (string path to json file) to a
|
| 413 |
+
serialized representation (json string)
|
| 414 |
+
Parameters:
|
| 415 |
+
x: String path to json file to read to get json string
|
| 416 |
+
load_dir: Path to directory containing x
|
| 417 |
+
"""
|
| 418 |
+
if x is None or x == "":
|
| 419 |
+
return None
|
| 420 |
+
return utils.file_to_json(Path(load_dir) / x)
|
| 421 |
+
|
| 422 |
+
def deserialize(
|
| 423 |
+
self,
|
| 424 |
+
x: str | dict | list,
|
| 425 |
+
save_dir: str | Path | None = None,
|
| 426 |
+
root_url: str | None = None,
|
| 427 |
+
hf_token: str | None = None,
|
| 428 |
+
) -> str | None:
|
| 429 |
+
"""
|
| 430 |
+
Convert from serialized representation (json string) to a human-friendly
|
| 431 |
+
version (string path to json file). Optionally, save the file to the directory specified by `save_dir`
|
| 432 |
+
Parameters:
|
| 433 |
+
x: Json string
|
| 434 |
+
save_dir: Path to save the deserialized json file to
|
| 435 |
+
root_url: Ignored
|
| 436 |
+
hf_token: Ignored
|
| 437 |
+
"""
|
| 438 |
+
if x is None:
|
| 439 |
+
return None
|
| 440 |
+
return utils.dict_or_str_to_json_file(x, dir=save_dir).name
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
class GallerySerializable(Serializable):
|
| 444 |
+
def serialized_info(self):
|
| 445 |
+
return {
|
| 446 |
+
"type": "string",
|
| 447 |
+
"description": "path to directory with images and a file associating images with captions called captions.json",
|
| 448 |
+
}
|
| 449 |
+
|
| 450 |
+
def api_info(self) -> dict[str, dict | bool]:
|
| 451 |
+
return {
|
| 452 |
+
"info": serializer_types["GallerySerializable"],
|
| 453 |
+
"serialized_info": True,
|
| 454 |
+
}
|
| 455 |
+
|
| 456 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 457 |
+
return {
|
| 458 |
+
"raw": [media_data.BASE64_IMAGE] * 2,
|
| 459 |
+
"serialized": [
|
| 460 |
+
"https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png",
|
| 461 |
+
]
|
| 462 |
+
* 2,
|
| 463 |
+
}
|
| 464 |
+
|
| 465 |
+
def serialize(
|
| 466 |
+
self, x: str | None, load_dir: str | Path = ""
|
| 467 |
+
) -> list[list[str | None]] | None:
|
| 468 |
+
if x is None or x == "":
|
| 469 |
+
return None
|
| 470 |
+
files = []
|
| 471 |
+
captions_file = Path(x) / "captions.json"
|
| 472 |
+
with captions_file.open("r") as captions_json:
|
| 473 |
+
captions = json.load(captions_json)
|
| 474 |
+
for file_name, caption in captions.items():
|
| 475 |
+
img = FileSerializable().serialize(file_name)
|
| 476 |
+
files.append([img, caption])
|
| 477 |
+
return files
|
| 478 |
+
|
| 479 |
+
def deserialize(
|
| 480 |
+
self,
|
| 481 |
+
x: list[list[str | None]] | None,
|
| 482 |
+
save_dir: str = "",
|
| 483 |
+
root_url: str | None = None,
|
| 484 |
+
hf_token: str | None = None,
|
| 485 |
+
) -> None | str:
|
| 486 |
+
if x is None:
|
| 487 |
+
return None
|
| 488 |
+
gallery_path = Path(save_dir) / str(uuid.uuid4())
|
| 489 |
+
gallery_path.mkdir(exist_ok=True, parents=True)
|
| 490 |
+
captions = {}
|
| 491 |
+
for img_data in x:
|
| 492 |
+
if isinstance(img_data, (list, tuple)):
|
| 493 |
+
img_data, caption = img_data
|
| 494 |
+
else:
|
| 495 |
+
caption = None
|
| 496 |
+
name = FileSerializable().deserialize(
|
| 497 |
+
img_data, gallery_path, root_url=root_url, hf_token=hf_token
|
| 498 |
+
)
|
| 499 |
+
captions[name] = caption
|
| 500 |
+
captions_file = gallery_path / "captions.json"
|
| 501 |
+
with captions_file.open("w") as captions_json:
|
| 502 |
+
json.dump(captions, captions_json)
|
| 503 |
+
return os.path.abspath(gallery_path)
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
SERIALIZER_MAPPING = {}
|
| 507 |
+
for cls in Serializable.__subclasses__():
|
| 508 |
+
SERIALIZER_MAPPING[cls.__name__] = cls
|
| 509 |
+
for subcls in cls.__subclasses__():
|
| 510 |
+
SERIALIZER_MAPPING[subcls.__name__] = subcls
|
| 511 |
+
|
| 512 |
+
SERIALIZER_MAPPING["Serializable"] = SimpleSerializable
|
| 513 |
+
SERIALIZER_MAPPING["File"] = FileSerializable
|
| 514 |
+
SERIALIZER_MAPPING["UploadButton"] = FileSerializable
|
| 515 |
+
|
| 516 |
+
COMPONENT_MAPPING: dict[str, type] = {
|
| 517 |
+
"textbox": StringSerializable,
|
| 518 |
+
"number": NumberSerializable,
|
| 519 |
+
"slider": NumberSerializable,
|
| 520 |
+
"checkbox": BooleanSerializable,
|
| 521 |
+
"checkboxgroup": ListStringSerializable,
|
| 522 |
+
"radio": StringSerializable,
|
| 523 |
+
"dropdown": SimpleSerializable,
|
| 524 |
+
"image": ImgSerializable,
|
| 525 |
+
"video": FileSerializable,
|
| 526 |
+
"audio": FileSerializable,
|
| 527 |
+
"file": FileSerializable,
|
| 528 |
+
"dataframe": JSONSerializable,
|
| 529 |
+
"timeseries": JSONSerializable,
|
| 530 |
+
"state": SimpleSerializable,
|
| 531 |
+
"button": StringSerializable,
|
| 532 |
+
"uploadbutton": FileSerializable,
|
| 533 |
+
"colorpicker": StringSerializable,
|
| 534 |
+
"label": JSONSerializable,
|
| 535 |
+
"highlightedtext": JSONSerializable,
|
| 536 |
+
"json": JSONSerializable,
|
| 537 |
+
"html": StringSerializable,
|
| 538 |
+
"gallery": GallerySerializable,
|
| 539 |
+
"chatbot": JSONSerializable,
|
| 540 |
+
"model3d": FileSerializable,
|
| 541 |
+
"plot": JSONSerializable,
|
| 542 |
+
"barplot": JSONSerializable,
|
| 543 |
+
"lineplot": JSONSerializable,
|
| 544 |
+
"scatterplot": JSONSerializable,
|
| 545 |
+
"markdown": StringSerializable,
|
| 546 |
+
"dataset": StringSerializable,
|
| 547 |
+
"code": StringSerializable,
|
| 548 |
+
"interpretation": SimpleSerializable,
|
| 549 |
+
"annotatedimage": JSONSerializable,
|
| 550 |
+
}
|
parrot/lib/python3.10/site-packages/gradio_client/types.json
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"SimpleSerializable": {
|
| 3 |
+
"type": {},
|
| 4 |
+
"description": "any valid value"
|
| 5 |
+
},
|
| 6 |
+
"StringSerializable": {
|
| 7 |
+
"type": "string"
|
| 8 |
+
},
|
| 9 |
+
"ListStringSerializable": {
|
| 10 |
+
"type": "array",
|
| 11 |
+
"items": {
|
| 12 |
+
"type": "string"
|
| 13 |
+
}
|
| 14 |
+
},
|
| 15 |
+
"BooleanSerializable": {
|
| 16 |
+
"type": "boolean"
|
| 17 |
+
},
|
| 18 |
+
"NumberSerializable": {
|
| 19 |
+
"type": "number"
|
| 20 |
+
},
|
| 21 |
+
"ImgSerializable": {
|
| 22 |
+
"type": "string",
|
| 23 |
+
"description": "base64 representation of an image"
|
| 24 |
+
},
|
| 25 |
+
"FileSerializable": {
|
| 26 |
+
"oneOf": [
|
| 27 |
+
{
|
| 28 |
+
"type": "string",
|
| 29 |
+
"description": "filepath or URL to file"
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"type": "object",
|
| 33 |
+
"properties": {
|
| 34 |
+
"name": { "type": "string", "description": "name of file" },
|
| 35 |
+
"data": {
|
| 36 |
+
"type": "string",
|
| 37 |
+
"description": "base64 representation of file"
|
| 38 |
+
},
|
| 39 |
+
"size": {
|
| 40 |
+
"type": "integer",
|
| 41 |
+
"description": "size of image in bytes"
|
| 42 |
+
},
|
| 43 |
+
"is_file": {
|
| 44 |
+
"type": "boolean",
|
| 45 |
+
"description": "true if the file has been uploaded to the server"
|
| 46 |
+
},
|
| 47 |
+
"orig_name": {
|
| 48 |
+
"type": "string",
|
| 49 |
+
"description": "original name of the file"
|
| 50 |
+
}
|
| 51 |
+
},
|
| 52 |
+
"required": ["name", "data"]
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"type": "array",
|
| 56 |
+
"items": {
|
| 57 |
+
"anyOf": [
|
| 58 |
+
{
|
| 59 |
+
"type": "string",
|
| 60 |
+
"description": "filepath or URL to file"
|
| 61 |
+
},
|
| 62 |
+
{
|
| 63 |
+
"type": "object",
|
| 64 |
+
"properties": {
|
| 65 |
+
"name": { "type": "string", "description": "name of file" },
|
| 66 |
+
"data": {
|
| 67 |
+
"type": "string",
|
| 68 |
+
"description": "base64 representation of file"
|
| 69 |
+
},
|
| 70 |
+
"size": {
|
| 71 |
+
"type": "integer",
|
| 72 |
+
"description": "size of image in bytes"
|
| 73 |
+
},
|
| 74 |
+
"is_file": {
|
| 75 |
+
"type": "boolean",
|
| 76 |
+
"description": "true if the file has been uploaded to the server"
|
| 77 |
+
},
|
| 78 |
+
"orig_name": {
|
| 79 |
+
"type": "string",
|
| 80 |
+
"description": "original name of the file"
|
| 81 |
+
}
|
| 82 |
+
},
|
| 83 |
+
"required": ["name", "data"]
|
| 84 |
+
}
|
| 85 |
+
]
|
| 86 |
+
}
|
| 87 |
+
}
|
| 88 |
+
]
|
| 89 |
+
},
|
| 90 |
+
"SingleFileSerializable": {
|
| 91 |
+
"oneOf": [
|
| 92 |
+
{
|
| 93 |
+
"type": "string",
|
| 94 |
+
"description": "filepath or URL to file"
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"type": "object",
|
| 98 |
+
"properties": {
|
| 99 |
+
"name": { "type": "string", "description": "name of file" },
|
| 100 |
+
"data": {
|
| 101 |
+
"type": "string",
|
| 102 |
+
"description": "base64 representation of file"
|
| 103 |
+
},
|
| 104 |
+
"size": {
|
| 105 |
+
"type": "integer",
|
| 106 |
+
"description": "size of image in bytes"
|
| 107 |
+
},
|
| 108 |
+
"is_file": {
|
| 109 |
+
"type": "boolean",
|
| 110 |
+
"description": "true if the file has been uploaded to the server"
|
| 111 |
+
},
|
| 112 |
+
"orig_name": {
|
| 113 |
+
"type": "string",
|
| 114 |
+
"description": "original name of the file"
|
| 115 |
+
}
|
| 116 |
+
},
|
| 117 |
+
"required": ["name", "data"]
|
| 118 |
+
}
|
| 119 |
+
]
|
| 120 |
+
},
|
| 121 |
+
"MultipleFileSerializable": {
|
| 122 |
+
"type": "array",
|
| 123 |
+
"items": {
|
| 124 |
+
"anyOf": [
|
| 125 |
+
{
|
| 126 |
+
"type": "string",
|
| 127 |
+
"description": "filepath or URL to file"
|
| 128 |
+
},
|
| 129 |
+
{
|
| 130 |
+
"type": "object",
|
| 131 |
+
"properties": {
|
| 132 |
+
"name": { "type": "string", "description": "name of file" },
|
| 133 |
+
"data": {
|
| 134 |
+
"type": "string",
|
| 135 |
+
"description": "base64 representation of file"
|
| 136 |
+
},
|
| 137 |
+
"size": {
|
| 138 |
+
"type": "integer",
|
| 139 |
+
"description": "size of image in bytes"
|
| 140 |
+
},
|
| 141 |
+
"is_file": {
|
| 142 |
+
"type": "boolean",
|
| 143 |
+
"description": "true if the file has been uploaded to the server"
|
| 144 |
+
},
|
| 145 |
+
"orig_name": {
|
| 146 |
+
"type": "string",
|
| 147 |
+
"description": "original name of the file"
|
| 148 |
+
}
|
| 149 |
+
},
|
| 150 |
+
"required": ["name", "data"]
|
| 151 |
+
}
|
| 152 |
+
]
|
| 153 |
+
}
|
| 154 |
+
},
|
| 155 |
+
"JSONSerializable": {
|
| 156 |
+
"type": {},
|
| 157 |
+
"description": "any valid json"
|
| 158 |
+
},
|
| 159 |
+
"GallerySerializable": {
|
| 160 |
+
"type": "array",
|
| 161 |
+
"items": {
|
| 162 |
+
"type": "array",
|
| 163 |
+
"items": false,
|
| 164 |
+
"maxSize": 2,
|
| 165 |
+
"minSize": 2,
|
| 166 |
+
"prefixItems": [
|
| 167 |
+
{
|
| 168 |
+
"type": "object",
|
| 169 |
+
"properties": {
|
| 170 |
+
"name": { "type": "string", "description": "name of file" },
|
| 171 |
+
"data": {
|
| 172 |
+
"type": "string",
|
| 173 |
+
"description": "base64 representation of file"
|
| 174 |
+
},
|
| 175 |
+
"size": {
|
| 176 |
+
"type": "integer",
|
| 177 |
+
"description": "size of image in bytes"
|
| 178 |
+
},
|
| 179 |
+
"is_file": {
|
| 180 |
+
"type": "boolean",
|
| 181 |
+
"description": "true if the file has been uploaded to the server"
|
| 182 |
+
},
|
| 183 |
+
"orig_name": {
|
| 184 |
+
"type": "string",
|
| 185 |
+
"description": "original name of the file"
|
| 186 |
+
}
|
| 187 |
+
},
|
| 188 |
+
"required": ["name", "data"]
|
| 189 |
+
},
|
| 190 |
+
{
|
| 191 |
+
"oneOf": [
|
| 192 |
+
{ "type": "string", "description": "caption of image" },
|
| 193 |
+
{ "type": "null" }
|
| 194 |
+
]
|
| 195 |
+
}
|
| 196 |
+
]
|
| 197 |
+
}
|
| 198 |
+
}
|
| 199 |
+
}
|
parrot/lib/python3.10/site-packages/gradio_client/utils.py
ADDED
|
@@ -0,0 +1,561 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import json
|
| 6 |
+
import mimetypes
|
| 7 |
+
import os
|
| 8 |
+
import pkgutil
|
| 9 |
+
import secrets
|
| 10 |
+
import shutil
|
| 11 |
+
import tempfile
|
| 12 |
+
import warnings
|
| 13 |
+
from concurrent.futures import CancelledError
|
| 14 |
+
from dataclasses import dataclass, field
|
| 15 |
+
from datetime import datetime
|
| 16 |
+
from enum import Enum
|
| 17 |
+
from pathlib import Path
|
| 18 |
+
from threading import Lock
|
| 19 |
+
from typing import Any, Callable, Optional
|
| 20 |
+
|
| 21 |
+
import fsspec.asyn
|
| 22 |
+
import httpx
|
| 23 |
+
import huggingface_hub
|
| 24 |
+
import requests
|
| 25 |
+
from huggingface_hub import SpaceStage
|
| 26 |
+
from websockets.legacy.protocol import WebSocketCommonProtocol
|
| 27 |
+
|
| 28 |
+
API_URL = "api/predict/"
|
| 29 |
+
WS_URL = "queue/join"
|
| 30 |
+
UPLOAD_URL = "upload"
|
| 31 |
+
CONFIG_URL = "config"
|
| 32 |
+
API_INFO_URL = "info"
|
| 33 |
+
RAW_API_INFO_URL = "info?serialize=False"
|
| 34 |
+
SPACE_FETCHER_URL = "https://gradio-space-api-fetcher-v2.hf.space/api"
|
| 35 |
+
RESET_URL = "reset"
|
| 36 |
+
SPACE_URL = "https://hf.space/{}"
|
| 37 |
+
|
| 38 |
+
STATE_COMPONENT = "state"
|
| 39 |
+
INVALID_RUNTIME = [
|
| 40 |
+
SpaceStage.NO_APP_FILE,
|
| 41 |
+
SpaceStage.CONFIG_ERROR,
|
| 42 |
+
SpaceStage.BUILD_ERROR,
|
| 43 |
+
SpaceStage.RUNTIME_ERROR,
|
| 44 |
+
SpaceStage.PAUSED,
|
| 45 |
+
]
|
| 46 |
+
|
| 47 |
+
__version__ = (pkgutil.get_data(__name__, "version.txt") or b"").decode("ascii").strip()
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class TooManyRequestsError(Exception):
|
| 51 |
+
"""Raised when the API returns a 429 status code."""
|
| 52 |
+
|
| 53 |
+
pass
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class QueueError(Exception):
|
| 57 |
+
"""Raised when the queue is full or there is an issue adding a job to the queue."""
|
| 58 |
+
|
| 59 |
+
pass
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class InvalidAPIEndpointError(Exception):
|
| 63 |
+
"""Raised when the API endpoint is invalid."""
|
| 64 |
+
|
| 65 |
+
pass
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class SpaceDuplicationError(Exception):
|
| 69 |
+
"""Raised when something goes wrong with a Space Duplication."""
|
| 70 |
+
|
| 71 |
+
pass
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class Status(Enum):
|
| 75 |
+
"""Status codes presented to client users."""
|
| 76 |
+
|
| 77 |
+
STARTING = "STARTING"
|
| 78 |
+
JOINING_QUEUE = "JOINING_QUEUE"
|
| 79 |
+
QUEUE_FULL = "QUEUE_FULL"
|
| 80 |
+
IN_QUEUE = "IN_QUEUE"
|
| 81 |
+
SENDING_DATA = "SENDING_DATA"
|
| 82 |
+
PROCESSING = "PROCESSING"
|
| 83 |
+
ITERATING = "ITERATING"
|
| 84 |
+
PROGRESS = "PROGRESS"
|
| 85 |
+
FINISHED = "FINISHED"
|
| 86 |
+
CANCELLED = "CANCELLED"
|
| 87 |
+
|
| 88 |
+
@staticmethod
|
| 89 |
+
def ordering(status: Status) -> int:
|
| 90 |
+
"""Order of messages. Helpful for testing."""
|
| 91 |
+
order = [
|
| 92 |
+
Status.STARTING,
|
| 93 |
+
Status.JOINING_QUEUE,
|
| 94 |
+
Status.QUEUE_FULL,
|
| 95 |
+
Status.IN_QUEUE,
|
| 96 |
+
Status.SENDING_DATA,
|
| 97 |
+
Status.PROCESSING,
|
| 98 |
+
Status.PROGRESS,
|
| 99 |
+
Status.ITERATING,
|
| 100 |
+
Status.FINISHED,
|
| 101 |
+
Status.CANCELLED,
|
| 102 |
+
]
|
| 103 |
+
return order.index(status)
|
| 104 |
+
|
| 105 |
+
def __lt__(self, other: Status):
|
| 106 |
+
return self.ordering(self) < self.ordering(other)
|
| 107 |
+
|
| 108 |
+
@staticmethod
|
| 109 |
+
def msg_to_status(msg: str) -> Status:
|
| 110 |
+
"""Map the raw message from the backend to the status code presented to users."""
|
| 111 |
+
return {
|
| 112 |
+
"send_hash": Status.JOINING_QUEUE,
|
| 113 |
+
"queue_full": Status.QUEUE_FULL,
|
| 114 |
+
"estimation": Status.IN_QUEUE,
|
| 115 |
+
"send_data": Status.SENDING_DATA,
|
| 116 |
+
"process_starts": Status.PROCESSING,
|
| 117 |
+
"process_generating": Status.ITERATING,
|
| 118 |
+
"process_completed": Status.FINISHED,
|
| 119 |
+
"progress": Status.PROGRESS,
|
| 120 |
+
}[msg]
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
@dataclass
|
| 124 |
+
class ProgressUnit:
|
| 125 |
+
index: Optional[int]
|
| 126 |
+
length: Optional[int]
|
| 127 |
+
unit: Optional[str]
|
| 128 |
+
progress: Optional[float]
|
| 129 |
+
desc: Optional[str]
|
| 130 |
+
|
| 131 |
+
@classmethod
|
| 132 |
+
def from_ws_msg(cls, data: list[dict]) -> list[ProgressUnit]:
|
| 133 |
+
return [
|
| 134 |
+
cls(
|
| 135 |
+
index=d.get("index"),
|
| 136 |
+
length=d.get("length"),
|
| 137 |
+
unit=d.get("unit"),
|
| 138 |
+
progress=d.get("progress"),
|
| 139 |
+
desc=d.get("desc"),
|
| 140 |
+
)
|
| 141 |
+
for d in data
|
| 142 |
+
]
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
@dataclass
|
| 146 |
+
class StatusUpdate:
|
| 147 |
+
"""Update message sent from the worker thread to the Job on the main thread."""
|
| 148 |
+
|
| 149 |
+
code: Status
|
| 150 |
+
rank: int | None
|
| 151 |
+
queue_size: int | None
|
| 152 |
+
eta: float | None
|
| 153 |
+
success: bool | None
|
| 154 |
+
time: datetime | None
|
| 155 |
+
progress_data: list[ProgressUnit] | None
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def create_initial_status_update():
|
| 159 |
+
return StatusUpdate(
|
| 160 |
+
code=Status.STARTING,
|
| 161 |
+
rank=None,
|
| 162 |
+
queue_size=None,
|
| 163 |
+
eta=None,
|
| 164 |
+
success=None,
|
| 165 |
+
time=datetime.now(),
|
| 166 |
+
progress_data=None,
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@dataclass
|
| 171 |
+
class JobStatus:
|
| 172 |
+
"""The job status.
|
| 173 |
+
|
| 174 |
+
Keeps track of the latest status update and intermediate outputs (not yet implements).
|
| 175 |
+
"""
|
| 176 |
+
|
| 177 |
+
latest_status: StatusUpdate = field(default_factory=create_initial_status_update)
|
| 178 |
+
outputs: list[Any] = field(default_factory=list)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
@dataclass
|
| 182 |
+
class Communicator:
|
| 183 |
+
"""Helper class to help communicate between the worker thread and main thread."""
|
| 184 |
+
|
| 185 |
+
lock: Lock
|
| 186 |
+
job: JobStatus
|
| 187 |
+
prediction_processor: Callable[..., tuple]
|
| 188 |
+
reset_url: str
|
| 189 |
+
should_cancel: bool = False
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
########################
|
| 193 |
+
# Network utils
|
| 194 |
+
########################
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def is_http_url_like(possible_url: str) -> bool:
|
| 198 |
+
"""
|
| 199 |
+
Check if the given string looks like an HTTP(S) URL.
|
| 200 |
+
"""
|
| 201 |
+
return possible_url.startswith(("http://", "https://"))
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def probe_url(possible_url: str) -> bool:
|
| 205 |
+
"""
|
| 206 |
+
Probe the given URL to see if it responds with a 200 status code (to HEAD, then to GET).
|
| 207 |
+
"""
|
| 208 |
+
headers = {"User-Agent": "gradio (https://gradio.app/; team@gradio.app)"}
|
| 209 |
+
try:
|
| 210 |
+
with requests.session() as sess:
|
| 211 |
+
head_request = sess.head(possible_url, headers=headers)
|
| 212 |
+
if head_request.status_code == 405:
|
| 213 |
+
return sess.get(possible_url, headers=headers).ok
|
| 214 |
+
return head_request.ok
|
| 215 |
+
except Exception:
|
| 216 |
+
return False
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def is_valid_url(possible_url: str) -> bool:
|
| 220 |
+
"""
|
| 221 |
+
Check if the given string is a valid URL.
|
| 222 |
+
"""
|
| 223 |
+
warnings.warn(
|
| 224 |
+
"is_valid_url should not be used. "
|
| 225 |
+
"Use is_http_url_like() and probe_url(), as suitable, instead.",
|
| 226 |
+
)
|
| 227 |
+
return is_http_url_like(possible_url) and probe_url(possible_url)
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
async def get_pred_from_ws(
|
| 231 |
+
websocket: WebSocketCommonProtocol,
|
| 232 |
+
data: str,
|
| 233 |
+
hash_data: str,
|
| 234 |
+
helper: Communicator | None = None,
|
| 235 |
+
) -> dict[str, Any]:
|
| 236 |
+
completed = False
|
| 237 |
+
resp = {}
|
| 238 |
+
while not completed:
|
| 239 |
+
# Receive message in the background so that we can
|
| 240 |
+
# cancel even while running a long pred
|
| 241 |
+
task = asyncio.create_task(websocket.recv())
|
| 242 |
+
while not task.done():
|
| 243 |
+
if helper:
|
| 244 |
+
with helper.lock:
|
| 245 |
+
if helper.should_cancel:
|
| 246 |
+
# Need to reset the iterator state since the client
|
| 247 |
+
# will not reset the session
|
| 248 |
+
async with httpx.AsyncClient() as http:
|
| 249 |
+
reset = http.post(
|
| 250 |
+
helper.reset_url, json=json.loads(hash_data)
|
| 251 |
+
)
|
| 252 |
+
# Retrieve cancel exception from task
|
| 253 |
+
# otherwise will get nasty warning in console
|
| 254 |
+
task.cancel()
|
| 255 |
+
await asyncio.gather(task, reset, return_exceptions=True)
|
| 256 |
+
raise CancelledError()
|
| 257 |
+
# Need to suspend this coroutine so that task actually runs
|
| 258 |
+
await asyncio.sleep(0.01)
|
| 259 |
+
msg = task.result()
|
| 260 |
+
resp = json.loads(msg)
|
| 261 |
+
if helper:
|
| 262 |
+
with helper.lock:
|
| 263 |
+
has_progress = "progress_data" in resp
|
| 264 |
+
status_update = StatusUpdate(
|
| 265 |
+
code=Status.msg_to_status(resp["msg"]),
|
| 266 |
+
queue_size=resp.get("queue_size"),
|
| 267 |
+
rank=resp.get("rank", None),
|
| 268 |
+
success=resp.get("success"),
|
| 269 |
+
time=datetime.now(),
|
| 270 |
+
eta=resp.get("rank_eta"),
|
| 271 |
+
progress_data=ProgressUnit.from_ws_msg(resp["progress_data"])
|
| 272 |
+
if has_progress
|
| 273 |
+
else None,
|
| 274 |
+
)
|
| 275 |
+
output = resp.get("output", {}).get("data", [])
|
| 276 |
+
if output and status_update.code != Status.FINISHED:
|
| 277 |
+
try:
|
| 278 |
+
result = helper.prediction_processor(*output)
|
| 279 |
+
except Exception as e:
|
| 280 |
+
result = [e]
|
| 281 |
+
helper.job.outputs.append(result)
|
| 282 |
+
helper.job.latest_status = status_update
|
| 283 |
+
if resp["msg"] == "queue_full":
|
| 284 |
+
raise QueueError("Queue is full! Please try again.")
|
| 285 |
+
if resp["msg"] == "send_hash":
|
| 286 |
+
await websocket.send(hash_data)
|
| 287 |
+
elif resp["msg"] == "send_data":
|
| 288 |
+
await websocket.send(data)
|
| 289 |
+
completed = resp["msg"] == "process_completed"
|
| 290 |
+
return resp["output"]
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
########################
|
| 294 |
+
# Data processing utils
|
| 295 |
+
########################
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def download_tmp_copy_of_file(
|
| 299 |
+
url_path: str, hf_token: str | None = None, dir: str | None = None
|
| 300 |
+
) -> str:
|
| 301 |
+
if dir is not None:
|
| 302 |
+
os.makedirs(dir, exist_ok=True)
|
| 303 |
+
headers = {"Authorization": "Bearer " + hf_token} if hf_token else {}
|
| 304 |
+
directory = Path(dir or tempfile.gettempdir()) / secrets.token_hex(20)
|
| 305 |
+
directory.mkdir(exist_ok=True, parents=True)
|
| 306 |
+
file_path = directory / Path(url_path).name
|
| 307 |
+
|
| 308 |
+
with requests.get(url_path, headers=headers, stream=True) as r:
|
| 309 |
+
r.raise_for_status()
|
| 310 |
+
with open(file_path, "wb") as f:
|
| 311 |
+
shutil.copyfileobj(r.raw, f)
|
| 312 |
+
return str(file_path.resolve())
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def create_tmp_copy_of_file(file_path: str, dir: str | None = None) -> str:
|
| 316 |
+
directory = Path(dir or tempfile.gettempdir()) / secrets.token_hex(20)
|
| 317 |
+
directory.mkdir(exist_ok=True, parents=True)
|
| 318 |
+
dest = directory / Path(file_path).name
|
| 319 |
+
shutil.copy2(file_path, dest)
|
| 320 |
+
return str(dest.resolve())
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
def get_mimetype(filename: str) -> str | None:
|
| 324 |
+
if filename.endswith(".vtt"):
|
| 325 |
+
return "text/vtt"
|
| 326 |
+
mimetype = mimetypes.guess_type(filename)[0]
|
| 327 |
+
if mimetype is not None:
|
| 328 |
+
mimetype = mimetype.replace("x-wav", "wav").replace("x-flac", "flac")
|
| 329 |
+
return mimetype
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def get_extension(encoding: str) -> str | None:
|
| 333 |
+
encoding = encoding.replace("audio/wav", "audio/x-wav")
|
| 334 |
+
type = mimetypes.guess_type(encoding)[0]
|
| 335 |
+
if type == "audio/flac": # flac is not supported by mimetypes
|
| 336 |
+
return "flac"
|
| 337 |
+
elif type is None:
|
| 338 |
+
return None
|
| 339 |
+
extension = mimetypes.guess_extension(type)
|
| 340 |
+
if extension is not None and extension.startswith("."):
|
| 341 |
+
extension = extension[1:]
|
| 342 |
+
return extension
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
def encode_file_to_base64(f: str | Path):
|
| 346 |
+
with open(f, "rb") as file:
|
| 347 |
+
encoded_string = base64.b64encode(file.read())
|
| 348 |
+
base64_str = str(encoded_string, "utf-8")
|
| 349 |
+
mimetype = get_mimetype(str(f))
|
| 350 |
+
return (
|
| 351 |
+
"data:"
|
| 352 |
+
+ (mimetype if mimetype is not None else "")
|
| 353 |
+
+ ";base64,"
|
| 354 |
+
+ base64_str
|
| 355 |
+
)
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def encode_url_to_base64(url: str):
|
| 359 |
+
resp = requests.get(url)
|
| 360 |
+
resp.raise_for_status()
|
| 361 |
+
encoded_string = base64.b64encode(resp.content)
|
| 362 |
+
base64_str = str(encoded_string, "utf-8")
|
| 363 |
+
mimetype = get_mimetype(url)
|
| 364 |
+
return (
|
| 365 |
+
"data:" + (mimetype if mimetype is not None else "") + ";base64," + base64_str
|
| 366 |
+
)
|
| 367 |
+
|
| 368 |
+
|
| 369 |
+
def encode_url_or_file_to_base64(path: str | Path):
|
| 370 |
+
path = str(path)
|
| 371 |
+
if is_http_url_like(path):
|
| 372 |
+
return encode_url_to_base64(path)
|
| 373 |
+
return encode_file_to_base64(path)
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
def decode_base64_to_binary(encoding: str) -> tuple[bytes, str | None]:
|
| 377 |
+
extension = get_extension(encoding)
|
| 378 |
+
data = encoding.rsplit(",", 1)[-1]
|
| 379 |
+
return base64.b64decode(data), extension
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def strip_invalid_filename_characters(filename: str, max_bytes: int = 200) -> str:
|
| 383 |
+
"""Strips invalid characters from a filename and ensures that the file_length is less than `max_bytes` bytes."""
|
| 384 |
+
filename = "".join([char for char in filename if char.isalnum() or char in "._- "])
|
| 385 |
+
filename_len = len(filename.encode())
|
| 386 |
+
if filename_len > max_bytes:
|
| 387 |
+
while filename_len > max_bytes:
|
| 388 |
+
if len(filename) == 0:
|
| 389 |
+
break
|
| 390 |
+
filename = filename[:-1]
|
| 391 |
+
filename_len = len(filename.encode())
|
| 392 |
+
return filename
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def sanitize_parameter_names(original_name: str) -> str:
|
| 396 |
+
"""Cleans up a Python parameter name to make the API info more readable."""
|
| 397 |
+
return (
|
| 398 |
+
"".join([char for char in original_name if char.isalnum() or char in " _"])
|
| 399 |
+
.replace(" ", "_")
|
| 400 |
+
.lower()
|
| 401 |
+
)
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
def decode_base64_to_file(
|
| 405 |
+
encoding: str,
|
| 406 |
+
file_path: str | None = None,
|
| 407 |
+
dir: str | Path | None = None,
|
| 408 |
+
prefix: str | None = None,
|
| 409 |
+
):
|
| 410 |
+
directory = Path(dir or tempfile.gettempdir()) / secrets.token_hex(20)
|
| 411 |
+
directory.mkdir(exist_ok=True, parents=True)
|
| 412 |
+
data, extension = decode_base64_to_binary(encoding)
|
| 413 |
+
if file_path is not None and prefix is None:
|
| 414 |
+
filename = Path(file_path).name
|
| 415 |
+
prefix = filename
|
| 416 |
+
if "." in filename:
|
| 417 |
+
prefix = filename[0 : filename.index(".")]
|
| 418 |
+
extension = filename[filename.index(".") + 1 :]
|
| 419 |
+
|
| 420 |
+
if prefix is not None:
|
| 421 |
+
prefix = strip_invalid_filename_characters(prefix)
|
| 422 |
+
|
| 423 |
+
if extension is None:
|
| 424 |
+
file_obj = tempfile.NamedTemporaryFile(
|
| 425 |
+
delete=False, prefix=prefix, dir=directory
|
| 426 |
+
)
|
| 427 |
+
else:
|
| 428 |
+
file_obj = tempfile.NamedTemporaryFile(
|
| 429 |
+
delete=False,
|
| 430 |
+
prefix=prefix,
|
| 431 |
+
suffix="." + extension,
|
| 432 |
+
dir=directory,
|
| 433 |
+
)
|
| 434 |
+
file_obj.write(data)
|
| 435 |
+
file_obj.flush()
|
| 436 |
+
return file_obj
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
def dict_or_str_to_json_file(jsn: str | dict | list, dir: str | Path | None = None):
|
| 440 |
+
if dir is not None:
|
| 441 |
+
os.makedirs(dir, exist_ok=True)
|
| 442 |
+
|
| 443 |
+
file_obj = tempfile.NamedTemporaryFile(
|
| 444 |
+
delete=False, suffix=".json", dir=dir, mode="w+"
|
| 445 |
+
)
|
| 446 |
+
if isinstance(jsn, str):
|
| 447 |
+
jsn = json.loads(jsn)
|
| 448 |
+
json.dump(jsn, file_obj)
|
| 449 |
+
file_obj.flush()
|
| 450 |
+
return file_obj
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
def file_to_json(file_path: str | Path) -> dict | list:
|
| 454 |
+
with open(file_path) as f:
|
| 455 |
+
return json.load(f)
|
| 456 |
+
|
| 457 |
+
|
| 458 |
+
###########################
|
| 459 |
+
# HuggingFace Hub API Utils
|
| 460 |
+
###########################
|
| 461 |
+
def set_space_timeout(
|
| 462 |
+
space_id: str,
|
| 463 |
+
hf_token: str | None = None,
|
| 464 |
+
timeout_in_seconds: int = 300,
|
| 465 |
+
):
|
| 466 |
+
headers = huggingface_hub.utils.build_hf_headers(
|
| 467 |
+
token=hf_token,
|
| 468 |
+
library_name="gradio_client",
|
| 469 |
+
library_version=__version__,
|
| 470 |
+
)
|
| 471 |
+
req = requests.post(
|
| 472 |
+
f"https://huggingface.co/api/spaces/{space_id}/sleeptime",
|
| 473 |
+
json={"seconds": timeout_in_seconds},
|
| 474 |
+
headers=headers,
|
| 475 |
+
)
|
| 476 |
+
try:
|
| 477 |
+
huggingface_hub.utils.hf_raise_for_status(req)
|
| 478 |
+
except huggingface_hub.utils.HfHubHTTPError as err:
|
| 479 |
+
raise SpaceDuplicationError(
|
| 480 |
+
f"Could not set sleep timeout on duplicated Space. Please visit {SPACE_URL.format(space_id)} "
|
| 481 |
+
"to set a timeout manually to reduce billing charges."
|
| 482 |
+
) from err
|
| 483 |
+
|
| 484 |
+
|
| 485 |
+
########################
|
| 486 |
+
# Misc utils
|
| 487 |
+
########################
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
def synchronize_async(func: Callable, *args, **kwargs) -> Any:
|
| 491 |
+
"""
|
| 492 |
+
Runs async functions in sync scopes. Can be used in any scope.
|
| 493 |
+
|
| 494 |
+
Example:
|
| 495 |
+
if inspect.iscoroutinefunction(block_fn.fn):
|
| 496 |
+
predictions = utils.synchronize_async(block_fn.fn, *processed_input)
|
| 497 |
+
|
| 498 |
+
Args:
|
| 499 |
+
func:
|
| 500 |
+
*args:
|
| 501 |
+
**kwargs:
|
| 502 |
+
"""
|
| 503 |
+
return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) # type: ignore
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
class APIInfoParseError(ValueError):
|
| 507 |
+
pass
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def get_type(schema: dict):
|
| 511 |
+
if "type" in schema:
|
| 512 |
+
return schema["type"]
|
| 513 |
+
elif schema.get("oneOf"):
|
| 514 |
+
return "oneOf"
|
| 515 |
+
elif schema.get("anyOf"):
|
| 516 |
+
return "anyOf"
|
| 517 |
+
else:
|
| 518 |
+
raise APIInfoParseError(f"Cannot parse type for {schema}")
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
def json_schema_to_python_type(schema: Any) -> str:
|
| 522 |
+
"""Convert the json schema into a python type hint"""
|
| 523 |
+
type_ = get_type(schema)
|
| 524 |
+
if type_ == {}:
|
| 525 |
+
if "json" in schema["description"]:
|
| 526 |
+
return "Dict[Any, Any]"
|
| 527 |
+
else:
|
| 528 |
+
return "Any"
|
| 529 |
+
elif type_ == "null":
|
| 530 |
+
return "None"
|
| 531 |
+
elif type_ == "integer":
|
| 532 |
+
return "int"
|
| 533 |
+
elif type_ == "string":
|
| 534 |
+
return "str"
|
| 535 |
+
elif type_ == "boolean":
|
| 536 |
+
return "bool"
|
| 537 |
+
elif type_ == "number":
|
| 538 |
+
return "int | float"
|
| 539 |
+
elif type_ == "array":
|
| 540 |
+
items = schema.get("items")
|
| 541 |
+
if "prefixItems" in items:
|
| 542 |
+
elements = ", ".join(
|
| 543 |
+
[json_schema_to_python_type(i) for i in items["prefixItems"]]
|
| 544 |
+
)
|
| 545 |
+
return f"Tuple[{elements}]"
|
| 546 |
+
else:
|
| 547 |
+
elements = json_schema_to_python_type(items)
|
| 548 |
+
return f"List[{elements}]"
|
| 549 |
+
elif type_ == "object":
|
| 550 |
+
des = ", ".join(
|
| 551 |
+
[
|
| 552 |
+
f"{n}: {json_schema_to_python_type(v)} ({v.get('description')})"
|
| 553 |
+
for n, v in schema["properties"].items()
|
| 554 |
+
]
|
| 555 |
+
)
|
| 556 |
+
return f"Dict({des})"
|
| 557 |
+
elif type_ in ["oneOf", "anyOf"]:
|
| 558 |
+
desc = " | ".join([json_schema_to_python_type(i) for i in schema[type_]])
|
| 559 |
+
return desc
|
| 560 |
+
else:
|
| 561 |
+
raise APIInfoParseError(f"Cannot parse schema {schema}")
|
parrot/lib/python3.10/site-packages/gradio_client/version.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
0.2.9
|
parrot/lib/python3.10/site-packages/hjson-3.1.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/hjson,sha256=BMDbBsC7xHTFEAmdzyvlziODAp-oO8mUkWoR1b0MkkI,221
|
| 2 |
+
hjson-3.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
hjson-3.1.0.dist-info/LICENSE.txt,sha256=WRxR9JxoM7zhpdU-Y3haBGet1nRaWogOTRD05kzETEw,10462
|
| 4 |
+
hjson-3.1.0.dist-info/METADATA,sha256=ws3nEqUXFpWNgmH-FYScGumUSgt2dkiPHNm7RZh-gcU,2610
|
| 5 |
+
hjson-3.1.0.dist-info/RECORD,,
|
| 6 |
+
hjson-3.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 7 |
+
hjson-3.1.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
| 8 |
+
hjson-3.1.0.dist-info/entry_points.txt,sha256=sghBTcTrqdK0kKuznOZXeEBtjoBnLweHF83S9NK-r0E,42
|
| 9 |
+
hjson-3.1.0.dist-info/top_level.txt,sha256=earxK7niVoW6ADlKRFHCs3JpUL4FBSnJn9qmrYgnEUE,6
|
| 10 |
+
hjson/__init__.py,sha256=Py1drAVt-Xpi2xJAHxVXDdX-NniC4llxZVPjY7MyPCY,26580
|
| 11 |
+
hjson/__pycache__/__init__.cpython-310.pyc,,
|
| 12 |
+
hjson/__pycache__/compat.cpython-310.pyc,,
|
| 13 |
+
hjson/__pycache__/decoder.cpython-310.pyc,,
|
| 14 |
+
hjson/__pycache__/encoder.cpython-310.pyc,,
|
| 15 |
+
hjson/__pycache__/encoderH.cpython-310.pyc,,
|
| 16 |
+
hjson/__pycache__/ordered_dict.cpython-310.pyc,,
|
| 17 |
+
hjson/__pycache__/scanner.cpython-310.pyc,,
|
| 18 |
+
hjson/__pycache__/tool.cpython-310.pyc,,
|
| 19 |
+
hjson/compat.py,sha256=uvmTYe1Oa18tT_6tFRtYnzTdAkyd73B9zwMF7ZafI04,1036
|
| 20 |
+
hjson/decoder.py,sha256=oHz7g2sQd4S-AJbSzur9VJWqAHDWL25FVQ-G35XlGCA,19563
|
| 21 |
+
hjson/encoder.py,sha256=NhR3YSMVzL3UP8OLAtP2Dr6dW9UbJzjVS-SXp1DzvcY,19168
|
| 22 |
+
hjson/encoderH.py,sha256=wJ8D0gAyR3n6e3MahCIU3OahI5_xuHPWV_ZlI839xCs,20481
|
| 23 |
+
hjson/ordered_dict.py,sha256=DXtgiqkkaNWXDLZ0DGXIjF_CPzGV5qpC-PSeS1zcps8,3370
|
| 24 |
+
hjson/scanner.py,sha256=IL8poQGvCsb82y7qY5jrlSrZ5xcENpPUti3tNKhprYw,1779
|
| 25 |
+
hjson/tests/__init__.py,sha256=_A-1Tn7q7ccNPro_QfbKiXo_bTL9ED5RUX9AeSLG4TA,2011
|
| 26 |
+
hjson/tests/__pycache__/__init__.cpython-310.pyc,,
|
| 27 |
+
hjson/tests/__pycache__/test_bigint_as_string.cpython-310.pyc,,
|
| 28 |
+
hjson/tests/__pycache__/test_bitsize_int_as_string.cpython-310.pyc,,
|
| 29 |
+
hjson/tests/__pycache__/test_check_circular.cpython-310.pyc,,
|
| 30 |
+
hjson/tests/__pycache__/test_decimal.cpython-310.pyc,,
|
| 31 |
+
hjson/tests/__pycache__/test_decode.cpython-310.pyc,,
|
| 32 |
+
hjson/tests/__pycache__/test_default.cpython-310.pyc,,
|
| 33 |
+
hjson/tests/__pycache__/test_dump.cpython-310.pyc,,
|
| 34 |
+
hjson/tests/__pycache__/test_encode_basestring_ascii.cpython-310.pyc,,
|
| 35 |
+
hjson/tests/__pycache__/test_errors.cpython-310.pyc,,
|
| 36 |
+
hjson/tests/__pycache__/test_fail.cpython-310.pyc,,
|
| 37 |
+
hjson/tests/__pycache__/test_float.cpython-310.pyc,,
|
| 38 |
+
hjson/tests/__pycache__/test_for_json.cpython-310.pyc,,
|
| 39 |
+
hjson/tests/__pycache__/test_hjson.cpython-310.pyc,,
|
| 40 |
+
hjson/tests/__pycache__/test_indent.cpython-310.pyc,,
|
| 41 |
+
hjson/tests/__pycache__/test_item_sort_key.cpython-310.pyc,,
|
| 42 |
+
hjson/tests/__pycache__/test_namedtuple.cpython-310.pyc,,
|
| 43 |
+
hjson/tests/__pycache__/test_pass1.cpython-310.pyc,,
|
| 44 |
+
hjson/tests/__pycache__/test_pass2.cpython-310.pyc,,
|
| 45 |
+
hjson/tests/__pycache__/test_pass3.cpython-310.pyc,,
|
| 46 |
+
hjson/tests/__pycache__/test_recursion.cpython-310.pyc,,
|
| 47 |
+
hjson/tests/__pycache__/test_scanstring.cpython-310.pyc,,
|
| 48 |
+
hjson/tests/__pycache__/test_separators.cpython-310.pyc,,
|
| 49 |
+
hjson/tests/__pycache__/test_tool.cpython-310.pyc,,
|
| 50 |
+
hjson/tests/__pycache__/test_tuple.cpython-310.pyc,,
|
| 51 |
+
hjson/tests/__pycache__/test_unicode.cpython-310.pyc,,
|
| 52 |
+
hjson/tests/test_bigint_as_string.py,sha256=bhEtYEXWUhxyi25iLm4sPWFrt5RZ9PfFEknX1cdzP-Y,2265
|
| 53 |
+
hjson/tests/test_bitsize_int_as_string.py,sha256=-73xJ8CE2hDjGOWTERRrYbDHaY0kgBNQXC0g8nIgd4k,2332
|
| 54 |
+
hjson/tests/test_check_circular.py,sha256=64kZhsab6OcwYmJNLRqNW-19dp1UdgYbZiGzopKyR9s,940
|
| 55 |
+
hjson/tests/test_decimal.py,sha256=Qw0IBPSPYoGZXwvXkkM1cz6lpqjkPRzNDBSul-RdR_4,2556
|
| 56 |
+
hjson/tests/test_decode.py,sha256=Sm4052xVjv7ZtZFdRVMsnvQeh2eCNoXv24YOUJJLMdg,4437
|
| 57 |
+
hjson/tests/test_default.py,sha256=WWDLhDVfih4PrenmiEcvshhUOl_bNsm3jML96-AtGmo,224
|
| 58 |
+
hjson/tests/test_dump.py,sha256=5WU4Rd6vsHOwXGpGqQKIw1ZBNgRWUqMY8w3DnJVWfxo,5061
|
| 59 |
+
hjson/tests/test_encode_basestring_ascii.py,sha256=up4y9JMdGXdBXkEjfqwiG-sudSdcKw0RQfO_76za-To,2102
|
| 60 |
+
hjson/tests/test_errors.py,sha256=vg3-z36T9O-UeDHG4ZtW-nQBNAvraWKBrDA70yG989c,1549
|
| 61 |
+
hjson/tests/test_fail.py,sha256=Giinb944NX0bPwBHYUjVZ4ZlNB611Wg0wxVWxv4bDaU,5688
|
| 62 |
+
hjson/tests/test_float.py,sha256=LCUL-2xT8PYq99jQi6-Ddk9pMuC1mLrcJboTfvR08HM,1011
|
| 63 |
+
hjson/tests/test_for_json.py,sha256=ZLtypdX0ALctxMB8c3fQvx3k9OHY5t71gBxGNOXemrc,2778
|
| 64 |
+
hjson/tests/test_hjson.py,sha256=CdvXR05nu8bF_jZ-Hhj3bh8LRi8tdSJTruayj69HoDk,2327
|
| 65 |
+
hjson/tests/test_indent.py,sha256=8oUK5E8DTz1c3RkUU-nOELmr9wOKoaHHOAsxDai66iE,2589
|
| 66 |
+
hjson/tests/test_item_sort_key.py,sha256=piYy-ntwdcb_qS-y8jPFI6rVZlHCNqtTFGnaZSEvWH8,1134
|
| 67 |
+
hjson/tests/test_namedtuple.py,sha256=iK7B95JH4f2L3_MB3rY9NagEVZ1X62JHpjlm1J4t5uM,4066
|
| 68 |
+
hjson/tests/test_pass1.py,sha256=wdnBz55dY4ou8IIdZFypJ_72J6HCtLZw0YesoNOTopQ,1745
|
| 69 |
+
hjson/tests/test_pass2.py,sha256=O389C8IeJ9ysqWKUftXOeHvBi5_47VSdBM_2bCee1SQ,385
|
| 70 |
+
hjson/tests/test_pass3.py,sha256=wwemRholrfv4lwgC0ArnXiS_tfA-NnbRrZqo71YZXaA,481
|
| 71 |
+
hjson/tests/test_recursion.py,sha256=APQAtnchO6KZdduYqvEJDrBOmAKqyORwAX4ldbzAW7A,1694
|
| 72 |
+
hjson/tests/test_scanstring.py,sha256=U-5gUsaO7jEai6zfKMKADfBi-xFg8IHC1dBeElvTsn4,5953
|
| 73 |
+
hjson/tests/test_separators.py,sha256=krudtyZbx0k7OrZZ-jGCZH6yrmsj1seRkUblQLRq4Rw,945
|
| 74 |
+
hjson/tests/test_tool.py,sha256=PyDhrmMTpJSvBXvsWG8P-yrw1BT5kSp6OcSrMLvzmIw,2826
|
| 75 |
+
hjson/tests/test_tuple.py,sha256=FkOyw4s7WZ1HUGxtHYiwaBwhW2thlDB3JFSpUZCbI_I,1976
|
| 76 |
+
hjson/tests/test_unicode.py,sha256=5npz25rj4T3ZhXF_OsSmVveBUtTuz92DQ_cHDe-Jfrw,7099
|
| 77 |
+
hjson/tool.py,sha256=PVlr-lSDWfrGdkh5exWQo_21HGgHLR4OpZ1S41Dqjgo,1897
|
parrot/lib/python3.10/site-packages/hjson-3.1.0.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
hjson = hjson.tool:main
|
parrot/lib/python3.10/site-packages/narwhals-1.8.3.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
parrot/lib/python3.10/site-packages/narwhals-1.8.3.dist-info/METADATA
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.3
|
| 2 |
+
Name: narwhals
|
| 3 |
+
Version: 1.8.3
|
| 4 |
+
Summary: Extremely lightweight compatibility layer between dataframe libraries
|
| 5 |
+
Project-URL: Homepage, https://github.com/narwhals-dev/narwhals
|
| 6 |
+
Project-URL: Bug Tracker, https://github.com/narwhals-dev/narwhals
|
| 7 |
+
Author-email: Marco Gorelli <33491632+MarcoGorelli@users.noreply.github.com>
|
| 8 |
+
License-File: LICENSE.md
|
| 9 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 10 |
+
Classifier: Operating System :: OS Independent
|
| 11 |
+
Classifier: Programming Language :: Python :: 3
|
| 12 |
+
Requires-Python: >=3.8
|
| 13 |
+
Provides-Extra: cudf
|
| 14 |
+
Requires-Dist: cudf>=23.08.00; extra == 'cudf'
|
| 15 |
+
Provides-Extra: dask
|
| 16 |
+
Requires-Dist: dask[dataframe]>=2024.7; extra == 'dask'
|
| 17 |
+
Provides-Extra: modin
|
| 18 |
+
Requires-Dist: modin; extra == 'modin'
|
| 19 |
+
Provides-Extra: pandas
|
| 20 |
+
Requires-Dist: pandas>=0.25.3; extra == 'pandas'
|
| 21 |
+
Provides-Extra: polars
|
| 22 |
+
Requires-Dist: polars>=0.20.3; extra == 'polars'
|
| 23 |
+
Provides-Extra: pyarrow
|
| 24 |
+
Requires-Dist: pyarrow>=11.0.0; extra == 'pyarrow'
|
| 25 |
+
Description-Content-Type: text/markdown
|
| 26 |
+
|
| 27 |
+
# Narwhals
|
| 28 |
+
|
| 29 |
+
<h1 align="center">
|
| 30 |
+
<img
|
| 31 |
+
width="400"
|
| 32 |
+
alt="narwhals_small"
|
| 33 |
+
src="https://github.com/narwhals-dev/narwhals/assets/33491632/26be901e-5383-49f2-9fbd-5c97b7696f27">
|
| 34 |
+
</h1>
|
| 35 |
+
|
| 36 |
+
[](https://badge.fury.io/py/narwhals)
|
| 37 |
+
[](https://pepy.tech/project/narwhals)
|
| 38 |
+
|
| 39 |
+
Extremely lightweight and extensible compatibility layer between dataframe libraries!
|
| 40 |
+
|
| 41 |
+
- **Full API support**: cuDF, Modin, pandas, Polars, PyArrow
|
| 42 |
+
- **Interchange-level support**: Ibis, Vaex, anything else which implements the DataFrame Interchange Protocol
|
| 43 |
+
|
| 44 |
+
Seamlessly support all, without depending on any!
|
| 45 |
+
|
| 46 |
+
- ✅ **Just use** a subset of **the Polars API**, no need to learn anything new
|
| 47 |
+
- ✅ **Zero dependencies**, Narwhals only uses what
|
| 48 |
+
the user passes in so your library can stay lightweight
|
| 49 |
+
- ✅ Separate **lazy** and eager APIs, use **expressions**
|
| 50 |
+
- ✅ Support pandas' complicated type system and index, without
|
| 51 |
+
either getting in the way
|
| 52 |
+
- ✅ **100% branch coverage**, tested against pandas and Polars nightly builds
|
| 53 |
+
- ✅ **Negligible overhead**, see [overhead](https://narwhals-dev.github.io/narwhals/overhead/)
|
| 54 |
+
- ✅ Let your IDE help you thanks to **full static typing**, see [typing](https://narwhals-dev.github.io/narwhals/api-reference/typing/)
|
| 55 |
+
- ✅ **Perfect backwards compatibility policy**,
|
| 56 |
+
see [stable api](https://narwhals-dev.github.io/narwhals/backcompat/) for how to opt-in
|
| 57 |
+
|
| 58 |
+
Get started!
|
| 59 |
+
|
| 60 |
+
- [Read the documentation](https://narwhals-dev.github.io/narwhals/)
|
| 61 |
+
- [Chat with us on Discord!](https://discord.gg/V3PqtB4VA4)
|
| 62 |
+
- [Join our community call](https://calendar.google.com/calendar/embed?src=27ff6dc5f598c1d94c1f6e627a1aaae680e2fac88f848bda1f2c7946ae74d5ab%40group.calendar.google.com)
|
| 63 |
+
- [Read the contributing guide](https://github.com/narwhals-dev/narwhals/blob/main/CONTRIBUTING.md)
|
| 64 |
+
|
| 65 |
+
## Used by / integrates with
|
| 66 |
+
|
| 67 |
+
Join the party!
|
| 68 |
+
|
| 69 |
+
- [Altair](https://github.com/vega/altair/)
|
| 70 |
+
- [Hamilton](https://github.com/DAGWorks-Inc/hamilton/tree/main/examples/narwhals)
|
| 71 |
+
- [scikit-lego](https://github.com/koaning/scikit-lego)
|
| 72 |
+
- [scikit-playtime](https://github.com/koaning/scikit-playtime)
|
| 73 |
+
- [timebasedcv](https://github.com/FBruzzesi/timebasedcv)
|
| 74 |
+
|
| 75 |
+
Feel free to add your project to the list if it's missing, and/or
|
| 76 |
+
[chat with us on Discord](https://discord.gg/V3PqtB4VA4) if you'd like any support.
|
| 77 |
+
|
| 78 |
+
## Installation
|
| 79 |
+
|
| 80 |
+
- pip (recommended, as it's the most up-to-date)
|
| 81 |
+
```
|
| 82 |
+
pip install narwhals
|
| 83 |
+
```
|
| 84 |
+
- conda-forge (also fine, but the latest version may take longer to appear)
|
| 85 |
+
```
|
| 86 |
+
conda install -c conda-forge narwhals
|
| 87 |
+
```
|
| 88 |
+
|
| 89 |
+
## Usage
|
| 90 |
+
|
| 91 |
+
There are three steps to writing dataframe-agnostic code using Narwhals:
|
| 92 |
+
|
| 93 |
+
1. use `narwhals.from_native` to wrap a pandas/Polars/Modin/cuDF/PyArrow
|
| 94 |
+
DataFrame/LazyFrame in a Narwhals class
|
| 95 |
+
2. use the [subset of the Polars API supported by Narwhals](https://narwhals-dev.github.io/narwhals/api-reference/)
|
| 96 |
+
3. use `narwhals.to_native` to return an object to the user in its original
|
| 97 |
+
dataframe flavour. For example:
|
| 98 |
+
|
| 99 |
+
- if you started with pandas, you'll get pandas back
|
| 100 |
+
- if you started with Polars, you'll get Polars back
|
| 101 |
+
- if you started with Modin, you'll get Modin back (and compute will be distributed)
|
| 102 |
+
- if you started with cuDF, you'll get cuDF back (and compute will happen on GPU)
|
| 103 |
+
- if you started with PyArrow, you'll get PyArrow back
|
| 104 |
+
|
| 105 |
+
<h1 align="left">
|
| 106 |
+
<img
|
| 107 |
+
width="600"
|
| 108 |
+
alt="narwhals_gif"
|
| 109 |
+
src="https://github.com/user-attachments/assets/88292d3c-6359-4155-973d-d0f8e3fbf5ac">
|
| 110 |
+
|
| 111 |
+
</h1>
|
| 112 |
+
|
| 113 |
+
## Example
|
| 114 |
+
|
| 115 |
+
See the [tutorial](https://narwhals-dev.github.io/narwhals/basics/dataframe/) for several examples!
|
| 116 |
+
|
| 117 |
+
## Scope
|
| 118 |
+
|
| 119 |
+
- Do you maintain a dataframe-consuming library?
|
| 120 |
+
- Do you have a specific Polars function in mind that you would like Narwhals to have in order to make your work easier?
|
| 121 |
+
|
| 122 |
+
If you said yes to both, we'd love to hear from you!
|
| 123 |
+
|
| 124 |
+
## Sponsors and institutional partners
|
| 125 |
+
|
| 126 |
+
Narwhals is 100% independent, community-driven, and community-owned.
|
| 127 |
+
We are extremely grateful to the following organisations for having
|
| 128 |
+
provided some funding / development time:
|
| 129 |
+
|
| 130 |
+
- [Quansight Labs](https://labs.quansight.org)
|
| 131 |
+
- [Quansight Futures](https://www.qi.ventures)
|
| 132 |
+
- [OpenTeams](https://www.openteams.com)
|
| 133 |
+
- [POSSEE initiative](https://possee.org)
|
| 134 |
+
- [BYU-Idaho](https://www.byui.edu)
|
| 135 |
+
|
| 136 |
+
If you contribute to Narwhals on your organization's time, please let us know. We'd be happy to add your employer
|
| 137 |
+
to this list!
|
| 138 |
+
|
| 139 |
+
## Appears on
|
| 140 |
+
|
| 141 |
+
Narwhals has been featured in several talks, podcasts, and blog posts:
|
| 142 |
+
|
| 143 |
+
- [Talk Python to me Podcast](https://youtu.be/FSH7BZ0tuE0)
|
| 144 |
+
Ahoy, Narwhals are bridging the data science APIs
|
| 145 |
+
|
| 146 |
+
- [Super Data Science: ML & AI Podcast](https://www.youtube.com/watch?v=TeG4U8R0U8U)
|
| 147 |
+
Narwhals: For Pandas-to-Polars DataFrame Compatibility
|
| 148 |
+
|
| 149 |
+
- [Sample Space Podcast | probabl](https://youtu.be/8hYdq4sWbbQ?si=WG0QP1CZ6gkFf18b)
|
| 150 |
+
How Narwhals has many end users ... that never use it directly. - Marco Gorelli
|
| 151 |
+
|
| 152 |
+
- [Pycon Lithuania](https://www.youtube.com/watch?v=-mdx7Cn6_6E)
|
| 153 |
+
Marco Gorelli - DataFrame interoperatiblity - what's been achieved, and what comes next?
|
| 154 |
+
|
| 155 |
+
- [Pycon Italy](https://www.youtube.com/watch?v=3IqUli9XsmQ)
|
| 156 |
+
How you can write a dataframe-agnostic library - Marco Gorelli
|
| 157 |
+
|
| 158 |
+
- [Polars Blog Post](https://pola.rs/posts/lightweight_plotting/)
|
| 159 |
+
Polars has a new lightweight plotting backend
|
| 160 |
+
|
| 161 |
+
- [Quansight Labs blog post (w/ Scikit-Lego)](https://labs.quansight.org/blog/scikit-lego-narwhals)
|
| 162 |
+
How Narwhals and scikit-lego came together to achieve dataframe-agnosticism
|
| 163 |
+
|
| 164 |
+
## Why "Narwhals"?
|
| 165 |
+
|
| 166 |
+
[Coz they are so awesome](https://youtu.be/ykwqXuMPsoc?si=A-i8LdR38teYsos4).
|
| 167 |
+
|
| 168 |
+
Thanks to [Olha Urdeichuk](https://www.fiverr.com/olhaurdeichuk) for the illustration!
|
parrot/lib/python3.10/site-packages/narwhals-1.8.3.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.25.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
The MIT License (MIT)
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2014-2022 Matthew Brennan Jones <matthew.brennan.jones@gmail.com>
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
| 6 |
+
this software and associated documentation files (the "Software"), to deal in
|
| 7 |
+
the Software without restriction, including without limitation the rights to
|
| 8 |
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
| 9 |
+
the Software, and to permit persons to whom the Software is furnished to do so,
|
| 10 |
+
subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
| 17 |
+
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
| 18 |
+
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
| 19 |
+
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
| 20 |
+
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: py-cpuinfo
|
| 3 |
+
Version: 9.0.0
|
| 4 |
+
Summary: Get CPU info with pure Python
|
| 5 |
+
Home-page: https://github.com/workhorsy/py-cpuinfo
|
| 6 |
+
Author: Matthew Brennan Jones
|
| 7 |
+
Author-email: matthew.brennan.jones@gmail.com
|
| 8 |
+
License: MIT
|
| 9 |
+
Platform: UNKNOWN
|
| 10 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 11 |
+
Classifier: Topic :: Utilities
|
| 12 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 13 |
+
Classifier: Programming Language :: Python :: 3
|
| 14 |
+
License-File: LICENSE
|
| 15 |
+
|
| 16 |
+
py-cpuinfo
|
| 17 |
+
==========
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
Py-cpuinfo gets CPU info with pure Python. Py-cpuinfo should work
|
| 21 |
+
without any extra programs or libraries, beyond what your OS provides.
|
| 22 |
+
It does not require any compilation(C/C++, assembly, et cetera) to use.
|
| 23 |
+
It works with Python 3.
|
| 24 |
+
|
| 25 |
+
Documentation can be viewed here: https://github.com/workhorsy/py-cpuinfo
|
| 26 |
+
|
| 27 |
+
|
parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/cpuinfo,sha256=TFMOnqUj_85fFg6zgair4_gFfJAKY_omPPxgnPuNd4I,218
|
| 2 |
+
cpuinfo/__init__.py,sha256=T6gndqGAggfJCu4_iOziTnomCN7KzaAK_OYTewE4FMA,44
|
| 3 |
+
cpuinfo/__main__.py,sha256=nSxC6Hqhi-0lN7Z4WwtKdxQdf3cUJefb5hOahCzh4Yg,33
|
| 4 |
+
cpuinfo/__pycache__/__init__.cpython-310.pyc,,
|
| 5 |
+
cpuinfo/__pycache__/__main__.cpython-310.pyc,,
|
| 6 |
+
cpuinfo/__pycache__/cpuinfo.cpython-310.pyc,,
|
| 7 |
+
cpuinfo/cpuinfo.py,sha256=HHyDlDUNovE3QzJ3hviiM1ngyOC4iD7i6oGiz2iTmVk,84388
|
| 8 |
+
py_cpuinfo-9.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 9 |
+
py_cpuinfo-9.0.0.dist-info/LICENSE,sha256=3br3Y5a_XHqkWXWiHq_i4i7st9paoNt8sOYVL6r-800,1127
|
| 10 |
+
py_cpuinfo-9.0.0.dist-info/METADATA,sha256=rRFelvhFdoYcXnXXYDAbgdIxQ8_iVUa5lUHgEmU3ncE,794
|
| 11 |
+
py_cpuinfo-9.0.0.dist-info/RECORD,,
|
| 12 |
+
py_cpuinfo-9.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 13 |
+
py_cpuinfo-9.0.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
| 14 |
+
py_cpuinfo-9.0.0.dist-info/entry_points.txt,sha256=ZwrsclY_xUA0xJZK98bLxBdcowxnkK0ANYUT4FYcZJ8,42
|
| 15 |
+
py_cpuinfo-9.0.0.dist-info/top_level.txt,sha256=XsjpunhkxD4hvznqQjrFNw0rtgizHEOGzewPZY3UEtU,8
|
parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/REQUESTED
ADDED
|
File without changes
|
parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
cpuinfo = cpuinfo:main
|
| 3 |
+
|
parrot/lib/python3.10/site-packages/py_cpuinfo-9.0.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
cpuinfo
|
parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.3
|
| 2 |
+
Name: Pygments
|
| 3 |
+
Version: 2.18.0
|
| 4 |
+
Summary: Pygments is a syntax highlighting package written in Python.
|
| 5 |
+
Project-URL: Homepage, https://pygments.org
|
| 6 |
+
Project-URL: Documentation, https://pygments.org/docs
|
| 7 |
+
Project-URL: Source, https://github.com/pygments/pygments
|
| 8 |
+
Project-URL: Bug Tracker, https://github.com/pygments/pygments/issues
|
| 9 |
+
Project-URL: Changelog, https://github.com/pygments/pygments/blob/master/CHANGES
|
| 10 |
+
Author-email: Georg Brandl <georg@python.org>
|
| 11 |
+
Maintainer: Matthäus G. Chajdas
|
| 12 |
+
Maintainer-email: Georg Brandl <georg@python.org>, Jean Abou Samra <jean@abou-samra.fr>
|
| 13 |
+
License: BSD-2-Clause
|
| 14 |
+
License-File: AUTHORS
|
| 15 |
+
License-File: LICENSE
|
| 16 |
+
Keywords: syntax highlighting
|
| 17 |
+
Classifier: Development Status :: 6 - Mature
|
| 18 |
+
Classifier: Intended Audience :: Developers
|
| 19 |
+
Classifier: Intended Audience :: End Users/Desktop
|
| 20 |
+
Classifier: Intended Audience :: System Administrators
|
| 21 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 22 |
+
Classifier: Operating System :: OS Independent
|
| 23 |
+
Classifier: Programming Language :: Python
|
| 24 |
+
Classifier: Programming Language :: Python :: 3
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 29 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 30 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 31 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 32 |
+
Classifier: Topic :: Text Processing :: Filters
|
| 33 |
+
Classifier: Topic :: Utilities
|
| 34 |
+
Requires-Python: >=3.8
|
| 35 |
+
Provides-Extra: plugins
|
| 36 |
+
Provides-Extra: windows-terminal
|
| 37 |
+
Requires-Dist: colorama>=0.4.6; extra == 'windows-terminal'
|
| 38 |
+
Description-Content-Type: text/x-rst
|
| 39 |
+
|
| 40 |
+
Pygments
|
| 41 |
+
~~~~~~~~
|
| 42 |
+
|
| 43 |
+
Pygments is a syntax highlighting package written in Python.
|
| 44 |
+
|
| 45 |
+
It is a generic syntax highlighter suitable for use in code hosting, forums,
|
| 46 |
+
wikis or other applications that need to prettify source code. Highlights
|
| 47 |
+
are:
|
| 48 |
+
|
| 49 |
+
* a wide range of over 500 languages and other text formats is supported
|
| 50 |
+
* special attention is paid to details, increasing quality by a fair amount
|
| 51 |
+
* support for new languages and formats are added easily
|
| 52 |
+
* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
|
| 53 |
+
formats that PIL supports and ANSI sequences
|
| 54 |
+
* it is usable as a command-line tool and as a library
|
| 55 |
+
|
| 56 |
+
Copyright 2006-2024 by the Pygments team, see ``AUTHORS``.
|
| 57 |
+
Licensed under the BSD, see ``LICENSE`` for details.
|
parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,663 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/pygmentize,sha256=Qib3E_bw3Xp6moTVKSaq7P6HhybwxsAEARBiwvSCKiQ,227
|
| 2 |
+
pygments-2.18.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
pygments-2.18.0.dist-info/METADATA,sha256=WWViMDj3GoEuYl9sIbkmA7zv0m0H5sJTUXUSeY73bs8,2461
|
| 4 |
+
pygments-2.18.0.dist-info/RECORD,,
|
| 5 |
+
pygments-2.18.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
pygments-2.18.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
|
| 7 |
+
pygments-2.18.0.dist-info/entry_points.txt,sha256=uUXw-XhMKBEX4pWcCtpuTTnPhL3h7OEE2jWi51VQsa8,53
|
| 8 |
+
pygments-2.18.0.dist-info/licenses/AUTHORS,sha256=REECiHykNMqFuqEk366APNvWe6irSUOqok67E3Evtzw,10500
|
| 9 |
+
pygments-2.18.0.dist-info/licenses/LICENSE,sha256=qdZvHVJt8C4p3Oc0NtNOVuhjL0bCdbvf_HBWnogvnxc,1331
|
| 10 |
+
pygments/__init__.py,sha256=d9try3r146jyYauE2BNZ8aJEYfsJlVdS9Bqv-fZZ5Pw,2959
|
| 11 |
+
pygments/__main__.py,sha256=Wgnr5lhKPtecgPavxdwRzAAaZoe-fCuq6LAEBbBBXd0,348
|
| 12 |
+
pygments/__pycache__/__init__.cpython-310.pyc,,
|
| 13 |
+
pygments/__pycache__/__main__.cpython-310.pyc,,
|
| 14 |
+
pygments/__pycache__/cmdline.cpython-310.pyc,,
|
| 15 |
+
pygments/__pycache__/console.cpython-310.pyc,,
|
| 16 |
+
pygments/__pycache__/filter.cpython-310.pyc,,
|
| 17 |
+
pygments/__pycache__/formatter.cpython-310.pyc,,
|
| 18 |
+
pygments/__pycache__/lexer.cpython-310.pyc,,
|
| 19 |
+
pygments/__pycache__/modeline.cpython-310.pyc,,
|
| 20 |
+
pygments/__pycache__/plugin.cpython-310.pyc,,
|
| 21 |
+
pygments/__pycache__/regexopt.cpython-310.pyc,,
|
| 22 |
+
pygments/__pycache__/scanner.cpython-310.pyc,,
|
| 23 |
+
pygments/__pycache__/sphinxext.cpython-310.pyc,,
|
| 24 |
+
pygments/__pycache__/style.cpython-310.pyc,,
|
| 25 |
+
pygments/__pycache__/token.cpython-310.pyc,,
|
| 26 |
+
pygments/__pycache__/unistring.cpython-310.pyc,,
|
| 27 |
+
pygments/__pycache__/util.cpython-310.pyc,,
|
| 28 |
+
pygments/cmdline.py,sha256=smQpJmIeqyPQ_xZ3Z9-O5FYDxTfngGg8icWV3hIcIIk,23536
|
| 29 |
+
pygments/console.py,sha256=yhP9UsLAVmWKVQf2446JJewkA7AiXeeTf4Ieg3Oi2fU,1718
|
| 30 |
+
pygments/filter.py,sha256=_ADNPCskD8_GmodHi6_LoVgPU3Zh336aBCT5cOeTMs0,1910
|
| 31 |
+
pygments/filters/__init__.py,sha256=IuhzMxGWWMN9tYBZuO2VD602_Ai24ONZXc1cDKtFHPk,40344
|
| 32 |
+
pygments/filters/__pycache__/__init__.cpython-310.pyc,,
|
| 33 |
+
pygments/formatter.py,sha256=oPaCNqrEy_aGBGt-7zwzm9KpYs6v5tyfCd5nua233D4,4366
|
| 34 |
+
pygments/formatters/__init__.py,sha256=C8E9WC8QpfxpCQSarFf_3KTaz21xEzw5N4ETSiv15zE,5349
|
| 35 |
+
pygments/formatters/__pycache__/__init__.cpython-310.pyc,,
|
| 36 |
+
pygments/formatters/__pycache__/_mapping.cpython-310.pyc,,
|
| 37 |
+
pygments/formatters/__pycache__/bbcode.cpython-310.pyc,,
|
| 38 |
+
pygments/formatters/__pycache__/groff.cpython-310.pyc,,
|
| 39 |
+
pygments/formatters/__pycache__/html.cpython-310.pyc,,
|
| 40 |
+
pygments/formatters/__pycache__/img.cpython-310.pyc,,
|
| 41 |
+
pygments/formatters/__pycache__/irc.cpython-310.pyc,,
|
| 42 |
+
pygments/formatters/__pycache__/latex.cpython-310.pyc,,
|
| 43 |
+
pygments/formatters/__pycache__/other.cpython-310.pyc,,
|
| 44 |
+
pygments/formatters/__pycache__/pangomarkup.cpython-310.pyc,,
|
| 45 |
+
pygments/formatters/__pycache__/rtf.cpython-310.pyc,,
|
| 46 |
+
pygments/formatters/__pycache__/svg.cpython-310.pyc,,
|
| 47 |
+
pygments/formatters/__pycache__/terminal.cpython-310.pyc,,
|
| 48 |
+
pygments/formatters/__pycache__/terminal256.cpython-310.pyc,,
|
| 49 |
+
pygments/formatters/_mapping.py,sha256=1Cw37FuQlNacnxRKmtlPX4nyLoX9_ttko5ZwscNUZZ4,4176
|
| 50 |
+
pygments/formatters/bbcode.py,sha256=EG993T9F65-VaRmq39A3vbmX-x8DyQh1hN4nAzqV6xg,3296
|
| 51 |
+
pygments/formatters/groff.py,sha256=kEt7COeP_Cy5lqWUTO7w31bMyx56cpKG9ZPkN4RSMy4,5082
|
| 52 |
+
pygments/formatters/html.py,sha256=bUbdrazRL2ONmkHWw1m2e6DHEIzwvJUogfiL_YhQT-U,35633
|
| 53 |
+
pygments/formatters/img.py,sha256=V5fZUT6SKFnLJAxen7zVJw12EyXJyyLuOsiObTaGKCQ,23263
|
| 54 |
+
pygments/formatters/irc.py,sha256=z1fWHg4G4sS8OMqY2N9b4k7k8bzJaKTkxo7Me8JQZGI,4945
|
| 55 |
+
pygments/formatters/latex.py,sha256=Y2Nqn6a_QDxrlRju4_SYLQywJVsGBRtsHJ8Sup1hA4A,19258
|
| 56 |
+
pygments/formatters/other.py,sha256=V2hrUtWYSS_klWzx1-h03aTxTkeBS4LEiVqy9ByPlKg,4986
|
| 57 |
+
pygments/formatters/pangomarkup.py,sha256=JtIpELl-WIaZeoTJhfLbR7SkXmOhAp6I66lfRTPlxOA,2206
|
| 58 |
+
pygments/formatters/rtf.py,sha256=Ffx6wgAqXr8iy4DhUkRiZlNiZ5QCe39fxPFvFScCOQ0,11921
|
| 59 |
+
pygments/formatters/svg.py,sha256=JCIpG3y9p83APRD0vOSGU6rdsukv9rbZr81QpMIQlMw,7138
|
| 60 |
+
pygments/formatters/terminal.py,sha256=B7X3InVYlDu142nk2wUkJcDtbKMLT1PA-Tfk49UE0j8,4626
|
| 61 |
+
pygments/formatters/terminal256.py,sha256=B8t9Y8WJTgO2LWBGMXcRz2VfKySHLR-ZhGNuGQnDn98,11717
|
| 62 |
+
pygments/lexer.py,sha256=9WU0ZHRI3mmVGymUCTgdVYNM3RRPUo-x_ijCGD1r8ho,35109
|
| 63 |
+
pygments/lexers/__init__.py,sha256=svzhbvkX4ny7I2ALRRxd1yKzfRaUaGrY2Z0wrCNnd74,12067
|
| 64 |
+
pygments/lexers/__pycache__/__init__.cpython-310.pyc,,
|
| 65 |
+
pygments/lexers/__pycache__/_ada_builtins.cpython-310.pyc,,
|
| 66 |
+
pygments/lexers/__pycache__/_asy_builtins.cpython-310.pyc,,
|
| 67 |
+
pygments/lexers/__pycache__/_cl_builtins.cpython-310.pyc,,
|
| 68 |
+
pygments/lexers/__pycache__/_cocoa_builtins.cpython-310.pyc,,
|
| 69 |
+
pygments/lexers/__pycache__/_csound_builtins.cpython-310.pyc,,
|
| 70 |
+
pygments/lexers/__pycache__/_css_builtins.cpython-310.pyc,,
|
| 71 |
+
pygments/lexers/__pycache__/_julia_builtins.cpython-310.pyc,,
|
| 72 |
+
pygments/lexers/__pycache__/_lasso_builtins.cpython-310.pyc,,
|
| 73 |
+
pygments/lexers/__pycache__/_lilypond_builtins.cpython-310.pyc,,
|
| 74 |
+
pygments/lexers/__pycache__/_lua_builtins.cpython-310.pyc,,
|
| 75 |
+
pygments/lexers/__pycache__/_luau_builtins.cpython-310.pyc,,
|
| 76 |
+
pygments/lexers/__pycache__/_mapping.cpython-310.pyc,,
|
| 77 |
+
pygments/lexers/__pycache__/_mql_builtins.cpython-310.pyc,,
|
| 78 |
+
pygments/lexers/__pycache__/_mysql_builtins.cpython-310.pyc,,
|
| 79 |
+
pygments/lexers/__pycache__/_openedge_builtins.cpython-310.pyc,,
|
| 80 |
+
pygments/lexers/__pycache__/_php_builtins.cpython-310.pyc,,
|
| 81 |
+
pygments/lexers/__pycache__/_postgres_builtins.cpython-310.pyc,,
|
| 82 |
+
pygments/lexers/__pycache__/_qlik_builtins.cpython-310.pyc,,
|
| 83 |
+
pygments/lexers/__pycache__/_scheme_builtins.cpython-310.pyc,,
|
| 84 |
+
pygments/lexers/__pycache__/_scilab_builtins.cpython-310.pyc,,
|
| 85 |
+
pygments/lexers/__pycache__/_sourcemod_builtins.cpython-310.pyc,,
|
| 86 |
+
pygments/lexers/__pycache__/_stan_builtins.cpython-310.pyc,,
|
| 87 |
+
pygments/lexers/__pycache__/_stata_builtins.cpython-310.pyc,,
|
| 88 |
+
pygments/lexers/__pycache__/_tsql_builtins.cpython-310.pyc,,
|
| 89 |
+
pygments/lexers/__pycache__/_usd_builtins.cpython-310.pyc,,
|
| 90 |
+
pygments/lexers/__pycache__/_vbscript_builtins.cpython-310.pyc,,
|
| 91 |
+
pygments/lexers/__pycache__/_vim_builtins.cpython-310.pyc,,
|
| 92 |
+
pygments/lexers/__pycache__/actionscript.cpython-310.pyc,,
|
| 93 |
+
pygments/lexers/__pycache__/ada.cpython-310.pyc,,
|
| 94 |
+
pygments/lexers/__pycache__/agile.cpython-310.pyc,,
|
| 95 |
+
pygments/lexers/__pycache__/algebra.cpython-310.pyc,,
|
| 96 |
+
pygments/lexers/__pycache__/ambient.cpython-310.pyc,,
|
| 97 |
+
pygments/lexers/__pycache__/amdgpu.cpython-310.pyc,,
|
| 98 |
+
pygments/lexers/__pycache__/ampl.cpython-310.pyc,,
|
| 99 |
+
pygments/lexers/__pycache__/apdlexer.cpython-310.pyc,,
|
| 100 |
+
pygments/lexers/__pycache__/apl.cpython-310.pyc,,
|
| 101 |
+
pygments/lexers/__pycache__/archetype.cpython-310.pyc,,
|
| 102 |
+
pygments/lexers/__pycache__/arrow.cpython-310.pyc,,
|
| 103 |
+
pygments/lexers/__pycache__/arturo.cpython-310.pyc,,
|
| 104 |
+
pygments/lexers/__pycache__/asc.cpython-310.pyc,,
|
| 105 |
+
pygments/lexers/__pycache__/asm.cpython-310.pyc,,
|
| 106 |
+
pygments/lexers/__pycache__/asn1.cpython-310.pyc,,
|
| 107 |
+
pygments/lexers/__pycache__/automation.cpython-310.pyc,,
|
| 108 |
+
pygments/lexers/__pycache__/bare.cpython-310.pyc,,
|
| 109 |
+
pygments/lexers/__pycache__/basic.cpython-310.pyc,,
|
| 110 |
+
pygments/lexers/__pycache__/bdd.cpython-310.pyc,,
|
| 111 |
+
pygments/lexers/__pycache__/berry.cpython-310.pyc,,
|
| 112 |
+
pygments/lexers/__pycache__/bibtex.cpython-310.pyc,,
|
| 113 |
+
pygments/lexers/__pycache__/blueprint.cpython-310.pyc,,
|
| 114 |
+
pygments/lexers/__pycache__/boa.cpython-310.pyc,,
|
| 115 |
+
pygments/lexers/__pycache__/bqn.cpython-310.pyc,,
|
| 116 |
+
pygments/lexers/__pycache__/business.cpython-310.pyc,,
|
| 117 |
+
pygments/lexers/__pycache__/c_cpp.cpython-310.pyc,,
|
| 118 |
+
pygments/lexers/__pycache__/c_like.cpython-310.pyc,,
|
| 119 |
+
pygments/lexers/__pycache__/capnproto.cpython-310.pyc,,
|
| 120 |
+
pygments/lexers/__pycache__/carbon.cpython-310.pyc,,
|
| 121 |
+
pygments/lexers/__pycache__/cddl.cpython-310.pyc,,
|
| 122 |
+
pygments/lexers/__pycache__/chapel.cpython-310.pyc,,
|
| 123 |
+
pygments/lexers/__pycache__/clean.cpython-310.pyc,,
|
| 124 |
+
pygments/lexers/__pycache__/comal.cpython-310.pyc,,
|
| 125 |
+
pygments/lexers/__pycache__/compiled.cpython-310.pyc,,
|
| 126 |
+
pygments/lexers/__pycache__/configs.cpython-310.pyc,,
|
| 127 |
+
pygments/lexers/__pycache__/console.cpython-310.pyc,,
|
| 128 |
+
pygments/lexers/__pycache__/cplint.cpython-310.pyc,,
|
| 129 |
+
pygments/lexers/__pycache__/crystal.cpython-310.pyc,,
|
| 130 |
+
pygments/lexers/__pycache__/csound.cpython-310.pyc,,
|
| 131 |
+
pygments/lexers/__pycache__/css.cpython-310.pyc,,
|
| 132 |
+
pygments/lexers/__pycache__/d.cpython-310.pyc,,
|
| 133 |
+
pygments/lexers/__pycache__/dalvik.cpython-310.pyc,,
|
| 134 |
+
pygments/lexers/__pycache__/data.cpython-310.pyc,,
|
| 135 |
+
pygments/lexers/__pycache__/dax.cpython-310.pyc,,
|
| 136 |
+
pygments/lexers/__pycache__/devicetree.cpython-310.pyc,,
|
| 137 |
+
pygments/lexers/__pycache__/diff.cpython-310.pyc,,
|
| 138 |
+
pygments/lexers/__pycache__/dns.cpython-310.pyc,,
|
| 139 |
+
pygments/lexers/__pycache__/dotnet.cpython-310.pyc,,
|
| 140 |
+
pygments/lexers/__pycache__/dsls.cpython-310.pyc,,
|
| 141 |
+
pygments/lexers/__pycache__/dylan.cpython-310.pyc,,
|
| 142 |
+
pygments/lexers/__pycache__/ecl.cpython-310.pyc,,
|
| 143 |
+
pygments/lexers/__pycache__/eiffel.cpython-310.pyc,,
|
| 144 |
+
pygments/lexers/__pycache__/elm.cpython-310.pyc,,
|
| 145 |
+
pygments/lexers/__pycache__/elpi.cpython-310.pyc,,
|
| 146 |
+
pygments/lexers/__pycache__/email.cpython-310.pyc,,
|
| 147 |
+
pygments/lexers/__pycache__/erlang.cpython-310.pyc,,
|
| 148 |
+
pygments/lexers/__pycache__/esoteric.cpython-310.pyc,,
|
| 149 |
+
pygments/lexers/__pycache__/ezhil.cpython-310.pyc,,
|
| 150 |
+
pygments/lexers/__pycache__/factor.cpython-310.pyc,,
|
| 151 |
+
pygments/lexers/__pycache__/fantom.cpython-310.pyc,,
|
| 152 |
+
pygments/lexers/__pycache__/felix.cpython-310.pyc,,
|
| 153 |
+
pygments/lexers/__pycache__/fift.cpython-310.pyc,,
|
| 154 |
+
pygments/lexers/__pycache__/floscript.cpython-310.pyc,,
|
| 155 |
+
pygments/lexers/__pycache__/forth.cpython-310.pyc,,
|
| 156 |
+
pygments/lexers/__pycache__/fortran.cpython-310.pyc,,
|
| 157 |
+
pygments/lexers/__pycache__/foxpro.cpython-310.pyc,,
|
| 158 |
+
pygments/lexers/__pycache__/freefem.cpython-310.pyc,,
|
| 159 |
+
pygments/lexers/__pycache__/func.cpython-310.pyc,,
|
| 160 |
+
pygments/lexers/__pycache__/functional.cpython-310.pyc,,
|
| 161 |
+
pygments/lexers/__pycache__/futhark.cpython-310.pyc,,
|
| 162 |
+
pygments/lexers/__pycache__/gcodelexer.cpython-310.pyc,,
|
| 163 |
+
pygments/lexers/__pycache__/gdscript.cpython-310.pyc,,
|
| 164 |
+
pygments/lexers/__pycache__/go.cpython-310.pyc,,
|
| 165 |
+
pygments/lexers/__pycache__/grammar_notation.cpython-310.pyc,,
|
| 166 |
+
pygments/lexers/__pycache__/graph.cpython-310.pyc,,
|
| 167 |
+
pygments/lexers/__pycache__/graphics.cpython-310.pyc,,
|
| 168 |
+
pygments/lexers/__pycache__/graphql.cpython-310.pyc,,
|
| 169 |
+
pygments/lexers/__pycache__/graphviz.cpython-310.pyc,,
|
| 170 |
+
pygments/lexers/__pycache__/gsql.cpython-310.pyc,,
|
| 171 |
+
pygments/lexers/__pycache__/haskell.cpython-310.pyc,,
|
| 172 |
+
pygments/lexers/__pycache__/haxe.cpython-310.pyc,,
|
| 173 |
+
pygments/lexers/__pycache__/hdl.cpython-310.pyc,,
|
| 174 |
+
pygments/lexers/__pycache__/hexdump.cpython-310.pyc,,
|
| 175 |
+
pygments/lexers/__pycache__/html.cpython-310.pyc,,
|
| 176 |
+
pygments/lexers/__pycache__/idl.cpython-310.pyc,,
|
| 177 |
+
pygments/lexers/__pycache__/igor.cpython-310.pyc,,
|
| 178 |
+
pygments/lexers/__pycache__/inferno.cpython-310.pyc,,
|
| 179 |
+
pygments/lexers/__pycache__/installers.cpython-310.pyc,,
|
| 180 |
+
pygments/lexers/__pycache__/int_fiction.cpython-310.pyc,,
|
| 181 |
+
pygments/lexers/__pycache__/iolang.cpython-310.pyc,,
|
| 182 |
+
pygments/lexers/__pycache__/j.cpython-310.pyc,,
|
| 183 |
+
pygments/lexers/__pycache__/javascript.cpython-310.pyc,,
|
| 184 |
+
pygments/lexers/__pycache__/jmespath.cpython-310.pyc,,
|
| 185 |
+
pygments/lexers/__pycache__/jslt.cpython-310.pyc,,
|
| 186 |
+
pygments/lexers/__pycache__/jsonnet.cpython-310.pyc,,
|
| 187 |
+
pygments/lexers/__pycache__/jsx.cpython-310.pyc,,
|
| 188 |
+
pygments/lexers/__pycache__/julia.cpython-310.pyc,,
|
| 189 |
+
pygments/lexers/__pycache__/jvm.cpython-310.pyc,,
|
| 190 |
+
pygments/lexers/__pycache__/kuin.cpython-310.pyc,,
|
| 191 |
+
pygments/lexers/__pycache__/kusto.cpython-310.pyc,,
|
| 192 |
+
pygments/lexers/__pycache__/ldap.cpython-310.pyc,,
|
| 193 |
+
pygments/lexers/__pycache__/lean.cpython-310.pyc,,
|
| 194 |
+
pygments/lexers/__pycache__/lilypond.cpython-310.pyc,,
|
| 195 |
+
pygments/lexers/__pycache__/lisp.cpython-310.pyc,,
|
| 196 |
+
pygments/lexers/__pycache__/macaulay2.cpython-310.pyc,,
|
| 197 |
+
pygments/lexers/__pycache__/make.cpython-310.pyc,,
|
| 198 |
+
pygments/lexers/__pycache__/markup.cpython-310.pyc,,
|
| 199 |
+
pygments/lexers/__pycache__/math.cpython-310.pyc,,
|
| 200 |
+
pygments/lexers/__pycache__/matlab.cpython-310.pyc,,
|
| 201 |
+
pygments/lexers/__pycache__/maxima.cpython-310.pyc,,
|
| 202 |
+
pygments/lexers/__pycache__/meson.cpython-310.pyc,,
|
| 203 |
+
pygments/lexers/__pycache__/mime.cpython-310.pyc,,
|
| 204 |
+
pygments/lexers/__pycache__/minecraft.cpython-310.pyc,,
|
| 205 |
+
pygments/lexers/__pycache__/mips.cpython-310.pyc,,
|
| 206 |
+
pygments/lexers/__pycache__/ml.cpython-310.pyc,,
|
| 207 |
+
pygments/lexers/__pycache__/modeling.cpython-310.pyc,,
|
| 208 |
+
pygments/lexers/__pycache__/modula2.cpython-310.pyc,,
|
| 209 |
+
pygments/lexers/__pycache__/mojo.cpython-310.pyc,,
|
| 210 |
+
pygments/lexers/__pycache__/monte.cpython-310.pyc,,
|
| 211 |
+
pygments/lexers/__pycache__/mosel.cpython-310.pyc,,
|
| 212 |
+
pygments/lexers/__pycache__/ncl.cpython-310.pyc,,
|
| 213 |
+
pygments/lexers/__pycache__/nimrod.cpython-310.pyc,,
|
| 214 |
+
pygments/lexers/__pycache__/nit.cpython-310.pyc,,
|
| 215 |
+
pygments/lexers/__pycache__/nix.cpython-310.pyc,,
|
| 216 |
+
pygments/lexers/__pycache__/oberon.cpython-310.pyc,,
|
| 217 |
+
pygments/lexers/__pycache__/objective.cpython-310.pyc,,
|
| 218 |
+
pygments/lexers/__pycache__/ooc.cpython-310.pyc,,
|
| 219 |
+
pygments/lexers/__pycache__/openscad.cpython-310.pyc,,
|
| 220 |
+
pygments/lexers/__pycache__/other.cpython-310.pyc,,
|
| 221 |
+
pygments/lexers/__pycache__/parasail.cpython-310.pyc,,
|
| 222 |
+
pygments/lexers/__pycache__/parsers.cpython-310.pyc,,
|
| 223 |
+
pygments/lexers/__pycache__/pascal.cpython-310.pyc,,
|
| 224 |
+
pygments/lexers/__pycache__/pawn.cpython-310.pyc,,
|
| 225 |
+
pygments/lexers/__pycache__/perl.cpython-310.pyc,,
|
| 226 |
+
pygments/lexers/__pycache__/phix.cpython-310.pyc,,
|
| 227 |
+
pygments/lexers/__pycache__/php.cpython-310.pyc,,
|
| 228 |
+
pygments/lexers/__pycache__/pointless.cpython-310.pyc,,
|
| 229 |
+
pygments/lexers/__pycache__/pony.cpython-310.pyc,,
|
| 230 |
+
pygments/lexers/__pycache__/praat.cpython-310.pyc,,
|
| 231 |
+
pygments/lexers/__pycache__/procfile.cpython-310.pyc,,
|
| 232 |
+
pygments/lexers/__pycache__/prolog.cpython-310.pyc,,
|
| 233 |
+
pygments/lexers/__pycache__/promql.cpython-310.pyc,,
|
| 234 |
+
pygments/lexers/__pycache__/prql.cpython-310.pyc,,
|
| 235 |
+
pygments/lexers/__pycache__/ptx.cpython-310.pyc,,
|
| 236 |
+
pygments/lexers/__pycache__/python.cpython-310.pyc,,
|
| 237 |
+
pygments/lexers/__pycache__/q.cpython-310.pyc,,
|
| 238 |
+
pygments/lexers/__pycache__/qlik.cpython-310.pyc,,
|
| 239 |
+
pygments/lexers/__pycache__/qvt.cpython-310.pyc,,
|
| 240 |
+
pygments/lexers/__pycache__/r.cpython-310.pyc,,
|
| 241 |
+
pygments/lexers/__pycache__/rdf.cpython-310.pyc,,
|
| 242 |
+
pygments/lexers/__pycache__/rebol.cpython-310.pyc,,
|
| 243 |
+
pygments/lexers/__pycache__/resource.cpython-310.pyc,,
|
| 244 |
+
pygments/lexers/__pycache__/ride.cpython-310.pyc,,
|
| 245 |
+
pygments/lexers/__pycache__/rita.cpython-310.pyc,,
|
| 246 |
+
pygments/lexers/__pycache__/rnc.cpython-310.pyc,,
|
| 247 |
+
pygments/lexers/__pycache__/roboconf.cpython-310.pyc,,
|
| 248 |
+
pygments/lexers/__pycache__/robotframework.cpython-310.pyc,,
|
| 249 |
+
pygments/lexers/__pycache__/ruby.cpython-310.pyc,,
|
| 250 |
+
pygments/lexers/__pycache__/rust.cpython-310.pyc,,
|
| 251 |
+
pygments/lexers/__pycache__/sas.cpython-310.pyc,,
|
| 252 |
+
pygments/lexers/__pycache__/savi.cpython-310.pyc,,
|
| 253 |
+
pygments/lexers/__pycache__/scdoc.cpython-310.pyc,,
|
| 254 |
+
pygments/lexers/__pycache__/scripting.cpython-310.pyc,,
|
| 255 |
+
pygments/lexers/__pycache__/sgf.cpython-310.pyc,,
|
| 256 |
+
pygments/lexers/__pycache__/shell.cpython-310.pyc,,
|
| 257 |
+
pygments/lexers/__pycache__/sieve.cpython-310.pyc,,
|
| 258 |
+
pygments/lexers/__pycache__/slash.cpython-310.pyc,,
|
| 259 |
+
pygments/lexers/__pycache__/smalltalk.cpython-310.pyc,,
|
| 260 |
+
pygments/lexers/__pycache__/smithy.cpython-310.pyc,,
|
| 261 |
+
pygments/lexers/__pycache__/smv.cpython-310.pyc,,
|
| 262 |
+
pygments/lexers/__pycache__/snobol.cpython-310.pyc,,
|
| 263 |
+
pygments/lexers/__pycache__/solidity.cpython-310.pyc,,
|
| 264 |
+
pygments/lexers/__pycache__/soong.cpython-310.pyc,,
|
| 265 |
+
pygments/lexers/__pycache__/sophia.cpython-310.pyc,,
|
| 266 |
+
pygments/lexers/__pycache__/special.cpython-310.pyc,,
|
| 267 |
+
pygments/lexers/__pycache__/spice.cpython-310.pyc,,
|
| 268 |
+
pygments/lexers/__pycache__/sql.cpython-310.pyc,,
|
| 269 |
+
pygments/lexers/__pycache__/srcinfo.cpython-310.pyc,,
|
| 270 |
+
pygments/lexers/__pycache__/stata.cpython-310.pyc,,
|
| 271 |
+
pygments/lexers/__pycache__/supercollider.cpython-310.pyc,,
|
| 272 |
+
pygments/lexers/__pycache__/tact.cpython-310.pyc,,
|
| 273 |
+
pygments/lexers/__pycache__/tal.cpython-310.pyc,,
|
| 274 |
+
pygments/lexers/__pycache__/tcl.cpython-310.pyc,,
|
| 275 |
+
pygments/lexers/__pycache__/teal.cpython-310.pyc,,
|
| 276 |
+
pygments/lexers/__pycache__/templates.cpython-310.pyc,,
|
| 277 |
+
pygments/lexers/__pycache__/teraterm.cpython-310.pyc,,
|
| 278 |
+
pygments/lexers/__pycache__/testing.cpython-310.pyc,,
|
| 279 |
+
pygments/lexers/__pycache__/text.cpython-310.pyc,,
|
| 280 |
+
pygments/lexers/__pycache__/textedit.cpython-310.pyc,,
|
| 281 |
+
pygments/lexers/__pycache__/textfmts.cpython-310.pyc,,
|
| 282 |
+
pygments/lexers/__pycache__/theorem.cpython-310.pyc,,
|
| 283 |
+
pygments/lexers/__pycache__/thingsdb.cpython-310.pyc,,
|
| 284 |
+
pygments/lexers/__pycache__/tlb.cpython-310.pyc,,
|
| 285 |
+
pygments/lexers/__pycache__/tls.cpython-310.pyc,,
|
| 286 |
+
pygments/lexers/__pycache__/tnt.cpython-310.pyc,,
|
| 287 |
+
pygments/lexers/__pycache__/trafficscript.cpython-310.pyc,,
|
| 288 |
+
pygments/lexers/__pycache__/typoscript.cpython-310.pyc,,
|
| 289 |
+
pygments/lexers/__pycache__/typst.cpython-310.pyc,,
|
| 290 |
+
pygments/lexers/__pycache__/ul4.cpython-310.pyc,,
|
| 291 |
+
pygments/lexers/__pycache__/unicon.cpython-310.pyc,,
|
| 292 |
+
pygments/lexers/__pycache__/urbi.cpython-310.pyc,,
|
| 293 |
+
pygments/lexers/__pycache__/usd.cpython-310.pyc,,
|
| 294 |
+
pygments/lexers/__pycache__/varnish.cpython-310.pyc,,
|
| 295 |
+
pygments/lexers/__pycache__/verification.cpython-310.pyc,,
|
| 296 |
+
pygments/lexers/__pycache__/verifpal.cpython-310.pyc,,
|
| 297 |
+
pygments/lexers/__pycache__/vip.cpython-310.pyc,,
|
| 298 |
+
pygments/lexers/__pycache__/vyper.cpython-310.pyc,,
|
| 299 |
+
pygments/lexers/__pycache__/web.cpython-310.pyc,,
|
| 300 |
+
pygments/lexers/__pycache__/webassembly.cpython-310.pyc,,
|
| 301 |
+
pygments/lexers/__pycache__/webidl.cpython-310.pyc,,
|
| 302 |
+
pygments/lexers/__pycache__/webmisc.cpython-310.pyc,,
|
| 303 |
+
pygments/lexers/__pycache__/wgsl.cpython-310.pyc,,
|
| 304 |
+
pygments/lexers/__pycache__/whiley.cpython-310.pyc,,
|
| 305 |
+
pygments/lexers/__pycache__/wowtoc.cpython-310.pyc,,
|
| 306 |
+
pygments/lexers/__pycache__/wren.cpython-310.pyc,,
|
| 307 |
+
pygments/lexers/__pycache__/x10.cpython-310.pyc,,
|
| 308 |
+
pygments/lexers/__pycache__/xorg.cpython-310.pyc,,
|
| 309 |
+
pygments/lexers/__pycache__/yang.cpython-310.pyc,,
|
| 310 |
+
pygments/lexers/__pycache__/yara.cpython-310.pyc,,
|
| 311 |
+
pygments/lexers/__pycache__/zig.cpython-310.pyc,,
|
| 312 |
+
pygments/lexers/_ada_builtins.py,sha256=hnh_MphEm4skkHsAGGlVSp9-IG1IvA-Kkkplc54-WPI,1543
|
| 313 |
+
pygments/lexers/_asy_builtins.py,sha256=nwBhZjbOkPUFReF22JgCde_AR6oaUBr1TEwIJDgxtec,27287
|
| 314 |
+
pygments/lexers/_cl_builtins.py,sha256=gwESyMaPG0eSdwgr9uNUuEMmjX43Z5Nav5z9pDGnhMA,13994
|
| 315 |
+
pygments/lexers/_cocoa_builtins.py,sha256=wa8Z6xRSVHiETfxU2WOuY4O9VN9ogH8PoVw4IpnTIYA,105173
|
| 316 |
+
pygments/lexers/_csound_builtins.py,sha256=vr4PKbi_2T5iSdWLfqARILPB0Z-Wc_CPGjNaHvQhySQ,18414
|
| 317 |
+
pygments/lexers/_css_builtins.py,sha256=5O6M8QQQ5518sqLorWlafT_eoN6mIbkT6OogR7gAq6M,12446
|
| 318 |
+
pygments/lexers/_julia_builtins.py,sha256=f_mAv1J9t-4kA85bUlf5iazlZrjBj8X39LMrg6bOBAc,11883
|
| 319 |
+
pygments/lexers/_lasso_builtins.py,sha256=a7MYoYE1yAgwyl7pHFpSZazz6VQLRI5pLCV8nn1xcvk,134510
|
| 320 |
+
pygments/lexers/_lilypond_builtins.py,sha256=HOmm1nxJwnJIMKJ0IcCdZeAWF1a2-g2_0_lmGbxqNmg,108094
|
| 321 |
+
pygments/lexers/_lua_builtins.py,sha256=8EUwcejkeZ4CpFxSoo2DON1UwFnJhbYRpF7Q22xiQ-8,8108
|
| 322 |
+
pygments/lexers/_luau_builtins.py,sha256=eX-OzjU9gT4gvoVwCttALJMZxk2repLtc7mqHY54xWs,955
|
| 323 |
+
pygments/lexers/_mapping.py,sha256=nPWeLyQslZd_A4ehzIefyWxmvcaTtqSCW6RMb47m9Xs,69089
|
| 324 |
+
pygments/lexers/_mql_builtins.py,sha256=_1MtbSYIUf4B1Yl9hGVm-N4jdj7lcxmx3yFoiWzO3vM,24713
|
| 325 |
+
pygments/lexers/_mysql_builtins.py,sha256=A5OlN4LrN-Ig1rfsUoLAaS80B_vYTG4Fj9bwiq_qGuc,25838
|
| 326 |
+
pygments/lexers/_openedge_builtins.py,sha256=fTAp0gK3PEBDg47Rrdygxql73_C-AdHg8EGJ_Ld8p0U,49398
|
| 327 |
+
pygments/lexers/_php_builtins.py,sha256=MikQttLCEbHulmTMT1LzJpRhDzUvuzRl0Yh-iKYyn1M,107922
|
| 328 |
+
pygments/lexers/_postgres_builtins.py,sha256=-e9B90henKpkQDqEQGkS0sIeKGlg-l5VdDPWuSw5TtY,13343
|
| 329 |
+
pygments/lexers/_qlik_builtins.py,sha256=ccaWjko7s6PNJnthww0eksfBH4kkrYy1qkWSwA7wk6o,12595
|
| 330 |
+
pygments/lexers/_scheme_builtins.py,sha256=uQVSWBP5oCjD6YoV37xY3Ep4ao8fIVd8oSBlqqBn0og,32564
|
| 331 |
+
pygments/lexers/_scilab_builtins.py,sha256=6kTzoifN-Dmbg7nlJJD1_ar3lEq6Uq5sdWCjV0A1R9A,52411
|
| 332 |
+
pygments/lexers/_sourcemod_builtins.py,sha256=GxGZ9MWhiyrNktPoc57VdwxAA6sguWyFM_v09GcW3nc,26777
|
| 333 |
+
pygments/lexers/_stan_builtins.py,sha256=aICL2mRN0xyWWknCUGVSS6s0HoK_rk91OmiYXSn2dMw,13445
|
| 334 |
+
pygments/lexers/_stata_builtins.py,sha256=1JS2rARgtMan7763FA_zZ7UFT7t8YsGNJo47kLt4e2E,27227
|
| 335 |
+
pygments/lexers/_tsql_builtins.py,sha256=ZVzbdvOcC-kbs1lXcaTAT3-XwgMJuPbYQtAc6jxb9ZY,15460
|
| 336 |
+
pygments/lexers/_usd_builtins.py,sha256=dZRmB_M4Z-aVZFPG5FU-5Su6bPZOvJmCcNxYIF80MXo,1658
|
| 337 |
+
pygments/lexers/_vbscript_builtins.py,sha256=ZVJrzBsxkQKPHVkrBNmWSHaWJcKYaPa9oiCTckzrqKA,4225
|
| 338 |
+
pygments/lexers/_vim_builtins.py,sha256=vN66emQdRrvXF--5n2XOP1_8OI5VO96rxXlKABI4eAA,57066
|
| 339 |
+
pygments/lexers/actionscript.py,sha256=o-fGli0AVanHyaN-nYkedrYgdE49umtHCduObXtfK_s,11727
|
| 340 |
+
pygments/lexers/ada.py,sha256=O0T9BR80xzRn7MAYlf3BDJsQxDIS6x14x6sjLkK4gmo,5353
|
| 341 |
+
pygments/lexers/agile.py,sha256=sWWEMQnlDt99eVuZyjd5uvnuebxCJ4ad2-UXAqti1iQ,896
|
| 342 |
+
pygments/lexers/algebra.py,sha256=CdDxjCBPoETdvmErRu5TG67jy0KQhCmOVsO6nYH2Ym4,9912
|
| 343 |
+
pygments/lexers/ambient.py,sha256=pVJWRy2554RUhQjxwF4JFMPkWbgt-bmqXPvsMu_rWb0,2605
|
| 344 |
+
pygments/lexers/amdgpu.py,sha256=_LuGGZoY9bqDYf4OaPldxEvbhKvP_xqOy3T4gAKHbdg,1723
|
| 345 |
+
pygments/lexers/ampl.py,sha256=q84rYyzGP1qmriMeZVt5Cl8ohugv1tp1hNdfWBYmU50,4176
|
| 346 |
+
pygments/lexers/apdlexer.py,sha256=o0mYCKG9to1Foq7VAQmYQzn7WS7Qt1YH6zSApqv32wM,30800
|
| 347 |
+
pygments/lexers/apl.py,sha256=YChhnBp5WIzKP2FZGDIBpqztQDS58hPz0Wi7mXcQgpA,3404
|
| 348 |
+
pygments/lexers/archetype.py,sha256=8UDdDTxLyIyNCfGnH_dU90St96Jsnl9t2XshWW7dXm4,11538
|
| 349 |
+
pygments/lexers/arrow.py,sha256=MfJlUb169MPfelyrH0jXCWkHfSLTljBCrknYQyAbpUA,3564
|
| 350 |
+
pygments/lexers/arturo.py,sha256=PsV5SMa5h2RHgj161xJJDJX-0ubZ2Eyzt8bxzYeOC8g,11414
|
| 351 |
+
pygments/lexers/asc.py,sha256=ZiLshADiwSba6srjtwibovBVX4hqXqquYK7_AAf4Ofw,1693
|
| 352 |
+
pygments/lexers/asm.py,sha256=9QSICK9VKJLfjxMkEtkkrEGzbB2q6SZd9egDMlGWW1s,41934
|
| 353 |
+
pygments/lexers/asn1.py,sha256=S7psbkDQTLFZd1jDK8KaWVkyi-wHiz7AFf2b1WN7YrQ,4262
|
| 354 |
+
pygments/lexers/automation.py,sha256=Lv4-0vpmwFOX6dbsQHfKG9m7yOyrI6F7km16Z3O9ZTw,19831
|
| 355 |
+
pygments/lexers/bare.py,sha256=s934nvvtAeG8avdi-Q04ZVQebjpo8xbZdhMH_FNnev0,3020
|
| 356 |
+
pygments/lexers/basic.py,sha256=oHx7F-C6gW7AULTU_zfA8zowbmmxcdoyniYZIiaVd6o,27989
|
| 357 |
+
pygments/lexers/bdd.py,sha256=I0tooXMevZw1eND_hrdAWtJ3n3nXgUDKDM5IoYJHRUg,1641
|
| 358 |
+
pygments/lexers/berry.py,sha256=sLmGllDvhPB51Pe_M9BFx10QkqDGejcVZQxCVwEUIeA,3209
|
| 359 |
+
pygments/lexers/bibtex.py,sha256=13JE6k9k1TUYTKit0zND246zLQ0xyL52lvMkrYopP14,4811
|
| 360 |
+
pygments/lexers/blueprint.py,sha256=FZ2WkBuqlOBu0GH0rqjnNSHG_qdAu3jGt97Oen19feA,6188
|
| 361 |
+
pygments/lexers/boa.py,sha256=T_VWCvNM-wt9IjPW4FV7u8NFngpx14dr9ZN7RdOLLVQ,3921
|
| 362 |
+
pygments/lexers/bqn.py,sha256=ze1whgy0AsdgoEc1yHgk_4f2LihqOHjCVPUojA4BehE,3337
|
| 363 |
+
pygments/lexers/business.py,sha256=j1h6DphSHRFwPB5shVQfrPsajprPItPKjHxqzQfdZTo,28345
|
| 364 |
+
pygments/lexers/c_cpp.py,sha256=8e-rRebFzZgYSd52bXFShriP8jFVlCf0L2F0vI-9Rmg,18059
|
| 365 |
+
pygments/lexers/c_like.py,sha256=JCHZRIboeGb0VE-bknxnoxd8oWQhr8T0qhS2i8vTlMI,32021
|
| 366 |
+
pygments/lexers/capnproto.py,sha256=MJmnE8MYtWPUEqXh2_zBgG9FgQAD-10so6ORwpqHZRo,2174
|
| 367 |
+
pygments/lexers/carbon.py,sha256=oYL5HLBgNOJIkLti5kjFFdwGIbtidKflCKk2tdHKgos,3211
|
| 368 |
+
pygments/lexers/cddl.py,sha256=qaeINyt59SyAbxAMZHY0zcDGgxTEFPElG6DP3aa5nZA,5076
|
| 369 |
+
pygments/lexers/chapel.py,sha256=XBFfoaOiLbfUvaNvnp_iQs_qLRpg41LUmAugtg-XGug,5156
|
| 370 |
+
pygments/lexers/clean.py,sha256=vUMnhYzD3uvSc7Wdyzo7PZllAGt0dzXqhthY82qhL9c,6418
|
| 371 |
+
pygments/lexers/comal.py,sha256=G_d_FHFU-wJFihoMPmZMLGF_Bl__HO0i1gC9InQ9rbE,3179
|
| 372 |
+
pygments/lexers/compiled.py,sha256=nufWnAfB85AiZPELv0yzijrROD2wz9SEc54e9n_dmGk,1426
|
| 373 |
+
pygments/lexers/configs.py,sha256=vNh6sytziBn-1qkqDwy_OFYTKxUH4kFsJ65Hc8Yx2Oc,50534
|
| 374 |
+
pygments/lexers/console.py,sha256=39fVAcHxYtO7cy2_epBq30ShxDJxhgc09jd5J7dOAzE,4180
|
| 375 |
+
pygments/lexers/cplint.py,sha256=0lqEPgDIFQuO7Q189JjG5whWlAw_YcTlWG9-aA6eli8,1389
|
| 376 |
+
pygments/lexers/crystal.py,sha256=5UlYigeTeFRrfhHQiSN0DxqkWo2ibnhtcCErB5bTJdg,15754
|
| 377 |
+
pygments/lexers/csound.py,sha256=ZX2bs4NykljvsrI5QXgQZXGH2IyP6ztFfT1v6gq3UnQ,16998
|
| 378 |
+
pygments/lexers/css.py,sha256=571dDjM2lWepEn3ybYec0I5b3h54azXF7xJO2ZNyRLA,25366
|
| 379 |
+
pygments/lexers/d.py,sha256=-mKlYHZMYsUV2BhW8C43AFnh97zeKJgyej7XSQbOA_I,9920
|
| 380 |
+
pygments/lexers/dalvik.py,sha256=PIEJoUcYD7lij6eCg9wZgMFhUyazyQ8oFxHrpzmQFMk,4606
|
| 381 |
+
pygments/lexers/data.py,sha256=l1fmUUlwiBccMAKX06phQ1tKr27mDpzwhnytwqxTbM0,27026
|
| 382 |
+
pygments/lexers/dax.py,sha256=blHXcUy-QL9X25rLQDDW5DUceThB3sSa_4mOtCT1bkE,8098
|
| 383 |
+
pygments/lexers/devicetree.py,sha256=K1jqVm8B-2K8rRzMElMNYWNUaUKbCIt62dVRQVjINI0,4019
|
| 384 |
+
pygments/lexers/diff.py,sha256=_dlEodnJh3Y61GgP3AvRNpbC0LD-0nuOSd7PBDaOK-I,5382
|
| 385 |
+
pygments/lexers/dns.py,sha256=do6plOeMEqasazJTfGpjYYTWLqvGKsTVNAd3SldgG_8,3891
|
| 386 |
+
pygments/lexers/dotnet.py,sha256=apkKKord2c6opVFhiaK-vRrQxsJRRnFfhijPLKiMYS4,37958
|
| 387 |
+
pygments/lexers/dsls.py,sha256=ea7bLN68qYqwx1yeRJvfn1YJP343rawhdO9NuDMRjnM,36746
|
| 388 |
+
pygments/lexers/dylan.py,sha256=kcx1IB9pqjQyy4PXpu4YeiS64I8XSkE5dJ3ulWDWzMg,10391
|
| 389 |
+
pygments/lexers/ecl.py,sha256=qmFlYH19sYDOli4aTjajecw_zRlvjmzq_VUeGfcEUk8,6371
|
| 390 |
+
pygments/lexers/eiffel.py,sha256=8fpkGHe6drmZvcKOJkS0jz87H61JsQc3_999IM4Mg1c,2690
|
| 391 |
+
pygments/lexers/elm.py,sha256=Dvh8YtUEbcnCC15pCmMGIuZFGenLsg_rS4X97GOyJiA,3152
|
| 392 |
+
pygments/lexers/elpi.py,sha256=1JMrWjlWYdle82RJSZ0rTtTOCt9kLktyVJOumeK2dMc,6535
|
| 393 |
+
pygments/lexers/email.py,sha256=vR1gKbuso2-oiJG5QXYUROYjaOGw8tYD9CmdbY68lAg,4804
|
| 394 |
+
pygments/lexers/erlang.py,sha256=3xVUmnbvgbQN4QM5WGvIoQMFS9jJptBCRM3MP4KxsZo,19147
|
| 395 |
+
pygments/lexers/esoteric.py,sha256=N9HJNvtxh5ktL_I8t_8vFJ6Ms-atOkplnE39GGe8wNg,10500
|
| 396 |
+
pygments/lexers/ezhil.py,sha256=BfutGlZYc9b9uW5Qy4BIsNKWowdikL9mWijA-D_xK1M,3272
|
| 397 |
+
pygments/lexers/factor.py,sha256=yxEbxsEFcReEaajn5S7bvaacuG2me1RRY5Fb2hYDW2M,19530
|
| 398 |
+
pygments/lexers/fantom.py,sha256=meOft3jMJ98kNGetDPVvzu9nvVFC08udETA18j51TQ4,10231
|
| 399 |
+
pygments/lexers/felix.py,sha256=o3E1v8qe9vyQjS9YK_LVPKwraosmfqqypRpOx7J-ikI,9655
|
| 400 |
+
pygments/lexers/fift.py,sha256=ZTjrY--aKiHlg24KFpKUAq-wn8ULL_8G_E7N3TPMnKg,1644
|
| 401 |
+
pygments/lexers/floscript.py,sha256=LYiIZfZ2HT05T3xR00J9_q5S6nxkEw-2v5GkcuS9kfI,2667
|
| 402 |
+
pygments/lexers/forth.py,sha256=xyeyN7nMFszsaKYDG2oxLuhfjrZcBVKdRIKL6htxvSQ,7193
|
| 403 |
+
pygments/lexers/fortran.py,sha256=llxa3itGDZfNWcmsQO55DjmWgvfvgohnEdt1T4m3AHo,10382
|
| 404 |
+
pygments/lexers/foxpro.py,sha256=KW39vSlXkigjRqx6E7s7L895l4auXk9T9iyWB3XbjY4,26295
|
| 405 |
+
pygments/lexers/freefem.py,sha256=gO8XOaVcwydnhWP8t2Cgl2YeQ4C75dbGDsQs3ko3SQo,26913
|
| 406 |
+
pygments/lexers/func.py,sha256=2yYrp5etRM94BT0nkxxlcC2xAJ4PBOP8mV4i6flNmJo,3700
|
| 407 |
+
pygments/lexers/functional.py,sha256=RJSf7XrVmsGQRYbbHKhWlJtfI5PZ6stlH8Va8WMdpLo,693
|
| 408 |
+
pygments/lexers/futhark.py,sha256=QPbnZ_MvzMTQmYMTJbdJIvW8w7hQySJCViOTDH0Pv6I,3743
|
| 409 |
+
pygments/lexers/gcodelexer.py,sha256=QE3tz5_Wu0c1GsjiVU-nbLi4ZcvGvRKjTUdT3VFbErA,874
|
| 410 |
+
pygments/lexers/gdscript.py,sha256=eBdkj-lmw2TVTf7cvJYsFHVzEZ0C7kyZyVHHuelJ18A,7566
|
| 411 |
+
pygments/lexers/go.py,sha256=dZY74RhlgVS7sZwEiqYm21AOOZ-NqrDXUHBz5aTkdDw,3783
|
| 412 |
+
pygments/lexers/grammar_notation.py,sha256=vg1BvwySit-7LB8Y_5AQkgkj0AzIGAM0mHtaq_fbuYs,8043
|
| 413 |
+
pygments/lexers/graph.py,sha256=kUfbhtuS9ysgfhUdFBnT86w_JK35I-ROzOZ3IQV7sTU,4108
|
| 414 |
+
pygments/lexers/graphics.py,sha256=PXcPVmfAOROL1ocKa6ft6J3HfKgZQ9HvZ2k7AcSPl70,39145
|
| 415 |
+
pygments/lexers/graphql.py,sha256=um1AZrLHx9qpZ6NFVBX_kRAFbM7TQV9l13ep7dCpKcY,5601
|
| 416 |
+
pygments/lexers/graphviz.py,sha256=P-Xy34waH0BYD-Ccp1XmE9832I5MzO-_mwHsRrA5n0U,1934
|
| 417 |
+
pygments/lexers/gsql.py,sha256=8y3ymaD6uFx-IrFJRy0D6i9wruaUETd_ceAsugtaxIw,3990
|
| 418 |
+
pygments/lexers/haskell.py,sha256=e7WpOTNyaIj7Sr8B-JjGeGbSlACLVEa6vOM-XfWMrK0,33239
|
| 419 |
+
pygments/lexers/haxe.py,sha256=hHGZkJt4JWpLHMTb0s6a01yQNxfnXgTPvnP-eyNp6YE,30974
|
| 420 |
+
pygments/lexers/hdl.py,sha256=Vr-kHDFjxhQSE1TW4zttZ2zZHDCuXzpNcQL3tCjjjOI,22738
|
| 421 |
+
pygments/lexers/hexdump.py,sha256=5F4mjn6fgyA1CaWFoYk4Ww-pHS02a_7zpoRr70WPm8A,3653
|
| 422 |
+
pygments/lexers/html.py,sha256=dPeQnP4Qf8z6TZjL642FHTMMHRwCUccGVAAD90ORzXk,20574
|
| 423 |
+
pygments/lexers/idl.py,sha256=UnYt9-IWBk37Kr0YvCCtU2D5TXWSIqPSmeglq7Wd-V4,15449
|
| 424 |
+
pygments/lexers/igor.py,sha256=KvamBRiJOU67TDqHGB_PixR19IR1c9Fq0oLScXFPlmw,31626
|
| 425 |
+
pygments/lexers/inferno.py,sha256=kSrMfu_3AofmBZ2uCH_YFD-0a95XRjmOKk7L_hB6CIA,3135
|
| 426 |
+
pygments/lexers/installers.py,sha256=IVyOtXNeV3AVFv-18TjGbMF2xPgiI-kVejAo_EszuNA,13297
|
| 427 |
+
pygments/lexers/int_fiction.py,sha256=LSf2BROdA4Mz_rgpsKLICJebljE3nQsh_K0hJvfVlaQ,56544
|
| 428 |
+
pygments/lexers/iolang.py,sha256=XL_JsHAZRziGN8Sk08NZmSo5irAWU27yVy7czpU9GtQ,1905
|
| 429 |
+
pygments/lexers/j.py,sha256=_fomKXuUh8FANTUl-HxE_x9WYRnKJGpAiQUMeVJUolo,4853
|
| 430 |
+
pygments/lexers/javascript.py,sha256=m6VcUOwTUcpdZoJNaAIXE_zHq8NZBkQVqYZMRPXI1VE,63101
|
| 431 |
+
pygments/lexers/jmespath.py,sha256=GBHuBxeDVdeUN03KuvFK4UIY3e79gYT0FkiDMWKgygw,2082
|
| 432 |
+
pygments/lexers/jslt.py,sha256=p_0Qba5bPcyxERl2xsmFKet7_P0R5R0ist_3GFrN2us,3700
|
| 433 |
+
pygments/lexers/jsonnet.py,sha256=bqXv1lRpxoIx8X-_Y1PIwOl-jgnEPtilCUL4wT2-2xE,5636
|
| 434 |
+
pygments/lexers/jsx.py,sha256=qlRxi85Fh0kPvD5Viff9_Xm-gjfmEEKm3AsVDWTgM5o,2233
|
| 435 |
+
pygments/lexers/julia.py,sha256=DLaYCAF4AN8nsgRzJY-MWlHEZtDuNZ4FznIFijJatGM,11679
|
| 436 |
+
pygments/lexers/jvm.py,sha256=OFw18wPlecJDJjt16N3gMQVy_xmnxjcDCXlu4_NXTYQ,72666
|
| 437 |
+
pygments/lexers/kuin.py,sha256=axJ3_tqZKu40OgkUP27qsyDEkM_TeZGQcKFXlYOrtMI,11405
|
| 438 |
+
pygments/lexers/kusto.py,sha256=Q-UC6DQB4vuxtVsQcMZEp56CxXpIlJXeuP-WDEV-Eww,3477
|
| 439 |
+
pygments/lexers/ldap.py,sha256=_ua5KpHLgDiTCKFVZ4OJkoBUh84FXGqeDX29UELMYzg,6551
|
| 440 |
+
pygments/lexers/lean.py,sha256=bk6LaVc_PsSEhspNLzRMKkG6KyFWup7nhS7Kx1XXQ34,8570
|
| 441 |
+
pygments/lexers/lilypond.py,sha256=gE6Z5xs7uocSCfaDOghkowKGblZX8zIE_v9eLZ651UQ,9752
|
| 442 |
+
pygments/lexers/lisp.py,sha256=79Upl-OrKzZ-8443lyPRJHagOSwDGkxywKqJwXF66FA,157668
|
| 443 |
+
pygments/lexers/macaulay2.py,sha256=iRTPn040k600ZQiY4IpPL-Byt027wBBhFMWn9QjgROc,32844
|
| 444 |
+
pygments/lexers/make.py,sha256=5-2Mq3muMg9Yq5yfmXNI7UH-ApYh9VePrIvKsJTpNHg,7831
|
| 445 |
+
pygments/lexers/markup.py,sha256=tgFazIqyrufzmAgd3FL-4-WMbwbc8ve5P7tJGvPWnOs,65088
|
| 446 |
+
pygments/lexers/math.py,sha256=lI6LVR-OqL5XB46Nu41YE00QA1WKssPp1TFzH-NoV50,695
|
| 447 |
+
pygments/lexers/matlab.py,sha256=gXzVaLWkjqmwqNJgxpyNbfqy8X2yDs9QyJaGQcXBu94,132962
|
| 448 |
+
pygments/lexers/maxima.py,sha256=Qfe8sTugA5UhGuRzEKjspbH-w6YVlREdAAmGyKp5HZc,2715
|
| 449 |
+
pygments/lexers/meson.py,sha256=FmmaxUNRV55nxQm6oQ2sXhhZZyNSms68V7Uxj7F9TuQ,4336
|
| 450 |
+
pygments/lexers/mime.py,sha256=nwyBkz2_gJ-jUmxWpcNvpGYftazfk4V9adzcPashGeg,7582
|
| 451 |
+
pygments/lexers/minecraft.py,sha256=GKPEYWI6HHvpOdEh4GTphfAM4mx8dOA6VFOsA0-1aAk,13696
|
| 452 |
+
pygments/lexers/mips.py,sha256=YtvYhgqSdYtfy69FK5I7EiOPC2VSKdqq5_ZgPHD1wbE,4656
|
| 453 |
+
pygments/lexers/ml.py,sha256=hZvMkwBzbgWYX4ndtUI_m9xAjjGmxyqPbwJe7FcAeoY,35390
|
| 454 |
+
pygments/lexers/modeling.py,sha256=EebFhvTaBw279EkIcRDvIsGIo33A8TTgtvn__Ypun7A,13683
|
| 455 |
+
pygments/lexers/modula2.py,sha256=2rHAQBCVnLu0mdaq8ERDPuHwF0wNh2r5SKl_vUJMea4,53072
|
| 456 |
+
pygments/lexers/mojo.py,sha256=z9CDFFnBfI9G9gHRsYOtcCtKWbXKdLgRdvoKRVAoWTA,24181
|
| 457 |
+
pygments/lexers/monte.py,sha256=DTy0svJRg7P7j7EwDPN5aG7l98Bm7STANyJSwOytv5c,6289
|
| 458 |
+
pygments/lexers/mosel.py,sha256=cErB67hM5hw1qZYPKuHmOfrJu8H8uDk8B378hnCyI6M,9297
|
| 459 |
+
pygments/lexers/ncl.py,sha256=zyV63ZeQnoV__OY6284jW8GENuzsXgY-_d6HxDz7m0w,63999
|
| 460 |
+
pygments/lexers/nimrod.py,sha256=lBDewJqj_VNdsKQJ-8qQ--_ZFNbQZETraP8u92b62xU,6413
|
| 461 |
+
pygments/lexers/nit.py,sha256=M91yluzYKftVIr2HsWdianN4gPirwBLybF287xiVb-o,2725
|
| 462 |
+
pygments/lexers/nix.py,sha256=oUlbwP0ktviAB6lMtrzVwrzsgw09BPN0OS5NM6IMK40,4421
|
| 463 |
+
pygments/lexers/oberon.py,sha256=QmaSlH1Magq2Kn6DlzZtowxTRWVTivoWPLsVj5ywHY8,4210
|
| 464 |
+
pygments/lexers/objective.py,sha256=W3ikK5MZiP2nF8gpGO0wSA5LM3TGE6Z1GHkLFQXq0aI,23240
|
| 465 |
+
pygments/lexers/ooc.py,sha256=xpKo2xGSael8ymrTyXhfyL7630zBx_Gatb4fEZ9qn14,2981
|
| 466 |
+
pygments/lexers/openscad.py,sha256=u-Ow27qJeR3l6N-8gSzJ4ZsvviBvCylCbMnc3pCgb_s,3700
|
| 467 |
+
pygments/lexers/other.py,sha256=U4tWdbxxxa-__MU48rejsw6n-OyZHf3ikk0O7dG4jxo,1763
|
| 468 |
+
pygments/lexers/parasail.py,sha256=9KkwIh9KuvGjI_cgFuzlTOTZEgyPcLnz1ikKOM48x48,2719
|
| 469 |
+
pygments/lexers/parsers.py,sha256=mZAfSp6U1KwcfnjzBUojmO_G1dlYMG8KEnaWqr4d5Ko,26596
|
| 470 |
+
pygments/lexers/pascal.py,sha256=k1vWSa2xiclhbHYK9bcEj6pUgBR-RT2oLTagK9zn5YA,30989
|
| 471 |
+
pygments/lexers/pawn.py,sha256=eYfJfp9ibWIJL0R5CGzg94vZI0nxlRI5bkbzzWuc3vM,8253
|
| 472 |
+
pygments/lexers/perl.py,sha256=uVou8njqVI4tzGM2iX6P3OPud4FipZK-Zi2uFAhcOBA,39192
|
| 473 |
+
pygments/lexers/phix.py,sha256=q-P5CcxMRvOGHuFAY5wAPUFWUY9IsGNj8gC7K5g28lE,23249
|
| 474 |
+
pygments/lexers/php.py,sha256=UkF4jrSzj7HsTwrkPF42xTm-kOF3jT-yvc7CzicfwMI,13061
|
| 475 |
+
pygments/lexers/pointless.py,sha256=pdrIHwcAcCJGfwHxFGHoJ_KkQXwPH2OKJbnlqIEugtU,1974
|
| 476 |
+
pygments/lexers/pony.py,sha256=FIrwgNLoN1shYp_JQk9TzzWy_GzWp1Fikc8niAUA8Zc,3279
|
| 477 |
+
pygments/lexers/praat.py,sha256=KZP6jV2BaJXeNEtGzCvGW7EO8tbNomTGuW3vqQ0BIoA,12676
|
| 478 |
+
pygments/lexers/procfile.py,sha256=hdl66BmCMFYjVkD2Do41OsKL9qoo5cOFty6u1il1f7s,1155
|
| 479 |
+
pygments/lexers/prolog.py,sha256=7ThE10pBk1juL1S87a_IBucCtEirJ4SdMHWv60hLw4s,12866
|
| 480 |
+
pygments/lexers/promql.py,sha256=T6Vth5trlmxvEUZFlwaNmQIMqc6gks16Xjv6VLlNiD8,4738
|
| 481 |
+
pygments/lexers/prql.py,sha256=yGEiiPGYoRS_BpWarVwu7iQV3ZY301w928fk_kIVXjo,8747
|
| 482 |
+
pygments/lexers/ptx.py,sha256=bvZwgSRXvlP5FhYEeIJR08tqrdJzG4iz_l-GfaYGmXU,4501
|
| 483 |
+
pygments/lexers/python.py,sha256=KEHRWLVhykUPwl10uBIev1RnQ5h5qGgcjVP-mtsY1rQ,53639
|
| 484 |
+
pygments/lexers/q.py,sha256=j5sI-L_KvvtNVnghW7fDVTkPUpGeS8RMVetadtZkSZQ,6936
|
| 485 |
+
pygments/lexers/qlik.py,sha256=BcEHm92Js-FoekcsuGx-Wp8B379PonGqV-CLEKqd7hA,3693
|
| 486 |
+
pygments/lexers/qvt.py,sha256=kAu8NJpjGktB9Z4wJAMoQbGc8LafMV1b6z-R9XEyqA4,6103
|
| 487 |
+
pygments/lexers/r.py,sha256=5AFY4SAj7RmscCdU4WYuI-XTnPBgryGkPKxvTkw3UDk,6342
|
| 488 |
+
pygments/lexers/rdf.py,sha256=GigtsRZmYwMjFzebvH1LV6ops_gi5OqxDCfZ_xBZ1Lc,16042
|
| 489 |
+
pygments/lexers/rebol.py,sha256=gacBQsL85pcTpjJS1UUKDrlmFanRnnPJYphjQDSmYiw,18259
|
| 490 |
+
pygments/lexers/resource.py,sha256=Mve3WMK62BrefqaJzwnHai23DIPvg-qdu0vQumJPhzA,2927
|
| 491 |
+
pygments/lexers/ride.py,sha256=qUAqA8gIMecQj3FOemfeYOtQ_iGC_mVkawdE6oUNopc,5035
|
| 492 |
+
pygments/lexers/rita.py,sha256=2fP6PGLEPZVhUOWaU00sC36r2QWnZVi07QQ2BhLY1bE,1127
|
| 493 |
+
pygments/lexers/rnc.py,sha256=E-4DyG21UILNE5UPpUSl43UmbaDA9JPEnrjFlPembuA,1972
|
| 494 |
+
pygments/lexers/roboconf.py,sha256=0J7CIBT5CE0A18sjOJV4M3J2tAvGs_WC2M2GTYSmshE,2074
|
| 495 |
+
pygments/lexers/robotframework.py,sha256=Nh5SSW_bchdqtioQo6bt3q4ULuL4dsoYau5_Pu-ah-I,18448
|
| 496 |
+
pygments/lexers/ruby.py,sha256=vTykyjbAPIG-mRBnNKNgNGeAXwsv7MB8xWOkzrhpsWg,22753
|
| 497 |
+
pygments/lexers/rust.py,sha256=B_H41x0dgnatX9zRLNAVOdPiEgFpENIXFYW7Jfsv8GQ,8260
|
| 498 |
+
pygments/lexers/sas.py,sha256=FDX4GAE1Pcr5vmwCSSUx5Ym7qOPzQzxXiDnqqQzwnQ8,9456
|
| 499 |
+
pygments/lexers/savi.py,sha256=eUiiyyRMQTXJacqlreYj8RgjZGPSTlpiJ9Ctll9cjjA,4878
|
| 500 |
+
pygments/lexers/scdoc.py,sha256=9YOBIrmKLIC30ceVtfG4Qy5HHNWfzqb9Fw1-L3poZZQ,2524
|
| 501 |
+
pygments/lexers/scripting.py,sha256=3mDK3eT62VIq-cmAo3js6wDxOYemfYtGQXccbGbMh-I,81062
|
| 502 |
+
pygments/lexers/sgf.py,sha256=wfWiRnyh8Z9fl6oN0SGrDDcdrXSozvmTEDqHK0AOIiw,1985
|
| 503 |
+
pygments/lexers/shell.py,sha256=kpL7It9fUab8bCQSKAPJiJuzWsgA6Tv5zo6TnmRdkiU,36234
|
| 504 |
+
pygments/lexers/sieve.py,sha256=ymmZ8ZubRoVtBcgOr0UW_Dg5siMpJqgpZXC6LmE7RJo,2514
|
| 505 |
+
pygments/lexers/slash.py,sha256=hc5BWEaTGxyGhIwaRaQAayXbr53Q6H3ZwHTC5o5R-WY,8484
|
| 506 |
+
pygments/lexers/smalltalk.py,sha256=GxnNoY4eYE2-reCp4OL0d4DHBtHsZJTymRZY31Fq1lM,7204
|
| 507 |
+
pygments/lexers/smithy.py,sha256=PjLWFlb-qYnEfkTOkLkpSBxkfG6gXCXsU-Pf42FKE0g,2659
|
| 508 |
+
pygments/lexers/smv.py,sha256=w-WL-lSg0NKJKTEzZH94QeHQbMQlbxoId-vUP1nAbJk,2805
|
| 509 |
+
pygments/lexers/snobol.py,sha256=vEBdaH4C9F6ctEKmMuWMmK_WKyCC40yi6q4HbXUZavo,2778
|
| 510 |
+
pygments/lexers/solidity.py,sha256=Bc3mELp5mXvZNgr3vRzAqjKRtC6SPNhfmsaD6RdJGyc,3163
|
| 511 |
+
pygments/lexers/soong.py,sha256=YpDy1HvoABJU26xq2-y0xs8BKIZyO3635VrYKOizhQ0,2339
|
| 512 |
+
pygments/lexers/sophia.py,sha256=SxKzCczr5WU19adkc-NbnPCcY5ecN5Fx97zRsFhk7Rk,3376
|
| 513 |
+
pygments/lexers/special.py,sha256=6-K1Ik28Dle15Ol_b1UANMcRIX7EcTo_Fbf3Wr-zp-k,3554
|
| 514 |
+
pygments/lexers/spice.py,sha256=QKbDr0PSRObjF3knMghglx8zFVp2Hjrq8zSAn6XVWlc,2790
|
| 515 |
+
pygments/lexers/sql.py,sha256=ESU6S2vX1jucKfOhe1tW8hKVluFTZYTvClB1AnGI9Kk,42594
|
| 516 |
+
pygments/lexers/srcinfo.py,sha256=Lv2ptfstgzOtzY2MGqCUcDadyntJrCQMTP-rOie2iuU,1746
|
| 517 |
+
pygments/lexers/stata.py,sha256=sxOLP7jb6UReJCjxUHJYAbgAJV7M_LNOGywLwkPWXTE,6415
|
| 518 |
+
pygments/lexers/supercollider.py,sha256=dYszvpnKfaLGdmIEnruntz9VTccLmUYwzbbLvZfOPC4,3697
|
| 519 |
+
pygments/lexers/tact.py,sha256=5KVnr9VxJeh_RKmbgOwR09X-5n7Th8Yb-Zk_0XTQovo,10809
|
| 520 |
+
pygments/lexers/tal.py,sha256=VF3YLB4xvQB52W4GW9ZVNkDqkyPXASlHALhOJupw2SI,2904
|
| 521 |
+
pygments/lexers/tcl.py,sha256=mepJbvCnYmZR1-dKK9Cfkt9m08bwTg7fOs96k2csfGg,5512
|
| 522 |
+
pygments/lexers/teal.py,sha256=gCEC8QyKyq3aQk6E8E98fwD3vJrVZs58XsGjJ6GTL_4,3522
|
| 523 |
+
pygments/lexers/templates.py,sha256=ASVTvkCvKfD7Ay5eETo7vs2Izc4ISP-UBUhpds_O1Yo,75719
|
| 524 |
+
pygments/lexers/teraterm.py,sha256=3gwNILGwryBxytCTzS6qVGcZBFRfOX7svC11w4EpgSY,9718
|
| 525 |
+
pygments/lexers/testing.py,sha256=5pQIJ7ZNeT02-GM0krZOXo8Pc3s6IbYbS46w0KjHaKE,10810
|
| 526 |
+
pygments/lexers/text.py,sha256=EF6zAFvul6KpvQMPJrRw1FIr5ALAP8aScZ9VRqutid0,1048
|
| 527 |
+
pygments/lexers/textedit.py,sha256=Kf8ALySo9XQ_UY6X9quOLA782ZwHvBHL17expjthZQI,7760
|
| 528 |
+
pygments/lexers/textfmts.py,sha256=0_1KYSlbmLTZmFpX5iXktRKJ8OyVO9QmAFDg__rXGUk,15524
|
| 529 |
+
pygments/lexers/theorem.py,sha256=z09aU_G5UI9DpsWf6jm-XPWcuUSKwJrAsLYqLgQuS8c,17855
|
| 530 |
+
pygments/lexers/thingsdb.py,sha256=cLu1oiOV0R1mR9oZWX2wD03G0s7MWhkvguZ70FPgkaM,6017
|
| 531 |
+
pygments/lexers/tlb.py,sha256=meibbT7CuzX9aFdel3o9uJDM9s4BsWHrHSd8SxyUl3Y,1450
|
| 532 |
+
pygments/lexers/tls.py,sha256=o6vcxSNMeQmq9jIRY5Muyt2ETwLomqWK983LnENX9vU,1540
|
| 533 |
+
pygments/lexers/tnt.py,sha256=okaq1-TxloDyopXy9pu6Ts5TbkfOqqynNzTbObH8JE0,10456
|
| 534 |
+
pygments/lexers/trafficscript.py,sha256=U7l9JD39m5YHtmO50vR-5pilR1QBMxO2RdqWe-wkUyY,1506
|
| 535 |
+
pygments/lexers/typoscript.py,sha256=dohTsxka3AAjgRfRYSyqO4WLxa_kcscDMf8SHxPWUKY,8332
|
| 536 |
+
pygments/lexers/typst.py,sha256=lou5GsSvL-FuBVyDMT8Tg8jOXl4V3RW2exEPqUbmfXE,4337
|
| 537 |
+
pygments/lexers/ul4.py,sha256=DyVoCHqF9AxIDJzFuBLcBgDbkXQzqhEmtcAkc4umnXA,10499
|
| 538 |
+
pygments/lexers/unicon.py,sha256=TH8FGcxImq4Ioq20cKHfTN3abwD-1YaImuf_MC1s7rk,18625
|
| 539 |
+
pygments/lexers/urbi.py,sha256=KreG90URYP6Xq-Cq5yLqkntpSmksjyJLrMo2ume7dM8,6082
|
| 540 |
+
pygments/lexers/usd.py,sha256=p_wZfBLvMILyTTohmkxxJPWTce89zb_TMHFZcZY5mw0,3304
|
| 541 |
+
pygments/lexers/varnish.py,sha256=YfIyQPkChcQ4VzcbDvGX7FLuemL2m9vB4w8hiyRKVfE,7473
|
| 542 |
+
pygments/lexers/verification.py,sha256=iZR-iWX_SWCAYZ3YqIWjhcTmQdmtiEomay18Gz8HdVw,3934
|
| 543 |
+
pygments/lexers/verifpal.py,sha256=dKEqh3m32VjEF_Bn0Ies8-Q0RkTfaKfJd06a9snQqmE,2661
|
| 544 |
+
pygments/lexers/vip.py,sha256=nHw6Yq4msTQaJqnDEEmnH6O4KfxCwxPEnMPfHJQr8uE,5711
|
| 545 |
+
pygments/lexers/vyper.py,sha256=I6Sr2i3ObogDbhElepwGzvnIH__htWF6cys3PqbimmM,5590
|
| 546 |
+
pygments/lexers/web.py,sha256=a5Da6fOZsK8ft4lj86ADqZnBFgV3jNw7oZCd_nepWXI,913
|
| 547 |
+
pygments/lexers/webassembly.py,sha256=sXB1c9mVobzv5F8rEmRi4re3UgTD_iGxHR3rf0zQieY,5698
|
| 548 |
+
pygments/lexers/webidl.py,sha256=9eutoEoxDJk6MEbcQIpM79EkQalU2-DDKbZQQKDYsrg,10516
|
| 549 |
+
pygments/lexers/webmisc.py,sha256=3CxiToHU2w3cHqyOizoWlP0rZAsrASowZQWZIu5XC-Q,40564
|
| 550 |
+
pygments/lexers/wgsl.py,sha256=PrBbAe9TxRiJXQhm7kiwLsiKFm3y0X4I66whtWGOIAs,11880
|
| 551 |
+
pygments/lexers/whiley.py,sha256=iH-TV5itY3LNcr1072PapDpMX2ys2RIKthe98F3BXhg,4017
|
| 552 |
+
pygments/lexers/wowtoc.py,sha256=K1-o9Ui3vr7yMUWNoipFZmQUzTrvzRkvm6Z0_S4FDk8,4076
|
| 553 |
+
pygments/lexers/wren.py,sha256=TOQXYQk-Vl0XLJ4_TZcsSaBiJMqE8KKi5Xu8A-nrrmM,3229
|
| 554 |
+
pygments/lexers/x10.py,sha256=lHNR_3TM82-nl5FynZByOZ-YlX_jFz3relStlHriDIk,1943
|
| 555 |
+
pygments/lexers/xorg.py,sha256=UriOYGMw2YiogLg7a1pVHQ_byUE1rkYybIw252-yL-I,925
|
| 556 |
+
pygments/lexers/yang.py,sha256=kzRAGm1AELRrc3NF6xN4Axc-M2DTzf90zqd0vyLIMWI,4499
|
| 557 |
+
pygments/lexers/yara.py,sha256=IhbIy7H8A__Y023fNwLgXzSW5H0SI0uHyzt2cXY75AI,2427
|
| 558 |
+
pygments/lexers/zig.py,sha256=nnm8oDDjYCLGB_JhWHobZ9hP1U_JIXJdyyetxyAJ2WQ,3976
|
| 559 |
+
pygments/modeline.py,sha256=gtRYZBS-CKOCDXHhGZqApboHBaZwGH8gznN3O6nuxj4,1005
|
| 560 |
+
pygments/plugin.py,sha256=ioeJ3QeoJ-UQhZpY9JL7vbxsTVuwwM7BCu-Jb8nN0AU,1891
|
| 561 |
+
pygments/regexopt.py,sha256=Hky4EB13rIXEHQUNkwmCrYqtIlnXDehNR3MztafZ43w,3072
|
| 562 |
+
pygments/scanner.py,sha256=NDy3ofK_fHRFK4hIDvxpamG871aewqcsIb6sgTi7Fhk,3092
|
| 563 |
+
pygments/sphinxext.py,sha256=gQAWnR6_1SM6OwUypWg1DlXrmmF9o3ZzbxsaXDsiRVg,7898
|
| 564 |
+
pygments/style.py,sha256=dCFnKszVtsMeSW20uHC4pU3yuYgVPuJggm64-2Ylmo4,6408
|
| 565 |
+
pygments/styles/__init__.py,sha256=v0aGLoaoKnp4_MGp4w-cmUvMoHYHMemiz_e_IPMLpbg,2006
|
| 566 |
+
pygments/styles/__pycache__/__init__.cpython-310.pyc,,
|
| 567 |
+
pygments/styles/__pycache__/_mapping.cpython-310.pyc,,
|
| 568 |
+
pygments/styles/__pycache__/abap.cpython-310.pyc,,
|
| 569 |
+
pygments/styles/__pycache__/algol.cpython-310.pyc,,
|
| 570 |
+
pygments/styles/__pycache__/algol_nu.cpython-310.pyc,,
|
| 571 |
+
pygments/styles/__pycache__/arduino.cpython-310.pyc,,
|
| 572 |
+
pygments/styles/__pycache__/autumn.cpython-310.pyc,,
|
| 573 |
+
pygments/styles/__pycache__/borland.cpython-310.pyc,,
|
| 574 |
+
pygments/styles/__pycache__/bw.cpython-310.pyc,,
|
| 575 |
+
pygments/styles/__pycache__/coffee.cpython-310.pyc,,
|
| 576 |
+
pygments/styles/__pycache__/colorful.cpython-310.pyc,,
|
| 577 |
+
pygments/styles/__pycache__/default.cpython-310.pyc,,
|
| 578 |
+
pygments/styles/__pycache__/dracula.cpython-310.pyc,,
|
| 579 |
+
pygments/styles/__pycache__/emacs.cpython-310.pyc,,
|
| 580 |
+
pygments/styles/__pycache__/friendly.cpython-310.pyc,,
|
| 581 |
+
pygments/styles/__pycache__/friendly_grayscale.cpython-310.pyc,,
|
| 582 |
+
pygments/styles/__pycache__/fruity.cpython-310.pyc,,
|
| 583 |
+
pygments/styles/__pycache__/gh_dark.cpython-310.pyc,,
|
| 584 |
+
pygments/styles/__pycache__/gruvbox.cpython-310.pyc,,
|
| 585 |
+
pygments/styles/__pycache__/igor.cpython-310.pyc,,
|
| 586 |
+
pygments/styles/__pycache__/inkpot.cpython-310.pyc,,
|
| 587 |
+
pygments/styles/__pycache__/lightbulb.cpython-310.pyc,,
|
| 588 |
+
pygments/styles/__pycache__/lilypond.cpython-310.pyc,,
|
| 589 |
+
pygments/styles/__pycache__/lovelace.cpython-310.pyc,,
|
| 590 |
+
pygments/styles/__pycache__/manni.cpython-310.pyc,,
|
| 591 |
+
pygments/styles/__pycache__/material.cpython-310.pyc,,
|
| 592 |
+
pygments/styles/__pycache__/monokai.cpython-310.pyc,,
|
| 593 |
+
pygments/styles/__pycache__/murphy.cpython-310.pyc,,
|
| 594 |
+
pygments/styles/__pycache__/native.cpython-310.pyc,,
|
| 595 |
+
pygments/styles/__pycache__/nord.cpython-310.pyc,,
|
| 596 |
+
pygments/styles/__pycache__/onedark.cpython-310.pyc,,
|
| 597 |
+
pygments/styles/__pycache__/paraiso_dark.cpython-310.pyc,,
|
| 598 |
+
pygments/styles/__pycache__/paraiso_light.cpython-310.pyc,,
|
| 599 |
+
pygments/styles/__pycache__/pastie.cpython-310.pyc,,
|
| 600 |
+
pygments/styles/__pycache__/perldoc.cpython-310.pyc,,
|
| 601 |
+
pygments/styles/__pycache__/rainbow_dash.cpython-310.pyc,,
|
| 602 |
+
pygments/styles/__pycache__/rrt.cpython-310.pyc,,
|
| 603 |
+
pygments/styles/__pycache__/sas.cpython-310.pyc,,
|
| 604 |
+
pygments/styles/__pycache__/solarized.cpython-310.pyc,,
|
| 605 |
+
pygments/styles/__pycache__/staroffice.cpython-310.pyc,,
|
| 606 |
+
pygments/styles/__pycache__/stata_dark.cpython-310.pyc,,
|
| 607 |
+
pygments/styles/__pycache__/stata_light.cpython-310.pyc,,
|
| 608 |
+
pygments/styles/__pycache__/tango.cpython-310.pyc,,
|
| 609 |
+
pygments/styles/__pycache__/trac.cpython-310.pyc,,
|
| 610 |
+
pygments/styles/__pycache__/vim.cpython-310.pyc,,
|
| 611 |
+
pygments/styles/__pycache__/vs.cpython-310.pyc,,
|
| 612 |
+
pygments/styles/__pycache__/xcode.cpython-310.pyc,,
|
| 613 |
+
pygments/styles/__pycache__/zenburn.cpython-310.pyc,,
|
| 614 |
+
pygments/styles/_mapping.py,sha256=6lovFUE29tz6EsV3XYY4hgozJ7q1JL7cfO3UOlgnS8w,3312
|
| 615 |
+
pygments/styles/abap.py,sha256=9rx_hWD_GptC9nhoXVVc3p8ysYJ9wQfcReqgzyic2OM,749
|
| 616 |
+
pygments/styles/algol.py,sha256=zeOLl6K37RauOH7chM5o_MC7UyTo81kk6sEOn5Joy8o,2262
|
| 617 |
+
pygments/styles/algol_nu.py,sha256=XGmfWujpWLfNJ4AL7BJX0Xyr3wp3vUEV7x7eYj4Z5y4,2283
|
| 618 |
+
pygments/styles/arduino.py,sha256=7IEZ8-xN_h8FjvuJFZiuVmZKC69gD57zgWussXjPsco,4557
|
| 619 |
+
pygments/styles/autumn.py,sha256=76KNB7kJfZIOqcH0nZ0y6kh8xWWQc_-agkIJFWbCdf4,2195
|
| 620 |
+
pygments/styles/borland.py,sha256=Gv5bXGIKuMGr0Oi4rbOrmEhZzdwHNLA20iQcuHg9yGQ,1611
|
| 621 |
+
pygments/styles/bw.py,sha256=zJUiRQGS9OIE7H82M--9A8_jiqWTUPmjf0ZfWz0eSNY,1406
|
| 622 |
+
pygments/styles/coffee.py,sha256=vdWBg6Oaj6T_TvfDSX1Ex6C8jOMggi1ZukUdMErPioE,2308
|
| 623 |
+
pygments/styles/colorful.py,sha256=x6Oka6AQiGm-PtzuF5AdHUUZk61ZZXhvjOjHdsuFsTE,2832
|
| 624 |
+
pygments/styles/default.py,sha256=YVxaKrvLvV9PD3I_7J6OE9gIYZE0O4fRWVr2qY9YWMg,2588
|
| 625 |
+
pygments/styles/dracula.py,sha256=qcgo21Tvi-_KyjXvHHWQus19kNYJbtOvDZbYDcMXBno,2182
|
| 626 |
+
pygments/styles/emacs.py,sha256=FknUJ19Pkh3iRLtVwUwxDOGwvVcJ8ZWPegGbCbTLlq4,2535
|
| 627 |
+
pygments/styles/friendly.py,sha256=wbmM_eI7io4ZiU7s04dbLu8MyBHi0nwkSCvA4PjhA10,2604
|
| 628 |
+
pygments/styles/friendly_grayscale.py,sha256=4VH0U_dWcANmzD5YvTfNGBAHt1NMa64Gxqvh2cMB5fY,2828
|
| 629 |
+
pygments/styles/fruity.py,sha256=lZKB5CspS2-7XiCS0HUR1Rw_p8bf7H5auyYXdH0RviA,1324
|
| 630 |
+
pygments/styles/gh_dark.py,sha256=q4ydCJLNNemg3FrShh3OeX3LLkI_ix6ZwAzsZ_U7bIA,3590
|
| 631 |
+
pygments/styles/gruvbox.py,sha256=kNlwHRuiNt6nFiV73HjEM9R7CpaYVzCyL19PWoKdVAA,3387
|
| 632 |
+
pygments/styles/igor.py,sha256=pxC51nXipMCi3K2lYzuTJI3VkwPTqF7xSu5co_PiU0s,737
|
| 633 |
+
pygments/styles/inkpot.py,sha256=jRs2u3nQsLhUOX3Fkl4eaNurasOtlEy3wKCMPWcPpFE,2404
|
| 634 |
+
pygments/styles/lightbulb.py,sha256=RJcQHPcC3SfqOtyWOzOXGysd5iyirTkkR6cqxch9LRQ,3172
|
| 635 |
+
pygments/styles/lilypond.py,sha256=faTOHQ9NCV-yWV3AxhnQ6wMvakoxQ8oBZw5BTDQOYGo,2066
|
| 636 |
+
pygments/styles/lovelace.py,sha256=y-5nl4TavhUCYrTHU_ysTNQDh40dUeqiGrAQuPmF1xc,3178
|
| 637 |
+
pygments/styles/manni.py,sha256=-zRQEJnjkXJpGmMMlJSOMNGho6VAsDimCvW79aqntG4,2443
|
| 638 |
+
pygments/styles/material.py,sha256=28SYTw9qOuQS42Qsy8lDUNBvJzbliNH8Crq0e5N2sIc,4201
|
| 639 |
+
pygments/styles/monokai.py,sha256=TvyWTqvy6oZxKNARKT7mgt2LgmNKLy2DL8Tphj6kjWk,5184
|
| 640 |
+
pygments/styles/murphy.py,sha256=txf8iMJaPpynJRtnZJILTAagWhZ7Vdxn9DGcn8TE_q4,2805
|
| 641 |
+
pygments/styles/native.py,sha256=0Ke0kSPZ3ARlI6ncLQzLiYGM660OpcuhNJfzHwJyVLU,2043
|
| 642 |
+
pygments/styles/nord.py,sha256=Ee-EgGAEhW8Yi2CNxx2URZm2AGFADHurN9BwFIDBcd4,5391
|
| 643 |
+
pygments/styles/onedark.py,sha256=SpfxQcze3C4s8R1DIxBn_YmuZC65X8UoShN8Dzuyuxs,1719
|
| 644 |
+
pygments/styles/paraiso_dark.py,sha256=4IBihUb0MsjbT1CXpmIYx2kxwDZRXh4wPRgxXXzbuvM,5662
|
| 645 |
+
pygments/styles/paraiso_light.py,sha256=6_LiKp3-cjlNW3q82poUZJktJQBhoUVROB0UgbgCPRs,5668
|
| 646 |
+
pygments/styles/pastie.py,sha256=Y1FWBgt6FDcZxpbiaAgj2x7_4gx-d_fSSwdhSnfcFRU,2525
|
| 647 |
+
pygments/styles/perldoc.py,sha256=n280hEgwbxNgHWnY5tvpwvBWoeKokUP3xYeSMnUDu0s,2230
|
| 648 |
+
pygments/styles/rainbow_dash.py,sha256=FHP8Es63TK_81EkyhlDTMZbZ-IqTKG3FkACbwMdJKVc,2390
|
| 649 |
+
pygments/styles/rrt.py,sha256=-PdgNtub-w6B04dL_BZxoztjMl42JdtekA_OLO-QXDw,964
|
| 650 |
+
pygments/styles/sas.py,sha256=wFRC1zzCjIaUoxp7F2F4PsQlrLgi0Lhl73EBhBa6MZU,1440
|
| 651 |
+
pygments/styles/solarized.py,sha256=sxXpKTLLJqbOZSaikQr9xK2YC39aMd7s1cVZk0WncnY,4247
|
| 652 |
+
pygments/styles/staroffice.py,sha256=Q8lR_zkR-YTqdyNNETypBdIjCGx4lQhUitdC56DKntY,831
|
| 653 |
+
pygments/styles/stata_dark.py,sha256=6HrS2rre1SjqCnhvFU_fZ0k657owP1tQO5UdY7xQk-k,1257
|
| 654 |
+
pygments/styles/stata_light.py,sha256=AleMjdPmr3OqBLdBrLyYRs_Qnnr2s27zWuh88CPtYY8,1289
|
| 655 |
+
pygments/styles/tango.py,sha256=N68BibRMXHmNTIt2-gH2krdqErgKmKaWiv3l5mO_yG4,7137
|
| 656 |
+
pygments/styles/trac.py,sha256=FVuEbNlIthXD2ajLaWBewGTvyx9tQtmleg-7Cigwnj4,1981
|
| 657 |
+
pygments/styles/vim.py,sha256=RxqkvCriR3QwCxZsSjTwFUzNBmFEcWKWbn5H9w7hwDE,2019
|
| 658 |
+
pygments/styles/vs.py,sha256=cG9_3FVzYiirGa8uesnwH4q1L6ZRb9nlwy_W1SAZ19w,1130
|
| 659 |
+
pygments/styles/xcode.py,sha256=4A2br07F5Q7X8dUkqu14oHLGT5zsuOiv-e0V0UYCCu4,1504
|
| 660 |
+
pygments/styles/zenburn.py,sha256=EyWeyFH9wRLSHc2S4v-rgFIro_b8H9Ia-gaRE2PJgCs,2203
|
| 661 |
+
pygments/token.py,sha256=qZwT7LSPy5YBY3JgDjut642CCy7JdQzAfmqD9NmT5j0,6226
|
| 662 |
+
pygments/unistring.py,sha256=p5c1i-HhoIhWemy9CUsaN9o39oomYHNxXll0Xfw6tEA,63208
|
| 663 |
+
pygments/util.py,sha256=2tj2nS1X9_OpcuSjf8dOET2bDVZhs8cEKd_uT6-Fgg8,10031
|
parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/REQUESTED
ADDED
|
File without changes
|
parrot/lib/python3.10/site-packages/pygments-2.18.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.24.2
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|