Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +1 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/__init__.py +7 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/client.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/data_classes.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/documentation.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/serializing.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/utils.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/client.py +1114 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/data_classes.py +15 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/documentation.py +266 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/media_data.py +0 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/serializing.py +550 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/types.json +199 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/utils.py +561 -0
- evalkit_internvl/lib/python3.10/site-packages/gradio_client/version.txt +1 -0
- evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/INSTALLER +1 -0
- evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/LICENSE.txt +81 -0
- evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/METADATA +125 -0
- evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/RECORD +77 -0
- evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/REQUESTED +0 -0
- evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/WHEEL +5 -0
- evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/entry_points.txt +2 -0
- evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/top_level.txt +1 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/__pycache__/process_executor.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__init__.py +14 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/_posix_reduction.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/context.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/fork_exec.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/popen_loky_posix.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/process.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/queues.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/reduction.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/spawn.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/utils.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/_posix_reduction.py +67 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/_win_reduction.py +18 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/context.py +378 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/popen_loky_win32.py +173 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/reduction.py +224 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/utils.py +181 -0
- evalkit_internvl/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/INSTALLER +1 -0
- evalkit_internvl/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/License.txt +1568 -0
- evalkit_internvl/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/METADATA +35 -0
- evalkit_internvl/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/RECORD +51 -0
.gitattributes
CHANGED
|
@@ -1655,3 +1655,4 @@ evalkit_internvl/lib/python3.10/site-packages/mpl_toolkits/mplot3d/__pycache__/a
|
|
| 1655 |
evalkit_internvl/lib/python3.10/site-packages/torchvision.libs/libcudart.60cfec8e.so.11.0 filter=lfs diff=lfs merge=lfs -text
|
| 1656 |
evalkit_tf437/lib/python3.10/site-packages/accelerate/__pycache__/accelerator.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1657 |
evalkit_internvl/lib/python3.10/site-packages/torchvision.libs/libnvjpeg.70530407.so.11 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 1655 |
evalkit_internvl/lib/python3.10/site-packages/torchvision.libs/libcudart.60cfec8e.so.11.0 filter=lfs diff=lfs merge=lfs -text
|
| 1656 |
evalkit_tf437/lib/python3.10/site-packages/accelerate/__pycache__/accelerator.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1657 |
evalkit_internvl/lib/python3.10/site-packages/torchvision.libs/libnvjpeg.70530407.so.11 filter=lfs diff=lfs merge=lfs -text
|
| 1658 |
+
evalkit_internvl/lib/python3.10/site-packages/torchvision.libs/libpng16.7f72a3c5.so.16 filter=lfs diff=lfs merge=lfs -text
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/__init__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from gradio_client.client import Client
|
| 2 |
+
from gradio_client.utils import __version__
|
| 3 |
+
|
| 4 |
+
__all__ = [
|
| 5 |
+
"Client",
|
| 6 |
+
"__version__",
|
| 7 |
+
]
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (314 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/client.cpython-310.pyc
ADDED
|
Binary file (39.7 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/data_classes.cpython-310.pyc
ADDED
|
Binary file (656 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/documentation.cpython-310.pyc
ADDED
|
Binary file (6.57 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/serializing.cpython-310.pyc
ADDED
|
Binary file (18.9 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (16.4 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/client.py
ADDED
|
@@ -0,0 +1,1114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""The main Client class for the Python client."""
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
import concurrent.futures
|
| 5 |
+
import json
|
| 6 |
+
import os
|
| 7 |
+
import re
|
| 8 |
+
import tempfile
|
| 9 |
+
import threading
|
| 10 |
+
import time
|
| 11 |
+
import urllib.parse
|
| 12 |
+
import uuid
|
| 13 |
+
import warnings
|
| 14 |
+
from concurrent.futures import Future, TimeoutError
|
| 15 |
+
from datetime import datetime
|
| 16 |
+
from pathlib import Path
|
| 17 |
+
from threading import Lock
|
| 18 |
+
from typing import Any, Callable, Literal
|
| 19 |
+
|
| 20 |
+
import huggingface_hub
|
| 21 |
+
import requests
|
| 22 |
+
import websockets
|
| 23 |
+
from huggingface_hub import SpaceHardware, SpaceStage
|
| 24 |
+
from huggingface_hub.utils import (
|
| 25 |
+
RepositoryNotFoundError,
|
| 26 |
+
build_hf_headers,
|
| 27 |
+
send_telemetry,
|
| 28 |
+
)
|
| 29 |
+
from packaging import version
|
| 30 |
+
|
| 31 |
+
from gradio_client import serializing, utils
|
| 32 |
+
from gradio_client.documentation import document, set_documentation_group
|
| 33 |
+
from gradio_client.serializing import Serializable
|
| 34 |
+
from gradio_client.utils import (
|
| 35 |
+
Communicator,
|
| 36 |
+
JobStatus,
|
| 37 |
+
Status,
|
| 38 |
+
StatusUpdate,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
set_documentation_group("py-client")
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
DEFAULT_TEMP_DIR = os.environ.get("GRADIO_TEMP_DIR") or str(
|
| 45 |
+
Path(tempfile.gettempdir()) / "gradio"
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
@document("predict", "submit", "view_api", "duplicate")
|
| 50 |
+
class Client:
|
| 51 |
+
"""
|
| 52 |
+
The main Client class for the Python client. This class is used to connect to a remote Gradio app and call its API endpoints.
|
| 53 |
+
|
| 54 |
+
Example:
|
| 55 |
+
from gradio_client import Client
|
| 56 |
+
|
| 57 |
+
client = Client("abidlabs/whisper-large-v2") # connecting to a Hugging Face Space
|
| 58 |
+
client.predict("test.mp4", api_name="/predict")
|
| 59 |
+
>> What a nice recording! # returns the result of the remote API call
|
| 60 |
+
|
| 61 |
+
client = Client("https://bec81a83-5b5c-471e.gradio.live") # connecting to a temporary Gradio share URL
|
| 62 |
+
job = client.submit("hello", api_name="/predict") # runs the prediction in a background thread
|
| 63 |
+
job.result()
|
| 64 |
+
>> 49 # returns the result of the remote API call (blocking call)
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
def __init__(
|
| 68 |
+
self,
|
| 69 |
+
src: str,
|
| 70 |
+
hf_token: str | None = None,
|
| 71 |
+
max_workers: int = 40,
|
| 72 |
+
serialize: bool = True,
|
| 73 |
+
output_dir: str | Path | None = DEFAULT_TEMP_DIR,
|
| 74 |
+
verbose: bool = True,
|
| 75 |
+
):
|
| 76 |
+
"""
|
| 77 |
+
Parameters:
|
| 78 |
+
src: Either the name of the Hugging Face Space to load, (e.g. "abidlabs/whisper-large-v2") or the full URL (including "http" or "https") of the hosted Gradio app to load (e.g. "http://mydomain.com/app" or "https://bec81a83-5b5c-471e.gradio.live/").
|
| 79 |
+
hf_token: The Hugging Face token to use to access private Spaces. Automatically fetched if you are logged in via the Hugging Face Hub CLI. Obtain from: https://huggingface.co/settings/token
|
| 80 |
+
max_workers: The maximum number of thread workers that can be used to make requests to the remote Gradio app simultaneously.
|
| 81 |
+
serialize: Whether the client should serialize the inputs and deserialize the outputs of the remote API. If set to False, the client will pass the inputs and outputs as-is, without serializing/deserializing them. E.g. you if you set this to False, you'd submit an image in base64 format instead of a filepath, and you'd get back an image in base64 format from the remote API instead of a filepath.
|
| 82 |
+
output_dir: The directory to save files that are downloaded from the remote API. If None, reads from the GRADIO_TEMP_DIR environment variable. Defaults to a temporary directory on your machine.
|
| 83 |
+
verbose: Whether the client should print statements to the console.
|
| 84 |
+
"""
|
| 85 |
+
self.verbose = verbose
|
| 86 |
+
self.hf_token = hf_token
|
| 87 |
+
self.serialize = serialize
|
| 88 |
+
self.headers = build_hf_headers(
|
| 89 |
+
token=hf_token,
|
| 90 |
+
library_name="gradio_client",
|
| 91 |
+
library_version=utils.__version__,
|
| 92 |
+
)
|
| 93 |
+
self.space_id = None
|
| 94 |
+
self.output_dir = output_dir
|
| 95 |
+
|
| 96 |
+
if src.startswith("http://") or src.startswith("https://"):
|
| 97 |
+
_src = src if src.endswith("/") else src + "/"
|
| 98 |
+
else:
|
| 99 |
+
_src = self._space_name_to_src(src)
|
| 100 |
+
if _src is None:
|
| 101 |
+
raise ValueError(
|
| 102 |
+
f"Could not find Space: {src}. If it is a private Space, please provide an hf_token."
|
| 103 |
+
)
|
| 104 |
+
self.space_id = src
|
| 105 |
+
self.src = _src
|
| 106 |
+
state = self._get_space_state()
|
| 107 |
+
if state == SpaceStage.BUILDING:
|
| 108 |
+
if self.verbose:
|
| 109 |
+
print("Space is still building. Please wait...")
|
| 110 |
+
while self._get_space_state() == SpaceStage.BUILDING:
|
| 111 |
+
time.sleep(2) # so we don't get rate limited by the API
|
| 112 |
+
pass
|
| 113 |
+
if state in utils.INVALID_RUNTIME:
|
| 114 |
+
raise ValueError(
|
| 115 |
+
f"The current space is in the invalid state: {state}. "
|
| 116 |
+
"Please contact the owner to fix this."
|
| 117 |
+
)
|
| 118 |
+
if self.verbose:
|
| 119 |
+
print(f"Loaded as API: {self.src} ✔")
|
| 120 |
+
|
| 121 |
+
self.api_url = urllib.parse.urljoin(self.src, utils.API_URL)
|
| 122 |
+
self.ws_url = urllib.parse.urljoin(
|
| 123 |
+
self.src.replace("http", "ws", 1), utils.WS_URL
|
| 124 |
+
)
|
| 125 |
+
self.upload_url = urllib.parse.urljoin(self.src, utils.UPLOAD_URL)
|
| 126 |
+
self.reset_url = urllib.parse.urljoin(self.src, utils.RESET_URL)
|
| 127 |
+
self.config = self._get_config()
|
| 128 |
+
self.session_hash = str(uuid.uuid4())
|
| 129 |
+
|
| 130 |
+
self.endpoints = [
|
| 131 |
+
Endpoint(self, fn_index, dependency)
|
| 132 |
+
for fn_index, dependency in enumerate(self.config["dependencies"])
|
| 133 |
+
]
|
| 134 |
+
|
| 135 |
+
# Create a pool of threads to handle the requests
|
| 136 |
+
self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers)
|
| 137 |
+
|
| 138 |
+
# Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1
|
| 139 |
+
threading.Thread(target=self._telemetry_thread).start()
|
| 140 |
+
|
| 141 |
+
@classmethod
|
| 142 |
+
def duplicate(
|
| 143 |
+
cls,
|
| 144 |
+
from_id: str,
|
| 145 |
+
to_id: str | None = None,
|
| 146 |
+
hf_token: str | None = None,
|
| 147 |
+
private: bool = True,
|
| 148 |
+
hardware: Literal[
|
| 149 |
+
"cpu-basic",
|
| 150 |
+
"cpu-upgrade",
|
| 151 |
+
"t4-small",
|
| 152 |
+
"t4-medium",
|
| 153 |
+
"a10g-small",
|
| 154 |
+
"a10g-large",
|
| 155 |
+
"a100-large",
|
| 156 |
+
]
|
| 157 |
+
| SpaceHardware
|
| 158 |
+
| None = None,
|
| 159 |
+
secrets: dict[str, str] | None = None,
|
| 160 |
+
sleep_timeout: int = 5,
|
| 161 |
+
max_workers: int = 40,
|
| 162 |
+
verbose: bool = True,
|
| 163 |
+
):
|
| 164 |
+
"""
|
| 165 |
+
Duplicates a Hugging Face Space under your account and returns a Client object
|
| 166 |
+
for the new Space. No duplication is created if the Space already exists in your
|
| 167 |
+
account (to override this, provide a new name for the new Space using `to_id`).
|
| 168 |
+
To use this method, you must provide an `hf_token` or be logged in via the Hugging
|
| 169 |
+
Face Hub CLI.
|
| 170 |
+
|
| 171 |
+
The new Space will be private by default and use the same hardware as the original
|
| 172 |
+
Space. This can be changed by using the `private` and `hardware` parameters. For
|
| 173 |
+
hardware upgrades (beyond the basic CPU tier), you may be required to provide
|
| 174 |
+
billing information on Hugging Face: https://huggingface.co/settings/billing
|
| 175 |
+
|
| 176 |
+
Parameters:
|
| 177 |
+
from_id: The name of the Hugging Face Space to duplicate in the format "{username}/{space_id}", e.g. "gradio/whisper".
|
| 178 |
+
to_id: The name of the new Hugging Face Space to create, e.g. "abidlabs/whisper-duplicate". If not provided, the new Space will be named "{your_HF_username}/{space_id}".
|
| 179 |
+
hf_token: The Hugging Face token to use to access private Spaces. Automatically fetched if you are logged in via the Hugging Face Hub CLI. Obtain from: https://huggingface.co/settings/token
|
| 180 |
+
private: Whether the new Space should be private (True) or public (False). Defaults to True.
|
| 181 |
+
hardware: The hardware tier to use for the new Space. Defaults to the same hardware tier as the original Space. Options include "cpu-basic", "cpu-upgrade", "t4-small", "t4-medium", "a10g-small", "a10g-large", "a100-large", subject to availability.
|
| 182 |
+
secrets: A dictionary of (secret key, secret value) to pass to the new Space. Defaults to None. Secrets are only used when the Space is duplicated for the first time, and are not updated if the duplicated Space already exists.
|
| 183 |
+
sleep_timeout: The number of minutes after which the duplicate Space will be puased if no requests are made to it (to minimize billing charges). Defaults to 5 minutes.
|
| 184 |
+
max_workers: The maximum number of thread workers that can be used to make requests to the remote Gradio app simultaneously.
|
| 185 |
+
verbose: Whether the client should print statements to the console.
|
| 186 |
+
Example:
|
| 187 |
+
import os
|
| 188 |
+
from gradio_client import Client
|
| 189 |
+
HF_TOKEN = os.environ.get("HF_TOKEN")
|
| 190 |
+
client = Client.duplicate("abidlabs/whisper", hf_token=HF_TOKEN)
|
| 191 |
+
client.predict("audio_sample.wav")
|
| 192 |
+
>> "This is a test of the whisper speech recognition model."
|
| 193 |
+
"""
|
| 194 |
+
try:
|
| 195 |
+
original_info = huggingface_hub.get_space_runtime(from_id, token=hf_token)
|
| 196 |
+
except RepositoryNotFoundError as rnfe:
|
| 197 |
+
raise ValueError(
|
| 198 |
+
f"Could not find Space: {from_id}. If it is a private Space, please provide an `hf_token`."
|
| 199 |
+
) from rnfe
|
| 200 |
+
if to_id:
|
| 201 |
+
if "/" in to_id:
|
| 202 |
+
to_id = to_id.split("/")[1]
|
| 203 |
+
space_id = huggingface_hub.get_full_repo_name(to_id, token=hf_token)
|
| 204 |
+
else:
|
| 205 |
+
space_id = huggingface_hub.get_full_repo_name(
|
| 206 |
+
from_id.split("/")[1], token=hf_token
|
| 207 |
+
)
|
| 208 |
+
try:
|
| 209 |
+
huggingface_hub.get_space_runtime(space_id, token=hf_token)
|
| 210 |
+
if verbose:
|
| 211 |
+
print(
|
| 212 |
+
f"Using your existing Space: {utils.SPACE_URL.format(space_id)} 🤗"
|
| 213 |
+
)
|
| 214 |
+
if secrets is not None:
|
| 215 |
+
warnings.warn(
|
| 216 |
+
"Secrets are only used when the Space is duplicated for the first time, and are not updated if the duplicated Space already exists."
|
| 217 |
+
)
|
| 218 |
+
except RepositoryNotFoundError:
|
| 219 |
+
if verbose:
|
| 220 |
+
print(f"Creating a duplicate of {from_id} for your own use... 🤗")
|
| 221 |
+
huggingface_hub.duplicate_space(
|
| 222 |
+
from_id=from_id,
|
| 223 |
+
to_id=space_id,
|
| 224 |
+
token=hf_token,
|
| 225 |
+
exist_ok=True,
|
| 226 |
+
private=private,
|
| 227 |
+
)
|
| 228 |
+
if secrets is not None:
|
| 229 |
+
for key, value in secrets.items():
|
| 230 |
+
huggingface_hub.add_space_secret(
|
| 231 |
+
space_id, key, value, token=hf_token
|
| 232 |
+
)
|
| 233 |
+
if verbose:
|
| 234 |
+
print(f"Created new Space: {utils.SPACE_URL.format(space_id)}")
|
| 235 |
+
current_info = huggingface_hub.get_space_runtime(space_id, token=hf_token)
|
| 236 |
+
current_hardware = (
|
| 237 |
+
current_info.hardware or huggingface_hub.SpaceHardware.CPU_BASIC
|
| 238 |
+
)
|
| 239 |
+
hardware = hardware or original_info.hardware
|
| 240 |
+
if current_hardware != hardware:
|
| 241 |
+
huggingface_hub.request_space_hardware(space_id, hardware) # type: ignore
|
| 242 |
+
print(
|
| 243 |
+
f"-------\nNOTE: this Space uses upgraded hardware: {hardware}... see billing info at https://huggingface.co/settings/billing\n-------"
|
| 244 |
+
)
|
| 245 |
+
# Setting a timeout only works if the hardware is not basic
|
| 246 |
+
# so set it here after the hardware has been requested
|
| 247 |
+
if hardware != huggingface_hub.SpaceHardware.CPU_BASIC:
|
| 248 |
+
utils.set_space_timeout(
|
| 249 |
+
space_id, hf_token=hf_token, timeout_in_seconds=sleep_timeout * 60
|
| 250 |
+
)
|
| 251 |
+
if verbose:
|
| 252 |
+
print("")
|
| 253 |
+
client = cls(
|
| 254 |
+
space_id, hf_token=hf_token, max_workers=max_workers, verbose=verbose
|
| 255 |
+
)
|
| 256 |
+
return client
|
| 257 |
+
|
| 258 |
+
def _get_space_state(self):
|
| 259 |
+
if not self.space_id:
|
| 260 |
+
return None
|
| 261 |
+
info = huggingface_hub.get_space_runtime(self.space_id, token=self.hf_token)
|
| 262 |
+
return info.stage
|
| 263 |
+
|
| 264 |
+
def predict(
|
| 265 |
+
self,
|
| 266 |
+
*args,
|
| 267 |
+
api_name: str | None = None,
|
| 268 |
+
fn_index: int | None = None,
|
| 269 |
+
) -> Any:
|
| 270 |
+
"""
|
| 271 |
+
Calls the Gradio API and returns the result (this is a blocking call).
|
| 272 |
+
|
| 273 |
+
Parameters:
|
| 274 |
+
args: The arguments to pass to the remote API. The order of the arguments must match the order of the inputs in the Gradio app.
|
| 275 |
+
api_name: The name of the API endpoint to call starting with a leading slash, e.g. "/predict". Does not need to be provided if the Gradio app has only one named API endpoint.
|
| 276 |
+
fn_index: As an alternative to api_name, this parameter takes the index of the API endpoint to call, e.g. 0. Both api_name and fn_index can be provided, but if they conflict, api_name will take precedence.
|
| 277 |
+
Returns:
|
| 278 |
+
The result of the API call. Will be a Tuple if the API has multiple outputs.
|
| 279 |
+
Example:
|
| 280 |
+
from gradio_client import Client
|
| 281 |
+
client = Client(src="gradio/calculator")
|
| 282 |
+
client.predict(5, "add", 4, api_name="/predict")
|
| 283 |
+
>> 9.0
|
| 284 |
+
"""
|
| 285 |
+
return self.submit(*args, api_name=api_name, fn_index=fn_index).result()
|
| 286 |
+
|
| 287 |
+
def submit(
|
| 288 |
+
self,
|
| 289 |
+
*args,
|
| 290 |
+
api_name: str | None = None,
|
| 291 |
+
fn_index: int | None = None,
|
| 292 |
+
result_callbacks: Callable | list[Callable] | None = None,
|
| 293 |
+
) -> Job:
|
| 294 |
+
"""
|
| 295 |
+
Creates and returns a Job object which calls the Gradio API in a background thread. The job can be used to retrieve the status and result of the remote API call.
|
| 296 |
+
|
| 297 |
+
Parameters:
|
| 298 |
+
args: The arguments to pass to the remote API. The order of the arguments must match the order of the inputs in the Gradio app.
|
| 299 |
+
api_name: The name of the API endpoint to call starting with a leading slash, e.g. "/predict". Does not need to be provided if the Gradio app has only one named API endpoint.
|
| 300 |
+
fn_index: As an alternative to api_name, this parameter takes the index of the API endpoint to call, e.g. 0. Both api_name and fn_index can be provided, but if they conflict, api_name will take precedence.
|
| 301 |
+
result_callbacks: A callback function, or list of callback functions, to be called when the result is ready. If a list of functions is provided, they will be called in order. The return values from the remote API are provided as separate parameters into the callback. If None, no callback will be called.
|
| 302 |
+
Returns:
|
| 303 |
+
A Job object that can be used to retrieve the status and result of the remote API call.
|
| 304 |
+
Example:
|
| 305 |
+
from gradio_client import Client
|
| 306 |
+
client = Client(src="gradio/calculator")
|
| 307 |
+
job = client.submit(5, "add", 4, api_name="/predict")
|
| 308 |
+
job.status()
|
| 309 |
+
>> <Status.STARTING: 'STARTING'>
|
| 310 |
+
job.result() # blocking call
|
| 311 |
+
>> 9.0
|
| 312 |
+
"""
|
| 313 |
+
inferred_fn_index = self._infer_fn_index(api_name, fn_index)
|
| 314 |
+
|
| 315 |
+
helper = None
|
| 316 |
+
if self.endpoints[inferred_fn_index].use_ws:
|
| 317 |
+
helper = Communicator(
|
| 318 |
+
Lock(),
|
| 319 |
+
JobStatus(),
|
| 320 |
+
self.endpoints[inferred_fn_index].process_predictions,
|
| 321 |
+
self.reset_url,
|
| 322 |
+
)
|
| 323 |
+
end_to_end_fn = self.endpoints[inferred_fn_index].make_end_to_end_fn(helper)
|
| 324 |
+
future = self.executor.submit(end_to_end_fn, *args)
|
| 325 |
+
|
| 326 |
+
job = Job(
|
| 327 |
+
future, communicator=helper, verbose=self.verbose, space_id=self.space_id
|
| 328 |
+
)
|
| 329 |
+
|
| 330 |
+
if result_callbacks:
|
| 331 |
+
if isinstance(result_callbacks, Callable):
|
| 332 |
+
result_callbacks = [result_callbacks]
|
| 333 |
+
|
| 334 |
+
def create_fn(callback) -> Callable:
|
| 335 |
+
def fn(future):
|
| 336 |
+
if isinstance(future.result(), tuple):
|
| 337 |
+
callback(*future.result())
|
| 338 |
+
else:
|
| 339 |
+
callback(future.result())
|
| 340 |
+
|
| 341 |
+
return fn
|
| 342 |
+
|
| 343 |
+
for callback in result_callbacks:
|
| 344 |
+
job.add_done_callback(create_fn(callback))
|
| 345 |
+
|
| 346 |
+
return job
|
| 347 |
+
|
| 348 |
+
def view_api(
|
| 349 |
+
self,
|
| 350 |
+
all_endpoints: bool | None = None,
|
| 351 |
+
print_info: bool = True,
|
| 352 |
+
return_format: Literal["dict", "str"] | None = None,
|
| 353 |
+
) -> dict | str | None:
|
| 354 |
+
"""
|
| 355 |
+
Prints the usage info for the API. If the Gradio app has multiple API endpoints, the usage info for each endpoint will be printed separately. If return_format="dict" the info is returned in dictionary format, as shown in the example below.
|
| 356 |
+
|
| 357 |
+
Parameters:
|
| 358 |
+
all_endpoints: If True, prints information for both named and unnamed endpoints in the Gradio app. If False, will only print info about named endpoints. If None (default), will print info about named endpoints, unless there aren't any -- in which it will print info about unnamed endpoints.
|
| 359 |
+
print_info: If True, prints the usage info to the console. If False, does not print the usage info.
|
| 360 |
+
return_format: If None, nothing is returned. If "str", returns the same string that would be printed to the console. If "dict", returns the usage info as a dictionary that can be programmatically parsed, and *all endpoints are returned in the dictionary* regardless of the value of `all_endpoints`. The format of the dictionary is in the docstring of this method.
|
| 361 |
+
Example:
|
| 362 |
+
from gradio_client import Client
|
| 363 |
+
client = Client(src="gradio/calculator")
|
| 364 |
+
client.view_api(return_format="dict")
|
| 365 |
+
>> {
|
| 366 |
+
'named_endpoints': {
|
| 367 |
+
'/predict': {
|
| 368 |
+
'parameters': [
|
| 369 |
+
{
|
| 370 |
+
'label': 'num1',
|
| 371 |
+
'type_python': 'int | float',
|
| 372 |
+
'type_description': 'numeric value',
|
| 373 |
+
'component': 'Number',
|
| 374 |
+
'example_input': '5'
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
'label': 'operation',
|
| 378 |
+
'type_python': 'str',
|
| 379 |
+
'type_description': 'string value',
|
| 380 |
+
'component': 'Radio',
|
| 381 |
+
'example_input': 'add'
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
'label': 'num2',
|
| 385 |
+
'type_python': 'int | float',
|
| 386 |
+
'type_description': 'numeric value',
|
| 387 |
+
'component': 'Number',
|
| 388 |
+
'example_input': '5'
|
| 389 |
+
},
|
| 390 |
+
],
|
| 391 |
+
'returns': [
|
| 392 |
+
{
|
| 393 |
+
'label': 'output',
|
| 394 |
+
'type_python': 'int | float',
|
| 395 |
+
'type_description': 'numeric value',
|
| 396 |
+
'component': 'Number',
|
| 397 |
+
},
|
| 398 |
+
]
|
| 399 |
+
},
|
| 400 |
+
'/flag': {
|
| 401 |
+
'parameters': [
|
| 402 |
+
...
|
| 403 |
+
],
|
| 404 |
+
'returns': [
|
| 405 |
+
...
|
| 406 |
+
]
|
| 407 |
+
}
|
| 408 |
+
}
|
| 409 |
+
'unnamed_endpoints': {
|
| 410 |
+
2: {
|
| 411 |
+
'parameters': [
|
| 412 |
+
...
|
| 413 |
+
],
|
| 414 |
+
'returns': [
|
| 415 |
+
...
|
| 416 |
+
]
|
| 417 |
+
}
|
| 418 |
+
}
|
| 419 |
+
}
|
| 420 |
+
}
|
| 421 |
+
|
| 422 |
+
"""
|
| 423 |
+
if self.serialize:
|
| 424 |
+
api_info_url = urllib.parse.urljoin(self.src, utils.API_INFO_URL)
|
| 425 |
+
else:
|
| 426 |
+
api_info_url = urllib.parse.urljoin(self.src, utils.RAW_API_INFO_URL)
|
| 427 |
+
|
| 428 |
+
# Versions of Gradio older than 3.29.0 returned format of the API info
|
| 429 |
+
# from the /info endpoint
|
| 430 |
+
if version.parse(self.config.get("version", "2.0")) > version.Version("3.29.0"):
|
| 431 |
+
r = requests.get(api_info_url, headers=self.headers)
|
| 432 |
+
if r.ok:
|
| 433 |
+
info = r.json()
|
| 434 |
+
else:
|
| 435 |
+
raise ValueError(f"Could not fetch api info for {self.src}")
|
| 436 |
+
else:
|
| 437 |
+
fetch = requests.post(
|
| 438 |
+
utils.SPACE_FETCHER_URL,
|
| 439 |
+
json={"config": json.dumps(self.config), "serialize": self.serialize},
|
| 440 |
+
)
|
| 441 |
+
if fetch.ok:
|
| 442 |
+
info = fetch.json()["api"]
|
| 443 |
+
else:
|
| 444 |
+
raise ValueError(f"Could not fetch api info for {self.src}")
|
| 445 |
+
num_named_endpoints = len(info["named_endpoints"])
|
| 446 |
+
num_unnamed_endpoints = len(info["unnamed_endpoints"])
|
| 447 |
+
if num_named_endpoints == 0 and all_endpoints is None:
|
| 448 |
+
all_endpoints = True
|
| 449 |
+
|
| 450 |
+
human_info = "Client.predict() Usage Info\n---------------------------\n"
|
| 451 |
+
human_info += f"Named API endpoints: {num_named_endpoints}\n"
|
| 452 |
+
|
| 453 |
+
for api_name, endpoint_info in info["named_endpoints"].items():
|
| 454 |
+
human_info += self._render_endpoints_info(api_name, endpoint_info)
|
| 455 |
+
|
| 456 |
+
if all_endpoints:
|
| 457 |
+
human_info += f"\nUnnamed API endpoints: {num_unnamed_endpoints}\n"
|
| 458 |
+
for fn_index, endpoint_info in info["unnamed_endpoints"].items():
|
| 459 |
+
# When loading from json, the fn_indices are read as strings
|
| 460 |
+
# because json keys can only be strings
|
| 461 |
+
human_info += self._render_endpoints_info(int(fn_index), endpoint_info)
|
| 462 |
+
else:
|
| 463 |
+
if num_unnamed_endpoints > 0:
|
| 464 |
+
human_info += f"\nUnnamed API endpoints: {num_unnamed_endpoints}, to view, run Client.view_api(all_endpoints=True)\n"
|
| 465 |
+
|
| 466 |
+
if print_info:
|
| 467 |
+
print(human_info)
|
| 468 |
+
if return_format == "str":
|
| 469 |
+
return human_info
|
| 470 |
+
elif return_format == "dict":
|
| 471 |
+
return info
|
| 472 |
+
|
| 473 |
+
def reset_session(self) -> None:
|
| 474 |
+
self.session_hash = str(uuid.uuid4())
|
| 475 |
+
|
| 476 |
+
def _render_endpoints_info(
|
| 477 |
+
self,
|
| 478 |
+
name_or_index: str | int,
|
| 479 |
+
endpoints_info: dict[str, list[dict[str, Any]]],
|
| 480 |
+
) -> str:
|
| 481 |
+
parameter_names = [p["label"] for p in endpoints_info["parameters"]]
|
| 482 |
+
parameter_names = [utils.sanitize_parameter_names(p) for p in parameter_names]
|
| 483 |
+
rendered_parameters = ", ".join(parameter_names)
|
| 484 |
+
if rendered_parameters:
|
| 485 |
+
rendered_parameters = rendered_parameters + ", "
|
| 486 |
+
return_values = [p["label"] for p in endpoints_info["returns"]]
|
| 487 |
+
return_values = [utils.sanitize_parameter_names(r) for r in return_values]
|
| 488 |
+
rendered_return_values = ", ".join(return_values)
|
| 489 |
+
if len(return_values) > 1:
|
| 490 |
+
rendered_return_values = f"({rendered_return_values})"
|
| 491 |
+
|
| 492 |
+
if isinstance(name_or_index, str):
|
| 493 |
+
final_param = f'api_name="{name_or_index}"'
|
| 494 |
+
elif isinstance(name_or_index, int):
|
| 495 |
+
final_param = f"fn_index={name_or_index}"
|
| 496 |
+
else:
|
| 497 |
+
raise ValueError("name_or_index must be a string or integer")
|
| 498 |
+
|
| 499 |
+
human_info = f"\n - predict({rendered_parameters}{final_param}) -> {rendered_return_values}\n"
|
| 500 |
+
human_info += " Parameters:\n"
|
| 501 |
+
if endpoints_info["parameters"]:
|
| 502 |
+
for info in endpoints_info["parameters"]:
|
| 503 |
+
desc = (
|
| 504 |
+
f" ({info['python_type']['description']})"
|
| 505 |
+
if info["python_type"].get("description")
|
| 506 |
+
else ""
|
| 507 |
+
)
|
| 508 |
+
type_ = info["python_type"]["type"]
|
| 509 |
+
human_info += f" - [{info['component']}] {utils.sanitize_parameter_names(info['label'])}: {type_}{desc} \n"
|
| 510 |
+
else:
|
| 511 |
+
human_info += " - None\n"
|
| 512 |
+
human_info += " Returns:\n"
|
| 513 |
+
if endpoints_info["returns"]:
|
| 514 |
+
for info in endpoints_info["returns"]:
|
| 515 |
+
desc = (
|
| 516 |
+
f" ({info['python_type']['description']})"
|
| 517 |
+
if info["python_type"].get("description")
|
| 518 |
+
else ""
|
| 519 |
+
)
|
| 520 |
+
type_ = info["python_type"]["type"]
|
| 521 |
+
human_info += f" - [{info['component']}] {utils.sanitize_parameter_names(info['label'])}: {type_}{desc} \n"
|
| 522 |
+
else:
|
| 523 |
+
human_info += " - None\n"
|
| 524 |
+
|
| 525 |
+
return human_info
|
| 526 |
+
|
| 527 |
+
def __repr__(self):
|
| 528 |
+
return self.view_api(print_info=False, return_format="str")
|
| 529 |
+
|
| 530 |
+
def __str__(self):
|
| 531 |
+
return self.view_api(print_info=False, return_format="str")
|
| 532 |
+
|
| 533 |
+
def _telemetry_thread(self) -> None:
|
| 534 |
+
# Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1
|
| 535 |
+
data = {
|
| 536 |
+
"src": self.src,
|
| 537 |
+
}
|
| 538 |
+
try:
|
| 539 |
+
send_telemetry(
|
| 540 |
+
topic="py_client/initiated",
|
| 541 |
+
library_name="gradio_client",
|
| 542 |
+
library_version=utils.__version__,
|
| 543 |
+
user_agent=data,
|
| 544 |
+
)
|
| 545 |
+
except Exception:
|
| 546 |
+
pass
|
| 547 |
+
|
| 548 |
+
def _infer_fn_index(self, api_name: str | None, fn_index: int | None) -> int:
|
| 549 |
+
inferred_fn_index = None
|
| 550 |
+
if api_name is not None:
|
| 551 |
+
for i, d in enumerate(self.config["dependencies"]):
|
| 552 |
+
config_api_name = d.get("api_name")
|
| 553 |
+
if config_api_name is None or config_api_name is False:
|
| 554 |
+
continue
|
| 555 |
+
if "/" + config_api_name == api_name:
|
| 556 |
+
inferred_fn_index = i
|
| 557 |
+
break
|
| 558 |
+
else:
|
| 559 |
+
error_message = f"Cannot find a function with `api_name`: {api_name}."
|
| 560 |
+
if not api_name.startswith("/"):
|
| 561 |
+
error_message += " Did you mean to use a leading slash?"
|
| 562 |
+
raise ValueError(error_message)
|
| 563 |
+
elif fn_index is not None:
|
| 564 |
+
inferred_fn_index = fn_index
|
| 565 |
+
else:
|
| 566 |
+
valid_endpoints = [
|
| 567 |
+
e for e in self.endpoints if e.is_valid and e.api_name is not None
|
| 568 |
+
]
|
| 569 |
+
if len(valid_endpoints) == 1:
|
| 570 |
+
inferred_fn_index = valid_endpoints[0].fn_index
|
| 571 |
+
else:
|
| 572 |
+
raise ValueError(
|
| 573 |
+
"This Gradio app might have multiple endpoints. Please specify an `api_name` or `fn_index`"
|
| 574 |
+
)
|
| 575 |
+
return inferred_fn_index
|
| 576 |
+
|
| 577 |
+
def __del__(self):
|
| 578 |
+
if hasattr(self, "executor"):
|
| 579 |
+
self.executor.shutdown(wait=True)
|
| 580 |
+
|
| 581 |
+
def _space_name_to_src(self, space) -> str | None:
|
| 582 |
+
return huggingface_hub.space_info(space, token=self.hf_token).host # type: ignore
|
| 583 |
+
|
| 584 |
+
def _get_config(self) -> dict:
|
| 585 |
+
r = requests.get(
|
| 586 |
+
urllib.parse.urljoin(self.src, utils.CONFIG_URL), headers=self.headers
|
| 587 |
+
)
|
| 588 |
+
if r.ok:
|
| 589 |
+
return r.json()
|
| 590 |
+
else: # to support older versions of Gradio
|
| 591 |
+
r = requests.get(self.src, headers=self.headers)
|
| 592 |
+
# some basic regex to extract the config
|
| 593 |
+
result = re.search(r"window.gradio_config = (.*?);[\s]*</script>", r.text)
|
| 594 |
+
try:
|
| 595 |
+
config = json.loads(result.group(1)) # type: ignore
|
| 596 |
+
except AttributeError as ae:
|
| 597 |
+
raise ValueError(
|
| 598 |
+
f"Could not get Gradio config from: {self.src}"
|
| 599 |
+
) from ae
|
| 600 |
+
if "allow_flagging" in config:
|
| 601 |
+
raise ValueError(
|
| 602 |
+
"Gradio 2.x is not supported by this client. Please upgrade your Gradio app to Gradio 3.x or higher."
|
| 603 |
+
)
|
| 604 |
+
return config
|
| 605 |
+
|
| 606 |
+
|
| 607 |
+
class Endpoint:
|
| 608 |
+
"""Helper class for storing all the information about a single API endpoint."""
|
| 609 |
+
|
| 610 |
+
def __init__(self, client: Client, fn_index: int, dependency: dict):
|
| 611 |
+
self.client: Client = client
|
| 612 |
+
self.fn_index = fn_index
|
| 613 |
+
self.dependency = dependency
|
| 614 |
+
api_name = dependency.get("api_name")
|
| 615 |
+
self.api_name: str | None = (
|
| 616 |
+
None if (api_name is None or api_name is False) else "/" + api_name
|
| 617 |
+
)
|
| 618 |
+
self.use_ws = self._use_websocket(self.dependency)
|
| 619 |
+
self.input_component_types = []
|
| 620 |
+
self.output_component_types = []
|
| 621 |
+
self.root_url = client.src + "/" if not client.src.endswith("/") else client.src
|
| 622 |
+
try:
|
| 623 |
+
# Only a real API endpoint if backend_fn is True (so not just a frontend function), serializers are valid,
|
| 624 |
+
# and api_name is not False (meaning that the developer has explicitly disabled the API endpoint)
|
| 625 |
+
self.serializers, self.deserializers = self._setup_serializers()
|
| 626 |
+
self.is_valid = self.dependency["backend_fn"] and self.api_name is not False
|
| 627 |
+
except AssertionError:
|
| 628 |
+
self.is_valid = False
|
| 629 |
+
|
| 630 |
+
def __repr__(self):
|
| 631 |
+
return f"Endpoint src: {self.client.src}, api_name: {self.api_name}, fn_index: {self.fn_index}"
|
| 632 |
+
|
| 633 |
+
def __str__(self):
|
| 634 |
+
return self.__repr__()
|
| 635 |
+
|
| 636 |
+
def make_end_to_end_fn(self, helper: Communicator | None = None):
|
| 637 |
+
_predict = self.make_predict(helper)
|
| 638 |
+
|
| 639 |
+
def _inner(*data):
|
| 640 |
+
if not self.is_valid:
|
| 641 |
+
raise utils.InvalidAPIEndpointError()
|
| 642 |
+
data = self.insert_state(*data)
|
| 643 |
+
if self.client.serialize:
|
| 644 |
+
data = self.serialize(*data)
|
| 645 |
+
predictions = _predict(*data)
|
| 646 |
+
predictions = self.process_predictions(*predictions)
|
| 647 |
+
# Append final output only if not already present
|
| 648 |
+
# for consistency between generators and not generators
|
| 649 |
+
if helper:
|
| 650 |
+
with helper.lock:
|
| 651 |
+
if not helper.job.outputs:
|
| 652 |
+
helper.job.outputs.append(predictions)
|
| 653 |
+
return predictions
|
| 654 |
+
|
| 655 |
+
return _inner
|
| 656 |
+
|
| 657 |
+
def make_predict(self, helper: Communicator | None = None):
|
| 658 |
+
def _predict(*data) -> tuple:
|
| 659 |
+
data = json.dumps(
|
| 660 |
+
{
|
| 661 |
+
"data": data,
|
| 662 |
+
"fn_index": self.fn_index,
|
| 663 |
+
"session_hash": self.client.session_hash,
|
| 664 |
+
}
|
| 665 |
+
)
|
| 666 |
+
hash_data = json.dumps(
|
| 667 |
+
{
|
| 668 |
+
"fn_index": self.fn_index,
|
| 669 |
+
"session_hash": self.client.session_hash,
|
| 670 |
+
}
|
| 671 |
+
)
|
| 672 |
+
|
| 673 |
+
if self.use_ws:
|
| 674 |
+
result = utils.synchronize_async(self._ws_fn, data, hash_data, helper)
|
| 675 |
+
if "error" in result:
|
| 676 |
+
raise ValueError(result["error"])
|
| 677 |
+
else:
|
| 678 |
+
response = requests.post(
|
| 679 |
+
self.client.api_url, headers=self.client.headers, data=data
|
| 680 |
+
)
|
| 681 |
+
result = json.loads(response.content.decode("utf-8"))
|
| 682 |
+
try:
|
| 683 |
+
output = result["data"]
|
| 684 |
+
except KeyError as ke:
|
| 685 |
+
is_public_space = (
|
| 686 |
+
self.client.space_id
|
| 687 |
+
and not huggingface_hub.space_info(self.client.space_id).private
|
| 688 |
+
)
|
| 689 |
+
if "error" in result and "429" in result["error"] and is_public_space:
|
| 690 |
+
raise utils.TooManyRequestsError(
|
| 691 |
+
f"Too many requests to the API, please try again later. To avoid being rate-limited, "
|
| 692 |
+
f"please duplicate the Space using Client.duplicate({self.client.space_id}) "
|
| 693 |
+
f"and pass in your Hugging Face token."
|
| 694 |
+
) from None
|
| 695 |
+
elif "error" in result:
|
| 696 |
+
raise ValueError(result["error"]) from None
|
| 697 |
+
raise KeyError(
|
| 698 |
+
f"Could not find 'data' key in response. Response received: {result}"
|
| 699 |
+
) from ke
|
| 700 |
+
return tuple(output)
|
| 701 |
+
|
| 702 |
+
return _predict
|
| 703 |
+
|
| 704 |
+
def _predict_resolve(self, *data) -> Any:
|
| 705 |
+
"""Needed for gradio.load(), which has a slightly different signature for serializing/deserializing"""
|
| 706 |
+
outputs = self.make_predict()(*data)
|
| 707 |
+
if len(self.dependency["outputs"]) == 1:
|
| 708 |
+
return outputs[0]
|
| 709 |
+
return outputs
|
| 710 |
+
|
| 711 |
+
def _upload(
|
| 712 |
+
self, file_paths: list[str | list[str]]
|
| 713 |
+
) -> list[str | list[str]] | list[dict[str, Any] | list[dict[str, Any]]]:
|
| 714 |
+
if not file_paths:
|
| 715 |
+
return []
|
| 716 |
+
# Put all the filepaths in one file
|
| 717 |
+
# but then keep track of which index in the
|
| 718 |
+
# original list they came from so we can recreate
|
| 719 |
+
# the original structure
|
| 720 |
+
files = []
|
| 721 |
+
indices = []
|
| 722 |
+
for i, fs in enumerate(file_paths):
|
| 723 |
+
if not isinstance(fs, list):
|
| 724 |
+
fs = [fs]
|
| 725 |
+
for f in fs:
|
| 726 |
+
files.append(("files", (Path(f).name, open(f, "rb")))) # noqa: SIM115
|
| 727 |
+
indices.append(i)
|
| 728 |
+
r = requests.post(
|
| 729 |
+
self.client.upload_url, headers=self.client.headers, files=files
|
| 730 |
+
)
|
| 731 |
+
if r.status_code != 200:
|
| 732 |
+
uploaded = file_paths
|
| 733 |
+
else:
|
| 734 |
+
uploaded = []
|
| 735 |
+
result = r.json()
|
| 736 |
+
for i, fs in enumerate(file_paths):
|
| 737 |
+
if isinstance(fs, list):
|
| 738 |
+
output = [o for ix, o in enumerate(result) if indices[ix] == i]
|
| 739 |
+
res = [
|
| 740 |
+
{
|
| 741 |
+
"is_file": True,
|
| 742 |
+
"name": o,
|
| 743 |
+
"orig_name": Path(f).name,
|
| 744 |
+
"data": None,
|
| 745 |
+
}
|
| 746 |
+
for f, o in zip(fs, output)
|
| 747 |
+
]
|
| 748 |
+
else:
|
| 749 |
+
o = next(o for ix, o in enumerate(result) if indices[ix] == i)
|
| 750 |
+
res = {
|
| 751 |
+
"is_file": True,
|
| 752 |
+
"name": o,
|
| 753 |
+
"orig_name": Path(fs).name,
|
| 754 |
+
"data": None,
|
| 755 |
+
}
|
| 756 |
+
uploaded.append(res)
|
| 757 |
+
return uploaded
|
| 758 |
+
|
| 759 |
+
def _add_uploaded_files_to_data(
|
| 760 |
+
self,
|
| 761 |
+
files: list[str | list[str]] | list[dict[str, Any] | list[dict[str, Any]]],
|
| 762 |
+
data: list[Any],
|
| 763 |
+
) -> None:
|
| 764 |
+
"""Helper function to modify the input data with the uploaded files."""
|
| 765 |
+
file_counter = 0
|
| 766 |
+
for i, t in enumerate(self.input_component_types):
|
| 767 |
+
if t in ["file", "uploadbutton"]:
|
| 768 |
+
data[i] = files[file_counter]
|
| 769 |
+
file_counter += 1
|
| 770 |
+
|
| 771 |
+
def insert_state(self, *data) -> tuple:
|
| 772 |
+
data = list(data)
|
| 773 |
+
for i, input_component_type in enumerate(self.input_component_types):
|
| 774 |
+
if input_component_type == utils.STATE_COMPONENT:
|
| 775 |
+
data.insert(i, None)
|
| 776 |
+
return tuple(data)
|
| 777 |
+
|
| 778 |
+
def remove_state(self, *data) -> tuple:
|
| 779 |
+
data = [
|
| 780 |
+
d
|
| 781 |
+
for d, oct in zip(data, self.output_component_types)
|
| 782 |
+
if oct != utils.STATE_COMPONENT
|
| 783 |
+
]
|
| 784 |
+
return tuple(data)
|
| 785 |
+
|
| 786 |
+
def reduce_singleton_output(self, *data) -> Any:
|
| 787 |
+
if (
|
| 788 |
+
len(
|
| 789 |
+
[
|
| 790 |
+
oct
|
| 791 |
+
for oct in self.output_component_types
|
| 792 |
+
if oct != utils.STATE_COMPONENT
|
| 793 |
+
]
|
| 794 |
+
)
|
| 795 |
+
== 1
|
| 796 |
+
):
|
| 797 |
+
return data[0]
|
| 798 |
+
else:
|
| 799 |
+
return data
|
| 800 |
+
|
| 801 |
+
def serialize(self, *data) -> tuple:
|
| 802 |
+
assert len(data) == len(
|
| 803 |
+
self.serializers
|
| 804 |
+
), f"Expected {len(self.serializers)} arguments, got {len(data)}"
|
| 805 |
+
|
| 806 |
+
files = [
|
| 807 |
+
f
|
| 808 |
+
for f, t in zip(data, self.input_component_types)
|
| 809 |
+
if t in ["file", "uploadbutton"]
|
| 810 |
+
]
|
| 811 |
+
uploaded_files = self._upload(files)
|
| 812 |
+
data = list(data)
|
| 813 |
+
self._add_uploaded_files_to_data(uploaded_files, data)
|
| 814 |
+
o = tuple([s.serialize(d) for s, d in zip(self.serializers, data)])
|
| 815 |
+
return o
|
| 816 |
+
|
| 817 |
+
def deserialize(self, *data) -> tuple:
|
| 818 |
+
assert len(data) == len(
|
| 819 |
+
self.deserializers
|
| 820 |
+
), f"Expected {len(self.deserializers)} outputs, got {len(data)}"
|
| 821 |
+
outputs = tuple(
|
| 822 |
+
[
|
| 823 |
+
s.deserialize(
|
| 824 |
+
d,
|
| 825 |
+
save_dir=self.client.output_dir,
|
| 826 |
+
hf_token=self.client.hf_token,
|
| 827 |
+
root_url=self.root_url,
|
| 828 |
+
)
|
| 829 |
+
for s, d in zip(self.deserializers, data)
|
| 830 |
+
]
|
| 831 |
+
)
|
| 832 |
+
return outputs
|
| 833 |
+
|
| 834 |
+
def process_predictions(self, *predictions):
|
| 835 |
+
if self.client.serialize:
|
| 836 |
+
predictions = self.deserialize(*predictions)
|
| 837 |
+
predictions = self.remove_state(*predictions)
|
| 838 |
+
predictions = self.reduce_singleton_output(*predictions)
|
| 839 |
+
return predictions
|
| 840 |
+
|
| 841 |
+
def _setup_serializers(self) -> tuple[list[Serializable], list[Serializable]]:
|
| 842 |
+
inputs = self.dependency["inputs"]
|
| 843 |
+
serializers = []
|
| 844 |
+
|
| 845 |
+
for i in inputs:
|
| 846 |
+
for component in self.client.config["components"]:
|
| 847 |
+
if component["id"] == i:
|
| 848 |
+
component_name = component["type"]
|
| 849 |
+
self.input_component_types.append(component_name)
|
| 850 |
+
if component.get("serializer"):
|
| 851 |
+
serializer_name = component["serializer"]
|
| 852 |
+
assert (
|
| 853 |
+
serializer_name in serializing.SERIALIZER_MAPPING
|
| 854 |
+
), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version."
|
| 855 |
+
serializer = serializing.SERIALIZER_MAPPING[serializer_name]
|
| 856 |
+
else:
|
| 857 |
+
assert (
|
| 858 |
+
component_name in serializing.COMPONENT_MAPPING
|
| 859 |
+
), f"Unknown component: {component_name}, you may need to update your gradio_client version."
|
| 860 |
+
serializer = serializing.COMPONENT_MAPPING[component_name]
|
| 861 |
+
serializers.append(serializer()) # type: ignore
|
| 862 |
+
|
| 863 |
+
outputs = self.dependency["outputs"]
|
| 864 |
+
deserializers = []
|
| 865 |
+
for i in outputs:
|
| 866 |
+
for component in self.client.config["components"]:
|
| 867 |
+
if component["id"] == i:
|
| 868 |
+
component_name = component["type"]
|
| 869 |
+
self.output_component_types.append(component_name)
|
| 870 |
+
if component.get("serializer"):
|
| 871 |
+
serializer_name = component["serializer"]
|
| 872 |
+
assert (
|
| 873 |
+
serializer_name in serializing.SERIALIZER_MAPPING
|
| 874 |
+
), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version."
|
| 875 |
+
deserializer = serializing.SERIALIZER_MAPPING[serializer_name]
|
| 876 |
+
else:
|
| 877 |
+
assert (
|
| 878 |
+
component_name in serializing.COMPONENT_MAPPING
|
| 879 |
+
), f"Unknown component: {component_name}, you may need to update your gradio_client version."
|
| 880 |
+
deserializer = serializing.COMPONENT_MAPPING[component_name]
|
| 881 |
+
deserializers.append(deserializer()) # type: ignore
|
| 882 |
+
|
| 883 |
+
return serializers, deserializers
|
| 884 |
+
|
| 885 |
+
def _use_websocket(self, dependency: dict) -> bool:
|
| 886 |
+
queue_enabled = self.client.config.get("enable_queue", False)
|
| 887 |
+
queue_uses_websocket = version.parse(
|
| 888 |
+
self.client.config.get("version", "2.0")
|
| 889 |
+
) >= version.Version("3.2")
|
| 890 |
+
dependency_uses_queue = dependency.get("queue", False) is not False
|
| 891 |
+
return queue_enabled and queue_uses_websocket and dependency_uses_queue
|
| 892 |
+
|
| 893 |
+
async def _ws_fn(self, data, hash_data, helper: Communicator):
|
| 894 |
+
async with websockets.connect( # type: ignore
|
| 895 |
+
self.client.ws_url,
|
| 896 |
+
open_timeout=10,
|
| 897 |
+
extra_headers=self.client.headers,
|
| 898 |
+
max_size=1024 * 1024 * 1024,
|
| 899 |
+
) as websocket:
|
| 900 |
+
return await utils.get_pred_from_ws(websocket, data, hash_data, helper)
|
| 901 |
+
|
| 902 |
+
|
| 903 |
+
@document("result", "outputs", "status")
|
| 904 |
+
class Job(Future):
|
| 905 |
+
"""
|
| 906 |
+
A Job is a wrapper over the Future class that represents a prediction call that has been
|
| 907 |
+
submitted by the Gradio client. This class is not meant to be instantiated directly, but rather
|
| 908 |
+
is created by the Client.submit() method.
|
| 909 |
+
|
| 910 |
+
A Job object includes methods to get the status of the prediction call, as well to get the outputs of
|
| 911 |
+
the prediction call. Job objects are also iterable, and can be used in a loop to get the outputs
|
| 912 |
+
of prediction calls as they become available for generator endpoints.
|
| 913 |
+
"""
|
| 914 |
+
|
| 915 |
+
def __init__(
|
| 916 |
+
self,
|
| 917 |
+
future: Future,
|
| 918 |
+
communicator: Communicator | None = None,
|
| 919 |
+
verbose: bool = True,
|
| 920 |
+
space_id: str | None = None,
|
| 921 |
+
):
|
| 922 |
+
"""
|
| 923 |
+
Parameters:
|
| 924 |
+
future: The future object that represents the prediction call, created by the Client.submit() method
|
| 925 |
+
communicator: The communicator object that is used to communicate between the client and the background thread running the job
|
| 926 |
+
verbose: Whether to print any status-related messages to the console
|
| 927 |
+
space_id: The space ID corresponding to the Client object that created this Job object
|
| 928 |
+
"""
|
| 929 |
+
self.future = future
|
| 930 |
+
self.communicator = communicator
|
| 931 |
+
self._counter = 0
|
| 932 |
+
self.verbose = verbose
|
| 933 |
+
self.space_id = space_id
|
| 934 |
+
|
| 935 |
+
def __iter__(self) -> Job:
|
| 936 |
+
return self
|
| 937 |
+
|
| 938 |
+
def __next__(self) -> tuple | Any:
|
| 939 |
+
if not self.communicator:
|
| 940 |
+
raise StopIteration()
|
| 941 |
+
|
| 942 |
+
with self.communicator.lock:
|
| 943 |
+
if self.communicator.job.latest_status.code == Status.FINISHED:
|
| 944 |
+
raise StopIteration()
|
| 945 |
+
|
| 946 |
+
while True:
|
| 947 |
+
with self.communicator.lock:
|
| 948 |
+
if len(self.communicator.job.outputs) == self._counter + 1:
|
| 949 |
+
o = self.communicator.job.outputs[self._counter]
|
| 950 |
+
self._counter += 1
|
| 951 |
+
return o
|
| 952 |
+
if self.communicator.job.latest_status.code == Status.FINISHED:
|
| 953 |
+
raise StopIteration()
|
| 954 |
+
|
| 955 |
+
def result(self, timeout: float | None = None) -> Any:
|
| 956 |
+
"""
|
| 957 |
+
Return the result of the call that the future represents. Raises CancelledError: If the future was cancelled, TimeoutError: If the future didn't finish executing before the given timeout, and Exception: If the call raised then that exception will be raised.
|
| 958 |
+
|
| 959 |
+
Parameters:
|
| 960 |
+
timeout: The number of seconds to wait for the result if the future isn't done. If None, then there is no limit on the wait time.
|
| 961 |
+
Returns:
|
| 962 |
+
The result of the call that the future represents.
|
| 963 |
+
Example:
|
| 964 |
+
from gradio_client import Client
|
| 965 |
+
calculator = Client(src="gradio/calculator")
|
| 966 |
+
job = calculator.submit("foo", "add", 4, fn_index=0)
|
| 967 |
+
job.result(timeout=5)
|
| 968 |
+
>> 9
|
| 969 |
+
"""
|
| 970 |
+
if self.communicator:
|
| 971 |
+
timeout = timeout or float("inf")
|
| 972 |
+
if self.future._exception: # type: ignore
|
| 973 |
+
raise self.future._exception # type: ignore
|
| 974 |
+
with self.communicator.lock:
|
| 975 |
+
if self.communicator.job.outputs:
|
| 976 |
+
return self.communicator.job.outputs[0]
|
| 977 |
+
start = datetime.now()
|
| 978 |
+
while True:
|
| 979 |
+
if (datetime.now() - start).seconds > timeout:
|
| 980 |
+
raise TimeoutError()
|
| 981 |
+
if self.future._exception: # type: ignore
|
| 982 |
+
raise self.future._exception # type: ignore
|
| 983 |
+
with self.communicator.lock:
|
| 984 |
+
if self.communicator.job.outputs:
|
| 985 |
+
return self.communicator.job.outputs[0]
|
| 986 |
+
time.sleep(0.01)
|
| 987 |
+
else:
|
| 988 |
+
return super().result(timeout=timeout)
|
| 989 |
+
|
| 990 |
+
def outputs(self) -> list[tuple | Any]:
|
| 991 |
+
"""
|
| 992 |
+
Returns a list containing the latest outputs from the Job.
|
| 993 |
+
|
| 994 |
+
If the endpoint has multiple output components, the list will contain
|
| 995 |
+
a tuple of results. Otherwise, it will contain the results without storing them
|
| 996 |
+
in tuples.
|
| 997 |
+
|
| 998 |
+
For endpoints that are queued, this list will contain the final job output even
|
| 999 |
+
if that endpoint does not use a generator function.
|
| 1000 |
+
|
| 1001 |
+
Example:
|
| 1002 |
+
from gradio_client import Client
|
| 1003 |
+
client = Client(src="gradio/count_generator")
|
| 1004 |
+
job = client.submit(3, api_name="/count")
|
| 1005 |
+
while not job.done():
|
| 1006 |
+
time.sleep(0.1)
|
| 1007 |
+
job.outputs()
|
| 1008 |
+
>> ['0', '1', '2']
|
| 1009 |
+
"""
|
| 1010 |
+
if not self.communicator:
|
| 1011 |
+
return []
|
| 1012 |
+
else:
|
| 1013 |
+
with self.communicator.lock:
|
| 1014 |
+
return self.communicator.job.outputs
|
| 1015 |
+
|
| 1016 |
+
def status(self) -> StatusUpdate:
|
| 1017 |
+
"""
|
| 1018 |
+
Returns the latest status update from the Job in the form of a StatusUpdate
|
| 1019 |
+
object, which contains the following fields: code, rank, queue_size, success, time, eta, and progress_data.
|
| 1020 |
+
|
| 1021 |
+
progress_data is a list of updates emitted by the gr.Progress() tracker of the event handler. Each element
|
| 1022 |
+
of the list has the following fields: index, length, unit, progress, desc. If the event handler does not have
|
| 1023 |
+
a gr.Progress() tracker, the progress_data field will be None.
|
| 1024 |
+
|
| 1025 |
+
Example:
|
| 1026 |
+
from gradio_client import Client
|
| 1027 |
+
client = Client(src="gradio/calculator")
|
| 1028 |
+
job = client.submit(5, "add", 4, api_name="/predict")
|
| 1029 |
+
job.status()
|
| 1030 |
+
>> <Status.STARTING: 'STARTING'>
|
| 1031 |
+
job.status().eta
|
| 1032 |
+
>> 43.241 # seconds
|
| 1033 |
+
"""
|
| 1034 |
+
time = datetime.now()
|
| 1035 |
+
cancelled = False
|
| 1036 |
+
if self.communicator:
|
| 1037 |
+
with self.communicator.lock:
|
| 1038 |
+
cancelled = self.communicator.should_cancel
|
| 1039 |
+
if cancelled:
|
| 1040 |
+
return StatusUpdate(
|
| 1041 |
+
code=Status.CANCELLED,
|
| 1042 |
+
rank=0,
|
| 1043 |
+
queue_size=None,
|
| 1044 |
+
success=False,
|
| 1045 |
+
time=time,
|
| 1046 |
+
eta=None,
|
| 1047 |
+
progress_data=None,
|
| 1048 |
+
)
|
| 1049 |
+
if self.done():
|
| 1050 |
+
if not self.future._exception: # type: ignore
|
| 1051 |
+
return StatusUpdate(
|
| 1052 |
+
code=Status.FINISHED,
|
| 1053 |
+
rank=0,
|
| 1054 |
+
queue_size=None,
|
| 1055 |
+
success=True,
|
| 1056 |
+
time=time,
|
| 1057 |
+
eta=None,
|
| 1058 |
+
progress_data=None,
|
| 1059 |
+
)
|
| 1060 |
+
else:
|
| 1061 |
+
return StatusUpdate(
|
| 1062 |
+
code=Status.FINISHED,
|
| 1063 |
+
rank=0,
|
| 1064 |
+
queue_size=None,
|
| 1065 |
+
success=False,
|
| 1066 |
+
time=time,
|
| 1067 |
+
eta=None,
|
| 1068 |
+
progress_data=None,
|
| 1069 |
+
)
|
| 1070 |
+
else:
|
| 1071 |
+
if not self.communicator:
|
| 1072 |
+
return StatusUpdate(
|
| 1073 |
+
code=Status.PROCESSING,
|
| 1074 |
+
rank=0,
|
| 1075 |
+
queue_size=None,
|
| 1076 |
+
success=None,
|
| 1077 |
+
time=time,
|
| 1078 |
+
eta=None,
|
| 1079 |
+
progress_data=None,
|
| 1080 |
+
)
|
| 1081 |
+
else:
|
| 1082 |
+
with self.communicator.lock:
|
| 1083 |
+
eta = self.communicator.job.latest_status.eta
|
| 1084 |
+
if self.verbose and self.space_id and eta and eta > 30:
|
| 1085 |
+
print(
|
| 1086 |
+
f"Due to heavy traffic on this app, the prediction will take approximately {int(eta)} seconds."
|
| 1087 |
+
f"For faster predictions without waiting in queue, you may duplicate the space using: Client.duplicate({self.space_id})"
|
| 1088 |
+
)
|
| 1089 |
+
return self.communicator.job.latest_status
|
| 1090 |
+
|
| 1091 |
+
def __getattr__(self, name):
|
| 1092 |
+
"""Forwards any properties to the Future class."""
|
| 1093 |
+
return getattr(self.future, name)
|
| 1094 |
+
|
| 1095 |
+
def cancel(self) -> bool:
|
| 1096 |
+
"""Cancels the job as best as possible.
|
| 1097 |
+
|
| 1098 |
+
If the app you are connecting to has the gradio queue enabled, the job
|
| 1099 |
+
will be cancelled locally as soon as possible. For apps that do not use the
|
| 1100 |
+
queue, the job cannot be cancelled if it's been sent to the local executor
|
| 1101 |
+
(for the time being).
|
| 1102 |
+
|
| 1103 |
+
Note: In general, this DOES not stop the process from running in the upstream server
|
| 1104 |
+
except for the following situations:
|
| 1105 |
+
|
| 1106 |
+
1. If the job is queued upstream, it will be removed from the queue and the server will not run the job
|
| 1107 |
+
2. If the job has iterative outputs, the job will finish as soon as the current iteration finishes running
|
| 1108 |
+
3. If the job has not been picked up by the queue yet, the queue will not pick up the job
|
| 1109 |
+
"""
|
| 1110 |
+
if self.communicator:
|
| 1111 |
+
with self.communicator.lock:
|
| 1112 |
+
self.communicator.should_cancel = True
|
| 1113 |
+
return True
|
| 1114 |
+
return self.future.cancel()
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/data_classes.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TypedDict
|
| 4 |
+
|
| 5 |
+
from typing_extensions import NotRequired
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class FileData(TypedDict):
|
| 9 |
+
name: str | None # filename
|
| 10 |
+
data: str | None # base64 encoded data
|
| 11 |
+
size: NotRequired[int | None] # size in bytes
|
| 12 |
+
is_file: NotRequired[
|
| 13 |
+
bool
|
| 14 |
+
] # whether the data corresponds to a file or base64 encoded data
|
| 15 |
+
orig_name: NotRequired[str] # original filename
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/documentation.py
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Contains methods that generate documentation for Gradio functions and classes."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import inspect
|
| 6 |
+
from typing import Callable
|
| 7 |
+
|
| 8 |
+
classes_to_document = {}
|
| 9 |
+
classes_inherit_documentation = {}
|
| 10 |
+
documentation_group = None
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def set_documentation_group(m):
|
| 14 |
+
global documentation_group
|
| 15 |
+
documentation_group = m
|
| 16 |
+
if m not in classes_to_document:
|
| 17 |
+
classes_to_document[m] = []
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def extract_instance_attr_doc(cls, attr):
|
| 21 |
+
code = inspect.getsource(cls.__init__)
|
| 22 |
+
lines = [line.strip() for line in code.split("\n")]
|
| 23 |
+
i = None
|
| 24 |
+
for i, line in enumerate(lines): # noqa: B007
|
| 25 |
+
if line.startswith("self." + attr + ":") or line.startswith(
|
| 26 |
+
"self." + attr + " ="
|
| 27 |
+
):
|
| 28 |
+
break
|
| 29 |
+
assert i is not None, f"Could not find {attr} in {cls.__name__}"
|
| 30 |
+
start_line = lines.index('"""', i)
|
| 31 |
+
end_line = lines.index('"""', start_line + 1)
|
| 32 |
+
for j in range(i + 1, start_line):
|
| 33 |
+
assert not lines[j].startswith("self."), (
|
| 34 |
+
f"Found another attribute before docstring for {attr} in {cls.__name__}: "
|
| 35 |
+
+ lines[j]
|
| 36 |
+
+ "\n start:"
|
| 37 |
+
+ lines[i]
|
| 38 |
+
)
|
| 39 |
+
doc_string = " ".join(lines[start_line + 1 : end_line])
|
| 40 |
+
return doc_string
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def document(*fns, inherit=False):
|
| 44 |
+
"""
|
| 45 |
+
Defines the @document decorator which adds classes or functions to the Gradio
|
| 46 |
+
documentation at www.gradio.app/docs.
|
| 47 |
+
|
| 48 |
+
Usage examples:
|
| 49 |
+
- Put @document() above a class to document the class and its constructor.
|
| 50 |
+
- Put @document("fn1", "fn2") above a class to also document methods fn1 and fn2.
|
| 51 |
+
- Put @document("*fn3") with an asterisk above a class to document the instance attribute methods f3.
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
def inner_doc(cls):
|
| 55 |
+
global documentation_group
|
| 56 |
+
if inherit:
|
| 57 |
+
classes_inherit_documentation[cls] = None
|
| 58 |
+
classes_to_document[documentation_group].append((cls, fns))
|
| 59 |
+
return cls
|
| 60 |
+
|
| 61 |
+
return inner_doc
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def document_fn(fn: Callable, cls) -> tuple[str, list[dict], dict, str | None]:
|
| 65 |
+
"""
|
| 66 |
+
Generates documentation for any function.
|
| 67 |
+
Parameters:
|
| 68 |
+
fn: Function to document
|
| 69 |
+
Returns:
|
| 70 |
+
description: General description of fn
|
| 71 |
+
parameters: A list of dicts for each parameter, storing data for the parameter name, annotation and doc
|
| 72 |
+
return: A dict storing data for the returned annotation and doc
|
| 73 |
+
example: Code for an example use of the fn
|
| 74 |
+
"""
|
| 75 |
+
doc_str = inspect.getdoc(fn) or ""
|
| 76 |
+
doc_lines = doc_str.split("\n")
|
| 77 |
+
signature = inspect.signature(fn)
|
| 78 |
+
description, parameters, returns, examples = [], {}, [], []
|
| 79 |
+
mode = "description"
|
| 80 |
+
for line in doc_lines:
|
| 81 |
+
line = line.rstrip()
|
| 82 |
+
if line == "Parameters:":
|
| 83 |
+
mode = "parameter"
|
| 84 |
+
elif line.startswith("Example:"):
|
| 85 |
+
mode = "example"
|
| 86 |
+
if "(" in line and ")" in line:
|
| 87 |
+
c = line.split("(")[1].split(")")[0]
|
| 88 |
+
if c != cls.__name__:
|
| 89 |
+
mode = "ignore"
|
| 90 |
+
elif line == "Returns:":
|
| 91 |
+
mode = "return"
|
| 92 |
+
else:
|
| 93 |
+
if mode == "description":
|
| 94 |
+
description.append(line if line.strip() else "<br>")
|
| 95 |
+
continue
|
| 96 |
+
if not (line.startswith(" ") or line.strip() == ""):
|
| 97 |
+
print(line)
|
| 98 |
+
assert (
|
| 99 |
+
line.startswith(" ") or line.strip() == ""
|
| 100 |
+
), f"Documentation format for {fn.__name__} has format error in line: {line}"
|
| 101 |
+
line = line[4:]
|
| 102 |
+
if mode == "parameter":
|
| 103 |
+
colon_index = line.index(": ")
|
| 104 |
+
assert (
|
| 105 |
+
colon_index > -1
|
| 106 |
+
), f"Documentation format for {fn.__name__} has format error in line: {line}"
|
| 107 |
+
parameter = line[:colon_index]
|
| 108 |
+
parameter_doc = line[colon_index + 2 :]
|
| 109 |
+
parameters[parameter] = parameter_doc
|
| 110 |
+
elif mode == "return":
|
| 111 |
+
returns.append(line)
|
| 112 |
+
elif mode == "example":
|
| 113 |
+
examples.append(line)
|
| 114 |
+
description_doc = " ".join(description)
|
| 115 |
+
parameter_docs = []
|
| 116 |
+
for param_name, param in signature.parameters.items():
|
| 117 |
+
if param_name.startswith("_"):
|
| 118 |
+
continue
|
| 119 |
+
if param_name in ["kwargs", "args"] and param_name not in parameters:
|
| 120 |
+
continue
|
| 121 |
+
parameter_doc = {
|
| 122 |
+
"name": param_name,
|
| 123 |
+
"annotation": param.annotation,
|
| 124 |
+
"doc": parameters.get(param_name),
|
| 125 |
+
}
|
| 126 |
+
if param_name in parameters:
|
| 127 |
+
del parameters[param_name]
|
| 128 |
+
if param.default != inspect.Parameter.empty:
|
| 129 |
+
default = param.default
|
| 130 |
+
if type(default) == str:
|
| 131 |
+
default = '"' + default + '"'
|
| 132 |
+
if default.__class__.__module__ != "builtins":
|
| 133 |
+
default = f"{default.__class__.__name__}()"
|
| 134 |
+
parameter_doc["default"] = default
|
| 135 |
+
elif parameter_doc["doc"] is not None:
|
| 136 |
+
if "kwargs" in parameter_doc["doc"]:
|
| 137 |
+
parameter_doc["kwargs"] = True
|
| 138 |
+
if "args" in parameter_doc["doc"]:
|
| 139 |
+
parameter_doc["args"] = True
|
| 140 |
+
parameter_docs.append(parameter_doc)
|
| 141 |
+
assert (
|
| 142 |
+
len(parameters) == 0
|
| 143 |
+
), f"Documentation format for {fn.__name__} documents nonexistent parameters: {''.join(parameters.keys())}"
|
| 144 |
+
if len(returns) == 0:
|
| 145 |
+
return_docs = {}
|
| 146 |
+
elif len(returns) == 1:
|
| 147 |
+
return_docs = {"annotation": signature.return_annotation, "doc": returns[0]}
|
| 148 |
+
else:
|
| 149 |
+
return_docs = {}
|
| 150 |
+
# raise ValueError("Does not support multiple returns yet.")
|
| 151 |
+
examples_doc = "\n".join(examples) if len(examples) > 0 else None
|
| 152 |
+
return description_doc, parameter_docs, return_docs, examples_doc
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def document_cls(cls):
|
| 156 |
+
doc_str = inspect.getdoc(cls)
|
| 157 |
+
if doc_str is None:
|
| 158 |
+
return "", {}, ""
|
| 159 |
+
tags = {}
|
| 160 |
+
description_lines = []
|
| 161 |
+
mode = "description"
|
| 162 |
+
for line in doc_str.split("\n"):
|
| 163 |
+
line = line.rstrip()
|
| 164 |
+
if line.endswith(":") and " " not in line:
|
| 165 |
+
mode = line[:-1].lower()
|
| 166 |
+
tags[mode] = []
|
| 167 |
+
elif line.split(" ")[0].endswith(":") and not line.startswith(" "):
|
| 168 |
+
tag = line[: line.index(":")].lower()
|
| 169 |
+
value = line[line.index(":") + 2 :]
|
| 170 |
+
tags[tag] = value
|
| 171 |
+
else:
|
| 172 |
+
if mode == "description":
|
| 173 |
+
description_lines.append(line if line.strip() else "<br>")
|
| 174 |
+
else:
|
| 175 |
+
assert (
|
| 176 |
+
line.startswith(" ") or not line.strip()
|
| 177 |
+
), f"Documentation format for {cls.__name__} has format error in line: {line}"
|
| 178 |
+
tags[mode].append(line[4:])
|
| 179 |
+
if "example" in tags:
|
| 180 |
+
example = "\n".join(tags["example"])
|
| 181 |
+
del tags["example"]
|
| 182 |
+
else:
|
| 183 |
+
example = None
|
| 184 |
+
for key, val in tags.items():
|
| 185 |
+
if isinstance(val, list):
|
| 186 |
+
tags[key] = "<br>".join(val)
|
| 187 |
+
description = " ".join(description_lines).replace("\n", "<br>")
|
| 188 |
+
return description, tags, example
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def generate_documentation():
|
| 192 |
+
documentation = {}
|
| 193 |
+
for mode, class_list in classes_to_document.items():
|
| 194 |
+
documentation[mode] = []
|
| 195 |
+
for cls, fns in class_list:
|
| 196 |
+
fn_to_document = cls if inspect.isfunction(cls) else cls.__init__
|
| 197 |
+
_, parameter_doc, return_doc, _ = document_fn(fn_to_document, cls)
|
| 198 |
+
cls_description, cls_tags, cls_example = document_cls(cls)
|
| 199 |
+
cls_documentation = {
|
| 200 |
+
"class": cls,
|
| 201 |
+
"name": cls.__name__,
|
| 202 |
+
"description": cls_description,
|
| 203 |
+
"tags": cls_tags,
|
| 204 |
+
"parameters": parameter_doc,
|
| 205 |
+
"returns": return_doc,
|
| 206 |
+
"example": cls_example,
|
| 207 |
+
"fns": [],
|
| 208 |
+
}
|
| 209 |
+
for fn_name in fns:
|
| 210 |
+
instance_attribute_fn = fn_name.startswith("*")
|
| 211 |
+
if instance_attribute_fn:
|
| 212 |
+
fn_name = fn_name[1:]
|
| 213 |
+
# Instance attribute fns are classes
|
| 214 |
+
# whose __call__ method determines their behavior
|
| 215 |
+
fn = getattr(cls(), fn_name).__call__
|
| 216 |
+
else:
|
| 217 |
+
fn = getattr(cls, fn_name)
|
| 218 |
+
if not callable(fn):
|
| 219 |
+
description_doc = str(fn)
|
| 220 |
+
parameter_docs = {}
|
| 221 |
+
return_docs = {}
|
| 222 |
+
examples_doc = ""
|
| 223 |
+
override_signature = f"gr.{cls.__name__}.{fn_name}"
|
| 224 |
+
else:
|
| 225 |
+
(
|
| 226 |
+
description_doc,
|
| 227 |
+
parameter_docs,
|
| 228 |
+
return_docs,
|
| 229 |
+
examples_doc,
|
| 230 |
+
) = document_fn(fn, cls)
|
| 231 |
+
override_signature = None
|
| 232 |
+
if instance_attribute_fn:
|
| 233 |
+
description_doc = extract_instance_attr_doc(cls, fn_name)
|
| 234 |
+
cls_documentation["fns"].append(
|
| 235 |
+
{
|
| 236 |
+
"fn": fn,
|
| 237 |
+
"name": fn_name,
|
| 238 |
+
"description": description_doc,
|
| 239 |
+
"tags": {},
|
| 240 |
+
"parameters": parameter_docs,
|
| 241 |
+
"returns": return_docs,
|
| 242 |
+
"example": examples_doc,
|
| 243 |
+
"override_signature": override_signature,
|
| 244 |
+
}
|
| 245 |
+
)
|
| 246 |
+
documentation[mode].append(cls_documentation)
|
| 247 |
+
if cls in classes_inherit_documentation:
|
| 248 |
+
classes_inherit_documentation[cls] = cls_documentation["fns"]
|
| 249 |
+
for mode, class_list in classes_to_document.items():
|
| 250 |
+
for i, (cls, _) in enumerate(class_list):
|
| 251 |
+
for super_class in classes_inherit_documentation:
|
| 252 |
+
if (
|
| 253 |
+
inspect.isclass(cls)
|
| 254 |
+
and issubclass(cls, super_class)
|
| 255 |
+
and cls != super_class
|
| 256 |
+
):
|
| 257 |
+
for inherited_fn in classes_inherit_documentation[super_class]:
|
| 258 |
+
inherited_fn = dict(inherited_fn)
|
| 259 |
+
try:
|
| 260 |
+
inherited_fn["description"] = extract_instance_attr_doc(
|
| 261 |
+
cls, inherited_fn["name"]
|
| 262 |
+
)
|
| 263 |
+
except (ValueError, AssertionError):
|
| 264 |
+
pass
|
| 265 |
+
documentation[mode][i]["fns"].append(inherited_fn)
|
| 266 |
+
return documentation
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/media_data.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/serializing.py
ADDED
|
@@ -0,0 +1,550 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import os
|
| 5 |
+
import uuid
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
from typing import Any
|
| 8 |
+
|
| 9 |
+
from gradio_client import media_data, utils
|
| 10 |
+
from gradio_client.data_classes import FileData
|
| 11 |
+
|
| 12 |
+
with open(Path(__file__).parent / "types.json") as f:
|
| 13 |
+
serializer_types = json.load(f)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class Serializable:
|
| 17 |
+
def serialized_info(self):
|
| 18 |
+
"""
|
| 19 |
+
The typing information for this component as a dictionary whose values are a list of 2 strings: [Python type, language-agnostic description].
|
| 20 |
+
Keys of the dictionary are: raw_input, raw_output, serialized_input, serialized_output
|
| 21 |
+
"""
|
| 22 |
+
return self.api_info()
|
| 23 |
+
|
| 24 |
+
def api_info(self) -> dict[str, list[str]]:
|
| 25 |
+
"""
|
| 26 |
+
The typing information for this component as a dictionary whose values are a list of 2 strings: [Python type, language-agnostic description].
|
| 27 |
+
Keys of the dictionary are: raw_input, raw_output, serialized_input, serialized_output
|
| 28 |
+
"""
|
| 29 |
+
raise NotImplementedError()
|
| 30 |
+
|
| 31 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 32 |
+
"""
|
| 33 |
+
The example inputs for this component as a dictionary whose values are example inputs compatible with this component.
|
| 34 |
+
Keys of the dictionary are: raw, serialized
|
| 35 |
+
"""
|
| 36 |
+
raise NotImplementedError()
|
| 37 |
+
|
| 38 |
+
# For backwards compatibility
|
| 39 |
+
def input_api_info(self) -> tuple[str, str]:
|
| 40 |
+
api_info = self.api_info()
|
| 41 |
+
return (api_info["serialized_input"][0], api_info["serialized_input"][1])
|
| 42 |
+
|
| 43 |
+
# For backwards compatibility
|
| 44 |
+
def output_api_info(self) -> tuple[str, str]:
|
| 45 |
+
api_info = self.api_info()
|
| 46 |
+
return (api_info["serialized_output"][0], api_info["serialized_output"][1])
|
| 47 |
+
|
| 48 |
+
def serialize(self, x: Any, load_dir: str | Path = ""):
|
| 49 |
+
"""
|
| 50 |
+
Convert data from human-readable format to serialized format for a browser.
|
| 51 |
+
"""
|
| 52 |
+
return x
|
| 53 |
+
|
| 54 |
+
def deserialize(
|
| 55 |
+
self,
|
| 56 |
+
x: Any,
|
| 57 |
+
save_dir: str | Path | None = None,
|
| 58 |
+
root_url: str | None = None,
|
| 59 |
+
hf_token: str | None = None,
|
| 60 |
+
):
|
| 61 |
+
"""
|
| 62 |
+
Convert data from serialized format for a browser to human-readable format.
|
| 63 |
+
"""
|
| 64 |
+
return x
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class SimpleSerializable(Serializable):
|
| 68 |
+
"""General class that does not perform any serialization or deserialization."""
|
| 69 |
+
|
| 70 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 71 |
+
return {
|
| 72 |
+
"info": serializer_types["SimpleSerializable"],
|
| 73 |
+
"serialized_info": False,
|
| 74 |
+
}
|
| 75 |
+
|
| 76 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 77 |
+
return {
|
| 78 |
+
"raw": None,
|
| 79 |
+
"serialized": None,
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class StringSerializable(Serializable):
|
| 84 |
+
"""Expects a string as input/output but performs no serialization."""
|
| 85 |
+
|
| 86 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 87 |
+
return {
|
| 88 |
+
"info": serializer_types["StringSerializable"],
|
| 89 |
+
"serialized_info": False,
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 93 |
+
return {
|
| 94 |
+
"raw": "Howdy!",
|
| 95 |
+
"serialized": "Howdy!",
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class ListStringSerializable(Serializable):
|
| 100 |
+
"""Expects a list of strings as input/output but performs no serialization."""
|
| 101 |
+
|
| 102 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 103 |
+
return {
|
| 104 |
+
"info": serializer_types["ListStringSerializable"],
|
| 105 |
+
"serialized_info": False,
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 109 |
+
return {
|
| 110 |
+
"raw": ["Howdy!", "Merhaba"],
|
| 111 |
+
"serialized": ["Howdy!", "Merhaba"],
|
| 112 |
+
}
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class BooleanSerializable(Serializable):
|
| 116 |
+
"""Expects a boolean as input/output but performs no serialization."""
|
| 117 |
+
|
| 118 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 119 |
+
return {
|
| 120 |
+
"info": serializer_types["BooleanSerializable"],
|
| 121 |
+
"serialized_info": False,
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 125 |
+
return {
|
| 126 |
+
"raw": True,
|
| 127 |
+
"serialized": True,
|
| 128 |
+
}
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class NumberSerializable(Serializable):
|
| 132 |
+
"""Expects a number (int/float) as input/output but performs no serialization."""
|
| 133 |
+
|
| 134 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 135 |
+
return {
|
| 136 |
+
"info": serializer_types["NumberSerializable"],
|
| 137 |
+
"serialized_info": False,
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 141 |
+
return {
|
| 142 |
+
"raw": 5,
|
| 143 |
+
"serialized": 5,
|
| 144 |
+
}
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class ImgSerializable(Serializable):
|
| 148 |
+
"""Expects a base64 string as input/output which is serialized to a filepath."""
|
| 149 |
+
|
| 150 |
+
def serialized_info(self):
|
| 151 |
+
return {"type": "string", "description": "filepath or URL to image"}
|
| 152 |
+
|
| 153 |
+
def api_info(self) -> dict[str, bool | dict]:
|
| 154 |
+
return {"info": serializer_types["ImgSerializable"], "serialized_info": True}
|
| 155 |
+
|
| 156 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 157 |
+
return {
|
| 158 |
+
"raw": media_data.BASE64_IMAGE,
|
| 159 |
+
"serialized": "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png",
|
| 160 |
+
}
|
| 161 |
+
|
| 162 |
+
def serialize(
|
| 163 |
+
self,
|
| 164 |
+
x: str | None,
|
| 165 |
+
load_dir: str | Path = "",
|
| 166 |
+
) -> str | None:
|
| 167 |
+
"""
|
| 168 |
+
Convert from human-friendly version of a file (string filepath) to a serialized
|
| 169 |
+
representation (base64).
|
| 170 |
+
Parameters:
|
| 171 |
+
x: String path to file to serialize
|
| 172 |
+
load_dir: Path to directory containing x
|
| 173 |
+
"""
|
| 174 |
+
if not x:
|
| 175 |
+
return None
|
| 176 |
+
if utils.is_http_url_like(x):
|
| 177 |
+
return utils.encode_url_to_base64(x)
|
| 178 |
+
return utils.encode_file_to_base64(Path(load_dir) / x)
|
| 179 |
+
|
| 180 |
+
def deserialize(
|
| 181 |
+
self,
|
| 182 |
+
x: str | None,
|
| 183 |
+
save_dir: str | Path | None = None,
|
| 184 |
+
root_url: str | None = None,
|
| 185 |
+
hf_token: str | None = None,
|
| 186 |
+
) -> str | None:
|
| 187 |
+
"""
|
| 188 |
+
Convert from serialized representation of a file (base64) to a human-friendly
|
| 189 |
+
version (string filepath). Optionally, save the file to the directory specified by save_dir
|
| 190 |
+
Parameters:
|
| 191 |
+
x: Base64 representation of image to deserialize into a string filepath
|
| 192 |
+
save_dir: Path to directory to save the deserialized image to
|
| 193 |
+
root_url: Ignored
|
| 194 |
+
hf_token: Ignored
|
| 195 |
+
"""
|
| 196 |
+
if x is None or x == "":
|
| 197 |
+
return None
|
| 198 |
+
file = utils.decode_base64_to_file(x, dir=save_dir)
|
| 199 |
+
return file.name
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
class FileSerializable(Serializable):
|
| 203 |
+
"""Expects a dict with base64 representation of object as input/output which is serialized to a filepath."""
|
| 204 |
+
|
| 205 |
+
def serialized_info(self):
|
| 206 |
+
return self._single_file_serialized_info()
|
| 207 |
+
|
| 208 |
+
def _single_file_api_info(self):
|
| 209 |
+
return {
|
| 210 |
+
"info": serializer_types["SingleFileSerializable"],
|
| 211 |
+
"serialized_info": True,
|
| 212 |
+
}
|
| 213 |
+
|
| 214 |
+
def _single_file_serialized_info(self):
|
| 215 |
+
return {"type": "string", "description": "filepath or URL to file"}
|
| 216 |
+
|
| 217 |
+
def _multiple_file_serialized_info(self):
|
| 218 |
+
return {
|
| 219 |
+
"type": "array",
|
| 220 |
+
"description": "List of filepath(s) or URL(s) to files",
|
| 221 |
+
"items": {"type": "string", "description": "filepath or URL to file"},
|
| 222 |
+
}
|
| 223 |
+
|
| 224 |
+
def _multiple_file_api_info(self):
|
| 225 |
+
return {
|
| 226 |
+
"info": serializer_types["MultipleFileSerializable"],
|
| 227 |
+
"serialized_info": True,
|
| 228 |
+
}
|
| 229 |
+
|
| 230 |
+
def api_info(self) -> dict[str, dict | bool]:
|
| 231 |
+
return self._single_file_api_info()
|
| 232 |
+
|
| 233 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 234 |
+
return self._single_file_example_inputs()
|
| 235 |
+
|
| 236 |
+
def _single_file_example_inputs(self) -> dict[str, Any]:
|
| 237 |
+
return {
|
| 238 |
+
"raw": {"is_file": False, "data": media_data.BASE64_FILE},
|
| 239 |
+
"serialized": "https://github.com/gradio-app/gradio/raw/main/test/test_files/sample_file.pdf",
|
| 240 |
+
}
|
| 241 |
+
|
| 242 |
+
def _multiple_file_example_inputs(self) -> dict[str, Any]:
|
| 243 |
+
return {
|
| 244 |
+
"raw": [{"is_file": False, "data": media_data.BASE64_FILE}],
|
| 245 |
+
"serialized": [
|
| 246 |
+
"https://github.com/gradio-app/gradio/raw/main/test/test_files/sample_file.pdf"
|
| 247 |
+
],
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
def _serialize_single(
|
| 251 |
+
self, x: str | FileData | None, load_dir: str | Path = ""
|
| 252 |
+
) -> FileData | None:
|
| 253 |
+
if x is None or isinstance(x, dict):
|
| 254 |
+
return x
|
| 255 |
+
if utils.is_http_url_like(x):
|
| 256 |
+
filename = x
|
| 257 |
+
size = None
|
| 258 |
+
else:
|
| 259 |
+
filename = str(Path(load_dir) / x)
|
| 260 |
+
size = Path(filename).stat().st_size
|
| 261 |
+
return {
|
| 262 |
+
"name": filename,
|
| 263 |
+
"data": utils.encode_url_or_file_to_base64(filename),
|
| 264 |
+
"orig_name": Path(filename).name,
|
| 265 |
+
"is_file": False,
|
| 266 |
+
"size": size,
|
| 267 |
+
}
|
| 268 |
+
|
| 269 |
+
def _deserialize_single(
|
| 270 |
+
self,
|
| 271 |
+
x: str | FileData | None,
|
| 272 |
+
save_dir: str | None = None,
|
| 273 |
+
root_url: str | None = None,
|
| 274 |
+
hf_token: str | None = None,
|
| 275 |
+
) -> str | None:
|
| 276 |
+
if x is None:
|
| 277 |
+
return None
|
| 278 |
+
if isinstance(x, str):
|
| 279 |
+
file_name = utils.decode_base64_to_file(x, dir=save_dir).name
|
| 280 |
+
elif isinstance(x, dict):
|
| 281 |
+
if x.get("is_file"):
|
| 282 |
+
filepath = x.get("name")
|
| 283 |
+
assert filepath is not None, f"The 'name' field is missing in {x}"
|
| 284 |
+
if root_url is not None:
|
| 285 |
+
file_name = utils.download_tmp_copy_of_file(
|
| 286 |
+
root_url + "file=" + filepath,
|
| 287 |
+
hf_token=hf_token,
|
| 288 |
+
dir=save_dir,
|
| 289 |
+
)
|
| 290 |
+
else:
|
| 291 |
+
file_name = utils.create_tmp_copy_of_file(filepath, dir=save_dir)
|
| 292 |
+
else:
|
| 293 |
+
data = x.get("data")
|
| 294 |
+
assert data is not None, f"The 'data' field is missing in {x}"
|
| 295 |
+
file_name = utils.decode_base64_to_file(data, dir=save_dir).name
|
| 296 |
+
else:
|
| 297 |
+
raise ValueError(
|
| 298 |
+
f"A FileSerializable component can only deserialize a string or a dict, not a {type(x)}: {x}"
|
| 299 |
+
)
|
| 300 |
+
return file_name
|
| 301 |
+
|
| 302 |
+
def serialize(
|
| 303 |
+
self,
|
| 304 |
+
x: str | FileData | None | list[str | FileData | None],
|
| 305 |
+
load_dir: str | Path = "",
|
| 306 |
+
) -> FileData | None | list[FileData | None]:
|
| 307 |
+
"""
|
| 308 |
+
Convert from human-friendly version of a file (string filepath) to a
|
| 309 |
+
serialized representation (base64)
|
| 310 |
+
Parameters:
|
| 311 |
+
x: String path to file to serialize
|
| 312 |
+
load_dir: Path to directory containing x
|
| 313 |
+
"""
|
| 314 |
+
if x is None or x == "":
|
| 315 |
+
return None
|
| 316 |
+
if isinstance(x, list):
|
| 317 |
+
return [self._serialize_single(f, load_dir=load_dir) for f in x]
|
| 318 |
+
else:
|
| 319 |
+
return self._serialize_single(x, load_dir=load_dir)
|
| 320 |
+
|
| 321 |
+
def deserialize(
|
| 322 |
+
self,
|
| 323 |
+
x: str | FileData | None | list[str | FileData | None],
|
| 324 |
+
save_dir: Path | str | None = None,
|
| 325 |
+
root_url: str | None = None,
|
| 326 |
+
hf_token: str | None = None,
|
| 327 |
+
) -> str | None | list[str | None]:
|
| 328 |
+
"""
|
| 329 |
+
Convert from serialized representation of a file (base64) to a human-friendly
|
| 330 |
+
version (string filepath). Optionally, save the file to the directory specified by `save_dir`
|
| 331 |
+
Parameters:
|
| 332 |
+
x: Base64 representation of file to deserialize into a string filepath
|
| 333 |
+
save_dir: Path to directory to save the deserialized file to
|
| 334 |
+
root_url: If this component is loaded from an external Space, this is the URL of the Space.
|
| 335 |
+
hf_token: If this component is loaded from an external private Space, this is the access token for the Space
|
| 336 |
+
"""
|
| 337 |
+
if x is None:
|
| 338 |
+
return None
|
| 339 |
+
if isinstance(save_dir, Path):
|
| 340 |
+
save_dir = str(save_dir)
|
| 341 |
+
if isinstance(x, list):
|
| 342 |
+
return [
|
| 343 |
+
self._deserialize_single(
|
| 344 |
+
f, save_dir=save_dir, root_url=root_url, hf_token=hf_token
|
| 345 |
+
)
|
| 346 |
+
for f in x
|
| 347 |
+
]
|
| 348 |
+
else:
|
| 349 |
+
return self._deserialize_single(
|
| 350 |
+
x, save_dir=save_dir, root_url=root_url, hf_token=hf_token
|
| 351 |
+
)
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
class VideoSerializable(FileSerializable):
|
| 355 |
+
def serialized_info(self):
|
| 356 |
+
return {"type": "string", "description": "filepath or URL to video file"}
|
| 357 |
+
|
| 358 |
+
def api_info(self) -> dict[str, dict | bool]:
|
| 359 |
+
return {"info": serializer_types["FileSerializable"], "serialized_info": True}
|
| 360 |
+
|
| 361 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 362 |
+
return {
|
| 363 |
+
"raw": {"is_file": False, "data": media_data.BASE64_VIDEO},
|
| 364 |
+
"serialized": "https://github.com/gradio-app/gradio/raw/main/test/test_files/video_sample.mp4",
|
| 365 |
+
}
|
| 366 |
+
|
| 367 |
+
def serialize(
|
| 368 |
+
self, x: str | None, load_dir: str | Path = ""
|
| 369 |
+
) -> tuple[FileData | None, None]:
|
| 370 |
+
return (super().serialize(x, load_dir), None) # type: ignore
|
| 371 |
+
|
| 372 |
+
def deserialize(
|
| 373 |
+
self,
|
| 374 |
+
x: tuple[FileData | None, FileData | None] | None,
|
| 375 |
+
save_dir: Path | str | None = None,
|
| 376 |
+
root_url: str | None = None,
|
| 377 |
+
hf_token: str | None = None,
|
| 378 |
+
) -> str | tuple[str | None, str | None] | None:
|
| 379 |
+
"""
|
| 380 |
+
Convert from serialized representation of a file (base64) to a human-friendly
|
| 381 |
+
version (string filepath). Optionally, save the file to the directory specified by `save_dir`
|
| 382 |
+
"""
|
| 383 |
+
if isinstance(x, (tuple, list)):
|
| 384 |
+
assert len(x) == 2, f"Expected tuple of length 2. Received: {x}"
|
| 385 |
+
x_as_list = [x[0], x[1]]
|
| 386 |
+
else:
|
| 387 |
+
raise ValueError(f"Expected tuple of length 2. Received: {x}")
|
| 388 |
+
deserialized_file = super().deserialize(x_as_list, save_dir, root_url, hf_token) # type: ignore
|
| 389 |
+
if isinstance(deserialized_file, list):
|
| 390 |
+
return deserialized_file[0] # ignore subtitles
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
class JSONSerializable(Serializable):
|
| 394 |
+
def serialized_info(self):
|
| 395 |
+
return {"type": "string", "description": "filepath to JSON file"}
|
| 396 |
+
|
| 397 |
+
def api_info(self) -> dict[str, dict | bool]:
|
| 398 |
+
return {"info": serializer_types["JSONSerializable"], "serialized_info": True}
|
| 399 |
+
|
| 400 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 401 |
+
return {
|
| 402 |
+
"raw": {"a": 1, "b": 2},
|
| 403 |
+
"serialized": None,
|
| 404 |
+
}
|
| 405 |
+
|
| 406 |
+
def serialize(
|
| 407 |
+
self,
|
| 408 |
+
x: str | None,
|
| 409 |
+
load_dir: str | Path = "",
|
| 410 |
+
) -> dict | list | None:
|
| 411 |
+
"""
|
| 412 |
+
Convert from a a human-friendly version (string path to json file) to a
|
| 413 |
+
serialized representation (json string)
|
| 414 |
+
Parameters:
|
| 415 |
+
x: String path to json file to read to get json string
|
| 416 |
+
load_dir: Path to directory containing x
|
| 417 |
+
"""
|
| 418 |
+
if x is None or x == "":
|
| 419 |
+
return None
|
| 420 |
+
return utils.file_to_json(Path(load_dir) / x)
|
| 421 |
+
|
| 422 |
+
def deserialize(
|
| 423 |
+
self,
|
| 424 |
+
x: str | dict | list,
|
| 425 |
+
save_dir: str | Path | None = None,
|
| 426 |
+
root_url: str | None = None,
|
| 427 |
+
hf_token: str | None = None,
|
| 428 |
+
) -> str | None:
|
| 429 |
+
"""
|
| 430 |
+
Convert from serialized representation (json string) to a human-friendly
|
| 431 |
+
version (string path to json file). Optionally, save the file to the directory specified by `save_dir`
|
| 432 |
+
Parameters:
|
| 433 |
+
x: Json string
|
| 434 |
+
save_dir: Path to save the deserialized json file to
|
| 435 |
+
root_url: Ignored
|
| 436 |
+
hf_token: Ignored
|
| 437 |
+
"""
|
| 438 |
+
if x is None:
|
| 439 |
+
return None
|
| 440 |
+
return utils.dict_or_str_to_json_file(x, dir=save_dir).name
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
class GallerySerializable(Serializable):
|
| 444 |
+
def serialized_info(self):
|
| 445 |
+
return {
|
| 446 |
+
"type": "string",
|
| 447 |
+
"description": "path to directory with images and a file associating images with captions called captions.json",
|
| 448 |
+
}
|
| 449 |
+
|
| 450 |
+
def api_info(self) -> dict[str, dict | bool]:
|
| 451 |
+
return {
|
| 452 |
+
"info": serializer_types["GallerySerializable"],
|
| 453 |
+
"serialized_info": True,
|
| 454 |
+
}
|
| 455 |
+
|
| 456 |
+
def example_inputs(self) -> dict[str, Any]:
|
| 457 |
+
return {
|
| 458 |
+
"raw": [media_data.BASE64_IMAGE] * 2,
|
| 459 |
+
"serialized": [
|
| 460 |
+
"https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png",
|
| 461 |
+
]
|
| 462 |
+
* 2,
|
| 463 |
+
}
|
| 464 |
+
|
| 465 |
+
def serialize(
|
| 466 |
+
self, x: str | None, load_dir: str | Path = ""
|
| 467 |
+
) -> list[list[str | None]] | None:
|
| 468 |
+
if x is None or x == "":
|
| 469 |
+
return None
|
| 470 |
+
files = []
|
| 471 |
+
captions_file = Path(x) / "captions.json"
|
| 472 |
+
with captions_file.open("r") as captions_json:
|
| 473 |
+
captions = json.load(captions_json)
|
| 474 |
+
for file_name, caption in captions.items():
|
| 475 |
+
img = FileSerializable().serialize(file_name)
|
| 476 |
+
files.append([img, caption])
|
| 477 |
+
return files
|
| 478 |
+
|
| 479 |
+
def deserialize(
|
| 480 |
+
self,
|
| 481 |
+
x: list[list[str | None]] | None,
|
| 482 |
+
save_dir: str = "",
|
| 483 |
+
root_url: str | None = None,
|
| 484 |
+
hf_token: str | None = None,
|
| 485 |
+
) -> None | str:
|
| 486 |
+
if x is None:
|
| 487 |
+
return None
|
| 488 |
+
gallery_path = Path(save_dir) / str(uuid.uuid4())
|
| 489 |
+
gallery_path.mkdir(exist_ok=True, parents=True)
|
| 490 |
+
captions = {}
|
| 491 |
+
for img_data in x:
|
| 492 |
+
if isinstance(img_data, (list, tuple)):
|
| 493 |
+
img_data, caption = img_data
|
| 494 |
+
else:
|
| 495 |
+
caption = None
|
| 496 |
+
name = FileSerializable().deserialize(
|
| 497 |
+
img_data, gallery_path, root_url=root_url, hf_token=hf_token
|
| 498 |
+
)
|
| 499 |
+
captions[name] = caption
|
| 500 |
+
captions_file = gallery_path / "captions.json"
|
| 501 |
+
with captions_file.open("w") as captions_json:
|
| 502 |
+
json.dump(captions, captions_json)
|
| 503 |
+
return os.path.abspath(gallery_path)
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
SERIALIZER_MAPPING = {}
|
| 507 |
+
for cls in Serializable.__subclasses__():
|
| 508 |
+
SERIALIZER_MAPPING[cls.__name__] = cls
|
| 509 |
+
for subcls in cls.__subclasses__():
|
| 510 |
+
SERIALIZER_MAPPING[subcls.__name__] = subcls
|
| 511 |
+
|
| 512 |
+
SERIALIZER_MAPPING["Serializable"] = SimpleSerializable
|
| 513 |
+
SERIALIZER_MAPPING["File"] = FileSerializable
|
| 514 |
+
SERIALIZER_MAPPING["UploadButton"] = FileSerializable
|
| 515 |
+
|
| 516 |
+
COMPONENT_MAPPING: dict[str, type] = {
|
| 517 |
+
"textbox": StringSerializable,
|
| 518 |
+
"number": NumberSerializable,
|
| 519 |
+
"slider": NumberSerializable,
|
| 520 |
+
"checkbox": BooleanSerializable,
|
| 521 |
+
"checkboxgroup": ListStringSerializable,
|
| 522 |
+
"radio": StringSerializable,
|
| 523 |
+
"dropdown": SimpleSerializable,
|
| 524 |
+
"image": ImgSerializable,
|
| 525 |
+
"video": FileSerializable,
|
| 526 |
+
"audio": FileSerializable,
|
| 527 |
+
"file": FileSerializable,
|
| 528 |
+
"dataframe": JSONSerializable,
|
| 529 |
+
"timeseries": JSONSerializable,
|
| 530 |
+
"state": SimpleSerializable,
|
| 531 |
+
"button": StringSerializable,
|
| 532 |
+
"uploadbutton": FileSerializable,
|
| 533 |
+
"colorpicker": StringSerializable,
|
| 534 |
+
"label": JSONSerializable,
|
| 535 |
+
"highlightedtext": JSONSerializable,
|
| 536 |
+
"json": JSONSerializable,
|
| 537 |
+
"html": StringSerializable,
|
| 538 |
+
"gallery": GallerySerializable,
|
| 539 |
+
"chatbot": JSONSerializable,
|
| 540 |
+
"model3d": FileSerializable,
|
| 541 |
+
"plot": JSONSerializable,
|
| 542 |
+
"barplot": JSONSerializable,
|
| 543 |
+
"lineplot": JSONSerializable,
|
| 544 |
+
"scatterplot": JSONSerializable,
|
| 545 |
+
"markdown": StringSerializable,
|
| 546 |
+
"dataset": StringSerializable,
|
| 547 |
+
"code": StringSerializable,
|
| 548 |
+
"interpretation": SimpleSerializable,
|
| 549 |
+
"annotatedimage": JSONSerializable,
|
| 550 |
+
}
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/types.json
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"SimpleSerializable": {
|
| 3 |
+
"type": {},
|
| 4 |
+
"description": "any valid value"
|
| 5 |
+
},
|
| 6 |
+
"StringSerializable": {
|
| 7 |
+
"type": "string"
|
| 8 |
+
},
|
| 9 |
+
"ListStringSerializable": {
|
| 10 |
+
"type": "array",
|
| 11 |
+
"items": {
|
| 12 |
+
"type": "string"
|
| 13 |
+
}
|
| 14 |
+
},
|
| 15 |
+
"BooleanSerializable": {
|
| 16 |
+
"type": "boolean"
|
| 17 |
+
},
|
| 18 |
+
"NumberSerializable": {
|
| 19 |
+
"type": "number"
|
| 20 |
+
},
|
| 21 |
+
"ImgSerializable": {
|
| 22 |
+
"type": "string",
|
| 23 |
+
"description": "base64 representation of an image"
|
| 24 |
+
},
|
| 25 |
+
"FileSerializable": {
|
| 26 |
+
"oneOf": [
|
| 27 |
+
{
|
| 28 |
+
"type": "string",
|
| 29 |
+
"description": "filepath or URL to file"
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"type": "object",
|
| 33 |
+
"properties": {
|
| 34 |
+
"name": { "type": "string", "description": "name of file" },
|
| 35 |
+
"data": {
|
| 36 |
+
"type": "string",
|
| 37 |
+
"description": "base64 representation of file"
|
| 38 |
+
},
|
| 39 |
+
"size": {
|
| 40 |
+
"type": "integer",
|
| 41 |
+
"description": "size of image in bytes"
|
| 42 |
+
},
|
| 43 |
+
"is_file": {
|
| 44 |
+
"type": "boolean",
|
| 45 |
+
"description": "true if the file has been uploaded to the server"
|
| 46 |
+
},
|
| 47 |
+
"orig_name": {
|
| 48 |
+
"type": "string",
|
| 49 |
+
"description": "original name of the file"
|
| 50 |
+
}
|
| 51 |
+
},
|
| 52 |
+
"required": ["name", "data"]
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"type": "array",
|
| 56 |
+
"items": {
|
| 57 |
+
"anyOf": [
|
| 58 |
+
{
|
| 59 |
+
"type": "string",
|
| 60 |
+
"description": "filepath or URL to file"
|
| 61 |
+
},
|
| 62 |
+
{
|
| 63 |
+
"type": "object",
|
| 64 |
+
"properties": {
|
| 65 |
+
"name": { "type": "string", "description": "name of file" },
|
| 66 |
+
"data": {
|
| 67 |
+
"type": "string",
|
| 68 |
+
"description": "base64 representation of file"
|
| 69 |
+
},
|
| 70 |
+
"size": {
|
| 71 |
+
"type": "integer",
|
| 72 |
+
"description": "size of image in bytes"
|
| 73 |
+
},
|
| 74 |
+
"is_file": {
|
| 75 |
+
"type": "boolean",
|
| 76 |
+
"description": "true if the file has been uploaded to the server"
|
| 77 |
+
},
|
| 78 |
+
"orig_name": {
|
| 79 |
+
"type": "string",
|
| 80 |
+
"description": "original name of the file"
|
| 81 |
+
}
|
| 82 |
+
},
|
| 83 |
+
"required": ["name", "data"]
|
| 84 |
+
}
|
| 85 |
+
]
|
| 86 |
+
}
|
| 87 |
+
}
|
| 88 |
+
]
|
| 89 |
+
},
|
| 90 |
+
"SingleFileSerializable": {
|
| 91 |
+
"oneOf": [
|
| 92 |
+
{
|
| 93 |
+
"type": "string",
|
| 94 |
+
"description": "filepath or URL to file"
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"type": "object",
|
| 98 |
+
"properties": {
|
| 99 |
+
"name": { "type": "string", "description": "name of file" },
|
| 100 |
+
"data": {
|
| 101 |
+
"type": "string",
|
| 102 |
+
"description": "base64 representation of file"
|
| 103 |
+
},
|
| 104 |
+
"size": {
|
| 105 |
+
"type": "integer",
|
| 106 |
+
"description": "size of image in bytes"
|
| 107 |
+
},
|
| 108 |
+
"is_file": {
|
| 109 |
+
"type": "boolean",
|
| 110 |
+
"description": "true if the file has been uploaded to the server"
|
| 111 |
+
},
|
| 112 |
+
"orig_name": {
|
| 113 |
+
"type": "string",
|
| 114 |
+
"description": "original name of the file"
|
| 115 |
+
}
|
| 116 |
+
},
|
| 117 |
+
"required": ["name", "data"]
|
| 118 |
+
}
|
| 119 |
+
]
|
| 120 |
+
},
|
| 121 |
+
"MultipleFileSerializable": {
|
| 122 |
+
"type": "array",
|
| 123 |
+
"items": {
|
| 124 |
+
"anyOf": [
|
| 125 |
+
{
|
| 126 |
+
"type": "string",
|
| 127 |
+
"description": "filepath or URL to file"
|
| 128 |
+
},
|
| 129 |
+
{
|
| 130 |
+
"type": "object",
|
| 131 |
+
"properties": {
|
| 132 |
+
"name": { "type": "string", "description": "name of file" },
|
| 133 |
+
"data": {
|
| 134 |
+
"type": "string",
|
| 135 |
+
"description": "base64 representation of file"
|
| 136 |
+
},
|
| 137 |
+
"size": {
|
| 138 |
+
"type": "integer",
|
| 139 |
+
"description": "size of image in bytes"
|
| 140 |
+
},
|
| 141 |
+
"is_file": {
|
| 142 |
+
"type": "boolean",
|
| 143 |
+
"description": "true if the file has been uploaded to the server"
|
| 144 |
+
},
|
| 145 |
+
"orig_name": {
|
| 146 |
+
"type": "string",
|
| 147 |
+
"description": "original name of the file"
|
| 148 |
+
}
|
| 149 |
+
},
|
| 150 |
+
"required": ["name", "data"]
|
| 151 |
+
}
|
| 152 |
+
]
|
| 153 |
+
}
|
| 154 |
+
},
|
| 155 |
+
"JSONSerializable": {
|
| 156 |
+
"type": {},
|
| 157 |
+
"description": "any valid json"
|
| 158 |
+
},
|
| 159 |
+
"GallerySerializable": {
|
| 160 |
+
"type": "array",
|
| 161 |
+
"items": {
|
| 162 |
+
"type": "array",
|
| 163 |
+
"items": false,
|
| 164 |
+
"maxSize": 2,
|
| 165 |
+
"minSize": 2,
|
| 166 |
+
"prefixItems": [
|
| 167 |
+
{
|
| 168 |
+
"type": "object",
|
| 169 |
+
"properties": {
|
| 170 |
+
"name": { "type": "string", "description": "name of file" },
|
| 171 |
+
"data": {
|
| 172 |
+
"type": "string",
|
| 173 |
+
"description": "base64 representation of file"
|
| 174 |
+
},
|
| 175 |
+
"size": {
|
| 176 |
+
"type": "integer",
|
| 177 |
+
"description": "size of image in bytes"
|
| 178 |
+
},
|
| 179 |
+
"is_file": {
|
| 180 |
+
"type": "boolean",
|
| 181 |
+
"description": "true if the file has been uploaded to the server"
|
| 182 |
+
},
|
| 183 |
+
"orig_name": {
|
| 184 |
+
"type": "string",
|
| 185 |
+
"description": "original name of the file"
|
| 186 |
+
}
|
| 187 |
+
},
|
| 188 |
+
"required": ["name", "data"]
|
| 189 |
+
},
|
| 190 |
+
{
|
| 191 |
+
"oneOf": [
|
| 192 |
+
{ "type": "string", "description": "caption of image" },
|
| 193 |
+
{ "type": "null" }
|
| 194 |
+
]
|
| 195 |
+
}
|
| 196 |
+
]
|
| 197 |
+
}
|
| 198 |
+
}
|
| 199 |
+
}
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/utils.py
ADDED
|
@@ -0,0 +1,561 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import json
|
| 6 |
+
import mimetypes
|
| 7 |
+
import os
|
| 8 |
+
import pkgutil
|
| 9 |
+
import secrets
|
| 10 |
+
import shutil
|
| 11 |
+
import tempfile
|
| 12 |
+
import warnings
|
| 13 |
+
from concurrent.futures import CancelledError
|
| 14 |
+
from dataclasses import dataclass, field
|
| 15 |
+
from datetime import datetime
|
| 16 |
+
from enum import Enum
|
| 17 |
+
from pathlib import Path
|
| 18 |
+
from threading import Lock
|
| 19 |
+
from typing import Any, Callable, Optional
|
| 20 |
+
|
| 21 |
+
import fsspec.asyn
|
| 22 |
+
import httpx
|
| 23 |
+
import huggingface_hub
|
| 24 |
+
import requests
|
| 25 |
+
from huggingface_hub import SpaceStage
|
| 26 |
+
from websockets.legacy.protocol import WebSocketCommonProtocol
|
| 27 |
+
|
| 28 |
+
API_URL = "api/predict/"
|
| 29 |
+
WS_URL = "queue/join"
|
| 30 |
+
UPLOAD_URL = "upload"
|
| 31 |
+
CONFIG_URL = "config"
|
| 32 |
+
API_INFO_URL = "info"
|
| 33 |
+
RAW_API_INFO_URL = "info?serialize=False"
|
| 34 |
+
SPACE_FETCHER_URL = "https://gradio-space-api-fetcher-v2.hf.space/api"
|
| 35 |
+
RESET_URL = "reset"
|
| 36 |
+
SPACE_URL = "https://hf.space/{}"
|
| 37 |
+
|
| 38 |
+
STATE_COMPONENT = "state"
|
| 39 |
+
INVALID_RUNTIME = [
|
| 40 |
+
SpaceStage.NO_APP_FILE,
|
| 41 |
+
SpaceStage.CONFIG_ERROR,
|
| 42 |
+
SpaceStage.BUILD_ERROR,
|
| 43 |
+
SpaceStage.RUNTIME_ERROR,
|
| 44 |
+
SpaceStage.PAUSED,
|
| 45 |
+
]
|
| 46 |
+
|
| 47 |
+
__version__ = (pkgutil.get_data(__name__, "version.txt") or b"").decode("ascii").strip()
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class TooManyRequestsError(Exception):
|
| 51 |
+
"""Raised when the API returns a 429 status code."""
|
| 52 |
+
|
| 53 |
+
pass
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class QueueError(Exception):
|
| 57 |
+
"""Raised when the queue is full or there is an issue adding a job to the queue."""
|
| 58 |
+
|
| 59 |
+
pass
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class InvalidAPIEndpointError(Exception):
|
| 63 |
+
"""Raised when the API endpoint is invalid."""
|
| 64 |
+
|
| 65 |
+
pass
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class SpaceDuplicationError(Exception):
|
| 69 |
+
"""Raised when something goes wrong with a Space Duplication."""
|
| 70 |
+
|
| 71 |
+
pass
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class Status(Enum):
|
| 75 |
+
"""Status codes presented to client users."""
|
| 76 |
+
|
| 77 |
+
STARTING = "STARTING"
|
| 78 |
+
JOINING_QUEUE = "JOINING_QUEUE"
|
| 79 |
+
QUEUE_FULL = "QUEUE_FULL"
|
| 80 |
+
IN_QUEUE = "IN_QUEUE"
|
| 81 |
+
SENDING_DATA = "SENDING_DATA"
|
| 82 |
+
PROCESSING = "PROCESSING"
|
| 83 |
+
ITERATING = "ITERATING"
|
| 84 |
+
PROGRESS = "PROGRESS"
|
| 85 |
+
FINISHED = "FINISHED"
|
| 86 |
+
CANCELLED = "CANCELLED"
|
| 87 |
+
|
| 88 |
+
@staticmethod
|
| 89 |
+
def ordering(status: Status) -> int:
|
| 90 |
+
"""Order of messages. Helpful for testing."""
|
| 91 |
+
order = [
|
| 92 |
+
Status.STARTING,
|
| 93 |
+
Status.JOINING_QUEUE,
|
| 94 |
+
Status.QUEUE_FULL,
|
| 95 |
+
Status.IN_QUEUE,
|
| 96 |
+
Status.SENDING_DATA,
|
| 97 |
+
Status.PROCESSING,
|
| 98 |
+
Status.PROGRESS,
|
| 99 |
+
Status.ITERATING,
|
| 100 |
+
Status.FINISHED,
|
| 101 |
+
Status.CANCELLED,
|
| 102 |
+
]
|
| 103 |
+
return order.index(status)
|
| 104 |
+
|
| 105 |
+
def __lt__(self, other: Status):
|
| 106 |
+
return self.ordering(self) < self.ordering(other)
|
| 107 |
+
|
| 108 |
+
@staticmethod
|
| 109 |
+
def msg_to_status(msg: str) -> Status:
|
| 110 |
+
"""Map the raw message from the backend to the status code presented to users."""
|
| 111 |
+
return {
|
| 112 |
+
"send_hash": Status.JOINING_QUEUE,
|
| 113 |
+
"queue_full": Status.QUEUE_FULL,
|
| 114 |
+
"estimation": Status.IN_QUEUE,
|
| 115 |
+
"send_data": Status.SENDING_DATA,
|
| 116 |
+
"process_starts": Status.PROCESSING,
|
| 117 |
+
"process_generating": Status.ITERATING,
|
| 118 |
+
"process_completed": Status.FINISHED,
|
| 119 |
+
"progress": Status.PROGRESS,
|
| 120 |
+
}[msg]
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
@dataclass
|
| 124 |
+
class ProgressUnit:
|
| 125 |
+
index: Optional[int]
|
| 126 |
+
length: Optional[int]
|
| 127 |
+
unit: Optional[str]
|
| 128 |
+
progress: Optional[float]
|
| 129 |
+
desc: Optional[str]
|
| 130 |
+
|
| 131 |
+
@classmethod
|
| 132 |
+
def from_ws_msg(cls, data: list[dict]) -> list[ProgressUnit]:
|
| 133 |
+
return [
|
| 134 |
+
cls(
|
| 135 |
+
index=d.get("index"),
|
| 136 |
+
length=d.get("length"),
|
| 137 |
+
unit=d.get("unit"),
|
| 138 |
+
progress=d.get("progress"),
|
| 139 |
+
desc=d.get("desc"),
|
| 140 |
+
)
|
| 141 |
+
for d in data
|
| 142 |
+
]
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
@dataclass
|
| 146 |
+
class StatusUpdate:
|
| 147 |
+
"""Update message sent from the worker thread to the Job on the main thread."""
|
| 148 |
+
|
| 149 |
+
code: Status
|
| 150 |
+
rank: int | None
|
| 151 |
+
queue_size: int | None
|
| 152 |
+
eta: float | None
|
| 153 |
+
success: bool | None
|
| 154 |
+
time: datetime | None
|
| 155 |
+
progress_data: list[ProgressUnit] | None
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def create_initial_status_update():
|
| 159 |
+
return StatusUpdate(
|
| 160 |
+
code=Status.STARTING,
|
| 161 |
+
rank=None,
|
| 162 |
+
queue_size=None,
|
| 163 |
+
eta=None,
|
| 164 |
+
success=None,
|
| 165 |
+
time=datetime.now(),
|
| 166 |
+
progress_data=None,
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@dataclass
|
| 171 |
+
class JobStatus:
|
| 172 |
+
"""The job status.
|
| 173 |
+
|
| 174 |
+
Keeps track of the latest status update and intermediate outputs (not yet implements).
|
| 175 |
+
"""
|
| 176 |
+
|
| 177 |
+
latest_status: StatusUpdate = field(default_factory=create_initial_status_update)
|
| 178 |
+
outputs: list[Any] = field(default_factory=list)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
@dataclass
|
| 182 |
+
class Communicator:
|
| 183 |
+
"""Helper class to help communicate between the worker thread and main thread."""
|
| 184 |
+
|
| 185 |
+
lock: Lock
|
| 186 |
+
job: JobStatus
|
| 187 |
+
prediction_processor: Callable[..., tuple]
|
| 188 |
+
reset_url: str
|
| 189 |
+
should_cancel: bool = False
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
########################
|
| 193 |
+
# Network utils
|
| 194 |
+
########################
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def is_http_url_like(possible_url: str) -> bool:
|
| 198 |
+
"""
|
| 199 |
+
Check if the given string looks like an HTTP(S) URL.
|
| 200 |
+
"""
|
| 201 |
+
return possible_url.startswith(("http://", "https://"))
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def probe_url(possible_url: str) -> bool:
|
| 205 |
+
"""
|
| 206 |
+
Probe the given URL to see if it responds with a 200 status code (to HEAD, then to GET).
|
| 207 |
+
"""
|
| 208 |
+
headers = {"User-Agent": "gradio (https://gradio.app/; team@gradio.app)"}
|
| 209 |
+
try:
|
| 210 |
+
with requests.session() as sess:
|
| 211 |
+
head_request = sess.head(possible_url, headers=headers)
|
| 212 |
+
if head_request.status_code == 405:
|
| 213 |
+
return sess.get(possible_url, headers=headers).ok
|
| 214 |
+
return head_request.ok
|
| 215 |
+
except Exception:
|
| 216 |
+
return False
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def is_valid_url(possible_url: str) -> bool:
|
| 220 |
+
"""
|
| 221 |
+
Check if the given string is a valid URL.
|
| 222 |
+
"""
|
| 223 |
+
warnings.warn(
|
| 224 |
+
"is_valid_url should not be used. "
|
| 225 |
+
"Use is_http_url_like() and probe_url(), as suitable, instead.",
|
| 226 |
+
)
|
| 227 |
+
return is_http_url_like(possible_url) and probe_url(possible_url)
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
async def get_pred_from_ws(
|
| 231 |
+
websocket: WebSocketCommonProtocol,
|
| 232 |
+
data: str,
|
| 233 |
+
hash_data: str,
|
| 234 |
+
helper: Communicator | None = None,
|
| 235 |
+
) -> dict[str, Any]:
|
| 236 |
+
completed = False
|
| 237 |
+
resp = {}
|
| 238 |
+
while not completed:
|
| 239 |
+
# Receive message in the background so that we can
|
| 240 |
+
# cancel even while running a long pred
|
| 241 |
+
task = asyncio.create_task(websocket.recv())
|
| 242 |
+
while not task.done():
|
| 243 |
+
if helper:
|
| 244 |
+
with helper.lock:
|
| 245 |
+
if helper.should_cancel:
|
| 246 |
+
# Need to reset the iterator state since the client
|
| 247 |
+
# will not reset the session
|
| 248 |
+
async with httpx.AsyncClient() as http:
|
| 249 |
+
reset = http.post(
|
| 250 |
+
helper.reset_url, json=json.loads(hash_data)
|
| 251 |
+
)
|
| 252 |
+
# Retrieve cancel exception from task
|
| 253 |
+
# otherwise will get nasty warning in console
|
| 254 |
+
task.cancel()
|
| 255 |
+
await asyncio.gather(task, reset, return_exceptions=True)
|
| 256 |
+
raise CancelledError()
|
| 257 |
+
# Need to suspend this coroutine so that task actually runs
|
| 258 |
+
await asyncio.sleep(0.01)
|
| 259 |
+
msg = task.result()
|
| 260 |
+
resp = json.loads(msg)
|
| 261 |
+
if helper:
|
| 262 |
+
with helper.lock:
|
| 263 |
+
has_progress = "progress_data" in resp
|
| 264 |
+
status_update = StatusUpdate(
|
| 265 |
+
code=Status.msg_to_status(resp["msg"]),
|
| 266 |
+
queue_size=resp.get("queue_size"),
|
| 267 |
+
rank=resp.get("rank", None),
|
| 268 |
+
success=resp.get("success"),
|
| 269 |
+
time=datetime.now(),
|
| 270 |
+
eta=resp.get("rank_eta"),
|
| 271 |
+
progress_data=ProgressUnit.from_ws_msg(resp["progress_data"])
|
| 272 |
+
if has_progress
|
| 273 |
+
else None,
|
| 274 |
+
)
|
| 275 |
+
output = resp.get("output", {}).get("data", [])
|
| 276 |
+
if output and status_update.code != Status.FINISHED:
|
| 277 |
+
try:
|
| 278 |
+
result = helper.prediction_processor(*output)
|
| 279 |
+
except Exception as e:
|
| 280 |
+
result = [e]
|
| 281 |
+
helper.job.outputs.append(result)
|
| 282 |
+
helper.job.latest_status = status_update
|
| 283 |
+
if resp["msg"] == "queue_full":
|
| 284 |
+
raise QueueError("Queue is full! Please try again.")
|
| 285 |
+
if resp["msg"] == "send_hash":
|
| 286 |
+
await websocket.send(hash_data)
|
| 287 |
+
elif resp["msg"] == "send_data":
|
| 288 |
+
await websocket.send(data)
|
| 289 |
+
completed = resp["msg"] == "process_completed"
|
| 290 |
+
return resp["output"]
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
########################
|
| 294 |
+
# Data processing utils
|
| 295 |
+
########################
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def download_tmp_copy_of_file(
|
| 299 |
+
url_path: str, hf_token: str | None = None, dir: str | None = None
|
| 300 |
+
) -> str:
|
| 301 |
+
if dir is not None:
|
| 302 |
+
os.makedirs(dir, exist_ok=True)
|
| 303 |
+
headers = {"Authorization": "Bearer " + hf_token} if hf_token else {}
|
| 304 |
+
directory = Path(dir or tempfile.gettempdir()) / secrets.token_hex(20)
|
| 305 |
+
directory.mkdir(exist_ok=True, parents=True)
|
| 306 |
+
file_path = directory / Path(url_path).name
|
| 307 |
+
|
| 308 |
+
with requests.get(url_path, headers=headers, stream=True) as r:
|
| 309 |
+
r.raise_for_status()
|
| 310 |
+
with open(file_path, "wb") as f:
|
| 311 |
+
shutil.copyfileobj(r.raw, f)
|
| 312 |
+
return str(file_path.resolve())
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def create_tmp_copy_of_file(file_path: str, dir: str | None = None) -> str:
|
| 316 |
+
directory = Path(dir or tempfile.gettempdir()) / secrets.token_hex(20)
|
| 317 |
+
directory.mkdir(exist_ok=True, parents=True)
|
| 318 |
+
dest = directory / Path(file_path).name
|
| 319 |
+
shutil.copy2(file_path, dest)
|
| 320 |
+
return str(dest.resolve())
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
def get_mimetype(filename: str) -> str | None:
|
| 324 |
+
if filename.endswith(".vtt"):
|
| 325 |
+
return "text/vtt"
|
| 326 |
+
mimetype = mimetypes.guess_type(filename)[0]
|
| 327 |
+
if mimetype is not None:
|
| 328 |
+
mimetype = mimetype.replace("x-wav", "wav").replace("x-flac", "flac")
|
| 329 |
+
return mimetype
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def get_extension(encoding: str) -> str | None:
|
| 333 |
+
encoding = encoding.replace("audio/wav", "audio/x-wav")
|
| 334 |
+
type = mimetypes.guess_type(encoding)[0]
|
| 335 |
+
if type == "audio/flac": # flac is not supported by mimetypes
|
| 336 |
+
return "flac"
|
| 337 |
+
elif type is None:
|
| 338 |
+
return None
|
| 339 |
+
extension = mimetypes.guess_extension(type)
|
| 340 |
+
if extension is not None and extension.startswith("."):
|
| 341 |
+
extension = extension[1:]
|
| 342 |
+
return extension
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
def encode_file_to_base64(f: str | Path):
|
| 346 |
+
with open(f, "rb") as file:
|
| 347 |
+
encoded_string = base64.b64encode(file.read())
|
| 348 |
+
base64_str = str(encoded_string, "utf-8")
|
| 349 |
+
mimetype = get_mimetype(str(f))
|
| 350 |
+
return (
|
| 351 |
+
"data:"
|
| 352 |
+
+ (mimetype if mimetype is not None else "")
|
| 353 |
+
+ ";base64,"
|
| 354 |
+
+ base64_str
|
| 355 |
+
)
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def encode_url_to_base64(url: str):
|
| 359 |
+
resp = requests.get(url)
|
| 360 |
+
resp.raise_for_status()
|
| 361 |
+
encoded_string = base64.b64encode(resp.content)
|
| 362 |
+
base64_str = str(encoded_string, "utf-8")
|
| 363 |
+
mimetype = get_mimetype(url)
|
| 364 |
+
return (
|
| 365 |
+
"data:" + (mimetype if mimetype is not None else "") + ";base64," + base64_str
|
| 366 |
+
)
|
| 367 |
+
|
| 368 |
+
|
| 369 |
+
def encode_url_or_file_to_base64(path: str | Path):
|
| 370 |
+
path = str(path)
|
| 371 |
+
if is_http_url_like(path):
|
| 372 |
+
return encode_url_to_base64(path)
|
| 373 |
+
return encode_file_to_base64(path)
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
def decode_base64_to_binary(encoding: str) -> tuple[bytes, str | None]:
|
| 377 |
+
extension = get_extension(encoding)
|
| 378 |
+
data = encoding.rsplit(",", 1)[-1]
|
| 379 |
+
return base64.b64decode(data), extension
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def strip_invalid_filename_characters(filename: str, max_bytes: int = 200) -> str:
|
| 383 |
+
"""Strips invalid characters from a filename and ensures that the file_length is less than `max_bytes` bytes."""
|
| 384 |
+
filename = "".join([char for char in filename if char.isalnum() or char in "._- "])
|
| 385 |
+
filename_len = len(filename.encode())
|
| 386 |
+
if filename_len > max_bytes:
|
| 387 |
+
while filename_len > max_bytes:
|
| 388 |
+
if len(filename) == 0:
|
| 389 |
+
break
|
| 390 |
+
filename = filename[:-1]
|
| 391 |
+
filename_len = len(filename.encode())
|
| 392 |
+
return filename
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def sanitize_parameter_names(original_name: str) -> str:
|
| 396 |
+
"""Cleans up a Python parameter name to make the API info more readable."""
|
| 397 |
+
return (
|
| 398 |
+
"".join([char for char in original_name if char.isalnum() or char in " _"])
|
| 399 |
+
.replace(" ", "_")
|
| 400 |
+
.lower()
|
| 401 |
+
)
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
def decode_base64_to_file(
|
| 405 |
+
encoding: str,
|
| 406 |
+
file_path: str | None = None,
|
| 407 |
+
dir: str | Path | None = None,
|
| 408 |
+
prefix: str | None = None,
|
| 409 |
+
):
|
| 410 |
+
directory = Path(dir or tempfile.gettempdir()) / secrets.token_hex(20)
|
| 411 |
+
directory.mkdir(exist_ok=True, parents=True)
|
| 412 |
+
data, extension = decode_base64_to_binary(encoding)
|
| 413 |
+
if file_path is not None and prefix is None:
|
| 414 |
+
filename = Path(file_path).name
|
| 415 |
+
prefix = filename
|
| 416 |
+
if "." in filename:
|
| 417 |
+
prefix = filename[0 : filename.index(".")]
|
| 418 |
+
extension = filename[filename.index(".") + 1 :]
|
| 419 |
+
|
| 420 |
+
if prefix is not None:
|
| 421 |
+
prefix = strip_invalid_filename_characters(prefix)
|
| 422 |
+
|
| 423 |
+
if extension is None:
|
| 424 |
+
file_obj = tempfile.NamedTemporaryFile(
|
| 425 |
+
delete=False, prefix=prefix, dir=directory
|
| 426 |
+
)
|
| 427 |
+
else:
|
| 428 |
+
file_obj = tempfile.NamedTemporaryFile(
|
| 429 |
+
delete=False,
|
| 430 |
+
prefix=prefix,
|
| 431 |
+
suffix="." + extension,
|
| 432 |
+
dir=directory,
|
| 433 |
+
)
|
| 434 |
+
file_obj.write(data)
|
| 435 |
+
file_obj.flush()
|
| 436 |
+
return file_obj
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
def dict_or_str_to_json_file(jsn: str | dict | list, dir: str | Path | None = None):
|
| 440 |
+
if dir is not None:
|
| 441 |
+
os.makedirs(dir, exist_ok=True)
|
| 442 |
+
|
| 443 |
+
file_obj = tempfile.NamedTemporaryFile(
|
| 444 |
+
delete=False, suffix=".json", dir=dir, mode="w+"
|
| 445 |
+
)
|
| 446 |
+
if isinstance(jsn, str):
|
| 447 |
+
jsn = json.loads(jsn)
|
| 448 |
+
json.dump(jsn, file_obj)
|
| 449 |
+
file_obj.flush()
|
| 450 |
+
return file_obj
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
def file_to_json(file_path: str | Path) -> dict | list:
|
| 454 |
+
with open(file_path) as f:
|
| 455 |
+
return json.load(f)
|
| 456 |
+
|
| 457 |
+
|
| 458 |
+
###########################
|
| 459 |
+
# HuggingFace Hub API Utils
|
| 460 |
+
###########################
|
| 461 |
+
def set_space_timeout(
|
| 462 |
+
space_id: str,
|
| 463 |
+
hf_token: str | None = None,
|
| 464 |
+
timeout_in_seconds: int = 300,
|
| 465 |
+
):
|
| 466 |
+
headers = huggingface_hub.utils.build_hf_headers(
|
| 467 |
+
token=hf_token,
|
| 468 |
+
library_name="gradio_client",
|
| 469 |
+
library_version=__version__,
|
| 470 |
+
)
|
| 471 |
+
req = requests.post(
|
| 472 |
+
f"https://huggingface.co/api/spaces/{space_id}/sleeptime",
|
| 473 |
+
json={"seconds": timeout_in_seconds},
|
| 474 |
+
headers=headers,
|
| 475 |
+
)
|
| 476 |
+
try:
|
| 477 |
+
huggingface_hub.utils.hf_raise_for_status(req)
|
| 478 |
+
except huggingface_hub.utils.HfHubHTTPError as err:
|
| 479 |
+
raise SpaceDuplicationError(
|
| 480 |
+
f"Could not set sleep timeout on duplicated Space. Please visit {SPACE_URL.format(space_id)} "
|
| 481 |
+
"to set a timeout manually to reduce billing charges."
|
| 482 |
+
) from err
|
| 483 |
+
|
| 484 |
+
|
| 485 |
+
########################
|
| 486 |
+
# Misc utils
|
| 487 |
+
########################
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
def synchronize_async(func: Callable, *args, **kwargs) -> Any:
|
| 491 |
+
"""
|
| 492 |
+
Runs async functions in sync scopes. Can be used in any scope.
|
| 493 |
+
|
| 494 |
+
Example:
|
| 495 |
+
if inspect.iscoroutinefunction(block_fn.fn):
|
| 496 |
+
predictions = utils.synchronize_async(block_fn.fn, *processed_input)
|
| 497 |
+
|
| 498 |
+
Args:
|
| 499 |
+
func:
|
| 500 |
+
*args:
|
| 501 |
+
**kwargs:
|
| 502 |
+
"""
|
| 503 |
+
return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) # type: ignore
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
class APIInfoParseError(ValueError):
|
| 507 |
+
pass
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def get_type(schema: dict):
|
| 511 |
+
if "type" in schema:
|
| 512 |
+
return schema["type"]
|
| 513 |
+
elif schema.get("oneOf"):
|
| 514 |
+
return "oneOf"
|
| 515 |
+
elif schema.get("anyOf"):
|
| 516 |
+
return "anyOf"
|
| 517 |
+
else:
|
| 518 |
+
raise APIInfoParseError(f"Cannot parse type for {schema}")
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
def json_schema_to_python_type(schema: Any) -> str:
|
| 522 |
+
"""Convert the json schema into a python type hint"""
|
| 523 |
+
type_ = get_type(schema)
|
| 524 |
+
if type_ == {}:
|
| 525 |
+
if "json" in schema["description"]:
|
| 526 |
+
return "Dict[Any, Any]"
|
| 527 |
+
else:
|
| 528 |
+
return "Any"
|
| 529 |
+
elif type_ == "null":
|
| 530 |
+
return "None"
|
| 531 |
+
elif type_ == "integer":
|
| 532 |
+
return "int"
|
| 533 |
+
elif type_ == "string":
|
| 534 |
+
return "str"
|
| 535 |
+
elif type_ == "boolean":
|
| 536 |
+
return "bool"
|
| 537 |
+
elif type_ == "number":
|
| 538 |
+
return "int | float"
|
| 539 |
+
elif type_ == "array":
|
| 540 |
+
items = schema.get("items")
|
| 541 |
+
if "prefixItems" in items:
|
| 542 |
+
elements = ", ".join(
|
| 543 |
+
[json_schema_to_python_type(i) for i in items["prefixItems"]]
|
| 544 |
+
)
|
| 545 |
+
return f"Tuple[{elements}]"
|
| 546 |
+
else:
|
| 547 |
+
elements = json_schema_to_python_type(items)
|
| 548 |
+
return f"List[{elements}]"
|
| 549 |
+
elif type_ == "object":
|
| 550 |
+
des = ", ".join(
|
| 551 |
+
[
|
| 552 |
+
f"{n}: {json_schema_to_python_type(v)} ({v.get('description')})"
|
| 553 |
+
for n, v in schema["properties"].items()
|
| 554 |
+
]
|
| 555 |
+
)
|
| 556 |
+
return f"Dict({des})"
|
| 557 |
+
elif type_ in ["oneOf", "anyOf"]:
|
| 558 |
+
desc = " | ".join([json_schema_to_python_type(i) for i in schema[type_]])
|
| 559 |
+
return desc
|
| 560 |
+
else:
|
| 561 |
+
raise APIInfoParseError(f"Cannot parse schema {schema}")
|
evalkit_internvl/lib/python3.10/site-packages/gradio_client/version.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
0.2.9
|
evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/LICENSE.txt
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
hjson (based on simplejson) is dual-licensed software. It is available under the terms
|
| 2 |
+
of the MIT license, or the Academic Free License version 2.1. The full
|
| 3 |
+
text of each license agreement is included below. This code is also
|
| 4 |
+
licensed to the Python Software Foundation (PSF) under a Contributor
|
| 5 |
+
Agreement.
|
| 6 |
+
|
| 7 |
+
MIT License
|
| 8 |
+
===========
|
| 9 |
+
|
| 10 |
+
Copyright (c) 2006 Bob Ippolito
|
| 11 |
+
Copyright (c) 2015 Christian Zangl
|
| 12 |
+
|
| 13 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
| 14 |
+
this software and associated documentation files (the "Software"), to deal in
|
| 15 |
+
the Software without restriction, including without limitation the rights to
|
| 16 |
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
| 17 |
+
of the Software, and to permit persons to whom the Software is furnished to do
|
| 18 |
+
so, subject to the following conditions:
|
| 19 |
+
|
| 20 |
+
The above copyright notice and this permission notice shall be included in all
|
| 21 |
+
copies or substantial portions of the Software.
|
| 22 |
+
|
| 23 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 24 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 25 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 26 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 27 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 28 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 29 |
+
SOFTWARE.
|
| 30 |
+
|
| 31 |
+
Academic Free License v. 2.1
|
| 32 |
+
============================
|
| 33 |
+
|
| 34 |
+
Copyright (c) 2006 Bob Ippolito. All rights reserved.
|
| 35 |
+
Copyright (c) 2015 Christian Zangl
|
| 36 |
+
|
| 37 |
+
This Academic Free License (the "License") applies to any original work of authorship (the "Original Work") whose owner (the "Licensor") has placed the following notice immediately following the copyright notice for the Original Work:
|
| 38 |
+
|
| 39 |
+
Licensed under the Academic Free License version 2.1
|
| 40 |
+
|
| 41 |
+
1) Grant of Copyright License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license to do the following:
|
| 42 |
+
|
| 43 |
+
a) to reproduce the Original Work in copies;
|
| 44 |
+
|
| 45 |
+
b) to prepare derivative works ("Derivative Works") based upon the Original Work;
|
| 46 |
+
|
| 47 |
+
c) to distribute copies of the Original Work and Derivative Works to the public;
|
| 48 |
+
|
| 49 |
+
d) to perform the Original Work publicly; and
|
| 50 |
+
|
| 51 |
+
e) to display the Original Work publicly.
|
| 52 |
+
|
| 53 |
+
2) Grant of Patent License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license, under patent claims owned or controlled by the Licensor that are embodied in the Original Work as furnished by the Licensor, to make, use, sell and offer for sale the Original Work and Derivative Works.
|
| 54 |
+
|
| 55 |
+
3) Grant of Source Code License. The term "Source Code" means the preferred form of the Original Work for making modifications to it and all available documentation describing how to modify the Original Work. Licensor hereby agrees to provide a machine-readable copy of the Source Code of the Original Work along with each copy of the Original Work that Licensor distributes. Licensor reserves the right to satisfy this obligation by placing a machine-readable copy of the Source Code in an information repository reasonably calculated to permit inexpensive and convenient access by You for as long as Licensor continues to distribute the Original Work, and by publishing the address of that information repository in a notice immediately following the copyright notice that applies to the Original Work.
|
| 56 |
+
|
| 57 |
+
4) Exclusions From License Grant. Neither the names of Licensor, nor the names of any contributors to the Original Work, nor any of their trademarks or service marks, may be used to endorse or promote products derived from this Original Work without express prior written permission of the Licensor. Nothing in this License shall be deemed to grant any rights to trademarks, copyrights, patents, trade secrets or any other intellectual property of Licensor except as expressly stated herein. No patent license is granted to make, use, sell or offer to sell embodiments of any patent claims other than the licensed claims defined in Section 2. No right is granted to the trademarks of Licensor even if such marks are included in the Original Work. Nothing in this License shall be interpreted to prohibit Licensor from licensing under different terms from this License any Original Work that Licensor otherwise would have a right to license.
|
| 58 |
+
|
| 59 |
+
5) This section intentionally omitted.
|
| 60 |
+
|
| 61 |
+
6) Attribution Rights. You must retain, in the Source Code of any Derivative Works that You create, all copyright, patent or trademark notices from the Source Code of the Original Work, as well as any notices of licensing and any descriptive text identified therein as an "Attribution Notice." You must cause the Source Code for any Derivative Works that You create to carry a prominent Attribution Notice reasonably calculated to inform recipients that You have modified the Original Work.
|
| 62 |
+
|
| 63 |
+
7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that the copyright in and to the Original Work and the patent rights granted herein by Licensor are owned by the Licensor or are sublicensed to You under the terms of this License with the permission of the contributor(s) of those copyrights and patent rights. Except as expressly stated in the immediately proceeding sentence, the Original Work is provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without limitation, the warranties of NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No license to Original Work is granted hereunder except under this disclaimer.
|
| 64 |
+
|
| 65 |
+
8) Limitation of Liability. Under no circumstances and under no legal theory, whether in tort (including negligence), contract, or otherwise, shall the Licensor be liable to any person for any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or the use of the Original Work including, without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses. This limitation of liability shall not apply to liability for death or personal injury resulting from Licensor's negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You.
|
| 66 |
+
|
| 67 |
+
9) Acceptance and Termination. If You distribute copies of the Original Work or a Derivative Work, You must make a reasonable effort under the circumstances to obtain the express assent of recipients to the terms of this License. Nothing else but this License (or another written agreement between Licensor and You) grants You permission to create Derivative Works based upon the Original Work or to exercise any of the rights granted in Section 1 herein, and any attempt to do so except under the terms of this License (or another written agreement between Licensor and You) is expressly prohibited by U.S. copyright law, the equivalent laws of other countries, and by international treaty. Therefore, by exercising any of the rights granted to You in Section 1 herein, You indicate Your acceptance of this License and all of its terms and conditions.
|
| 68 |
+
|
| 69 |
+
10) Termination for Patent Action. This License shall terminate automatically and You may no longer exercise any of the rights granted to You by this License as of the date You commence an action, including a cross-claim or counterclaim, against Licensor or any licensee alleging that the Original Work infringes a patent. This termination provision shall not apply for an action alleging patent infringement by combinations of the Original Work with other software or hardware.
|
| 70 |
+
|
| 71 |
+
11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this License may be brought only in the courts of a jurisdiction wherein the Licensor resides or in which Licensor conducts its primary business, and under the laws of that jurisdiction excluding its conflict-of-law provisions. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any use of the Original Work outside the scope of this License or after its termination shall be subject to the requirements and penalties of the U.S. Copyright Act, 17 U.S.C. § 101 et seq., the equivalent laws of other countries, and international treaty. This section shall survive the termination of this License.
|
| 72 |
+
|
| 73 |
+
12) Attorneys Fees. In any action to enforce the terms of this License or seeking damages relating thereto, the prevailing party shall be entitled to recover its costs and expenses, including, without limitation, reasonable attorneys' fees and costs incurred in connection with such action, including any appeal of such action. This section shall survive the termination of this License.
|
| 74 |
+
|
| 75 |
+
13) Miscellaneous. This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable.
|
| 76 |
+
|
| 77 |
+
14) Definition of "You" in This License. "You" throughout this License, whether in upper or lower case, means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with you. For purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
| 78 |
+
|
| 79 |
+
15) Right to Use. You may use the Original Work in all ways not otherwise restricted or conditioned by this License or by law, and Licensor promises not to interfere with or be responsible for such uses by You.
|
| 80 |
+
|
| 81 |
+
This license is Copyright (C) 2003-2004 Lawrence E. Rosen. All rights reserved. Permission is hereby granted to copy and distribute this license without modification. This license may not be modified without the express written permission of its copyright owner.
|
evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: hjson
|
| 3 |
+
Version: 3.1.0
|
| 4 |
+
Summary: Hjson, a user interface for JSON.
|
| 5 |
+
Home-page: http://github.com/hjson/hjson-py
|
| 6 |
+
Author: Christian Zangl
|
| 7 |
+
Author-email: laktak@cdak.net
|
| 8 |
+
License: MIT License
|
| 9 |
+
Keywords: json comments configuration
|
| 10 |
+
Platform: any
|
| 11 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 12 |
+
Classifier: Intended Audience :: Developers
|
| 13 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 14 |
+
Classifier: License :: OSI Approved :: Academic Free License (AFL)
|
| 15 |
+
Classifier: Programming Language :: Python
|
| 16 |
+
Classifier: Programming Language :: Python :: 2
|
| 17 |
+
Classifier: Programming Language :: Python :: 2.6
|
| 18 |
+
Classifier: Programming Language :: Python :: 2.7
|
| 19 |
+
Classifier: Programming Language :: Python :: 3
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.3
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.4
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.5
|
| 23 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 24 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 25 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 26 |
+
License-File: LICENSE.txt
|
| 27 |
+
|
| 28 |
+
hjson-py
|
| 29 |
+
========
|
| 30 |
+
|
| 31 |
+
`Hjson`_, a user interface for JSON
|
| 32 |
+
|
| 33 |
+
Hjson works with Python 2.5+ and Python 3.3+ (based on `simplejson`_)
|
| 34 |
+
|
| 35 |
+
Installation
|
| 36 |
+
============
|
| 37 |
+
|
| 38 |
+
- ``pip install hjson``
|
| 39 |
+
|
| 40 |
+
- or download from https://pypi.python.org/pypi/hjson
|
| 41 |
+
|
| 42 |
+
Commandline
|
| 43 |
+
-----------
|
| 44 |
+
|
| 45 |
+
::
|
| 46 |
+
|
| 47 |
+
Usage:
|
| 48 |
+
hjson [options]
|
| 49 |
+
hjson [options] <input>
|
| 50 |
+
hjson (-h | --help)
|
| 51 |
+
hjson (-V | --version)
|
| 52 |
+
|
| 53 |
+
Options:
|
| 54 |
+
-h --help Show this screen.
|
| 55 |
+
-j Output as formatted JSON.
|
| 56 |
+
-c Output as JSON.
|
| 57 |
+
-V --version Show version.
|
| 58 |
+
|
| 59 |
+
E.g. ``echo '{"json":"obj"}' | hjson``
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
Usage
|
| 63 |
+
=====
|
| 64 |
+
|
| 65 |
+
.. code-block:: python
|
| 66 |
+
|
| 67 |
+
import hjson
|
| 68 |
+
|
| 69 |
+
Decoding Hjson
|
| 70 |
+
--------------
|
| 71 |
+
|
| 72 |
+
.. code-block:: python
|
| 73 |
+
|
| 74 |
+
text = """{
|
| 75 |
+
foo: a
|
| 76 |
+
bar: 1
|
| 77 |
+
}"""
|
| 78 |
+
|
| 79 |
+
hjson.loads(text)
|
| 80 |
+
|
| 81 |
+
Result:
|
| 82 |
+
|
| 83 |
+
.. code-block:: python
|
| 84 |
+
|
| 85 |
+
OrderedDict([('foo', 'a'), ('bar', 1)])
|
| 86 |
+
|
| 87 |
+
Encoding Python object hierarchies
|
| 88 |
+
----------------------------------
|
| 89 |
+
|
| 90 |
+
.. code-block:: python
|
| 91 |
+
|
| 92 |
+
hjson.dumps({'foo': 'text', 'bar': (1, 2)})
|
| 93 |
+
|
| 94 |
+
Result:
|
| 95 |
+
|
| 96 |
+
::
|
| 97 |
+
|
| 98 |
+
{
|
| 99 |
+
foo: text
|
| 100 |
+
bar:
|
| 101 |
+
[
|
| 102 |
+
1
|
| 103 |
+
2
|
| 104 |
+
]
|
| 105 |
+
}
|
| 106 |
+
|
| 107 |
+
Encoding as JSON
|
| 108 |
+
----------------
|
| 109 |
+
|
| 110 |
+
Note that this is probably not as performant as the simplejson version.
|
| 111 |
+
|
| 112 |
+
.. code-block:: python
|
| 113 |
+
|
| 114 |
+
hjson.dumpsJSON(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
| 115 |
+
|
| 116 |
+
Result: ``'["foo", {"bar": ["baz", null, 1.0, 2]}]'``
|
| 117 |
+
|
| 118 |
+
API
|
| 119 |
+
===
|
| 120 |
+
|
| 121 |
+
`hjson-py`_
|
| 122 |
+
|
| 123 |
+
.. _Hjson: https://hjson.github.io
|
| 124 |
+
.. _simplejson: https://github.com/simplejson/simplejson
|
| 125 |
+
.. _hjson-py: http://hjson.github.io/hjson-py/
|
evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/hjson,sha256=e-TUdFh5Fd6Pvk89iYfdjr_1h9RmZmbQcL9FzJUXgHk,231
|
| 2 |
+
hjson-3.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
hjson-3.1.0.dist-info/LICENSE.txt,sha256=WRxR9JxoM7zhpdU-Y3haBGet1nRaWogOTRD05kzETEw,10462
|
| 4 |
+
hjson-3.1.0.dist-info/METADATA,sha256=ws3nEqUXFpWNgmH-FYScGumUSgt2dkiPHNm7RZh-gcU,2610
|
| 5 |
+
hjson-3.1.0.dist-info/RECORD,,
|
| 6 |
+
hjson-3.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 7 |
+
hjson-3.1.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
| 8 |
+
hjson-3.1.0.dist-info/entry_points.txt,sha256=sghBTcTrqdK0kKuznOZXeEBtjoBnLweHF83S9NK-r0E,42
|
| 9 |
+
hjson-3.1.0.dist-info/top_level.txt,sha256=earxK7niVoW6ADlKRFHCs3JpUL4FBSnJn9qmrYgnEUE,6
|
| 10 |
+
hjson/__init__.py,sha256=Py1drAVt-Xpi2xJAHxVXDdX-NniC4llxZVPjY7MyPCY,26580
|
| 11 |
+
hjson/__pycache__/__init__.cpython-310.pyc,,
|
| 12 |
+
hjson/__pycache__/compat.cpython-310.pyc,,
|
| 13 |
+
hjson/__pycache__/decoder.cpython-310.pyc,,
|
| 14 |
+
hjson/__pycache__/encoder.cpython-310.pyc,,
|
| 15 |
+
hjson/__pycache__/encoderH.cpython-310.pyc,,
|
| 16 |
+
hjson/__pycache__/ordered_dict.cpython-310.pyc,,
|
| 17 |
+
hjson/__pycache__/scanner.cpython-310.pyc,,
|
| 18 |
+
hjson/__pycache__/tool.cpython-310.pyc,,
|
| 19 |
+
hjson/compat.py,sha256=uvmTYe1Oa18tT_6tFRtYnzTdAkyd73B9zwMF7ZafI04,1036
|
| 20 |
+
hjson/decoder.py,sha256=oHz7g2sQd4S-AJbSzur9VJWqAHDWL25FVQ-G35XlGCA,19563
|
| 21 |
+
hjson/encoder.py,sha256=NhR3YSMVzL3UP8OLAtP2Dr6dW9UbJzjVS-SXp1DzvcY,19168
|
| 22 |
+
hjson/encoderH.py,sha256=wJ8D0gAyR3n6e3MahCIU3OahI5_xuHPWV_ZlI839xCs,20481
|
| 23 |
+
hjson/ordered_dict.py,sha256=DXtgiqkkaNWXDLZ0DGXIjF_CPzGV5qpC-PSeS1zcps8,3370
|
| 24 |
+
hjson/scanner.py,sha256=IL8poQGvCsb82y7qY5jrlSrZ5xcENpPUti3tNKhprYw,1779
|
| 25 |
+
hjson/tests/__init__.py,sha256=_A-1Tn7q7ccNPro_QfbKiXo_bTL9ED5RUX9AeSLG4TA,2011
|
| 26 |
+
hjson/tests/__pycache__/__init__.cpython-310.pyc,,
|
| 27 |
+
hjson/tests/__pycache__/test_bigint_as_string.cpython-310.pyc,,
|
| 28 |
+
hjson/tests/__pycache__/test_bitsize_int_as_string.cpython-310.pyc,,
|
| 29 |
+
hjson/tests/__pycache__/test_check_circular.cpython-310.pyc,,
|
| 30 |
+
hjson/tests/__pycache__/test_decimal.cpython-310.pyc,,
|
| 31 |
+
hjson/tests/__pycache__/test_decode.cpython-310.pyc,,
|
| 32 |
+
hjson/tests/__pycache__/test_default.cpython-310.pyc,,
|
| 33 |
+
hjson/tests/__pycache__/test_dump.cpython-310.pyc,,
|
| 34 |
+
hjson/tests/__pycache__/test_encode_basestring_ascii.cpython-310.pyc,,
|
| 35 |
+
hjson/tests/__pycache__/test_errors.cpython-310.pyc,,
|
| 36 |
+
hjson/tests/__pycache__/test_fail.cpython-310.pyc,,
|
| 37 |
+
hjson/tests/__pycache__/test_float.cpython-310.pyc,,
|
| 38 |
+
hjson/tests/__pycache__/test_for_json.cpython-310.pyc,,
|
| 39 |
+
hjson/tests/__pycache__/test_hjson.cpython-310.pyc,,
|
| 40 |
+
hjson/tests/__pycache__/test_indent.cpython-310.pyc,,
|
| 41 |
+
hjson/tests/__pycache__/test_item_sort_key.cpython-310.pyc,,
|
| 42 |
+
hjson/tests/__pycache__/test_namedtuple.cpython-310.pyc,,
|
| 43 |
+
hjson/tests/__pycache__/test_pass1.cpython-310.pyc,,
|
| 44 |
+
hjson/tests/__pycache__/test_pass2.cpython-310.pyc,,
|
| 45 |
+
hjson/tests/__pycache__/test_pass3.cpython-310.pyc,,
|
| 46 |
+
hjson/tests/__pycache__/test_recursion.cpython-310.pyc,,
|
| 47 |
+
hjson/tests/__pycache__/test_scanstring.cpython-310.pyc,,
|
| 48 |
+
hjson/tests/__pycache__/test_separators.cpython-310.pyc,,
|
| 49 |
+
hjson/tests/__pycache__/test_tool.cpython-310.pyc,,
|
| 50 |
+
hjson/tests/__pycache__/test_tuple.cpython-310.pyc,,
|
| 51 |
+
hjson/tests/__pycache__/test_unicode.cpython-310.pyc,,
|
| 52 |
+
hjson/tests/test_bigint_as_string.py,sha256=bhEtYEXWUhxyi25iLm4sPWFrt5RZ9PfFEknX1cdzP-Y,2265
|
| 53 |
+
hjson/tests/test_bitsize_int_as_string.py,sha256=-73xJ8CE2hDjGOWTERRrYbDHaY0kgBNQXC0g8nIgd4k,2332
|
| 54 |
+
hjson/tests/test_check_circular.py,sha256=64kZhsab6OcwYmJNLRqNW-19dp1UdgYbZiGzopKyR9s,940
|
| 55 |
+
hjson/tests/test_decimal.py,sha256=Qw0IBPSPYoGZXwvXkkM1cz6lpqjkPRzNDBSul-RdR_4,2556
|
| 56 |
+
hjson/tests/test_decode.py,sha256=Sm4052xVjv7ZtZFdRVMsnvQeh2eCNoXv24YOUJJLMdg,4437
|
| 57 |
+
hjson/tests/test_default.py,sha256=WWDLhDVfih4PrenmiEcvshhUOl_bNsm3jML96-AtGmo,224
|
| 58 |
+
hjson/tests/test_dump.py,sha256=5WU4Rd6vsHOwXGpGqQKIw1ZBNgRWUqMY8w3DnJVWfxo,5061
|
| 59 |
+
hjson/tests/test_encode_basestring_ascii.py,sha256=up4y9JMdGXdBXkEjfqwiG-sudSdcKw0RQfO_76za-To,2102
|
| 60 |
+
hjson/tests/test_errors.py,sha256=vg3-z36T9O-UeDHG4ZtW-nQBNAvraWKBrDA70yG989c,1549
|
| 61 |
+
hjson/tests/test_fail.py,sha256=Giinb944NX0bPwBHYUjVZ4ZlNB611Wg0wxVWxv4bDaU,5688
|
| 62 |
+
hjson/tests/test_float.py,sha256=LCUL-2xT8PYq99jQi6-Ddk9pMuC1mLrcJboTfvR08HM,1011
|
| 63 |
+
hjson/tests/test_for_json.py,sha256=ZLtypdX0ALctxMB8c3fQvx3k9OHY5t71gBxGNOXemrc,2778
|
| 64 |
+
hjson/tests/test_hjson.py,sha256=CdvXR05nu8bF_jZ-Hhj3bh8LRi8tdSJTruayj69HoDk,2327
|
| 65 |
+
hjson/tests/test_indent.py,sha256=8oUK5E8DTz1c3RkUU-nOELmr9wOKoaHHOAsxDai66iE,2589
|
| 66 |
+
hjson/tests/test_item_sort_key.py,sha256=piYy-ntwdcb_qS-y8jPFI6rVZlHCNqtTFGnaZSEvWH8,1134
|
| 67 |
+
hjson/tests/test_namedtuple.py,sha256=iK7B95JH4f2L3_MB3rY9NagEVZ1X62JHpjlm1J4t5uM,4066
|
| 68 |
+
hjson/tests/test_pass1.py,sha256=wdnBz55dY4ou8IIdZFypJ_72J6HCtLZw0YesoNOTopQ,1745
|
| 69 |
+
hjson/tests/test_pass2.py,sha256=O389C8IeJ9ysqWKUftXOeHvBi5_47VSdBM_2bCee1SQ,385
|
| 70 |
+
hjson/tests/test_pass3.py,sha256=wwemRholrfv4lwgC0ArnXiS_tfA-NnbRrZqo71YZXaA,481
|
| 71 |
+
hjson/tests/test_recursion.py,sha256=APQAtnchO6KZdduYqvEJDrBOmAKqyORwAX4ldbzAW7A,1694
|
| 72 |
+
hjson/tests/test_scanstring.py,sha256=U-5gUsaO7jEai6zfKMKADfBi-xFg8IHC1dBeElvTsn4,5953
|
| 73 |
+
hjson/tests/test_separators.py,sha256=krudtyZbx0k7OrZZ-jGCZH6yrmsj1seRkUblQLRq4Rw,945
|
| 74 |
+
hjson/tests/test_tool.py,sha256=PyDhrmMTpJSvBXvsWG8P-yrw1BT5kSp6OcSrMLvzmIw,2826
|
| 75 |
+
hjson/tests/test_tuple.py,sha256=FkOyw4s7WZ1HUGxtHYiwaBwhW2thlDB3JFSpUZCbI_I,1976
|
| 76 |
+
hjson/tests/test_unicode.py,sha256=5npz25rj4T3ZhXF_OsSmVveBUtTuz92DQ_cHDe-Jfrw,7099
|
| 77 |
+
hjson/tool.py,sha256=PVlr-lSDWfrGdkh5exWQo_21HGgHLR4OpZ1S41Dqjgo,1897
|
evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.37.1)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
hjson = hjson.tool:main
|
evalkit_internvl/lib/python3.10/site-packages/hjson-3.1.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
hjson
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (178 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.16 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/__pycache__/process_executor.cpython-310.pyc
ADDED
|
Binary file (33 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from multiprocessing import synchronize
|
| 3 |
+
|
| 4 |
+
from .context import get_context
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def _make_name():
|
| 8 |
+
return f"/loky-{os.getpid()}-{next(synchronize.SemLock._rand)}"
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
# monkey patch the name creation for multiprocessing
|
| 12 |
+
synchronize.SemLock._make_name = staticmethod(_make_name)
|
| 13 |
+
|
| 14 |
+
__all__ = ["get_context"]
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (536 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/_posix_reduction.cpython-310.pyc
ADDED
|
Binary file (1.81 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-310.pyc
ADDED
|
Binary file (493 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/context.cpython-310.pyc
ADDED
|
Binary file (9.59 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/fork_exec.cpython-310.pyc
ADDED
|
Binary file (1.11 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/popen_loky_posix.cpython-310.pyc
ADDED
|
Binary file (5.04 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/process.cpython-310.pyc
ADDED
|
Binary file (2.1 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/queues.cpython-310.pyc
ADDED
|
Binary file (4.82 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/reduction.cpython-310.pyc
ADDED
|
Binary file (4.94 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-310.pyc
ADDED
|
Binary file (7.82 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/spawn.cpython-310.pyc
ADDED
|
Binary file (5.05 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (4.33 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/_posix_reduction.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
###############################################################################
|
| 2 |
+
# Extra reducers for Unix based system and connections objects
|
| 3 |
+
#
|
| 4 |
+
# author: Thomas Moreau and Olivier Grisel
|
| 5 |
+
#
|
| 6 |
+
# adapted from multiprocessing/reduction.py (17/02/2017)
|
| 7 |
+
# * Add adapted reduction for LokyProcesses and socket/Connection
|
| 8 |
+
#
|
| 9 |
+
import os
|
| 10 |
+
import socket
|
| 11 |
+
import _socket
|
| 12 |
+
from multiprocessing.connection import Connection
|
| 13 |
+
from multiprocessing.context import get_spawning_popen
|
| 14 |
+
|
| 15 |
+
from .reduction import register
|
| 16 |
+
|
| 17 |
+
HAVE_SEND_HANDLE = (
|
| 18 |
+
hasattr(socket, "CMSG_LEN")
|
| 19 |
+
and hasattr(socket, "SCM_RIGHTS")
|
| 20 |
+
and hasattr(socket.socket, "sendmsg")
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def _mk_inheritable(fd):
|
| 25 |
+
os.set_inheritable(fd, True)
|
| 26 |
+
return fd
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def DupFd(fd):
|
| 30 |
+
"""Return a wrapper for an fd."""
|
| 31 |
+
popen_obj = get_spawning_popen()
|
| 32 |
+
if popen_obj is not None:
|
| 33 |
+
return popen_obj.DupFd(popen_obj.duplicate_for_child(fd))
|
| 34 |
+
elif HAVE_SEND_HANDLE:
|
| 35 |
+
from multiprocessing import resource_sharer
|
| 36 |
+
|
| 37 |
+
return resource_sharer.DupFd(fd)
|
| 38 |
+
else:
|
| 39 |
+
raise TypeError(
|
| 40 |
+
"Cannot pickle connection object. This object can only be "
|
| 41 |
+
"passed when spawning a new process"
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def _reduce_socket(s):
|
| 46 |
+
df = DupFd(s.fileno())
|
| 47 |
+
return _rebuild_socket, (df, s.family, s.type, s.proto)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _rebuild_socket(df, family, type, proto):
|
| 51 |
+
fd = df.detach()
|
| 52 |
+
return socket.fromfd(fd, family, type, proto)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def rebuild_connection(df, readable, writable):
|
| 56 |
+
fd = df.detach()
|
| 57 |
+
return Connection(fd, readable, writable)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def reduce_connection(conn):
|
| 61 |
+
df = DupFd(conn.fileno())
|
| 62 |
+
return rebuild_connection, (df, conn.readable, conn.writable)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
register(socket.socket, _reduce_socket)
|
| 66 |
+
register(_socket.socket, _reduce_socket)
|
| 67 |
+
register(Connection, reduce_connection)
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/_win_reduction.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
###############################################################################
|
| 2 |
+
# Extra reducers for Windows system and connections objects
|
| 3 |
+
#
|
| 4 |
+
# author: Thomas Moreau and Olivier Grisel
|
| 5 |
+
#
|
| 6 |
+
# adapted from multiprocessing/reduction.py (17/02/2017)
|
| 7 |
+
# * Add adapted reduction for LokyProcesses and socket/PipeConnection
|
| 8 |
+
#
|
| 9 |
+
import socket
|
| 10 |
+
from multiprocessing import connection
|
| 11 |
+
from multiprocessing.reduction import _reduce_socket
|
| 12 |
+
|
| 13 |
+
from .reduction import register
|
| 14 |
+
|
| 15 |
+
# register reduction for win32 communication objects
|
| 16 |
+
register(socket.socket, _reduce_socket)
|
| 17 |
+
register(connection.Connection, connection.reduce_connection)
|
| 18 |
+
register(connection.PipeConnection, connection.reduce_pipe_connection)
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/context.py
ADDED
|
@@ -0,0 +1,378 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
###############################################################################
|
| 2 |
+
# Basic context management with LokyContext
|
| 3 |
+
#
|
| 4 |
+
# author: Thomas Moreau and Olivier Grisel
|
| 5 |
+
#
|
| 6 |
+
# adapted from multiprocessing/context.py
|
| 7 |
+
# * Create a context ensuring loky uses only objects that are compatible
|
| 8 |
+
# * Add LokyContext to the list of context of multiprocessing so loky can be
|
| 9 |
+
# used with multiprocessing.set_start_method
|
| 10 |
+
# * Implement a CFS-aware amd physical-core aware cpu_count function.
|
| 11 |
+
#
|
| 12 |
+
import os
|
| 13 |
+
import sys
|
| 14 |
+
import math
|
| 15 |
+
import subprocess
|
| 16 |
+
import traceback
|
| 17 |
+
import warnings
|
| 18 |
+
import multiprocessing as mp
|
| 19 |
+
from multiprocessing import get_context as mp_get_context
|
| 20 |
+
from multiprocessing.context import BaseContext
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
from .process import LokyProcess, LokyInitMainProcess
|
| 24 |
+
|
| 25 |
+
# Apparently, on older Python versions, loky cannot work 61 workers on Windows
|
| 26 |
+
# but instead 60: ¯\_(ツ)_/¯
|
| 27 |
+
if sys.version_info >= (3, 8):
|
| 28 |
+
from concurrent.futures.process import _MAX_WINDOWS_WORKERS
|
| 29 |
+
|
| 30 |
+
if sys.version_info < (3, 10):
|
| 31 |
+
_MAX_WINDOWS_WORKERS = _MAX_WINDOWS_WORKERS - 1
|
| 32 |
+
else:
|
| 33 |
+
# compat for versions before 3.8 which do not define this.
|
| 34 |
+
_MAX_WINDOWS_WORKERS = 60
|
| 35 |
+
|
| 36 |
+
START_METHODS = ["loky", "loky_init_main", "spawn"]
|
| 37 |
+
if sys.platform != "win32":
|
| 38 |
+
START_METHODS += ["fork", "forkserver"]
|
| 39 |
+
|
| 40 |
+
_DEFAULT_START_METHOD = None
|
| 41 |
+
|
| 42 |
+
# Cache for the number of physical cores to avoid repeating subprocess calls.
|
| 43 |
+
# It should not change during the lifetime of the program.
|
| 44 |
+
physical_cores_cache = None
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def get_context(method=None):
|
| 48 |
+
# Try to overload the default context
|
| 49 |
+
method = method or _DEFAULT_START_METHOD or "loky"
|
| 50 |
+
if method == "fork":
|
| 51 |
+
# If 'fork' is explicitly requested, warn user about potential issues.
|
| 52 |
+
warnings.warn(
|
| 53 |
+
"`fork` start method should not be used with "
|
| 54 |
+
"`loky` as it does not respect POSIX. Try using "
|
| 55 |
+
"`spawn` or `loky` instead.",
|
| 56 |
+
UserWarning,
|
| 57 |
+
)
|
| 58 |
+
try:
|
| 59 |
+
return mp_get_context(method)
|
| 60 |
+
except ValueError:
|
| 61 |
+
raise ValueError(
|
| 62 |
+
f"Unknown context '{method}'. Value should be in "
|
| 63 |
+
f"{START_METHODS}."
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def set_start_method(method, force=False):
|
| 68 |
+
global _DEFAULT_START_METHOD
|
| 69 |
+
if _DEFAULT_START_METHOD is not None and not force:
|
| 70 |
+
raise RuntimeError("context has already been set")
|
| 71 |
+
assert method is None or method in START_METHODS, (
|
| 72 |
+
f"'{method}' is not a valid start_method. It should be in "
|
| 73 |
+
f"{START_METHODS}"
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
_DEFAULT_START_METHOD = method
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def get_start_method():
|
| 80 |
+
return _DEFAULT_START_METHOD
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def cpu_count(only_physical_cores=False):
|
| 84 |
+
"""Return the number of CPUs the current process can use.
|
| 85 |
+
|
| 86 |
+
The returned number of CPUs accounts for:
|
| 87 |
+
* the number of CPUs in the system, as given by
|
| 88 |
+
``multiprocessing.cpu_count``;
|
| 89 |
+
* the CPU affinity settings of the current process
|
| 90 |
+
(available on some Unix systems);
|
| 91 |
+
* Cgroup CPU bandwidth limit (available on Linux only, typically
|
| 92 |
+
set by docker and similar container orchestration systems);
|
| 93 |
+
* the value of the LOKY_MAX_CPU_COUNT environment variable if defined.
|
| 94 |
+
and is given as the minimum of these constraints.
|
| 95 |
+
|
| 96 |
+
If ``only_physical_cores`` is True, return the number of physical cores
|
| 97 |
+
instead of the number of logical cores (hyperthreading / SMT). Note that
|
| 98 |
+
this option is not enforced if the number of usable cores is controlled in
|
| 99 |
+
any other way such as: process affinity, Cgroup restricted CPU bandwidth
|
| 100 |
+
or the LOKY_MAX_CPU_COUNT environment variable. If the number of physical
|
| 101 |
+
cores is not found, return the number of logical cores.
|
| 102 |
+
|
| 103 |
+
Note that on Windows, the returned number of CPUs cannot exceed 61 (or 60 for
|
| 104 |
+
Python < 3.10), see:
|
| 105 |
+
https://bugs.python.org/issue26903.
|
| 106 |
+
|
| 107 |
+
It is also always larger or equal to 1.
|
| 108 |
+
"""
|
| 109 |
+
# Note: os.cpu_count() is allowed to return None in its docstring
|
| 110 |
+
os_cpu_count = os.cpu_count() or 1
|
| 111 |
+
if sys.platform == "win32":
|
| 112 |
+
# On Windows, attempting to use more than 61 CPUs would result in a
|
| 113 |
+
# OS-level error. See https://bugs.python.org/issue26903. According to
|
| 114 |
+
# https://learn.microsoft.com/en-us/windows/win32/procthread/processor-groups
|
| 115 |
+
# it might be possible to go beyond with a lot of extra work but this
|
| 116 |
+
# does not look easy.
|
| 117 |
+
os_cpu_count = min(os_cpu_count, _MAX_WINDOWS_WORKERS)
|
| 118 |
+
|
| 119 |
+
cpu_count_user = _cpu_count_user(os_cpu_count)
|
| 120 |
+
aggregate_cpu_count = max(min(os_cpu_count, cpu_count_user), 1)
|
| 121 |
+
|
| 122 |
+
if not only_physical_cores:
|
| 123 |
+
return aggregate_cpu_count
|
| 124 |
+
|
| 125 |
+
if cpu_count_user < os_cpu_count:
|
| 126 |
+
# Respect user setting
|
| 127 |
+
return max(cpu_count_user, 1)
|
| 128 |
+
|
| 129 |
+
cpu_count_physical, exception = _count_physical_cores()
|
| 130 |
+
if cpu_count_physical != "not found":
|
| 131 |
+
return cpu_count_physical
|
| 132 |
+
|
| 133 |
+
# Fallback to default behavior
|
| 134 |
+
if exception is not None:
|
| 135 |
+
# warns only the first time
|
| 136 |
+
warnings.warn(
|
| 137 |
+
"Could not find the number of physical cores for the "
|
| 138 |
+
f"following reason:\n{exception}\n"
|
| 139 |
+
"Returning the number of logical cores instead. You can "
|
| 140 |
+
"silence this warning by setting LOKY_MAX_CPU_COUNT to "
|
| 141 |
+
"the number of cores you want to use."
|
| 142 |
+
)
|
| 143 |
+
traceback.print_tb(exception.__traceback__)
|
| 144 |
+
|
| 145 |
+
return aggregate_cpu_count
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _cpu_count_cgroup(os_cpu_count):
|
| 149 |
+
# Cgroup CPU bandwidth limit available in Linux since 2.6 kernel
|
| 150 |
+
cpu_max_fname = "/sys/fs/cgroup/cpu.max"
|
| 151 |
+
cfs_quota_fname = "/sys/fs/cgroup/cpu/cpu.cfs_quota_us"
|
| 152 |
+
cfs_period_fname = "/sys/fs/cgroup/cpu/cpu.cfs_period_us"
|
| 153 |
+
if os.path.exists(cpu_max_fname):
|
| 154 |
+
# cgroup v2
|
| 155 |
+
# https://www.kernel.org/doc/html/latest/admin-guide/cgroup-v2.html
|
| 156 |
+
with open(cpu_max_fname) as fh:
|
| 157 |
+
cpu_quota_us, cpu_period_us = fh.read().strip().split()
|
| 158 |
+
elif os.path.exists(cfs_quota_fname) and os.path.exists(cfs_period_fname):
|
| 159 |
+
# cgroup v1
|
| 160 |
+
# https://www.kernel.org/doc/html/latest/scheduler/sched-bwc.html#management
|
| 161 |
+
with open(cfs_quota_fname) as fh:
|
| 162 |
+
cpu_quota_us = fh.read().strip()
|
| 163 |
+
with open(cfs_period_fname) as fh:
|
| 164 |
+
cpu_period_us = fh.read().strip()
|
| 165 |
+
else:
|
| 166 |
+
# No Cgroup CPU bandwidth limit (e.g. non-Linux platform)
|
| 167 |
+
cpu_quota_us = "max"
|
| 168 |
+
cpu_period_us = 100_000 # unused, for consistency with default values
|
| 169 |
+
|
| 170 |
+
if cpu_quota_us == "max":
|
| 171 |
+
# No active Cgroup quota on a Cgroup-capable platform
|
| 172 |
+
return os_cpu_count
|
| 173 |
+
else:
|
| 174 |
+
cpu_quota_us = int(cpu_quota_us)
|
| 175 |
+
cpu_period_us = int(cpu_period_us)
|
| 176 |
+
if cpu_quota_us > 0 and cpu_period_us > 0:
|
| 177 |
+
return math.ceil(cpu_quota_us / cpu_period_us)
|
| 178 |
+
else: # pragma: no cover
|
| 179 |
+
# Setting a negative cpu_quota_us value is a valid way to disable
|
| 180 |
+
# cgroup CPU bandwith limits
|
| 181 |
+
return os_cpu_count
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def _cpu_count_affinity(os_cpu_count):
|
| 185 |
+
# Number of available CPUs given affinity settings
|
| 186 |
+
if hasattr(os, "sched_getaffinity"):
|
| 187 |
+
try:
|
| 188 |
+
return len(os.sched_getaffinity(0))
|
| 189 |
+
except NotImplementedError:
|
| 190 |
+
pass
|
| 191 |
+
|
| 192 |
+
# On PyPy and possibly other platforms, os.sched_getaffinity does not exist
|
| 193 |
+
# or raises NotImplementedError, let's try with the psutil if installed.
|
| 194 |
+
try:
|
| 195 |
+
import psutil
|
| 196 |
+
|
| 197 |
+
p = psutil.Process()
|
| 198 |
+
if hasattr(p, "cpu_affinity"):
|
| 199 |
+
return len(p.cpu_affinity())
|
| 200 |
+
|
| 201 |
+
except ImportError: # pragma: no cover
|
| 202 |
+
if (
|
| 203 |
+
sys.platform == "linux"
|
| 204 |
+
and os.environ.get("LOKY_MAX_CPU_COUNT") is None
|
| 205 |
+
):
|
| 206 |
+
# PyPy does not implement os.sched_getaffinity on Linux which
|
| 207 |
+
# can cause severe oversubscription problems. Better warn the
|
| 208 |
+
# user in this particularly pathological case which can wreck
|
| 209 |
+
# havoc, typically on CI workers.
|
| 210 |
+
warnings.warn(
|
| 211 |
+
"Failed to inspect CPU affinity constraints on this system. "
|
| 212 |
+
"Please install psutil or explictly set LOKY_MAX_CPU_COUNT."
|
| 213 |
+
)
|
| 214 |
+
|
| 215 |
+
# This can happen for platforms that do not implement any kind of CPU
|
| 216 |
+
# infinity such as macOS-based platforms.
|
| 217 |
+
return os_cpu_count
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def _cpu_count_user(os_cpu_count):
|
| 221 |
+
"""Number of user defined available CPUs"""
|
| 222 |
+
cpu_count_affinity = _cpu_count_affinity(os_cpu_count)
|
| 223 |
+
|
| 224 |
+
cpu_count_cgroup = _cpu_count_cgroup(os_cpu_count)
|
| 225 |
+
|
| 226 |
+
# User defined soft-limit passed as a loky specific environment variable.
|
| 227 |
+
cpu_count_loky = int(os.environ.get("LOKY_MAX_CPU_COUNT", os_cpu_count))
|
| 228 |
+
|
| 229 |
+
return min(cpu_count_affinity, cpu_count_cgroup, cpu_count_loky)
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def _count_physical_cores():
|
| 233 |
+
"""Return a tuple (number of physical cores, exception)
|
| 234 |
+
|
| 235 |
+
If the number of physical cores is found, exception is set to None.
|
| 236 |
+
If it has not been found, return ("not found", exception).
|
| 237 |
+
|
| 238 |
+
The number of physical cores is cached to avoid repeating subprocess calls.
|
| 239 |
+
"""
|
| 240 |
+
exception = None
|
| 241 |
+
|
| 242 |
+
# First check if the value is cached
|
| 243 |
+
global physical_cores_cache
|
| 244 |
+
if physical_cores_cache is not None:
|
| 245 |
+
return physical_cores_cache, exception
|
| 246 |
+
|
| 247 |
+
# Not cached yet, find it
|
| 248 |
+
try:
|
| 249 |
+
if sys.platform == "linux":
|
| 250 |
+
cpu_info = subprocess.run(
|
| 251 |
+
"lscpu --parse=core".split(), capture_output=True, text=True
|
| 252 |
+
)
|
| 253 |
+
cpu_info = cpu_info.stdout.splitlines()
|
| 254 |
+
cpu_info = {line for line in cpu_info if not line.startswith("#")}
|
| 255 |
+
cpu_count_physical = len(cpu_info)
|
| 256 |
+
elif sys.platform == "win32":
|
| 257 |
+
cpu_info = subprocess.run(
|
| 258 |
+
"wmic CPU Get NumberOfCores /Format:csv".split(),
|
| 259 |
+
capture_output=True,
|
| 260 |
+
text=True,
|
| 261 |
+
)
|
| 262 |
+
cpu_info = cpu_info.stdout.splitlines()
|
| 263 |
+
cpu_info = [
|
| 264 |
+
l.split(",")[1]
|
| 265 |
+
for l in cpu_info
|
| 266 |
+
if (l and l != "Node,NumberOfCores")
|
| 267 |
+
]
|
| 268 |
+
cpu_count_physical = sum(map(int, cpu_info))
|
| 269 |
+
elif sys.platform == "darwin":
|
| 270 |
+
cpu_info = subprocess.run(
|
| 271 |
+
"sysctl -n hw.physicalcpu".split(),
|
| 272 |
+
capture_output=True,
|
| 273 |
+
text=True,
|
| 274 |
+
)
|
| 275 |
+
cpu_info = cpu_info.stdout
|
| 276 |
+
cpu_count_physical = int(cpu_info)
|
| 277 |
+
else:
|
| 278 |
+
raise NotImplementedError(f"unsupported platform: {sys.platform}")
|
| 279 |
+
|
| 280 |
+
# if cpu_count_physical < 1, we did not find a valid value
|
| 281 |
+
if cpu_count_physical < 1:
|
| 282 |
+
raise ValueError(f"found {cpu_count_physical} physical cores < 1")
|
| 283 |
+
|
| 284 |
+
except Exception as e:
|
| 285 |
+
exception = e
|
| 286 |
+
cpu_count_physical = "not found"
|
| 287 |
+
|
| 288 |
+
# Put the result in cache
|
| 289 |
+
physical_cores_cache = cpu_count_physical
|
| 290 |
+
|
| 291 |
+
return cpu_count_physical, exception
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
class LokyContext(BaseContext):
|
| 295 |
+
"""Context relying on the LokyProcess."""
|
| 296 |
+
|
| 297 |
+
_name = "loky"
|
| 298 |
+
Process = LokyProcess
|
| 299 |
+
cpu_count = staticmethod(cpu_count)
|
| 300 |
+
|
| 301 |
+
def Queue(self, maxsize=0, reducers=None):
|
| 302 |
+
"""Returns a queue object"""
|
| 303 |
+
from .queues import Queue
|
| 304 |
+
|
| 305 |
+
return Queue(maxsize, reducers=reducers, ctx=self.get_context())
|
| 306 |
+
|
| 307 |
+
def SimpleQueue(self, reducers=None):
|
| 308 |
+
"""Returns a queue object"""
|
| 309 |
+
from .queues import SimpleQueue
|
| 310 |
+
|
| 311 |
+
return SimpleQueue(reducers=reducers, ctx=self.get_context())
|
| 312 |
+
|
| 313 |
+
if sys.platform != "win32":
|
| 314 |
+
"""For Unix platform, use our custom implementation of synchronize
|
| 315 |
+
ensuring that we use the loky.backend.resource_tracker to clean-up
|
| 316 |
+
the semaphores in case of a worker crash.
|
| 317 |
+
"""
|
| 318 |
+
|
| 319 |
+
def Semaphore(self, value=1):
|
| 320 |
+
"""Returns a semaphore object"""
|
| 321 |
+
from .synchronize import Semaphore
|
| 322 |
+
|
| 323 |
+
return Semaphore(value=value)
|
| 324 |
+
|
| 325 |
+
def BoundedSemaphore(self, value):
|
| 326 |
+
"""Returns a bounded semaphore object"""
|
| 327 |
+
from .synchronize import BoundedSemaphore
|
| 328 |
+
|
| 329 |
+
return BoundedSemaphore(value)
|
| 330 |
+
|
| 331 |
+
def Lock(self):
|
| 332 |
+
"""Returns a lock object"""
|
| 333 |
+
from .synchronize import Lock
|
| 334 |
+
|
| 335 |
+
return Lock()
|
| 336 |
+
|
| 337 |
+
def RLock(self):
|
| 338 |
+
"""Returns a recurrent lock object"""
|
| 339 |
+
from .synchronize import RLock
|
| 340 |
+
|
| 341 |
+
return RLock()
|
| 342 |
+
|
| 343 |
+
def Condition(self, lock=None):
|
| 344 |
+
"""Returns a condition object"""
|
| 345 |
+
from .synchronize import Condition
|
| 346 |
+
|
| 347 |
+
return Condition(lock)
|
| 348 |
+
|
| 349 |
+
def Event(self):
|
| 350 |
+
"""Returns an event object"""
|
| 351 |
+
from .synchronize import Event
|
| 352 |
+
|
| 353 |
+
return Event()
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
class LokyInitMainContext(LokyContext):
|
| 357 |
+
"""Extra context with LokyProcess, which does load the main module
|
| 358 |
+
|
| 359 |
+
This context is used for compatibility in the case ``cloudpickle`` is not
|
| 360 |
+
present on the running system. This permits to load functions defined in
|
| 361 |
+
the ``main`` module, using proper safeguards. The declaration of the
|
| 362 |
+
``executor`` should be protected by ``if __name__ == "__main__":`` and the
|
| 363 |
+
functions and variable used from main should be out of this block.
|
| 364 |
+
|
| 365 |
+
This mimics the default behavior of multiprocessing under Windows and the
|
| 366 |
+
behavior of the ``spawn`` start method on a posix system.
|
| 367 |
+
For more details, see the end of the following section of python doc
|
| 368 |
+
https://docs.python.org/3/library/multiprocessing.html#multiprocessing-programming
|
| 369 |
+
"""
|
| 370 |
+
|
| 371 |
+
_name = "loky_init_main"
|
| 372 |
+
Process = LokyInitMainProcess
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
# Register loky context so it works with multiprocessing.get_context
|
| 376 |
+
ctx_loky = LokyContext()
|
| 377 |
+
mp.context._concrete_contexts["loky"] = ctx_loky
|
| 378 |
+
mp.context._concrete_contexts["loky_init_main"] = LokyInitMainContext()
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/popen_loky_win32.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
import msvcrt
|
| 4 |
+
import _winapi
|
| 5 |
+
from pickle import load
|
| 6 |
+
from multiprocessing import process, util
|
| 7 |
+
from multiprocessing.context import set_spawning_popen
|
| 8 |
+
from multiprocessing.popen_spawn_win32 import Popen as _Popen
|
| 9 |
+
|
| 10 |
+
from . import reduction, spawn
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
__all__ = ["Popen"]
|
| 14 |
+
|
| 15 |
+
#
|
| 16 |
+
#
|
| 17 |
+
#
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def _path_eq(p1, p2):
|
| 21 |
+
return p1 == p2 or os.path.normcase(p1) == os.path.normcase(p2)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
WINENV = hasattr(sys, "_base_executable") and not _path_eq(
|
| 25 |
+
sys.executable, sys._base_executable
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _close_handles(*handles):
|
| 30 |
+
for handle in handles:
|
| 31 |
+
_winapi.CloseHandle(handle)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
#
|
| 35 |
+
# We define a Popen class similar to the one from subprocess, but
|
| 36 |
+
# whose constructor takes a process object as its argument.
|
| 37 |
+
#
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class Popen(_Popen):
|
| 41 |
+
"""
|
| 42 |
+
Start a subprocess to run the code of a process object.
|
| 43 |
+
|
| 44 |
+
We differ from cpython implementation with the way we handle environment
|
| 45 |
+
variables, in order to be able to modify then in the child processes before
|
| 46 |
+
importing any library, in order to control the number of threads in C-level
|
| 47 |
+
threadpools.
|
| 48 |
+
|
| 49 |
+
We also use the loky preparation data, in particular to handle main_module
|
| 50 |
+
inits and the loky resource tracker.
|
| 51 |
+
"""
|
| 52 |
+
|
| 53 |
+
method = "loky"
|
| 54 |
+
|
| 55 |
+
def __init__(self, process_obj):
|
| 56 |
+
prep_data = spawn.get_preparation_data(
|
| 57 |
+
process_obj._name, getattr(process_obj, "init_main_module", True)
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
# read end of pipe will be duplicated by the child process
|
| 61 |
+
# -- see spawn_main() in spawn.py.
|
| 62 |
+
#
|
| 63 |
+
# bpo-33929: Previously, the read end of pipe was "stolen" by the child
|
| 64 |
+
# process, but it leaked a handle if the child process had been
|
| 65 |
+
# terminated before it could steal the handle from the parent process.
|
| 66 |
+
rhandle, whandle = _winapi.CreatePipe(None, 0)
|
| 67 |
+
wfd = msvcrt.open_osfhandle(whandle, 0)
|
| 68 |
+
cmd = get_command_line(parent_pid=os.getpid(), pipe_handle=rhandle)
|
| 69 |
+
|
| 70 |
+
python_exe = spawn.get_executable()
|
| 71 |
+
|
| 72 |
+
# copy the environment variables to set in the child process
|
| 73 |
+
child_env = {**os.environ, **process_obj.env}
|
| 74 |
+
|
| 75 |
+
# bpo-35797: When running in a venv, we bypass the redirect
|
| 76 |
+
# executor and launch our base Python.
|
| 77 |
+
if WINENV and _path_eq(python_exe, sys.executable):
|
| 78 |
+
cmd[0] = python_exe = sys._base_executable
|
| 79 |
+
child_env["__PYVENV_LAUNCHER__"] = sys.executable
|
| 80 |
+
|
| 81 |
+
cmd = " ".join(f'"{x}"' for x in cmd)
|
| 82 |
+
|
| 83 |
+
with open(wfd, "wb") as to_child:
|
| 84 |
+
# start process
|
| 85 |
+
try:
|
| 86 |
+
hp, ht, pid, _ = _winapi.CreateProcess(
|
| 87 |
+
python_exe,
|
| 88 |
+
cmd,
|
| 89 |
+
None,
|
| 90 |
+
None,
|
| 91 |
+
False,
|
| 92 |
+
0,
|
| 93 |
+
child_env,
|
| 94 |
+
None,
|
| 95 |
+
None,
|
| 96 |
+
)
|
| 97 |
+
_winapi.CloseHandle(ht)
|
| 98 |
+
except BaseException:
|
| 99 |
+
_winapi.CloseHandle(rhandle)
|
| 100 |
+
raise
|
| 101 |
+
|
| 102 |
+
# set attributes of self
|
| 103 |
+
self.pid = pid
|
| 104 |
+
self.returncode = None
|
| 105 |
+
self._handle = hp
|
| 106 |
+
self.sentinel = int(hp)
|
| 107 |
+
self.finalizer = util.Finalize(
|
| 108 |
+
self, _close_handles, (self.sentinel, int(rhandle))
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
# send information to child
|
| 112 |
+
set_spawning_popen(self)
|
| 113 |
+
try:
|
| 114 |
+
reduction.dump(prep_data, to_child)
|
| 115 |
+
reduction.dump(process_obj, to_child)
|
| 116 |
+
finally:
|
| 117 |
+
set_spawning_popen(None)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def get_command_line(pipe_handle, parent_pid, **kwds):
|
| 121 |
+
"""Returns prefix of command line used for spawning a child process."""
|
| 122 |
+
if getattr(sys, "frozen", False):
|
| 123 |
+
return [sys.executable, "--multiprocessing-fork", pipe_handle]
|
| 124 |
+
else:
|
| 125 |
+
prog = (
|
| 126 |
+
"from joblib.externals.loky.backend.popen_loky_win32 import main; "
|
| 127 |
+
f"main(pipe_handle={pipe_handle}, parent_pid={parent_pid})"
|
| 128 |
+
)
|
| 129 |
+
opts = util._args_from_interpreter_flags()
|
| 130 |
+
return [
|
| 131 |
+
spawn.get_executable(),
|
| 132 |
+
*opts,
|
| 133 |
+
"-c",
|
| 134 |
+
prog,
|
| 135 |
+
"--multiprocessing-fork",
|
| 136 |
+
]
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def is_forking(argv):
|
| 140 |
+
"""Return whether commandline indicates we are forking."""
|
| 141 |
+
if len(argv) >= 2 and argv[1] == "--multiprocessing-fork":
|
| 142 |
+
return True
|
| 143 |
+
else:
|
| 144 |
+
return False
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def main(pipe_handle, parent_pid=None):
|
| 148 |
+
"""Run code specified by data received over pipe."""
|
| 149 |
+
assert is_forking(sys.argv), "Not forking"
|
| 150 |
+
|
| 151 |
+
if parent_pid is not None:
|
| 152 |
+
source_process = _winapi.OpenProcess(
|
| 153 |
+
_winapi.SYNCHRONIZE | _winapi.PROCESS_DUP_HANDLE, False, parent_pid
|
| 154 |
+
)
|
| 155 |
+
else:
|
| 156 |
+
source_process = None
|
| 157 |
+
new_handle = reduction.duplicate(
|
| 158 |
+
pipe_handle, source_process=source_process
|
| 159 |
+
)
|
| 160 |
+
fd = msvcrt.open_osfhandle(new_handle, os.O_RDONLY)
|
| 161 |
+
parent_sentinel = source_process
|
| 162 |
+
|
| 163 |
+
with os.fdopen(fd, "rb", closefd=True) as from_parent:
|
| 164 |
+
process.current_process()._inheriting = True
|
| 165 |
+
try:
|
| 166 |
+
preparation_data = load(from_parent)
|
| 167 |
+
spawn.prepare(preparation_data, parent_sentinel)
|
| 168 |
+
self = load(from_parent)
|
| 169 |
+
finally:
|
| 170 |
+
del process.current_process()._inheriting
|
| 171 |
+
|
| 172 |
+
exitcode = self._bootstrap(parent_sentinel)
|
| 173 |
+
sys.exit(exitcode)
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/reduction.py
ADDED
|
@@ -0,0 +1,224 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
###############################################################################
|
| 2 |
+
# Customizable Pickler with some basic reducers
|
| 3 |
+
#
|
| 4 |
+
# author: Thomas Moreau
|
| 5 |
+
#
|
| 6 |
+
# adapted from multiprocessing/reduction.py (17/02/2017)
|
| 7 |
+
# * Replace the ForkingPickler with a similar _LokyPickler,
|
| 8 |
+
# * Add CustomizableLokyPickler to allow customizing pickling process
|
| 9 |
+
# on the fly.
|
| 10 |
+
#
|
| 11 |
+
import copyreg
|
| 12 |
+
import io
|
| 13 |
+
import functools
|
| 14 |
+
import types
|
| 15 |
+
import sys
|
| 16 |
+
import os
|
| 17 |
+
|
| 18 |
+
from multiprocessing import util
|
| 19 |
+
from pickle import loads, HIGHEST_PROTOCOL
|
| 20 |
+
|
| 21 |
+
###############################################################################
|
| 22 |
+
# Enable custom pickling in Loky.
|
| 23 |
+
|
| 24 |
+
_dispatch_table = {}
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def register(type_, reduce_function):
|
| 28 |
+
_dispatch_table[type_] = reduce_function
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
###############################################################################
|
| 32 |
+
# Registers extra pickling routines to improve picklization for loky
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
# make methods picklable
|
| 36 |
+
def _reduce_method(m):
|
| 37 |
+
if m.__self__ is None:
|
| 38 |
+
return getattr, (m.__class__, m.__func__.__name__)
|
| 39 |
+
else:
|
| 40 |
+
return getattr, (m.__self__, m.__func__.__name__)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class _C:
|
| 44 |
+
def f(self):
|
| 45 |
+
pass
|
| 46 |
+
|
| 47 |
+
@classmethod
|
| 48 |
+
def h(cls):
|
| 49 |
+
pass
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
register(type(_C().f), _reduce_method)
|
| 53 |
+
register(type(_C.h), _reduce_method)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
if not hasattr(sys, "pypy_version_info"):
|
| 57 |
+
# PyPy uses functions instead of method_descriptors and wrapper_descriptors
|
| 58 |
+
def _reduce_method_descriptor(m):
|
| 59 |
+
return getattr, (m.__objclass__, m.__name__)
|
| 60 |
+
|
| 61 |
+
register(type(list.append), _reduce_method_descriptor)
|
| 62 |
+
register(type(int.__add__), _reduce_method_descriptor)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# Make partial func pickable
|
| 66 |
+
def _reduce_partial(p):
|
| 67 |
+
return _rebuild_partial, (p.func, p.args, p.keywords or {})
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def _rebuild_partial(func, args, keywords):
|
| 71 |
+
return functools.partial(func, *args, **keywords)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
register(functools.partial, _reduce_partial)
|
| 75 |
+
|
| 76 |
+
if sys.platform != "win32":
|
| 77 |
+
from ._posix_reduction import _mk_inheritable # noqa: F401
|
| 78 |
+
else:
|
| 79 |
+
from . import _win_reduction # noqa: F401
|
| 80 |
+
|
| 81 |
+
# global variable to change the pickler behavior
|
| 82 |
+
try:
|
| 83 |
+
from joblib.externals import cloudpickle # noqa: F401
|
| 84 |
+
|
| 85 |
+
DEFAULT_ENV = "cloudpickle"
|
| 86 |
+
except ImportError:
|
| 87 |
+
# If cloudpickle is not present, fallback to pickle
|
| 88 |
+
DEFAULT_ENV = "pickle"
|
| 89 |
+
|
| 90 |
+
ENV_LOKY_PICKLER = os.environ.get("LOKY_PICKLER", DEFAULT_ENV)
|
| 91 |
+
_LokyPickler = None
|
| 92 |
+
_loky_pickler_name = None
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
def set_loky_pickler(loky_pickler=None):
|
| 96 |
+
global _LokyPickler, _loky_pickler_name
|
| 97 |
+
|
| 98 |
+
if loky_pickler is None:
|
| 99 |
+
loky_pickler = ENV_LOKY_PICKLER
|
| 100 |
+
|
| 101 |
+
loky_pickler_cls = None
|
| 102 |
+
|
| 103 |
+
# The default loky_pickler is cloudpickle
|
| 104 |
+
if loky_pickler in ["", None]:
|
| 105 |
+
loky_pickler = "cloudpickle"
|
| 106 |
+
|
| 107 |
+
if loky_pickler == _loky_pickler_name:
|
| 108 |
+
return
|
| 109 |
+
|
| 110 |
+
if loky_pickler == "cloudpickle":
|
| 111 |
+
from joblib.externals.cloudpickle import CloudPickler as loky_pickler_cls
|
| 112 |
+
else:
|
| 113 |
+
try:
|
| 114 |
+
from importlib import import_module
|
| 115 |
+
|
| 116 |
+
module_pickle = import_module(loky_pickler)
|
| 117 |
+
loky_pickler_cls = module_pickle.Pickler
|
| 118 |
+
except (ImportError, AttributeError) as e:
|
| 119 |
+
extra_info = (
|
| 120 |
+
"\nThis error occurred while setting loky_pickler to"
|
| 121 |
+
f" '{loky_pickler}', as required by the env variable "
|
| 122 |
+
"LOKY_PICKLER or the function set_loky_pickler."
|
| 123 |
+
)
|
| 124 |
+
e.args = (e.args[0] + extra_info,) + e.args[1:]
|
| 125 |
+
e.msg = e.args[0]
|
| 126 |
+
raise e
|
| 127 |
+
|
| 128 |
+
util.debug(
|
| 129 |
+
f"Using '{loky_pickler if loky_pickler else 'cloudpickle'}' for "
|
| 130 |
+
"serialization."
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
class CustomizablePickler(loky_pickler_cls):
|
| 134 |
+
_loky_pickler_cls = loky_pickler_cls
|
| 135 |
+
|
| 136 |
+
def _set_dispatch_table(self, dispatch_table):
|
| 137 |
+
for ancestor_class in self._loky_pickler_cls.mro():
|
| 138 |
+
dt_attribute = getattr(ancestor_class, "dispatch_table", None)
|
| 139 |
+
if isinstance(dt_attribute, types.MemberDescriptorType):
|
| 140 |
+
# Ancestor class (typically _pickle.Pickler) has a
|
| 141 |
+
# member_descriptor for its "dispatch_table" attribute. Use
|
| 142 |
+
# it to set the dispatch_table as a member instead of a
|
| 143 |
+
# dynamic attribute in the __dict__ of the instance,
|
| 144 |
+
# otherwise it will not be taken into account by the C
|
| 145 |
+
# implementation of the dump method if a subclass defines a
|
| 146 |
+
# class-level dispatch_table attribute as was done in
|
| 147 |
+
# cloudpickle 1.6.0:
|
| 148 |
+
# https://github.com/joblib/loky/pull/260
|
| 149 |
+
dt_attribute.__set__(self, dispatch_table)
|
| 150 |
+
break
|
| 151 |
+
|
| 152 |
+
# On top of member descriptor set, also use setattr such that code
|
| 153 |
+
# that directly access self.dispatch_table gets a consistent view
|
| 154 |
+
# of the same table.
|
| 155 |
+
self.dispatch_table = dispatch_table
|
| 156 |
+
|
| 157 |
+
def __init__(self, writer, reducers=None, protocol=HIGHEST_PROTOCOL):
|
| 158 |
+
loky_pickler_cls.__init__(self, writer, protocol=protocol)
|
| 159 |
+
if reducers is None:
|
| 160 |
+
reducers = {}
|
| 161 |
+
|
| 162 |
+
if hasattr(self, "dispatch_table"):
|
| 163 |
+
# Force a copy that we will update without mutating the
|
| 164 |
+
# any class level defined dispatch_table.
|
| 165 |
+
loky_dt = dict(self.dispatch_table)
|
| 166 |
+
else:
|
| 167 |
+
# Use standard reducers as bases
|
| 168 |
+
loky_dt = copyreg.dispatch_table.copy()
|
| 169 |
+
|
| 170 |
+
# Register loky specific reducers
|
| 171 |
+
loky_dt.update(_dispatch_table)
|
| 172 |
+
|
| 173 |
+
# Set the new dispatch table, taking care of the fact that we
|
| 174 |
+
# need to use the member_descriptor when we inherit from a
|
| 175 |
+
# subclass of the C implementation of the Pickler base class
|
| 176 |
+
# with an class level dispatch_table attribute.
|
| 177 |
+
self._set_dispatch_table(loky_dt)
|
| 178 |
+
|
| 179 |
+
# Register the reducers
|
| 180 |
+
for type, reduce_func in reducers.items():
|
| 181 |
+
self.register(type, reduce_func)
|
| 182 |
+
|
| 183 |
+
def register(self, type, reduce_func):
|
| 184 |
+
"""Attach a reducer function to a given type in the dispatch table."""
|
| 185 |
+
self.dispatch_table[type] = reduce_func
|
| 186 |
+
|
| 187 |
+
_LokyPickler = CustomizablePickler
|
| 188 |
+
_loky_pickler_name = loky_pickler
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def get_loky_pickler_name():
|
| 192 |
+
global _loky_pickler_name
|
| 193 |
+
return _loky_pickler_name
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def get_loky_pickler():
|
| 197 |
+
global _LokyPickler
|
| 198 |
+
return _LokyPickler
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
# Set it to its default value
|
| 202 |
+
set_loky_pickler()
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def dump(obj, file, reducers=None, protocol=None):
|
| 206 |
+
"""Replacement for pickle.dump() using _LokyPickler."""
|
| 207 |
+
global _LokyPickler
|
| 208 |
+
_LokyPickler(file, reducers=reducers, protocol=protocol).dump(obj)
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def dumps(obj, reducers=None, protocol=None):
|
| 212 |
+
global _LokyPickler
|
| 213 |
+
|
| 214 |
+
buf = io.BytesIO()
|
| 215 |
+
dump(obj, buf, reducers=reducers, protocol=protocol)
|
| 216 |
+
return buf.getbuffer()
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
__all__ = ["dump", "dumps", "loads", "register", "set_loky_pickler"]
|
| 220 |
+
|
| 221 |
+
if sys.platform == "win32":
|
| 222 |
+
from multiprocessing.reduction import duplicate
|
| 223 |
+
|
| 224 |
+
__all__ += ["duplicate"]
|
evalkit_internvl/lib/python3.10/site-packages/joblib/externals/loky/backend/utils.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
import time
|
| 4 |
+
import errno
|
| 5 |
+
import signal
|
| 6 |
+
import warnings
|
| 7 |
+
import subprocess
|
| 8 |
+
import traceback
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
import psutil
|
| 12 |
+
except ImportError:
|
| 13 |
+
psutil = None
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def kill_process_tree(process, use_psutil=True):
|
| 17 |
+
"""Terminate process and its descendants with SIGKILL"""
|
| 18 |
+
if use_psutil and psutil is not None:
|
| 19 |
+
_kill_process_tree_with_psutil(process)
|
| 20 |
+
else:
|
| 21 |
+
_kill_process_tree_without_psutil(process)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def recursive_terminate(process, use_psutil=True):
|
| 25 |
+
warnings.warn(
|
| 26 |
+
"recursive_terminate is deprecated in loky 3.2, use kill_process_tree"
|
| 27 |
+
"instead",
|
| 28 |
+
DeprecationWarning,
|
| 29 |
+
)
|
| 30 |
+
kill_process_tree(process, use_psutil=use_psutil)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def _kill_process_tree_with_psutil(process):
|
| 34 |
+
try:
|
| 35 |
+
descendants = psutil.Process(process.pid).children(recursive=True)
|
| 36 |
+
except psutil.NoSuchProcess:
|
| 37 |
+
return
|
| 38 |
+
|
| 39 |
+
# Kill the descendants in reverse order to avoid killing the parents before
|
| 40 |
+
# the descendant in cases where there are more processes nested.
|
| 41 |
+
for descendant in descendants[::-1]:
|
| 42 |
+
try:
|
| 43 |
+
descendant.kill()
|
| 44 |
+
except psutil.NoSuchProcess:
|
| 45 |
+
pass
|
| 46 |
+
|
| 47 |
+
try:
|
| 48 |
+
psutil.Process(process.pid).kill()
|
| 49 |
+
except psutil.NoSuchProcess:
|
| 50 |
+
pass
|
| 51 |
+
process.join()
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def _kill_process_tree_without_psutil(process):
|
| 55 |
+
"""Terminate a process and its descendants."""
|
| 56 |
+
try:
|
| 57 |
+
if sys.platform == "win32":
|
| 58 |
+
_windows_taskkill_process_tree(process.pid)
|
| 59 |
+
else:
|
| 60 |
+
_posix_recursive_kill(process.pid)
|
| 61 |
+
except Exception: # pragma: no cover
|
| 62 |
+
details = traceback.format_exc()
|
| 63 |
+
warnings.warn(
|
| 64 |
+
"Failed to kill subprocesses on this platform. Please install"
|
| 65 |
+
"psutil: https://github.com/giampaolo/psutil\n"
|
| 66 |
+
f"Details:\n{details}"
|
| 67 |
+
)
|
| 68 |
+
# In case we cannot introspect or kill the descendants, we fall back to
|
| 69 |
+
# only killing the main process.
|
| 70 |
+
#
|
| 71 |
+
# Note: on Windows, process.kill() is an alias for process.terminate()
|
| 72 |
+
# which in turns calls the Win32 API function TerminateProcess().
|
| 73 |
+
process.kill()
|
| 74 |
+
process.join()
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _windows_taskkill_process_tree(pid):
|
| 78 |
+
# On windows, the taskkill function with option `/T` terminate a given
|
| 79 |
+
# process pid and its children.
|
| 80 |
+
try:
|
| 81 |
+
subprocess.check_output(
|
| 82 |
+
["taskkill", "/F", "/T", "/PID", str(pid)], stderr=None
|
| 83 |
+
)
|
| 84 |
+
except subprocess.CalledProcessError as e:
|
| 85 |
+
# In Windows, taskkill returns 128, 255 for no process found.
|
| 86 |
+
if e.returncode not in [128, 255]:
|
| 87 |
+
# Let's raise to let the caller log the error details in a
|
| 88 |
+
# warning and only kill the root process.
|
| 89 |
+
raise # pragma: no cover
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def _kill(pid):
|
| 93 |
+
# Not all systems (e.g. Windows) have a SIGKILL, but the C specification
|
| 94 |
+
# mandates a SIGTERM signal. While Windows is handled specifically above,
|
| 95 |
+
# let's try to be safe for other hypothetic platforms that only have
|
| 96 |
+
# SIGTERM without SIGKILL.
|
| 97 |
+
kill_signal = getattr(signal, "SIGKILL", signal.SIGTERM)
|
| 98 |
+
try:
|
| 99 |
+
os.kill(pid, kill_signal)
|
| 100 |
+
except OSError as e:
|
| 101 |
+
# if OSError is raised with [Errno 3] no such process, the process
|
| 102 |
+
# is already terminated, else, raise the error and let the top
|
| 103 |
+
# level function raise a warning and retry to kill the process.
|
| 104 |
+
if e.errno != errno.ESRCH:
|
| 105 |
+
raise # pragma: no cover
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def _posix_recursive_kill(pid):
|
| 109 |
+
"""Recursively kill the descendants of a process before killing it."""
|
| 110 |
+
try:
|
| 111 |
+
children_pids = subprocess.check_output(
|
| 112 |
+
["pgrep", "-P", str(pid)], stderr=None, text=True
|
| 113 |
+
)
|
| 114 |
+
except subprocess.CalledProcessError as e:
|
| 115 |
+
# `ps` returns 1 when no child process has been found
|
| 116 |
+
if e.returncode == 1:
|
| 117 |
+
children_pids = ""
|
| 118 |
+
else:
|
| 119 |
+
raise # pragma: no cover
|
| 120 |
+
|
| 121 |
+
# Decode the result, split the cpid and remove the trailing line
|
| 122 |
+
for cpid in children_pids.splitlines():
|
| 123 |
+
cpid = int(cpid)
|
| 124 |
+
_posix_recursive_kill(cpid)
|
| 125 |
+
|
| 126 |
+
_kill(pid)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def get_exitcodes_terminated_worker(processes):
|
| 130 |
+
"""Return a formatted string with the exitcodes of terminated workers.
|
| 131 |
+
|
| 132 |
+
If necessary, wait (up to .25s) for the system to correctly set the
|
| 133 |
+
exitcode of one terminated worker.
|
| 134 |
+
"""
|
| 135 |
+
patience = 5
|
| 136 |
+
|
| 137 |
+
# Catch the exitcode of the terminated workers. There should at least be
|
| 138 |
+
# one. If not, wait a bit for the system to correctly set the exitcode of
|
| 139 |
+
# the terminated worker.
|
| 140 |
+
exitcodes = [
|
| 141 |
+
p.exitcode for p in list(processes.values()) if p.exitcode is not None
|
| 142 |
+
]
|
| 143 |
+
while not exitcodes and patience > 0:
|
| 144 |
+
patience -= 1
|
| 145 |
+
exitcodes = [
|
| 146 |
+
p.exitcode
|
| 147 |
+
for p in list(processes.values())
|
| 148 |
+
if p.exitcode is not None
|
| 149 |
+
]
|
| 150 |
+
time.sleep(0.05)
|
| 151 |
+
|
| 152 |
+
return _format_exitcodes(exitcodes)
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def _format_exitcodes(exitcodes):
|
| 156 |
+
"""Format a list of exit code with names of the signals if possible"""
|
| 157 |
+
str_exitcodes = [
|
| 158 |
+
f"{_get_exitcode_name(e)}({e})" for e in exitcodes if e is not None
|
| 159 |
+
]
|
| 160 |
+
return "{" + ", ".join(str_exitcodes) + "}"
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def _get_exitcode_name(exitcode):
|
| 164 |
+
if sys.platform == "win32":
|
| 165 |
+
# The exitcode are unreliable on windows (see bpo-31863).
|
| 166 |
+
# For this case, return UNKNOWN
|
| 167 |
+
return "UNKNOWN"
|
| 168 |
+
|
| 169 |
+
if exitcode < 0:
|
| 170 |
+
try:
|
| 171 |
+
import signal
|
| 172 |
+
|
| 173 |
+
return signal.Signals(-exitcode).name
|
| 174 |
+
except ValueError:
|
| 175 |
+
return "UNKNOWN"
|
| 176 |
+
elif exitcode != 255:
|
| 177 |
+
# The exitcode are unreliable on forkserver were 255 is always returned
|
| 178 |
+
# (see bpo-30589). For this case, return UNKNOWN
|
| 179 |
+
return "EXIT"
|
| 180 |
+
|
| 181 |
+
return "UNKNOWN"
|
evalkit_internvl/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_internvl/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/License.txt
ADDED
|
@@ -0,0 +1,1568 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
End User License Agreement
|
| 2 |
+
--------------------------
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
Preface
|
| 6 |
+
-------
|
| 7 |
+
|
| 8 |
+
The Software License Agreement in Chapter 1 and the Supplement
|
| 9 |
+
in Chapter 2 contain license terms and conditions that govern
|
| 10 |
+
the use of NVIDIA software. By accepting this agreement, you
|
| 11 |
+
agree to comply with all the terms and conditions applicable
|
| 12 |
+
to the product(s) included herein.
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
NVIDIA Driver
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
Description
|
| 19 |
+
|
| 20 |
+
This package contains the operating system driver and
|
| 21 |
+
fundamental system software components for NVIDIA GPUs.
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
NVIDIA CUDA Toolkit
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
Description
|
| 28 |
+
|
| 29 |
+
The NVIDIA CUDA Toolkit provides command-line and graphical
|
| 30 |
+
tools for building, debugging and optimizing the performance
|
| 31 |
+
of applications accelerated by NVIDIA GPUs, runtime and math
|
| 32 |
+
libraries, and documentation including programming guides,
|
| 33 |
+
user manuals, and API references.
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
Default Install Location of CUDA Toolkit
|
| 37 |
+
|
| 38 |
+
Windows platform:
|
| 39 |
+
|
| 40 |
+
%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.#
|
| 41 |
+
|
| 42 |
+
Linux platform:
|
| 43 |
+
|
| 44 |
+
/usr/local/cuda-#.#
|
| 45 |
+
|
| 46 |
+
Mac platform:
|
| 47 |
+
|
| 48 |
+
/Developer/NVIDIA/CUDA-#.#
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
NVIDIA CUDA Samples
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
Description
|
| 55 |
+
|
| 56 |
+
This package includes over 100+ CUDA examples that demonstrate
|
| 57 |
+
various CUDA programming principles, and efficient CUDA
|
| 58 |
+
implementation of algorithms in specific application domains.
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
Default Install Location of CUDA Samples
|
| 62 |
+
|
| 63 |
+
Windows platform:
|
| 64 |
+
|
| 65 |
+
%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.#
|
| 66 |
+
|
| 67 |
+
Linux platform:
|
| 68 |
+
|
| 69 |
+
/usr/local/cuda-#.#/samples
|
| 70 |
+
|
| 71 |
+
and
|
| 72 |
+
|
| 73 |
+
$HOME/NVIDIA_CUDA-#.#_Samples
|
| 74 |
+
|
| 75 |
+
Mac platform:
|
| 76 |
+
|
| 77 |
+
/Developer/NVIDIA/CUDA-#.#/samples
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
NVIDIA Nsight Visual Studio Edition (Windows only)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
Description
|
| 84 |
+
|
| 85 |
+
NVIDIA Nsight Development Platform, Visual Studio Edition is a
|
| 86 |
+
development environment integrated into Microsoft Visual
|
| 87 |
+
Studio that provides tools for debugging, profiling, analyzing
|
| 88 |
+
and optimizing your GPU computing and graphics applications.
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
Default Install Location of Nsight Visual Studio Edition
|
| 92 |
+
|
| 93 |
+
Windows platform:
|
| 94 |
+
|
| 95 |
+
%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.#
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
1. License Agreement for NVIDIA Software Development Kits
|
| 99 |
+
---------------------------------------------------------
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
Release Date: July 26, 2018
|
| 103 |
+
---------------------------
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
Important NoticeRead before downloading, installing,
|
| 107 |
+
copying or using the licensed software:
|
| 108 |
+
-------------------------------------------------------
|
| 109 |
+
|
| 110 |
+
This license agreement, including exhibits attached
|
| 111 |
+
("Agreement”) is a legal agreement between you and NVIDIA
|
| 112 |
+
Corporation ("NVIDIA") and governs your use of a NVIDIA
|
| 113 |
+
software development kit (“SDK”).
|
| 114 |
+
|
| 115 |
+
Each SDK has its own set of software and materials, but here
|
| 116 |
+
is a description of the types of items that may be included in
|
| 117 |
+
a SDK: source code, header files, APIs, data sets and assets
|
| 118 |
+
(examples include images, textures, models, scenes, videos,
|
| 119 |
+
native API input/output files), binary software, sample code,
|
| 120 |
+
libraries, utility programs, programming code and
|
| 121 |
+
documentation.
|
| 122 |
+
|
| 123 |
+
This Agreement can be accepted only by an adult of legal age
|
| 124 |
+
of majority in the country in which the SDK is used.
|
| 125 |
+
|
| 126 |
+
If you are entering into this Agreement on behalf of a company
|
| 127 |
+
or other legal entity, you represent that you have the legal
|
| 128 |
+
authority to bind the entity to this Agreement, in which case
|
| 129 |
+
“you” will mean the entity you represent.
|
| 130 |
+
|
| 131 |
+
If you don’t have the required age or authority to accept
|
| 132 |
+
this Agreement, or if you don’t accept all the terms and
|
| 133 |
+
conditions of this Agreement, do not download, install or use
|
| 134 |
+
the SDK.
|
| 135 |
+
|
| 136 |
+
You agree to use the SDK only for purposes that are permitted
|
| 137 |
+
by (a) this Agreement, and (b) any applicable law, regulation
|
| 138 |
+
or generally accepted practices or guidelines in the relevant
|
| 139 |
+
jurisdictions.
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
1.1. License
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
1.1.1. License Grant
|
| 146 |
+
|
| 147 |
+
Subject to the terms of this Agreement, NVIDIA hereby grants
|
| 148 |
+
you a non-exclusive, non-transferable license, without the
|
| 149 |
+
right to sublicense (except as expressly provided in this
|
| 150 |
+
Agreement) to:
|
| 151 |
+
|
| 152 |
+
1. Install and use the SDK,
|
| 153 |
+
|
| 154 |
+
2. Modify and create derivative works of sample source code
|
| 155 |
+
delivered in the SDK, and
|
| 156 |
+
|
| 157 |
+
3. Distribute those portions of the SDK that are identified
|
| 158 |
+
in this Agreement as distributable, as incorporated in
|
| 159 |
+
object code format into a software application that meets
|
| 160 |
+
the distribution requirements indicated in this Agreement.
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
1.1.2. Distribution Requirements
|
| 164 |
+
|
| 165 |
+
These are the distribution requirements for you to exercise
|
| 166 |
+
the distribution grant:
|
| 167 |
+
|
| 168 |
+
1. Your application must have material additional
|
| 169 |
+
functionality, beyond the included portions of the SDK.
|
| 170 |
+
|
| 171 |
+
2. The distributable portions of the SDK shall only be
|
| 172 |
+
accessed by your application.
|
| 173 |
+
|
| 174 |
+
3. The following notice shall be included in modifications
|
| 175 |
+
and derivative works of sample source code distributed:
|
| 176 |
+
“This software contains source code provided by NVIDIA
|
| 177 |
+
Corporation.”
|
| 178 |
+
|
| 179 |
+
4. Unless a developer tool is identified in this Agreement
|
| 180 |
+
as distributable, it is delivered for your internal use
|
| 181 |
+
only.
|
| 182 |
+
|
| 183 |
+
5. The terms under which you distribute your application
|
| 184 |
+
must be consistent with the terms of this Agreement,
|
| 185 |
+
including (without limitation) terms relating to the
|
| 186 |
+
license grant and license restrictions and protection of
|
| 187 |
+
NVIDIA’s intellectual property rights. Additionally, you
|
| 188 |
+
agree that you will protect the privacy, security and
|
| 189 |
+
legal rights of your application users.
|
| 190 |
+
|
| 191 |
+
6. You agree to notify NVIDIA in writing of any known or
|
| 192 |
+
suspected distribution or use of the SDK not in compliance
|
| 193 |
+
with the requirements of this Agreement, and to enforce
|
| 194 |
+
the terms of your agreements with respect to distributed
|
| 195 |
+
SDK.
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
1.1.3. Authorized Users
|
| 199 |
+
|
| 200 |
+
You may allow employees and contractors of your entity or of
|
| 201 |
+
your subsidiary(ies) to access and use the SDK from your
|
| 202 |
+
secure network to perform work on your behalf.
|
| 203 |
+
|
| 204 |
+
If you are an academic institution you may allow users
|
| 205 |
+
enrolled or employed by the academic institution to access and
|
| 206 |
+
use the SDK from your secure network.
|
| 207 |
+
|
| 208 |
+
You are responsible for the compliance with the terms of this
|
| 209 |
+
Agreement by your authorized users. If you become aware that
|
| 210 |
+
your authorized users didn’t follow the terms of this
|
| 211 |
+
Agreement, you agree to take reasonable steps to resolve the
|
| 212 |
+
non-compliance and prevent new occurrences.
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
1.1.4. Pre-Release SDK
|
| 216 |
+
|
| 217 |
+
The SDK versions identified as alpha, beta, preview or
|
| 218 |
+
otherwise as pre-release, may not be fully functional, may
|
| 219 |
+
contain errors or design flaws, and may have reduced or
|
| 220 |
+
different security, privacy, accessibility, availability, and
|
| 221 |
+
reliability standards relative to commercial versions of
|
| 222 |
+
NVIDIA software and materials. Use of a pre-release SDK may
|
| 223 |
+
result in unexpected results, loss of data, project delays or
|
| 224 |
+
other unpredictable damage or loss.
|
| 225 |
+
|
| 226 |
+
You may use a pre-release SDK at your own risk, understanding
|
| 227 |
+
that pre-release SDKs are not intended for use in production
|
| 228 |
+
or business-critical systems.
|
| 229 |
+
|
| 230 |
+
NVIDIA may choose not to make available a commercial version
|
| 231 |
+
of any pre-release SDK. NVIDIA may also choose to abandon
|
| 232 |
+
development and terminate the availability of a pre-release
|
| 233 |
+
SDK at any time without liability.
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
1.1.5. Updates
|
| 237 |
+
|
| 238 |
+
NVIDIA may, at its option, make available patches, workarounds
|
| 239 |
+
or other updates to this SDK. Unless the updates are provided
|
| 240 |
+
with their separate governing terms, they are deemed part of
|
| 241 |
+
the SDK licensed to you as provided in this Agreement. You
|
| 242 |
+
agree that the form and content of the SDK that NVIDIA
|
| 243 |
+
provides may change without prior notice to you. While NVIDIA
|
| 244 |
+
generally maintains compatibility between versions, NVIDIA may
|
| 245 |
+
in some cases make changes that introduce incompatibilities in
|
| 246 |
+
future versions of the SDK.
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
1.1.6. Third Party Licenses
|
| 250 |
+
|
| 251 |
+
The SDK may come bundled with, or otherwise include or be
|
| 252 |
+
distributed with, third party software licensed by a NVIDIA
|
| 253 |
+
supplier and/or open source software provided under an open
|
| 254 |
+
source license. Use of third party software is subject to the
|
| 255 |
+
third-party license terms, or in the absence of third party
|
| 256 |
+
terms, the terms of this Agreement. Copyright to third party
|
| 257 |
+
software is held by the copyright holders indicated in the
|
| 258 |
+
third-party software or license.
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
1.1.7. Reservation of Rights
|
| 262 |
+
|
| 263 |
+
NVIDIA reserves all rights, title, and interest in and to the
|
| 264 |
+
SDK, not expressly granted to you under this Agreement.
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
1.2. Limitations
|
| 268 |
+
|
| 269 |
+
The following license limitations apply to your use of the
|
| 270 |
+
SDK:
|
| 271 |
+
|
| 272 |
+
1. You may not reverse engineer, decompile or disassemble,
|
| 273 |
+
or remove copyright or other proprietary notices from any
|
| 274 |
+
portion of the SDK or copies of the SDK.
|
| 275 |
+
|
| 276 |
+
2. Except as expressly provided in this Agreement, you may
|
| 277 |
+
not copy, sell, rent, sublicense, transfer, distribute,
|
| 278 |
+
modify, or create derivative works of any portion of the
|
| 279 |
+
SDK. For clarity, you may not distribute or sublicense the
|
| 280 |
+
SDK as a stand-alone product.
|
| 281 |
+
|
| 282 |
+
3. Unless you have an agreement with NVIDIA for this
|
| 283 |
+
purpose, you may not indicate that an application created
|
| 284 |
+
with the SDK is sponsored or endorsed by NVIDIA.
|
| 285 |
+
|
| 286 |
+
4. You may not bypass, disable, or circumvent any
|
| 287 |
+
encryption, security, digital rights management or
|
| 288 |
+
authentication mechanism in the SDK.
|
| 289 |
+
|
| 290 |
+
5. You may not use the SDK in any manner that would cause it
|
| 291 |
+
to become subject to an open source software license. As
|
| 292 |
+
examples, licenses that require as a condition of use,
|
| 293 |
+
modification, and/or distribution that the SDK be:
|
| 294 |
+
|
| 295 |
+
a. Disclosed or distributed in source code form;
|
| 296 |
+
|
| 297 |
+
b. Licensed for the purpose of making derivative works;
|
| 298 |
+
or
|
| 299 |
+
|
| 300 |
+
c. Redistributable at no charge.
|
| 301 |
+
|
| 302 |
+
6. Unless you have an agreement with NVIDIA for this
|
| 303 |
+
purpose, you may not use the SDK with any system or
|
| 304 |
+
application where the use or failure of the system or
|
| 305 |
+
application can reasonably be expected to threaten or
|
| 306 |
+
result in personal injury, death, or catastrophic loss.
|
| 307 |
+
Examples include use in avionics, navigation, military,
|
| 308 |
+
medical, life support or other life critical applications.
|
| 309 |
+
NVIDIA does not design, test or manufacture the SDK for
|
| 310 |
+
these critical uses and NVIDIA shall not be liable to you
|
| 311 |
+
or any third party, in whole or in part, for any claims or
|
| 312 |
+
damages arising from such uses.
|
| 313 |
+
|
| 314 |
+
7. You agree to defend, indemnify and hold harmless NVIDIA
|
| 315 |
+
and its affiliates, and their respective employees,
|
| 316 |
+
contractors, agents, officers and directors, from and
|
| 317 |
+
against any and all claims, damages, obligations, losses,
|
| 318 |
+
liabilities, costs or debt, fines, restitutions and
|
| 319 |
+
expenses (including but not limited to attorney’s fees
|
| 320 |
+
and costs incident to establishing the right of
|
| 321 |
+
indemnification) arising out of or related to your use of
|
| 322 |
+
the SDK outside of the scope of this Agreement, or not in
|
| 323 |
+
compliance with its terms.
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
1.3. Ownership
|
| 327 |
+
|
| 328 |
+
1. NVIDIA or its licensors hold all rights, title and
|
| 329 |
+
interest in and to the SDK and its modifications and
|
| 330 |
+
derivative works, including their respective intellectual
|
| 331 |
+
property rights, subject to your rights described in this
|
| 332 |
+
section. This SDK may include software and materials from
|
| 333 |
+
NVIDIA’s licensors, and these licensors are intended
|
| 334 |
+
third party beneficiaries that may enforce this Agreement
|
| 335 |
+
with respect to their intellectual property rights.
|
| 336 |
+
|
| 337 |
+
2. You hold all rights, title and interest in and to your
|
| 338 |
+
applications and your derivative works of the sample
|
| 339 |
+
source code delivered in the SDK, including their
|
| 340 |
+
respective intellectual property rights, subject to
|
| 341 |
+
NVIDIA’s rights described in this section.
|
| 342 |
+
|
| 343 |
+
3. You may, but don’t have to, provide to NVIDIA
|
| 344 |
+
suggestions, feature requests or other feedback regarding
|
| 345 |
+
the SDK, including possible enhancements or modifications
|
| 346 |
+
to the SDK. For any feedback that you voluntarily provide,
|
| 347 |
+
you hereby grant NVIDIA and its affiliates a perpetual,
|
| 348 |
+
non-exclusive, worldwide, irrevocable license to use,
|
| 349 |
+
reproduce, modify, license, sublicense (through multiple
|
| 350 |
+
tiers of sublicensees), and distribute (through multiple
|
| 351 |
+
tiers of distributors) it without the payment of any
|
| 352 |
+
royalties or fees to you. NVIDIA will use feedback at its
|
| 353 |
+
choice. NVIDIA is constantly looking for ways to improve
|
| 354 |
+
its products, so you may send feedback to NVIDIA through
|
| 355 |
+
the developer portal at https://developer.nvidia.com.
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
1.4. No Warranties
|
| 359 |
+
|
| 360 |
+
THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL
|
| 361 |
+
FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND
|
| 362 |
+
ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND
|
| 363 |
+
OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING,
|
| 364 |
+
BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS
|
| 365 |
+
FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE
|
| 366 |
+
ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO
|
| 367 |
+
WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF
|
| 368 |
+
DEALING OR COURSE OF TRADE.
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
1.5. Limitation of Liability
|
| 372 |
+
|
| 373 |
+
TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS
|
| 374 |
+
AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
|
| 375 |
+
PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS
|
| 376 |
+
OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF
|
| 377 |
+
PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION
|
| 378 |
+
WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK,
|
| 379 |
+
WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH
|
| 380 |
+
OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE),
|
| 381 |
+
PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF
|
| 382 |
+
LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES
|
| 383 |
+
TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS
|
| 384 |
+
AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE
|
| 385 |
+
NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS
|
| 386 |
+
LIMIT.
|
| 387 |
+
|
| 388 |
+
These exclusions and limitations of liability shall apply
|
| 389 |
+
regardless if NVIDIA or its affiliates have been advised of
|
| 390 |
+
the possibility of such damages, and regardless of whether a
|
| 391 |
+
remedy fails its essential purpose. These exclusions and
|
| 392 |
+
limitations of liability form an essential basis of the
|
| 393 |
+
bargain between the parties, and, absent any of these
|
| 394 |
+
exclusions or limitations of liability, the provisions of this
|
| 395 |
+
Agreement, including, without limitation, the economic terms,
|
| 396 |
+
would be substantially different.
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
1.6. Termination
|
| 400 |
+
|
| 401 |
+
1. This Agreement will continue to apply until terminated by
|
| 402 |
+
either you or NVIDIA as described below.
|
| 403 |
+
|
| 404 |
+
2. If you want to terminate this Agreement, you may do so by
|
| 405 |
+
stopping to use the SDK.
|
| 406 |
+
|
| 407 |
+
3. NVIDIA may, at any time, terminate this Agreement if:
|
| 408 |
+
|
| 409 |
+
a. (i) you fail to comply with any term of this
|
| 410 |
+
Agreement and the non-compliance is not fixed within
|
| 411 |
+
thirty (30) days following notice from NVIDIA (or
|
| 412 |
+
immediately if you violate NVIDIA’s intellectual
|
| 413 |
+
property rights);
|
| 414 |
+
|
| 415 |
+
b. (ii) you commence or participate in any legal
|
| 416 |
+
proceeding against NVIDIA with respect to the SDK; or
|
| 417 |
+
|
| 418 |
+
c. (iii) NVIDIA decides to no longer provide the SDK in
|
| 419 |
+
a country or, in NVIDIA’s sole discretion, the
|
| 420 |
+
continued use of it is no longer commercially viable.
|
| 421 |
+
|
| 422 |
+
4. Upon any termination of this Agreement, you agree to
|
| 423 |
+
promptly discontinue use of the SDK and destroy all copies
|
| 424 |
+
in your possession or control. Your prior distributions in
|
| 425 |
+
accordance with this Agreement are not affected by the
|
| 426 |
+
termination of this Agreement. Upon written request, you
|
| 427 |
+
will certify in writing that you have complied with your
|
| 428 |
+
commitments under this section. Upon any termination of
|
| 429 |
+
this Agreement all provisions survive except for the
|
| 430 |
+
license grant provisions.
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
1.7. General
|
| 434 |
+
|
| 435 |
+
If you wish to assign this Agreement or your rights and
|
| 436 |
+
obligations, including by merger, consolidation, dissolution
|
| 437 |
+
or operation of law, contact NVIDIA to ask for permission. Any
|
| 438 |
+
attempted assignment not approved by NVIDIA in writing shall
|
| 439 |
+
be void and of no effect. NVIDIA may assign, delegate or
|
| 440 |
+
transfer this Agreement and its rights and obligations, and if
|
| 441 |
+
to a non-affiliate you will be notified.
|
| 442 |
+
|
| 443 |
+
You agree to cooperate with NVIDIA and provide reasonably
|
| 444 |
+
requested information to verify your compliance with this
|
| 445 |
+
Agreement.
|
| 446 |
+
|
| 447 |
+
This Agreement will be governed in all respects by the laws of
|
| 448 |
+
the United States and of the State of Delaware as those laws
|
| 449 |
+
are applied to contracts entered into and performed entirely
|
| 450 |
+
within Delaware by Delaware residents, without regard to the
|
| 451 |
+
conflicts of laws principles. The United Nations Convention on
|
| 452 |
+
Contracts for the International Sale of Goods is specifically
|
| 453 |
+
disclaimed. You agree to all terms of this Agreement in the
|
| 454 |
+
English language.
|
| 455 |
+
|
| 456 |
+
The state or federal courts residing in Santa Clara County,
|
| 457 |
+
California shall have exclusive jurisdiction over any dispute
|
| 458 |
+
or claim arising out of this Agreement. Notwithstanding this,
|
| 459 |
+
you agree that NVIDIA shall still be allowed to apply for
|
| 460 |
+
injunctive remedies or an equivalent type of urgent legal
|
| 461 |
+
relief in any jurisdiction.
|
| 462 |
+
|
| 463 |
+
If any court of competent jurisdiction determines that any
|
| 464 |
+
provision of this Agreement is illegal, invalid or
|
| 465 |
+
unenforceable, such provision will be construed as limited to
|
| 466 |
+
the extent necessary to be consistent with and fully
|
| 467 |
+
enforceable under the law and the remaining provisions will
|
| 468 |
+
remain in full force and effect. Unless otherwise specified,
|
| 469 |
+
remedies are cumulative.
|
| 470 |
+
|
| 471 |
+
Each party acknowledges and agrees that the other is an
|
| 472 |
+
independent contractor in the performance of this Agreement.
|
| 473 |
+
|
| 474 |
+
The SDK has been developed entirely at private expense and is
|
| 475 |
+
“commercial items” consisting of “commercial computer
|
| 476 |
+
software” and “commercial computer software
|
| 477 |
+
documentation” provided with RESTRICTED RIGHTS. Use,
|
| 478 |
+
duplication or disclosure by the U.S. Government or a U.S.
|
| 479 |
+
Government subcontractor is subject to the restrictions in
|
| 480 |
+
this Agreement pursuant to DFARS 227.7202-3(a) or as set forth
|
| 481 |
+
in subparagraphs (c)(1) and (2) of the Commercial Computer
|
| 482 |
+
Software - Restricted Rights clause at FAR 52.227-19, as
|
| 483 |
+
applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas
|
| 484 |
+
Expressway, Santa Clara, CA 95051.
|
| 485 |
+
|
| 486 |
+
The SDK is subject to United States export laws and
|
| 487 |
+
regulations. You agree that you will not ship, transfer or
|
| 488 |
+
export the SDK into any country, or use the SDK in any manner,
|
| 489 |
+
prohibited by the United States Bureau of Industry and
|
| 490 |
+
Security or economic sanctions regulations administered by the
|
| 491 |
+
U.S. Department of Treasury’s Office of Foreign Assets
|
| 492 |
+
Control (OFAC), or any applicable export laws, restrictions or
|
| 493 |
+
regulations. These laws include restrictions on destinations,
|
| 494 |
+
end users and end use. By accepting this Agreement, you
|
| 495 |
+
confirm that you are not a resident or citizen of any country
|
| 496 |
+
currently embargoed by the U.S. and that you are not otherwise
|
| 497 |
+
prohibited from receiving the SDK.
|
| 498 |
+
|
| 499 |
+
Any notice delivered by NVIDIA to you under this Agreement
|
| 500 |
+
will be delivered via mail, email or fax. You agree that any
|
| 501 |
+
notices that NVIDIA sends you electronically will satisfy any
|
| 502 |
+
legal communication requirements. Please direct your legal
|
| 503 |
+
notices or other correspondence to NVIDIA Corporation, 2788
|
| 504 |
+
San Tomas Expressway, Santa Clara, California 95051, United
|
| 505 |
+
States of America, Attention: Legal Department.
|
| 506 |
+
|
| 507 |
+
This Agreement and any exhibits incorporated into this
|
| 508 |
+
Agreement constitute the entire agreement of the parties with
|
| 509 |
+
respect to the subject matter of this Agreement and supersede
|
| 510 |
+
all prior negotiations or documentation exchanged between the
|
| 511 |
+
parties relating to this SDK license. Any additional and/or
|
| 512 |
+
conflicting terms on documents issued by you are null, void,
|
| 513 |
+
and invalid. Any amendment or waiver under this Agreement
|
| 514 |
+
shall be in writing and signed by representatives of both
|
| 515 |
+
parties.
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
2. CUDA Toolkit Supplement to Software License Agreement for
|
| 519 |
+
NVIDIA Software Development Kits
|
| 520 |
+
------------------------------------------------------------
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
Release date: August 16, 2018
|
| 524 |
+
-----------------------------
|
| 525 |
+
|
| 526 |
+
The terms in this supplement govern your use of the NVIDIA
|
| 527 |
+
CUDA Toolkit SDK under the terms of your license agreement
|
| 528 |
+
(“Agreement”) as modified by this supplement. Capitalized
|
| 529 |
+
terms used but not defined below have the meaning assigned to
|
| 530 |
+
them in the Agreement.
|
| 531 |
+
|
| 532 |
+
This supplement is an exhibit to the Agreement and is
|
| 533 |
+
incorporated as an integral part of the Agreement. In the
|
| 534 |
+
event of conflict between the terms in this supplement and the
|
| 535 |
+
terms in the Agreement, the terms in this supplement govern.
|
| 536 |
+
|
| 537 |
+
|
| 538 |
+
2.1. License Scope
|
| 539 |
+
|
| 540 |
+
The SDK is licensed for you to develop applications only for
|
| 541 |
+
use in systems with NVIDIA GPUs.
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
2.2. Distribution
|
| 545 |
+
|
| 546 |
+
The portions of the SDK that are distributable under the
|
| 547 |
+
Agreement are listed in Attachment A.
|
| 548 |
+
|
| 549 |
+
|
| 550 |
+
2.3. Operating Systems
|
| 551 |
+
|
| 552 |
+
Those portions of the SDK designed exclusively for use on the
|
| 553 |
+
Linux or FreeBSD operating systems, or other operating systems
|
| 554 |
+
derived from the source code to these operating systems, may
|
| 555 |
+
be copied and redistributed for use in accordance with this
|
| 556 |
+
Agreement, provided that the object code files are not
|
| 557 |
+
modified in any way (except for unzipping of compressed
|
| 558 |
+
files).
|
| 559 |
+
|
| 560 |
+
|
| 561 |
+
2.4. Audio and Video Encoders and Decoders
|
| 562 |
+
|
| 563 |
+
You acknowledge and agree that it is your sole responsibility
|
| 564 |
+
to obtain any additional third-party licenses required to
|
| 565 |
+
make, have made, use, have used, sell, import, and offer for
|
| 566 |
+
sale your products or services that include or incorporate any
|
| 567 |
+
third-party software and content relating to audio and/or
|
| 568 |
+
video encoders and decoders from, including but not limited
|
| 569 |
+
to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A.,
|
| 570 |
+
MPEG-LA, and Coding Technologies. NVIDIA does not grant to you
|
| 571 |
+
under this Agreement any necessary patent or other rights with
|
| 572 |
+
respect to any audio and/or video encoders and decoders.
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
2.5. Licensing
|
| 576 |
+
|
| 577 |
+
If the distribution terms in this Agreement are not suitable
|
| 578 |
+
for your organization, or for any questions regarding this
|
| 579 |
+
Agreement, please contact NVIDIA at
|
| 580 |
+
nvidia-compute-license-questions@nvidia.com.
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
2.6. Attachment A
|
| 584 |
+
|
| 585 |
+
The following portions of the SDK are distributable under the
|
| 586 |
+
Agreement:
|
| 587 |
+
|
| 588 |
+
Component
|
| 589 |
+
|
| 590 |
+
CUDA Runtime
|
| 591 |
+
|
| 592 |
+
Windows
|
| 593 |
+
|
| 594 |
+
cudart.dll, cudart_static.lib, cudadevrt.lib
|
| 595 |
+
|
| 596 |
+
Mac OSX
|
| 597 |
+
|
| 598 |
+
libcudart.dylib, libcudart_static.a, libcudadevrt.a
|
| 599 |
+
|
| 600 |
+
Linux
|
| 601 |
+
|
| 602 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
| 603 |
+
|
| 604 |
+
Android
|
| 605 |
+
|
| 606 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
| 607 |
+
|
| 608 |
+
Component
|
| 609 |
+
|
| 610 |
+
CUDA FFT Library
|
| 611 |
+
|
| 612 |
+
Windows
|
| 613 |
+
|
| 614 |
+
cufft.dll, cufftw.dll, cufft.lib, cufftw.lib
|
| 615 |
+
|
| 616 |
+
Mac OSX
|
| 617 |
+
|
| 618 |
+
libcufft.dylib, libcufft_static.a, libcufftw.dylib,
|
| 619 |
+
libcufftw_static.a
|
| 620 |
+
|
| 621 |
+
Linux
|
| 622 |
+
|
| 623 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
| 624 |
+
libcufftw_static.a
|
| 625 |
+
|
| 626 |
+
Android
|
| 627 |
+
|
| 628 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
| 629 |
+
libcufftw_static.a
|
| 630 |
+
|
| 631 |
+
Component
|
| 632 |
+
|
| 633 |
+
CUDA BLAS Library
|
| 634 |
+
|
| 635 |
+
Windows
|
| 636 |
+
|
| 637 |
+
cublas.dll, cublasLt.dll
|
| 638 |
+
|
| 639 |
+
Mac OSX
|
| 640 |
+
|
| 641 |
+
libcublas.dylib, libcublasLt.dylib, libcublas_static.a,
|
| 642 |
+
libcublasLt_static.a
|
| 643 |
+
|
| 644 |
+
Linux
|
| 645 |
+
|
| 646 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
| 647 |
+
libcublasLt_static.a
|
| 648 |
+
|
| 649 |
+
Android
|
| 650 |
+
|
| 651 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
| 652 |
+
libcublasLt_static.a
|
| 653 |
+
|
| 654 |
+
Component
|
| 655 |
+
|
| 656 |
+
NVIDIA "Drop-in" BLAS Library
|
| 657 |
+
|
| 658 |
+
Windows
|
| 659 |
+
|
| 660 |
+
nvblas.dll
|
| 661 |
+
|
| 662 |
+
Mac OSX
|
| 663 |
+
|
| 664 |
+
libnvblas.dylib
|
| 665 |
+
|
| 666 |
+
Linux
|
| 667 |
+
|
| 668 |
+
libnvblas.so
|
| 669 |
+
|
| 670 |
+
Component
|
| 671 |
+
|
| 672 |
+
CUDA Sparse Matrix Library
|
| 673 |
+
|
| 674 |
+
Windows
|
| 675 |
+
|
| 676 |
+
cusparse.dll, cusparse.lib
|
| 677 |
+
|
| 678 |
+
Mac OSX
|
| 679 |
+
|
| 680 |
+
libcusparse.dylib, libcusparse_static.a
|
| 681 |
+
|
| 682 |
+
Linux
|
| 683 |
+
|
| 684 |
+
libcusparse.so, libcusparse_static.a
|
| 685 |
+
|
| 686 |
+
Android
|
| 687 |
+
|
| 688 |
+
libcusparse.so, libcusparse_static.a
|
| 689 |
+
|
| 690 |
+
Component
|
| 691 |
+
|
| 692 |
+
CUDA Linear Solver Library
|
| 693 |
+
|
| 694 |
+
Windows
|
| 695 |
+
|
| 696 |
+
cusolver.dll, cusolver.lib
|
| 697 |
+
|
| 698 |
+
Mac OSX
|
| 699 |
+
|
| 700 |
+
libcusolver.dylib, libcusolver_static.a
|
| 701 |
+
|
| 702 |
+
Linux
|
| 703 |
+
|
| 704 |
+
libcusolver.so, libcusolver_static.a
|
| 705 |
+
|
| 706 |
+
Android
|
| 707 |
+
|
| 708 |
+
libcusolver.so, libcusolver_static.a
|
| 709 |
+
|
| 710 |
+
Component
|
| 711 |
+
|
| 712 |
+
CUDA Random Number Generation Library
|
| 713 |
+
|
| 714 |
+
Windows
|
| 715 |
+
|
| 716 |
+
curand.dll, curand.lib
|
| 717 |
+
|
| 718 |
+
Mac OSX
|
| 719 |
+
|
| 720 |
+
libcurand.dylib, libcurand_static.a
|
| 721 |
+
|
| 722 |
+
Linux
|
| 723 |
+
|
| 724 |
+
libcurand.so, libcurand_static.a
|
| 725 |
+
|
| 726 |
+
Android
|
| 727 |
+
|
| 728 |
+
libcurand.so, libcurand_static.a
|
| 729 |
+
|
| 730 |
+
Component
|
| 731 |
+
|
| 732 |
+
CUDA Accelerated Graph Library
|
| 733 |
+
|
| 734 |
+
Component
|
| 735 |
+
|
| 736 |
+
NVIDIA Performance Primitives Library
|
| 737 |
+
|
| 738 |
+
Windows
|
| 739 |
+
|
| 740 |
+
nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll,
|
| 741 |
+
nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll,
|
| 742 |
+
nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib,
|
| 743 |
+
nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll,
|
| 744 |
+
nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib
|
| 745 |
+
|
| 746 |
+
Mac OSX
|
| 747 |
+
|
| 748 |
+
libnppc.dylib, libnppc_static.a, libnppial.dylib,
|
| 749 |
+
libnppial_static.a, libnppicc.dylib, libnppicc_static.a,
|
| 750 |
+
libnppicom.dylib, libnppicom_static.a, libnppidei.dylib,
|
| 751 |
+
libnppidei_static.a, libnppif.dylib, libnppif_static.a,
|
| 752 |
+
libnppig.dylib, libnppig_static.a, libnppim.dylib,
|
| 753 |
+
libnppisu_static.a, libnppitc.dylib, libnppitc_static.a,
|
| 754 |
+
libnpps.dylib, libnpps_static.a
|
| 755 |
+
|
| 756 |
+
Linux
|
| 757 |
+
|
| 758 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
| 759 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
| 760 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
| 761 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
| 762 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
| 763 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
| 764 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
| 765 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
| 766 |
+
|
| 767 |
+
Android
|
| 768 |
+
|
| 769 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
| 770 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
| 771 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
| 772 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
| 773 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
| 774 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
| 775 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
| 776 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
| 777 |
+
|
| 778 |
+
Component
|
| 779 |
+
|
| 780 |
+
NVIDIA JPEG Library
|
| 781 |
+
|
| 782 |
+
Linux
|
| 783 |
+
|
| 784 |
+
libnvjpeg.so, libnvjpeg_static.a
|
| 785 |
+
|
| 786 |
+
Component
|
| 787 |
+
|
| 788 |
+
Internal common library required for statically linking to
|
| 789 |
+
cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP
|
| 790 |
+
|
| 791 |
+
Mac OSX
|
| 792 |
+
|
| 793 |
+
libculibos.a
|
| 794 |
+
|
| 795 |
+
Linux
|
| 796 |
+
|
| 797 |
+
libculibos.a
|
| 798 |
+
|
| 799 |
+
Component
|
| 800 |
+
|
| 801 |
+
NVIDIA Runtime Compilation Library and Header
|
| 802 |
+
|
| 803 |
+
All
|
| 804 |
+
|
| 805 |
+
nvrtc.h
|
| 806 |
+
|
| 807 |
+
Windows
|
| 808 |
+
|
| 809 |
+
nvrtc.dll, nvrtc-builtins.dll
|
| 810 |
+
|
| 811 |
+
Mac OSX
|
| 812 |
+
|
| 813 |
+
libnvrtc.dylib, libnvrtc-builtins.dylib
|
| 814 |
+
|
| 815 |
+
Linux
|
| 816 |
+
|
| 817 |
+
libnvrtc.so, libnvrtc-builtins.so
|
| 818 |
+
|
| 819 |
+
Component
|
| 820 |
+
|
| 821 |
+
NVIDIA Optimizing Compiler Library
|
| 822 |
+
|
| 823 |
+
Windows
|
| 824 |
+
|
| 825 |
+
nvvm.dll
|
| 826 |
+
|
| 827 |
+
Mac OSX
|
| 828 |
+
|
| 829 |
+
libnvvm.dylib
|
| 830 |
+
|
| 831 |
+
Linux
|
| 832 |
+
|
| 833 |
+
libnvvm.so
|
| 834 |
+
|
| 835 |
+
Component
|
| 836 |
+
|
| 837 |
+
NVIDIA Common Device Math Functions Library
|
| 838 |
+
|
| 839 |
+
Windows
|
| 840 |
+
|
| 841 |
+
libdevice.10.bc
|
| 842 |
+
|
| 843 |
+
Mac OSX
|
| 844 |
+
|
| 845 |
+
libdevice.10.bc
|
| 846 |
+
|
| 847 |
+
Linux
|
| 848 |
+
|
| 849 |
+
libdevice.10.bc
|
| 850 |
+
|
| 851 |
+
Component
|
| 852 |
+
|
| 853 |
+
CUDA Occupancy Calculation Header Library
|
| 854 |
+
|
| 855 |
+
All
|
| 856 |
+
|
| 857 |
+
cuda_occupancy.h
|
| 858 |
+
|
| 859 |
+
Component
|
| 860 |
+
|
| 861 |
+
CUDA Half Precision Headers
|
| 862 |
+
|
| 863 |
+
All
|
| 864 |
+
|
| 865 |
+
cuda_fp16.h, cuda_fp16.hpp
|
| 866 |
+
|
| 867 |
+
Component
|
| 868 |
+
|
| 869 |
+
CUDA Profiling Tools Interface (CUPTI) Library
|
| 870 |
+
|
| 871 |
+
Windows
|
| 872 |
+
|
| 873 |
+
cupti.dll
|
| 874 |
+
|
| 875 |
+
Mac OSX
|
| 876 |
+
|
| 877 |
+
libcupti.dylib
|
| 878 |
+
|
| 879 |
+
Linux
|
| 880 |
+
|
| 881 |
+
libcupti.so
|
| 882 |
+
|
| 883 |
+
Component
|
| 884 |
+
|
| 885 |
+
NVIDIA Tools Extension Library
|
| 886 |
+
|
| 887 |
+
Windows
|
| 888 |
+
|
| 889 |
+
nvToolsExt.dll, nvToolsExt.lib
|
| 890 |
+
|
| 891 |
+
Mac OSX
|
| 892 |
+
|
| 893 |
+
libnvToolsExt.dylib
|
| 894 |
+
|
| 895 |
+
Linux
|
| 896 |
+
|
| 897 |
+
libnvToolsExt.so
|
| 898 |
+
|
| 899 |
+
Component
|
| 900 |
+
|
| 901 |
+
NVIDIA CUDA Driver Libraries
|
| 902 |
+
|
| 903 |
+
Linux
|
| 904 |
+
|
| 905 |
+
libcuda.so, libnvidia-fatbinaryloader.so,
|
| 906 |
+
libnvidia-ptxjitcompiler.so
|
| 907 |
+
|
| 908 |
+
The NVIDIA CUDA Driver Libraries are only distributable in
|
| 909 |
+
applications that meet this criteria:
|
| 910 |
+
|
| 911 |
+
1. The application was developed starting from a NVIDIA CUDA
|
| 912 |
+
container obtained from Docker Hub or the NVIDIA GPU
|
| 913 |
+
Cloud, and
|
| 914 |
+
|
| 915 |
+
2. The resulting application is packaged as a Docker
|
| 916 |
+
container and distributed to users on Docker Hub or the
|
| 917 |
+
NVIDIA GPU Cloud only.
|
| 918 |
+
|
| 919 |
+
|
| 920 |
+
2.7. Attachment B
|
| 921 |
+
|
| 922 |
+
|
| 923 |
+
Additional Licensing Obligations
|
| 924 |
+
|
| 925 |
+
The following third party components included in the SOFTWARE
|
| 926 |
+
are licensed to Licensee pursuant to the following terms and
|
| 927 |
+
conditions:
|
| 928 |
+
|
| 929 |
+
1. Licensee's use of the GDB third party component is
|
| 930 |
+
subject to the terms and conditions of GNU GPL v3:
|
| 931 |
+
|
| 932 |
+
This product includes copyrighted third-party software licensed
|
| 933 |
+
under the terms of the GNU General Public License v3 ("GPL v3").
|
| 934 |
+
All third-party software packages are copyright by their respective
|
| 935 |
+
authors. GPL v3 terms and conditions are hereby incorporated into
|
| 936 |
+
the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt
|
| 937 |
+
|
| 938 |
+
Consistent with these licensing requirements, the software
|
| 939 |
+
listed below is provided under the terms of the specified
|
| 940 |
+
open source software licenses. To obtain source code for
|
| 941 |
+
software provided under licenses that require
|
| 942 |
+
redistribution of source code, including the GNU General
|
| 943 |
+
Public License (GPL) and GNU Lesser General Public License
|
| 944 |
+
(LGPL), contact oss-requests@nvidia.com. This offer is
|
| 945 |
+
valid for a period of three (3) years from the date of the
|
| 946 |
+
distribution of this product by NVIDIA CORPORATION.
|
| 947 |
+
|
| 948 |
+
Component License
|
| 949 |
+
CUDA-GDB GPL v3
|
| 950 |
+
|
| 951 |
+
2. Licensee represents and warrants that any and all third
|
| 952 |
+
party licensing and/or royalty payment obligations in
|
| 953 |
+
connection with Licensee's use of the H.264 video codecs
|
| 954 |
+
are solely the responsibility of Licensee.
|
| 955 |
+
|
| 956 |
+
3. Licensee's use of the Thrust library is subject to the
|
| 957 |
+
terms and conditions of the Apache License Version 2.0.
|
| 958 |
+
All third-party software packages are copyright by their
|
| 959 |
+
respective authors. Apache License Version 2.0 terms and
|
| 960 |
+
conditions are hereby incorporated into the Agreement by
|
| 961 |
+
this reference.
|
| 962 |
+
http://www.apache.org/licenses/LICENSE-2.0.html
|
| 963 |
+
|
| 964 |
+
In addition, Licensee acknowledges the following notice:
|
| 965 |
+
Thrust includes source code from the Boost Iterator,
|
| 966 |
+
Tuple, System, and Random Number libraries.
|
| 967 |
+
|
| 968 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
| 969 |
+
. . . .
|
| 970 |
+
|
| 971 |
+
Permission is hereby granted, free of charge, to any person or
|
| 972 |
+
organization obtaining a copy of the software and accompanying
|
| 973 |
+
documentation covered by this license (the "Software") to use,
|
| 974 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
| 975 |
+
and to prepare derivative works of the Software, and to permit
|
| 976 |
+
third-parties to whom the Software is furnished to do so, all
|
| 977 |
+
subject to the following:
|
| 978 |
+
|
| 979 |
+
The copyright notices in the Software and this entire statement,
|
| 980 |
+
including the above license grant, this restriction and the following
|
| 981 |
+
disclaimer, must be included in all copies of the Software, in whole
|
| 982 |
+
or in part, and all derivative works of the Software, unless such
|
| 983 |
+
copies or derivative works are solely in the form of machine-executable
|
| 984 |
+
object code generated by a source language processor.
|
| 985 |
+
|
| 986 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 987 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 988 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
| 989 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
| 990 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
| 991 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
| 992 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 993 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 994 |
+
|
| 995 |
+
4. Licensee's use of the LLVM third party component is
|
| 996 |
+
subject to the following terms and conditions:
|
| 997 |
+
|
| 998 |
+
======================================================
|
| 999 |
+
LLVM Release License
|
| 1000 |
+
======================================================
|
| 1001 |
+
University of Illinois/NCSA
|
| 1002 |
+
Open Source License
|
| 1003 |
+
|
| 1004 |
+
Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign.
|
| 1005 |
+
All rights reserved.
|
| 1006 |
+
|
| 1007 |
+
Developed by:
|
| 1008 |
+
|
| 1009 |
+
LLVM Team
|
| 1010 |
+
|
| 1011 |
+
University of Illinois at Urbana-Champaign
|
| 1012 |
+
|
| 1013 |
+
http://llvm.org
|
| 1014 |
+
|
| 1015 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 1016 |
+
of this software and associated documentation files (the "Software"), to
|
| 1017 |
+
deal with the Software without restriction, including without limitation the
|
| 1018 |
+
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
| 1019 |
+
sell copies of the Software, and to permit persons to whom the Software is
|
| 1020 |
+
furnished to do so, subject to the following conditions:
|
| 1021 |
+
|
| 1022 |
+
* Redistributions of source code must retain the above copyright notice,
|
| 1023 |
+
this list of conditions and the following disclaimers.
|
| 1024 |
+
|
| 1025 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1026 |
+
notice, this list of conditions and the following disclaimers in the
|
| 1027 |
+
documentation and/or other materials provided with the distribution.
|
| 1028 |
+
|
| 1029 |
+
* Neither the names of the LLVM Team, University of Illinois at Urbana-
|
| 1030 |
+
Champaign, nor the names of its contributors may be used to endorse or
|
| 1031 |
+
promote products derived from this Software without specific prior
|
| 1032 |
+
written permission.
|
| 1033 |
+
|
| 1034 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 1035 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1036 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
| 1037 |
+
THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
| 1038 |
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
| 1039 |
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
| 1040 |
+
DEALINGS WITH THE SOFTWARE.
|
| 1041 |
+
|
| 1042 |
+
5. Licensee's use (e.g. nvprof) of the PCRE third party
|
| 1043 |
+
component is subject to the following terms and
|
| 1044 |
+
conditions:
|
| 1045 |
+
|
| 1046 |
+
------------
|
| 1047 |
+
PCRE LICENCE
|
| 1048 |
+
------------
|
| 1049 |
+
PCRE is a library of functions to support regular expressions whose syntax
|
| 1050 |
+
and semantics are as close as possible to those of the Perl 5 language.
|
| 1051 |
+
Release 8 of PCRE is distributed under the terms of the "BSD" licence, as
|
| 1052 |
+
specified below. The documentation for PCRE, supplied in the "doc"
|
| 1053 |
+
directory, is distributed under the same terms as the software itself. The
|
| 1054 |
+
basic library functions are written in C and are freestanding. Also
|
| 1055 |
+
included in the distribution is a set of C++ wrapper functions, and a just-
|
| 1056 |
+
in-time compiler that can be used to optimize pattern matching. These are
|
| 1057 |
+
both optional features that can be omitted when the library is built.
|
| 1058 |
+
|
| 1059 |
+
THE BASIC LIBRARY FUNCTIONS
|
| 1060 |
+
---------------------------
|
| 1061 |
+
Written by: Philip Hazel
|
| 1062 |
+
Email local part: ph10
|
| 1063 |
+
Email domain: cam.ac.uk
|
| 1064 |
+
University of Cambridge Computing Service,
|
| 1065 |
+
Cambridge, England.
|
| 1066 |
+
Copyright (c) 1997-2012 University of Cambridge
|
| 1067 |
+
All rights reserved.
|
| 1068 |
+
|
| 1069 |
+
PCRE JUST-IN-TIME COMPILATION SUPPORT
|
| 1070 |
+
-------------------------------------
|
| 1071 |
+
Written by: Zoltan Herczeg
|
| 1072 |
+
Email local part: hzmester
|
| 1073 |
+
Emain domain: freemail.hu
|
| 1074 |
+
Copyright(c) 2010-2012 Zoltan Herczeg
|
| 1075 |
+
All rights reserved.
|
| 1076 |
+
|
| 1077 |
+
STACK-LESS JUST-IN-TIME COMPILER
|
| 1078 |
+
--------------------------------
|
| 1079 |
+
Written by: Zoltan Herczeg
|
| 1080 |
+
Email local part: hzmester
|
| 1081 |
+
Emain domain: freemail.hu
|
| 1082 |
+
Copyright(c) 2009-2012 Zoltan Herczeg
|
| 1083 |
+
All rights reserved.
|
| 1084 |
+
|
| 1085 |
+
THE C++ WRAPPER FUNCTIONS
|
| 1086 |
+
-------------------------
|
| 1087 |
+
Contributed by: Google Inc.
|
| 1088 |
+
Copyright (c) 2007-2012, Google Inc.
|
| 1089 |
+
All rights reserved.
|
| 1090 |
+
|
| 1091 |
+
THE "BSD" LICENCE
|
| 1092 |
+
-----------------
|
| 1093 |
+
Redistribution and use in source and binary forms, with or without
|
| 1094 |
+
modification, are permitted provided that the following conditions are met:
|
| 1095 |
+
|
| 1096 |
+
* Redistributions of source code must retain the above copyright notice,
|
| 1097 |
+
this list of conditions and the following disclaimer.
|
| 1098 |
+
|
| 1099 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1100 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1101 |
+
documentation and/or other materials provided with the distribution.
|
| 1102 |
+
|
| 1103 |
+
* Neither the name of the University of Cambridge nor the name of Google
|
| 1104 |
+
Inc. nor the names of their contributors may be used to endorse or
|
| 1105 |
+
promote products derived from this software without specific prior
|
| 1106 |
+
written permission.
|
| 1107 |
+
|
| 1108 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
| 1109 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
| 1110 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
| 1111 |
+
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
| 1112 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 1113 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 1114 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
| 1115 |
+
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
| 1116 |
+
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
| 1117 |
+
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1118 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1119 |
+
|
| 1120 |
+
6. Some of the cuBLAS library routines were written by or
|
| 1121 |
+
derived from code written by Vasily Volkov and are subject
|
| 1122 |
+
to the Modified Berkeley Software Distribution License as
|
| 1123 |
+
follows:
|
| 1124 |
+
|
| 1125 |
+
Copyright (c) 2007-2009, Regents of the University of California
|
| 1126 |
+
|
| 1127 |
+
All rights reserved.
|
| 1128 |
+
|
| 1129 |
+
Redistribution and use in source and binary forms, with or without
|
| 1130 |
+
modification, are permitted provided that the following conditions are
|
| 1131 |
+
met:
|
| 1132 |
+
* Redistributions of source code must retain the above copyright
|
| 1133 |
+
notice, this list of conditions and the following disclaimer.
|
| 1134 |
+
* Redistributions in binary form must reproduce the above
|
| 1135 |
+
copyright notice, this list of conditions and the following
|
| 1136 |
+
disclaimer in the documentation and/or other materials provided
|
| 1137 |
+
with the distribution.
|
| 1138 |
+
* Neither the name of the University of California, Berkeley nor
|
| 1139 |
+
the names of its contributors may be used to endorse or promote
|
| 1140 |
+
products derived from this software without specific prior
|
| 1141 |
+
written permission.
|
| 1142 |
+
|
| 1143 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
| 1144 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 1145 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 1146 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
| 1147 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
| 1148 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 1149 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
| 1150 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
| 1151 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
| 1152 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1153 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1154 |
+
|
| 1155 |
+
7. Some of the cuBLAS library routines were written by or
|
| 1156 |
+
derived from code written by Davide Barbieri and are
|
| 1157 |
+
subject to the Modified Berkeley Software Distribution
|
| 1158 |
+
License as follows:
|
| 1159 |
+
|
| 1160 |
+
Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata.
|
| 1161 |
+
|
| 1162 |
+
All rights reserved.
|
| 1163 |
+
|
| 1164 |
+
Redistribution and use in source and binary forms, with or without
|
| 1165 |
+
modification, are permitted provided that the following conditions are
|
| 1166 |
+
met:
|
| 1167 |
+
* Redistributions of source code must retain the above copyright
|
| 1168 |
+
notice, this list of conditions and the following disclaimer.
|
| 1169 |
+
* Redistributions in binary form must reproduce the above
|
| 1170 |
+
copyright notice, this list of conditions and the following
|
| 1171 |
+
disclaimer in the documentation and/or other materials provided
|
| 1172 |
+
with the distribution.
|
| 1173 |
+
* The name of the author may not be used to endorse or promote
|
| 1174 |
+
products derived from this software without specific prior
|
| 1175 |
+
written permission.
|
| 1176 |
+
|
| 1177 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
| 1178 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 1179 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 1180 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
| 1181 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
| 1182 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 1183 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
| 1184 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
| 1185 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
| 1186 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1187 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1188 |
+
|
| 1189 |
+
8. Some of the cuBLAS library routines were derived from
|
| 1190 |
+
code developed by the University of Tennessee and are
|
| 1191 |
+
subject to the Modified Berkeley Software Distribution
|
| 1192 |
+
License as follows:
|
| 1193 |
+
|
| 1194 |
+
Copyright (c) 2010 The University of Tennessee.
|
| 1195 |
+
|
| 1196 |
+
All rights reserved.
|
| 1197 |
+
|
| 1198 |
+
Redistribution and use in source and binary forms, with or without
|
| 1199 |
+
modification, are permitted provided that the following conditions are
|
| 1200 |
+
met:
|
| 1201 |
+
* Redistributions of source code must retain the above copyright
|
| 1202 |
+
notice, this list of conditions and the following disclaimer.
|
| 1203 |
+
* Redistributions in binary form must reproduce the above
|
| 1204 |
+
copyright notice, this list of conditions and the following
|
| 1205 |
+
disclaimer listed in this license in the documentation and/or
|
| 1206 |
+
other materials provided with the distribution.
|
| 1207 |
+
* Neither the name of the copyright holders nor the names of its
|
| 1208 |
+
contributors may be used to endorse or promote products derived
|
| 1209 |
+
from this software without specific prior written permission.
|
| 1210 |
+
|
| 1211 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1212 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1213 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1214 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1215 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1216 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1217 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1218 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1219 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1220 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1221 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1222 |
+
|
| 1223 |
+
9. Some of the cuBLAS library routines were written by or
|
| 1224 |
+
derived from code written by Jonathan Hogg and are subject
|
| 1225 |
+
to the Modified Berkeley Software Distribution License as
|
| 1226 |
+
follows:
|
| 1227 |
+
|
| 1228 |
+
Copyright (c) 2012, The Science and Technology Facilities Council (STFC).
|
| 1229 |
+
|
| 1230 |
+
All rights reserved.
|
| 1231 |
+
|
| 1232 |
+
Redistribution and use in source and binary forms, with or without
|
| 1233 |
+
modification, are permitted provided that the following conditions are
|
| 1234 |
+
met:
|
| 1235 |
+
* Redistributions of source code must retain the above copyright
|
| 1236 |
+
notice, this list of conditions and the following disclaimer.
|
| 1237 |
+
* Redistributions in binary form must reproduce the above
|
| 1238 |
+
copyright notice, this list of conditions and the following
|
| 1239 |
+
disclaimer in the documentation and/or other materials provided
|
| 1240 |
+
with the distribution.
|
| 1241 |
+
* Neither the name of the STFC nor the names of its contributors
|
| 1242 |
+
may be used to endorse or promote products derived from this
|
| 1243 |
+
software without specific prior written permission.
|
| 1244 |
+
|
| 1245 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1246 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1247 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1248 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE
|
| 1249 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 1250 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 1251 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
| 1252 |
+
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
| 1253 |
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
| 1254 |
+
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
| 1255 |
+
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1256 |
+
|
| 1257 |
+
10. Some of the cuBLAS library routines were written by or
|
| 1258 |
+
derived from code written by Ahmad M. Abdelfattah, David
|
| 1259 |
+
Keyes, and Hatem Ltaief, and are subject to the Apache
|
| 1260 |
+
License, Version 2.0, as follows:
|
| 1261 |
+
|
| 1262 |
+
-- (C) Copyright 2013 King Abdullah University of Science and Technology
|
| 1263 |
+
Authors:
|
| 1264 |
+
Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa)
|
| 1265 |
+
David Keyes (david.keyes@kaust.edu.sa)
|
| 1266 |
+
Hatem Ltaief (hatem.ltaief@kaust.edu.sa)
|
| 1267 |
+
|
| 1268 |
+
Redistribution and use in source and binary forms, with or without
|
| 1269 |
+
modification, are permitted provided that the following conditions
|
| 1270 |
+
are met:
|
| 1271 |
+
|
| 1272 |
+
* Redistributions of source code must retain the above copyright
|
| 1273 |
+
notice, this list of conditions and the following disclaimer.
|
| 1274 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1275 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1276 |
+
documentation and/or other materials provided with the distribution.
|
| 1277 |
+
* Neither the name of the King Abdullah University of Science and
|
| 1278 |
+
Technology nor the names of its contributors may be used to endorse
|
| 1279 |
+
or promote products derived from this software without specific prior
|
| 1280 |
+
written permission.
|
| 1281 |
+
|
| 1282 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1283 |
+
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1284 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1285 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1286 |
+
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1287 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1288 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1289 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1290 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1291 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1292 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
|
| 1293 |
+
|
| 1294 |
+
11. Some of the cuSPARSE library routines were written by or
|
| 1295 |
+
derived from code written by Li-Wen Chang and are subject
|
| 1296 |
+
to the NCSA Open Source License as follows:
|
| 1297 |
+
|
| 1298 |
+
Copyright (c) 2012, University of Illinois.
|
| 1299 |
+
|
| 1300 |
+
All rights reserved.
|
| 1301 |
+
|
| 1302 |
+
Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu
|
| 1303 |
+
|
| 1304 |
+
Permission is hereby granted, free of charge, to any person obtaining
|
| 1305 |
+
a copy of this software and associated documentation files (the
|
| 1306 |
+
"Software"), to deal with the Software without restriction, including
|
| 1307 |
+
without limitation the rights to use, copy, modify, merge, publish,
|
| 1308 |
+
distribute, sublicense, and/or sell copies of the Software, and to
|
| 1309 |
+
permit persons to whom the Software is furnished to do so, subject to
|
| 1310 |
+
the following conditions:
|
| 1311 |
+
* Redistributions of source code must retain the above copyright
|
| 1312 |
+
notice, this list of conditions and the following disclaimer.
|
| 1313 |
+
* Redistributions in binary form must reproduce the above
|
| 1314 |
+
copyright notice, this list of conditions and the following
|
| 1315 |
+
disclaimers in the documentation and/or other materials provided
|
| 1316 |
+
with the distribution.
|
| 1317 |
+
* Neither the names of IMPACT Group, University of Illinois, nor
|
| 1318 |
+
the names of its contributors may be used to endorse or promote
|
| 1319 |
+
products derived from this Software without specific prior
|
| 1320 |
+
written permission.
|
| 1321 |
+
|
| 1322 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 1323 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 1324 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
| 1325 |
+
NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT
|
| 1326 |
+
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
| 1327 |
+
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
| 1328 |
+
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
|
| 1329 |
+
SOFTWARE.
|
| 1330 |
+
|
| 1331 |
+
12. Some of the cuRAND library routines were written by or
|
| 1332 |
+
derived from code written by Mutsuo Saito and Makoto
|
| 1333 |
+
Matsumoto and are subject to the following license:
|
| 1334 |
+
|
| 1335 |
+
Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima
|
| 1336 |
+
University. All rights reserved.
|
| 1337 |
+
|
| 1338 |
+
Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima
|
| 1339 |
+
University and University of Tokyo. All rights reserved.
|
| 1340 |
+
|
| 1341 |
+
Redistribution and use in source and binary forms, with or without
|
| 1342 |
+
modification, are permitted provided that the following conditions are
|
| 1343 |
+
met:
|
| 1344 |
+
* Redistributions of source code must retain the above copyright
|
| 1345 |
+
notice, this list of conditions and the following disclaimer.
|
| 1346 |
+
* Redistributions in binary form must reproduce the above
|
| 1347 |
+
copyright notice, this list of conditions and the following
|
| 1348 |
+
disclaimer in the documentation and/or other materials provided
|
| 1349 |
+
with the distribution.
|
| 1350 |
+
* Neither the name of the Hiroshima University nor the names of
|
| 1351 |
+
its contributors may be used to endorse or promote products
|
| 1352 |
+
derived from this software without specific prior written
|
| 1353 |
+
permission.
|
| 1354 |
+
|
| 1355 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1356 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1357 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1358 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1359 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1360 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1361 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1362 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1363 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1364 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1365 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1366 |
+
|
| 1367 |
+
13. Some of the cuRAND library routines were derived from
|
| 1368 |
+
code developed by D. E. Shaw Research and are subject to
|
| 1369 |
+
the following license:
|
| 1370 |
+
|
| 1371 |
+
Copyright 2010-2011, D. E. Shaw Research.
|
| 1372 |
+
|
| 1373 |
+
All rights reserved.
|
| 1374 |
+
|
| 1375 |
+
Redistribution and use in source and binary forms, with or without
|
| 1376 |
+
modification, are permitted provided that the following conditions are
|
| 1377 |
+
met:
|
| 1378 |
+
* Redistributions of source code must retain the above copyright
|
| 1379 |
+
notice, this list of conditions, and the following disclaimer.
|
| 1380 |
+
* Redistributions in binary form must reproduce the above
|
| 1381 |
+
copyright notice, this list of conditions, and the following
|
| 1382 |
+
disclaimer in the documentation and/or other materials provided
|
| 1383 |
+
with the distribution.
|
| 1384 |
+
* Neither the name of D. E. Shaw Research nor the names of its
|
| 1385 |
+
contributors may be used to endorse or promote products derived
|
| 1386 |
+
from this software without specific prior written permission.
|
| 1387 |
+
|
| 1388 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1389 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1390 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1391 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1392 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1393 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1394 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1395 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1396 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1397 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1398 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1399 |
+
|
| 1400 |
+
14. Some of the Math library routines were written by or
|
| 1401 |
+
derived from code developed by Norbert Juffa and are
|
| 1402 |
+
subject to the following license:
|
| 1403 |
+
|
| 1404 |
+
Copyright (c) 2015-2017, Norbert Juffa
|
| 1405 |
+
All rights reserved.
|
| 1406 |
+
|
| 1407 |
+
Redistribution and use in source and binary forms, with or without
|
| 1408 |
+
modification, are permitted provided that the following conditions
|
| 1409 |
+
are met:
|
| 1410 |
+
|
| 1411 |
+
1. Redistributions of source code must retain the above copyright
|
| 1412 |
+
notice, this list of conditions and the following disclaimer.
|
| 1413 |
+
|
| 1414 |
+
2. Redistributions in binary form must reproduce the above copyright
|
| 1415 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1416 |
+
documentation and/or other materials provided with the distribution.
|
| 1417 |
+
|
| 1418 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1419 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1420 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1421 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1422 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1423 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1424 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1425 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1426 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1427 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1428 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1429 |
+
|
| 1430 |
+
15. Licensee's use of the lz4 third party component is
|
| 1431 |
+
subject to the following terms and conditions:
|
| 1432 |
+
|
| 1433 |
+
Copyright (C) 2011-2013, Yann Collet.
|
| 1434 |
+
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
|
| 1435 |
+
|
| 1436 |
+
Redistribution and use in source and binary forms, with or without
|
| 1437 |
+
modification, are permitted provided that the following conditions are
|
| 1438 |
+
met:
|
| 1439 |
+
|
| 1440 |
+
* Redistributions of source code must retain the above copyright
|
| 1441 |
+
notice, this list of conditions and the following disclaimer.
|
| 1442 |
+
* Redistributions in binary form must reproduce the above
|
| 1443 |
+
copyright notice, this list of conditions and the following disclaimer
|
| 1444 |
+
in the documentation and/or other materials provided with the
|
| 1445 |
+
distribution.
|
| 1446 |
+
|
| 1447 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1448 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1449 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1450 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1451 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1452 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1453 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1454 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1455 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1456 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1457 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1458 |
+
|
| 1459 |
+
16. The NPP library uses code from the Boost Math Toolkit,
|
| 1460 |
+
and is subject to the following license:
|
| 1461 |
+
|
| 1462 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
| 1463 |
+
. . . .
|
| 1464 |
+
|
| 1465 |
+
Permission is hereby granted, free of charge, to any person or
|
| 1466 |
+
organization obtaining a copy of the software and accompanying
|
| 1467 |
+
documentation covered by this license (the "Software") to use,
|
| 1468 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
| 1469 |
+
and to prepare derivative works of the Software, and to permit
|
| 1470 |
+
third-parties to whom the Software is furnished to do so, all
|
| 1471 |
+
subject to the following:
|
| 1472 |
+
|
| 1473 |
+
The copyright notices in the Software and this entire statement,
|
| 1474 |
+
including the above license grant, this restriction and the following
|
| 1475 |
+
disclaimer, must be included in all copies of the Software, in whole
|
| 1476 |
+
or in part, and all derivative works of the Software, unless such
|
| 1477 |
+
copies or derivative works are solely in the form of machine-executable
|
| 1478 |
+
object code generated by a source language processor.
|
| 1479 |
+
|
| 1480 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 1481 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 1482 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
| 1483 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
| 1484 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
| 1485 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
| 1486 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 1487 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 1488 |
+
|
| 1489 |
+
17. Portions of the Nsight Eclipse Edition is subject to the
|
| 1490 |
+
following license:
|
| 1491 |
+
|
| 1492 |
+
The Eclipse Foundation makes available all content in this plug-in
|
| 1493 |
+
("Content"). Unless otherwise indicated below, the Content is provided
|
| 1494 |
+
to you under the terms and conditions of the Eclipse Public License
|
| 1495 |
+
Version 1.0 ("EPL"). A copy of the EPL is available at http://
|
| 1496 |
+
www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program"
|
| 1497 |
+
will mean the Content.
|
| 1498 |
+
|
| 1499 |
+
If you did not receive this Content directly from the Eclipse
|
| 1500 |
+
Foundation, the Content is being redistributed by another party
|
| 1501 |
+
("Redistributor") and different terms and conditions may apply to your
|
| 1502 |
+
use of any object code in the Content. Check the Redistributor's
|
| 1503 |
+
license that was provided with the Content. If no such license exists,
|
| 1504 |
+
contact the Redistributor. Unless otherwise indicated below, the terms
|
| 1505 |
+
and conditions of the EPL still apply to any source code in the
|
| 1506 |
+
Content and such source code may be obtained at http://www.eclipse.org.
|
| 1507 |
+
|
| 1508 |
+
18. Some of the cuBLAS library routines uses code from
|
| 1509 |
+
OpenAI, which is subject to the following license:
|
| 1510 |
+
|
| 1511 |
+
License URL
|
| 1512 |
+
https://github.com/openai/openai-gemm/blob/master/LICENSE
|
| 1513 |
+
|
| 1514 |
+
License Text
|
| 1515 |
+
The MIT License
|
| 1516 |
+
|
| 1517 |
+
Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc.
|
| 1518 |
+
|
| 1519 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 1520 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 1521 |
+
in the Software without restriction, including without limitation the rights
|
| 1522 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 1523 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 1524 |
+
furnished to do so, subject to the following conditions:
|
| 1525 |
+
|
| 1526 |
+
The above copyright notice and this permission notice shall be included in
|
| 1527 |
+
all copies or substantial portions of the Software.
|
| 1528 |
+
|
| 1529 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 1530 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1531 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 1532 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 1533 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 1534 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
| 1535 |
+
THE SOFTWARE.
|
| 1536 |
+
|
| 1537 |
+
19. Licensee's use of the Visual Studio Setup Configuration
|
| 1538 |
+
Samples is subject to the following license:
|
| 1539 |
+
|
| 1540 |
+
The MIT License (MIT)
|
| 1541 |
+
Copyright (C) Microsoft Corporation. All rights reserved.
|
| 1542 |
+
|
| 1543 |
+
Permission is hereby granted, free of charge, to any person
|
| 1544 |
+
obtaining a copy of this software and associated documentation
|
| 1545 |
+
files (the "Software"), to deal in the Software without restriction,
|
| 1546 |
+
including without limitation the rights to use, copy, modify, merge,
|
| 1547 |
+
publish, distribute, sublicense, and/or sell copies of the Software,
|
| 1548 |
+
and to permit persons to whom the Software is furnished to do so,
|
| 1549 |
+
subject to the following conditions:
|
| 1550 |
+
|
| 1551 |
+
The above copyright notice and this permission notice shall be included
|
| 1552 |
+
in all copies or substantial portions of the Software.
|
| 1553 |
+
|
| 1554 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
| 1555 |
+
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1556 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 1557 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 1558 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 1559 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 1560 |
+
|
| 1561 |
+
20. Licensee's use of linmath.h header for CPU functions for
|
| 1562 |
+
GL vector/matrix operations from lunarG is subject to the
|
| 1563 |
+
Apache License Version 2.0.
|
| 1564 |
+
|
| 1565 |
+
21. The DX12-CUDA sample uses the d3dx12.h header, which is
|
| 1566 |
+
subject to the MIT license .
|
| 1567 |
+
|
| 1568 |
+
-----------------
|
evalkit_internvl/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/METADATA
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: nvidia-cuda-cupti-cu12
|
| 3 |
+
Version: 12.1.105
|
| 4 |
+
Summary: CUDA profiling tools runtime libs.
|
| 5 |
+
Home-page: https://developer.nvidia.com/cuda-zone
|
| 6 |
+
Author: Nvidia CUDA Installer Team
|
| 7 |
+
Author-email: cuda_installer@nvidia.com
|
| 8 |
+
License: NVIDIA Proprietary Software
|
| 9 |
+
Keywords: cuda,nvidia,runtime,machine learning,deep learning
|
| 10 |
+
Classifier: Development Status :: 4 - Beta
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: Intended Audience :: Education
|
| 13 |
+
Classifier: Intended Audience :: Science/Research
|
| 14 |
+
Classifier: License :: Other/Proprietary License
|
| 15 |
+
Classifier: Natural Language :: English
|
| 16 |
+
Classifier: Programming Language :: Python :: 3
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.5
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.6
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 24 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 25 |
+
Classifier: Topic :: Scientific/Engineering
|
| 26 |
+
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
| 27 |
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
| 28 |
+
Classifier: Topic :: Software Development
|
| 29 |
+
Classifier: Topic :: Software Development :: Libraries
|
| 30 |
+
Classifier: Operating System :: Microsoft :: Windows
|
| 31 |
+
Classifier: Operating System :: POSIX :: Linux
|
| 32 |
+
Requires-Python: >=3
|
| 33 |
+
License-File: License.txt
|
| 34 |
+
|
| 35 |
+
Provides libraries to enable third party tools using GPU profiling APIs.
|
evalkit_internvl/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/RECORD
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 2 |
+
nvidia/__pycache__/__init__.cpython-310.pyc,,
|
| 3 |
+
nvidia/cuda_cupti/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 4 |
+
nvidia/cuda_cupti/__pycache__/__init__.cpython-310.pyc,,
|
| 5 |
+
nvidia/cuda_cupti/include/Openacc/cupti_openacc.h,sha256=Z0OM5e_hbd3cxdXyn3SCHqBBQawLg4QORnlm57Cr2-M,3513
|
| 6 |
+
nvidia/cuda_cupti/include/Openmp/cupti_openmp.h,sha256=E1WNmeb_7HaUSmBegtUNe4IV1i7pXeNxgzIlyKn1zrM,3491
|
| 7 |
+
nvidia/cuda_cupti/include/Openmp/omp-tools.h,sha256=AmuC_xPC7VPu3B-W4PmXuCNufFawhY8PjNXePaQFAOg,37403
|
| 8 |
+
nvidia/cuda_cupti/include/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 9 |
+
nvidia/cuda_cupti/include/__pycache__/__init__.cpython-310.pyc,,
|
| 10 |
+
nvidia/cuda_cupti/include/cuda_stdint.h,sha256=XbFOk9CtJjKqk7PpYNqbSVsDxAsVM8avA4rWpPi0BjQ,4093
|
| 11 |
+
nvidia/cuda_cupti/include/cupti.h,sha256=JkVyAGTIMYzwm62dfVqas3nMcILhgP_Wdz6fh4_NED0,4697
|
| 12 |
+
nvidia/cuda_cupti/include/cupti_activity.h,sha256=qVVazvOJZbDMzvbqgS8nmaHN4gaxAWO2HA_8D7-Vpiw,311866
|
| 13 |
+
nvidia/cuda_cupti/include/cupti_callbacks.h,sha256=-a47AoM4HoU5IuCCB_L-6lZRdrkDAC4XXLJuoUqojeY,26587
|
| 14 |
+
nvidia/cuda_cupti/include/cupti_checkpoint.h,sha256=rTz8JoWxqESBXyZWUhZJGm4xeYcx4OJOtJ7Ld13T_b0,5264
|
| 15 |
+
nvidia/cuda_cupti/include/cupti_driver_cbid.h,sha256=Uc74JDlJN_3qI04l4gkGzYbB3Ki0l0IgZILZO0WXtVs,70346
|
| 16 |
+
nvidia/cuda_cupti/include/cupti_events.h,sha256=oHIOKSsE5ZAot5tZK-sbS2K9xcgiXBXTZZDkPQuiaNw,52639
|
| 17 |
+
nvidia/cuda_cupti/include/cupti_metrics.h,sha256=iLAOlDrcbHEsIIUmgq0Tp1ZOY9O3Ot3wj2-bI8iYbSs,32148
|
| 18 |
+
nvidia/cuda_cupti/include/cupti_nvtx_cbid.h,sha256=_azPtR1g4qivvX7qbvHRUg0RHCWF7iEOJyHMN9qZe9E,5912
|
| 19 |
+
nvidia/cuda_cupti/include/cupti_pcsampling.h,sha256=uT_DtFN0Bye6ADtxfKXUAc8BcrFefotf-VtTuKQGJx0,32395
|
| 20 |
+
nvidia/cuda_cupti/include/cupti_pcsampling_util.h,sha256=gEiMBes3mtpDJqauxqUtfe0csY4J31qpdg2Cp8On95E,13060
|
| 21 |
+
nvidia/cuda_cupti/include/cupti_profiler_target.h,sha256=LWNFuYyotgGhCKY7TS48uVGxjeuOAuANWSnB8yfOfvo,31596
|
| 22 |
+
nvidia/cuda_cupti/include/cupti_result.h,sha256=sOBZCRuRVHvcbIyDlzyLeina5YXwIQH21rVr3FPoB6M,12026
|
| 23 |
+
nvidia/cuda_cupti/include/cupti_runtime_cbid.h,sha256=ZpomdRK7Fhn_NZYiiq5b3AyNZX3gznot-aX4dk-tsZI,44182
|
| 24 |
+
nvidia/cuda_cupti/include/cupti_target.h,sha256=x4Vz1Upb6m9ixmVpmGaKQldDWYQI3OZ-ocEXGzNK0EE,1263
|
| 25 |
+
nvidia/cuda_cupti/include/cupti_version.h,sha256=7XDJSIWpeJU8lrp0cOyma7dXXSGK4bdT1G8akxu8D_Q,4344
|
| 26 |
+
nvidia/cuda_cupti/include/generated_cudaGL_meta.h,sha256=dfd2QuaRdEjbStOKvaQLi1Md_qrpRQh8PfyZznJ8bWY,3115
|
| 27 |
+
nvidia/cuda_cupti/include/generated_cudaVDPAU_meta.h,sha256=fAedsoQxaU3hIAApAWDOKsa9kgcuQw4tdyf8klLm-3k,1453
|
| 28 |
+
nvidia/cuda_cupti/include/generated_cuda_gl_interop_meta.h,sha256=LXOqvQCej0sCgAT1LUKKYZ466EFxN4hIwf9oIhXOLF0,2250
|
| 29 |
+
nvidia/cuda_cupti/include/generated_cuda_meta.h,sha256=qZhsMxL-CURycqC2YkkioSDiD5pA8q22GOje2bOeviU,87152
|
| 30 |
+
nvidia/cuda_cupti/include/generated_cuda_runtime_api_meta.h,sha256=YCkUMRP93XtDGLEH7DOJCUuhdRcVsO1vQwF_K9AuDfI,64332
|
| 31 |
+
nvidia/cuda_cupti/include/generated_cuda_vdpau_interop_meta.h,sha256=8OLqWN26aEYpTWUXtbHJvA5GYhVv3ybYVOTW7yK37z8,1367
|
| 32 |
+
nvidia/cuda_cupti/include/generated_cudart_removed_meta.h,sha256=X3I5WXmhtsJNNlgY7coJ5vg4t11G5FRR6Xo7MboIeck,5172
|
| 33 |
+
nvidia/cuda_cupti/include/generated_nvtx_meta.h,sha256=YHb_RD8g3s4m8PJn7Z0wnxvUHarl7BOAX5ADr-BL3HI,7513
|
| 34 |
+
nvidia/cuda_cupti/include/nvperf_common.h,sha256=MMZrDvDdtG2DSS0h2B8AR1aPyt6UmeWwH-Dc_XsxaHo,10422
|
| 35 |
+
nvidia/cuda_cupti/include/nvperf_cuda_host.h,sha256=xEapxwvdl96uV-On-c8LY2lvwVNfIjq-rAgj9_dYbqo,8299
|
| 36 |
+
nvidia/cuda_cupti/include/nvperf_host.h,sha256=3mcgAEbB9uaDfWheRqC8gLlTiTggc_auV8PE9dTShx4,66289
|
| 37 |
+
nvidia/cuda_cupti/include/nvperf_target.h,sha256=jVR2zEO2KmMta0C-qTGuS9V6rhVyMNnRnOU4QJSiPrc,21476
|
| 38 |
+
nvidia/cuda_cupti/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 39 |
+
nvidia/cuda_cupti/lib/__pycache__/__init__.cpython-310.pyc,,
|
| 40 |
+
nvidia/cuda_cupti/lib/libcheckpoint.so,sha256=Fib_EZWCvKRmBbxtSXaat1MUuZk91ke9ZKkN7HR7yEM,1534104
|
| 41 |
+
nvidia/cuda_cupti/lib/libcupti.so.12,sha256=q8YxAOnPUWuO0folNUrlPb_o30g4rFJdjXODMsIZjcI,7419504
|
| 42 |
+
nvidia/cuda_cupti/lib/libnvperf_host.so,sha256=lc7EKudwwfIlHSBLA-EtVv2y5VYeSJjAe0A4L-JHRYk,28636664
|
| 43 |
+
nvidia/cuda_cupti/lib/libnvperf_target.so,sha256=-iWHyNIR-8hei4jMoLzr54yMxAyBsMN2POV6yeY_Bmk,5895416
|
| 44 |
+
nvidia/cuda_cupti/lib/libpcsamplingutil.so,sha256=XGCctMdV5khc1HtLdK_imh8aepM88GJz0q6CcPJtb3k,912728
|
| 45 |
+
nvidia_cuda_cupti_cu12-12.1.105.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 46 |
+
nvidia_cuda_cupti_cu12-12.1.105.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262
|
| 47 |
+
nvidia_cuda_cupti_cu12-12.1.105.dist-info/METADATA,sha256=xrOx7eliZP6--5Pla2AJW0e8XI3H0XDb9ZEN7DXghPs,1553
|
| 48 |
+
nvidia_cuda_cupti_cu12-12.1.105.dist-info/RECORD,,
|
| 49 |
+
nvidia_cuda_cupti_cu12-12.1.105.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 50 |
+
nvidia_cuda_cupti_cu12-12.1.105.dist-info/WHEEL,sha256=-kQi_VMfvRQozZJT7HUPMfY-5vLo0LVTmAylNJ3Ft98,106
|
| 51 |
+
nvidia_cuda_cupti_cu12-12.1.105.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7
|