Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +2 -0
- mgm/bin/python3 +3 -0
- mgm/lib/python3.10/site-packages/altair/__pycache__/__init__.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/__pycache__/_magics.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/__pycache__/theme.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/expr/__init__.py +2034 -0
- mgm/lib/python3.10/site-packages/altair/expr/consts.py +13 -0
- mgm/lib/python3.10/site-packages/altair/expr/core.py +282 -0
- mgm/lib/python3.10/site-packages/altair/expr/funcs.py +167 -0
- mgm/lib/python3.10/site-packages/altair/utils/__init__.py +37 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_dfi_types.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_importers.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_show.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_transformed_data.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_vegafusion_data.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/compiler.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/core.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/data.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/deprecation.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/display.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/execeval.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/html.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/mimebundle.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/plugin_registry.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/save.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/schemapi.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/selection.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/__pycache__/server.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/utils/_dfi_types.py +164 -0
- mgm/lib/python3.10/site-packages/altair/utils/_importers.py +113 -0
- mgm/lib/python3.10/site-packages/altair/utils/_show.py +75 -0
- mgm/lib/python3.10/site-packages/altair/utils/_transformed_data.py +567 -0
- mgm/lib/python3.10/site-packages/altair/utils/_vegafusion_data.py +304 -0
- mgm/lib/python3.10/site-packages/altair/utils/compiler.py +12 -0
- mgm/lib/python3.10/site-packages/altair/utils/core.py +981 -0
- mgm/lib/python3.10/site-packages/altair/utils/data.py +442 -0
- mgm/lib/python3.10/site-packages/altair/utils/deprecation.py +196 -0
- mgm/lib/python3.10/site-packages/altair/utils/display.py +232 -0
- mgm/lib/python3.10/site-packages/altair/utils/html.py +411 -0
- mgm/lib/python3.10/site-packages/altair/utils/mimebundle.py +377 -0
- mgm/lib/python3.10/site-packages/altair/utils/plugin_registry.py +290 -0
- mgm/lib/python3.10/site-packages/altair/utils/save.py +224 -0
- mgm/lib/python3.10/site-packages/altair/utils/schemapi.py +1616 -0
- mgm/lib/python3.10/site-packages/altair/utils/selection.py +130 -0
- mgm/lib/python3.10/site-packages/altair/vegalite/__init__.py +2 -0
- mgm/lib/python3.10/site-packages/altair/vegalite/__pycache__/__init__.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/vegalite/__pycache__/api.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/vegalite/__pycache__/data.cpython-310.pyc +0 -0
- mgm/lib/python3.10/site-packages/altair/vegalite/__pycache__/display.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -1096,3 +1096,5 @@ mgm/lib/python3.10/site-packages/nvidia/cufft/lib/libcufftw.so.10 filter=lfs dif
|
|
| 1096 |
mgm/lib/python3.10/site-packages/nvidia/cusolver/lib/libcusolverMg.so.11 filter=lfs diff=lfs merge=lfs -text
|
| 1097 |
mgm/lib/python3.10/site-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so.11.2 filter=lfs diff=lfs merge=lfs -text
|
| 1098 |
mgm/lib/python3.10/site-packages/nvidia/cudnn/lib/libcudnn_cnn.so.9 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 1096 |
mgm/lib/python3.10/site-packages/nvidia/cusolver/lib/libcusolverMg.so.11 filter=lfs diff=lfs merge=lfs -text
|
| 1097 |
mgm/lib/python3.10/site-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so.11.2 filter=lfs diff=lfs merge=lfs -text
|
| 1098 |
mgm/lib/python3.10/site-packages/nvidia/cudnn/lib/libcudnn_cnn.so.9 filter=lfs diff=lfs merge=lfs -text
|
| 1099 |
+
openflamingo/lib/python3.10/site-packages/nvidia/cudnn/lib/libcudnn_adv_infer.so.8 filter=lfs diff=lfs merge=lfs -text
|
| 1100 |
+
mgm/bin/python3 filter=lfs diff=lfs merge=lfs -text
|
mgm/bin/python3
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:78efd64775da32a2ca7f4de60398e0f1ccf02988c0a002cc8f418a9fc8df5744
|
| 3 |
+
size 17225608
|
mgm/lib/python3.10/site-packages/altair/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (13.6 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/__pycache__/_magics.cpython-310.pyc
ADDED
|
Binary file (2.94 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/__pycache__/theme.cpython-310.pyc
ADDED
|
Binary file (7.5 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/expr/__init__.py
ADDED
|
@@ -0,0 +1,2034 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# The contents of this file are automatically written by
|
| 2 |
+
# tools/generate_schema_wrapper.py. Do not modify directly.
|
| 3 |
+
|
| 4 |
+
"""Tools for creating transform & filter expressions with a python syntax."""
|
| 5 |
+
|
| 6 |
+
from __future__ import annotations
|
| 7 |
+
|
| 8 |
+
import sys
|
| 9 |
+
from typing import TYPE_CHECKING, Any
|
| 10 |
+
|
| 11 |
+
from altair.expr.core import ConstExpression, FunctionExpression
|
| 12 |
+
from altair.vegalite.v5.schema.core import ExprRef as _ExprRef
|
| 13 |
+
|
| 14 |
+
if sys.version_info >= (3, 12):
|
| 15 |
+
from typing import override
|
| 16 |
+
else:
|
| 17 |
+
from typing_extensions import override
|
| 18 |
+
|
| 19 |
+
if TYPE_CHECKING:
|
| 20 |
+
from altair.expr.core import Expression, IntoExpression
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class _ExprMeta(type):
|
| 24 |
+
"""
|
| 25 |
+
Metaclass for :class:`expr`.
|
| 26 |
+
|
| 27 |
+
Currently providing read-only class properties, representing JavaScript constants.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
@property
|
| 31 |
+
def NaN(cls) -> Expression:
|
| 32 |
+
"""Not a number (same as JavaScript literal NaN)."""
|
| 33 |
+
return ConstExpression("NaN")
|
| 34 |
+
|
| 35 |
+
@property
|
| 36 |
+
def LN10(cls) -> Expression:
|
| 37 |
+
"""The natural log of 10 (alias to Math.LN10)."""
|
| 38 |
+
return ConstExpression("LN10")
|
| 39 |
+
|
| 40 |
+
@property
|
| 41 |
+
def E(cls) -> Expression:
|
| 42 |
+
"""The transcendental number e (alias to Math.E)."""
|
| 43 |
+
return ConstExpression("E")
|
| 44 |
+
|
| 45 |
+
@property
|
| 46 |
+
def LOG10E(cls) -> Expression:
|
| 47 |
+
"""The base 10 logarithm e (alias to Math.LOG10E)."""
|
| 48 |
+
return ConstExpression("LOG10E")
|
| 49 |
+
|
| 50 |
+
@property
|
| 51 |
+
def LOG2E(cls) -> Expression:
|
| 52 |
+
"""The base 2 logarithm of e (alias to Math.LOG2E)."""
|
| 53 |
+
return ConstExpression("LOG2E")
|
| 54 |
+
|
| 55 |
+
@property
|
| 56 |
+
def SQRT1_2(cls) -> Expression:
|
| 57 |
+
"""The square root of 0.5 (alias to Math.SQRT1_2)."""
|
| 58 |
+
return ConstExpression("SQRT1_2")
|
| 59 |
+
|
| 60 |
+
@property
|
| 61 |
+
def LN2(cls) -> Expression:
|
| 62 |
+
"""The natural log of 2 (alias to Math.LN2)."""
|
| 63 |
+
return ConstExpression("LN2")
|
| 64 |
+
|
| 65 |
+
@property
|
| 66 |
+
def SQRT2(cls) -> Expression:
|
| 67 |
+
"""The square root of 2 (alias to Math.SQRT1_2)."""
|
| 68 |
+
return ConstExpression("SQRT2")
|
| 69 |
+
|
| 70 |
+
@property
|
| 71 |
+
def PI(cls) -> Expression:
|
| 72 |
+
"""The transcendental number pi (alias to Math.PI)."""
|
| 73 |
+
return ConstExpression("PI")
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class expr(_ExprRef, metaclass=_ExprMeta):
|
| 77 |
+
"""
|
| 78 |
+
Utility providing *constants* and *classmethods* to construct expressions.
|
| 79 |
+
|
| 80 |
+
`Expressions`_ can be used to write basic formulas that enable custom interactions.
|
| 81 |
+
|
| 82 |
+
Alternatively, an `inline expression`_ may be defined via :class:`expr()`.
|
| 83 |
+
|
| 84 |
+
Parameters
|
| 85 |
+
----------
|
| 86 |
+
expr: str
|
| 87 |
+
A `vega expression`_ string.
|
| 88 |
+
|
| 89 |
+
Returns
|
| 90 |
+
-------
|
| 91 |
+
``ExprRef``
|
| 92 |
+
|
| 93 |
+
.. _Expressions:
|
| 94 |
+
https://altair-viz.github.io/user_guide/interactions.html#expressions
|
| 95 |
+
.. _inline expression:
|
| 96 |
+
https://altair-viz.github.io/user_guide/interactions.html#inline-expressions
|
| 97 |
+
.. _vega expression:
|
| 98 |
+
https://vega.github.io/vega/docs/expressions/
|
| 99 |
+
|
| 100 |
+
Examples
|
| 101 |
+
--------
|
| 102 |
+
>>> import altair as alt
|
| 103 |
+
|
| 104 |
+
>>> bind_range = alt.binding_range(min=100, max=300, name="Slider value: ")
|
| 105 |
+
>>> param_width = alt.param(bind=bind_range, name="param_width")
|
| 106 |
+
>>> param_color = alt.param(
|
| 107 |
+
... expr=alt.expr.if_(param_width < 200, "red", "black"),
|
| 108 |
+
... name="param_color",
|
| 109 |
+
... )
|
| 110 |
+
>>> y = alt.Y("yval").axis(titleColor=param_color)
|
| 111 |
+
|
| 112 |
+
>>> y
|
| 113 |
+
Y({
|
| 114 |
+
axis: {'titleColor': Parameter('param_color', VariableParameter({
|
| 115 |
+
expr: if((param_width < 200),'red','black'),
|
| 116 |
+
name: 'param_color'
|
| 117 |
+
}))},
|
| 118 |
+
shorthand: 'yval'
|
| 119 |
+
})
|
| 120 |
+
|
| 121 |
+
.. _Number.isNaN:
|
| 122 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNan
|
| 123 |
+
.. _Number.isFinite:
|
| 124 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite
|
| 125 |
+
.. _Math.abs:
|
| 126 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/abs
|
| 127 |
+
.. _Math.acos:
|
| 128 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/acos
|
| 129 |
+
.. _Math.asin:
|
| 130 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/asin
|
| 131 |
+
.. _Math.atan:
|
| 132 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/atan
|
| 133 |
+
.. _Math.atan2:
|
| 134 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/atan2
|
| 135 |
+
.. _Math.ceil:
|
| 136 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/ceil
|
| 137 |
+
.. _Math.cos:
|
| 138 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/cos
|
| 139 |
+
.. _Math.exp:
|
| 140 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/exp
|
| 141 |
+
.. _Math.floor:
|
| 142 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/floor
|
| 143 |
+
.. _Math.hypot:
|
| 144 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/hypot
|
| 145 |
+
.. _Math.log:
|
| 146 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/log
|
| 147 |
+
.. _Math.max:
|
| 148 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/max
|
| 149 |
+
.. _Math.min:
|
| 150 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/min
|
| 151 |
+
.. _Math.pow:
|
| 152 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/pow
|
| 153 |
+
.. _Math.random:
|
| 154 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/random
|
| 155 |
+
.. _Math.round:
|
| 156 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/round
|
| 157 |
+
.. _Math.sin:
|
| 158 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/sin
|
| 159 |
+
.. _Math.sqrt:
|
| 160 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/sqrt
|
| 161 |
+
.. _Math.tan:
|
| 162 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/tan
|
| 163 |
+
.. _normal (Gaussian) probability distribution:
|
| 164 |
+
https://en.wikipedia.org/wiki/Normal_distribution
|
| 165 |
+
.. _cumulative distribution function:
|
| 166 |
+
https://en.wikipedia.org/wiki/Cumulative_distribution_function
|
| 167 |
+
.. _probability density function:
|
| 168 |
+
https://en.wikipedia.org/wiki/Probability_density_function
|
| 169 |
+
.. _log-normal probability distribution:
|
| 170 |
+
https://en.wikipedia.org/wiki/Log-normal_distribution
|
| 171 |
+
.. _continuous uniform probability distribution:
|
| 172 |
+
https://en.wikipedia.org/wiki/Continuous_uniform_distribution
|
| 173 |
+
.. _*unit*:
|
| 174 |
+
https://vega.github.io/vega/docs/api/time/#time-units
|
| 175 |
+
.. _JavaScript's String.replace:
|
| 176 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace
|
| 177 |
+
.. _d3-format specifier:
|
| 178 |
+
https://github.com/d3/d3-format/
|
| 179 |
+
.. _*units*:
|
| 180 |
+
https://vega.github.io/vega/docs/api/time/#time-units
|
| 181 |
+
.. _timeUnitSpecifier API documentation:
|
| 182 |
+
https://vega.github.io/vega/docs/api/time/#timeUnitSpecifier
|
| 183 |
+
.. _timeFormat:
|
| 184 |
+
https://vega.github.io/vega/docs/expressions/#timeFormat
|
| 185 |
+
.. _utcFormat:
|
| 186 |
+
https://vega.github.io/vega/docs/expressions/#utcFormat
|
| 187 |
+
.. _d3-time-format specifier:
|
| 188 |
+
https://github.com/d3/d3-time-format/
|
| 189 |
+
.. _TimeMultiFormat object:
|
| 190 |
+
https://vega.github.io/vega/docs/types/#TimeMultiFormat
|
| 191 |
+
.. _UTC:
|
| 192 |
+
https://en.wikipedia.org/wiki/Coordinated_Universal_Time
|
| 193 |
+
.. _JavaScript's RegExp:
|
| 194 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp
|
| 195 |
+
.. _RGB:
|
| 196 |
+
https://en.wikipedia.org/wiki/RGB_color_model
|
| 197 |
+
.. _d3-color's rgb function:
|
| 198 |
+
https://github.com/d3/d3-color#rgb
|
| 199 |
+
.. _HSL:
|
| 200 |
+
https://en.wikipedia.org/wiki/HSL_and_HSV
|
| 201 |
+
.. _d3-color's hsl function:
|
| 202 |
+
https://github.com/d3/d3-color#hsl
|
| 203 |
+
.. _CIE LAB:
|
| 204 |
+
https://en.wikipedia.org/wiki/Lab_color_space#CIELAB
|
| 205 |
+
.. _d3-color's lab function:
|
| 206 |
+
https://github.com/d3/d3-color#lab
|
| 207 |
+
.. _HCL:
|
| 208 |
+
https://en.wikipedia.org/wiki/Lab_color_space#CIELAB
|
| 209 |
+
.. _d3-color's hcl function:
|
| 210 |
+
https://github.com/d3/d3-color#hcl
|
| 211 |
+
.. _W3C Web Content Accessibility Guidelines:
|
| 212 |
+
https://www.w3.org/TR/2008/REC-WCAG20-20081211/#contrast-ratiodef
|
| 213 |
+
.. _continuous color scheme:
|
| 214 |
+
https://vega.github.io/vega/docs/schemes
|
| 215 |
+
.. _geoArea:
|
| 216 |
+
https://github.com/d3/d3-geo#geoArea
|
| 217 |
+
.. _path.area:
|
| 218 |
+
https://github.com/d3/d3-geo#path_area
|
| 219 |
+
.. _geoBounds:
|
| 220 |
+
https://github.com/d3/d3-geo#geoBounds
|
| 221 |
+
.. _path.bounds:
|
| 222 |
+
https://github.com/d3/d3-geo#path_bounds
|
| 223 |
+
.. _geoCentroid:
|
| 224 |
+
https://github.com/d3/d3-geo#geoCentroid
|
| 225 |
+
.. _path.centroid:
|
| 226 |
+
https://github.com/d3/d3-geo#path_centroid
|
| 227 |
+
.. _window.screen:
|
| 228 |
+
https://developer.mozilla.org/en-US/docs/Web/API/Window/screen
|
| 229 |
+
"""
|
| 230 |
+
|
| 231 |
+
@override
|
| 232 |
+
def __new__(cls: type[_ExprRef], expr: str) -> _ExprRef: # type: ignore[misc]
|
| 233 |
+
return _ExprRef(expr=expr)
|
| 234 |
+
|
| 235 |
+
@classmethod
|
| 236 |
+
def isArray(cls, value: IntoExpression, /) -> Expression:
|
| 237 |
+
"""Returns true if ``value`` is an array, false otherwise."""
|
| 238 |
+
return FunctionExpression("isArray", (value,))
|
| 239 |
+
|
| 240 |
+
@classmethod
|
| 241 |
+
def isBoolean(cls, value: IntoExpression, /) -> Expression:
|
| 242 |
+
"""Returns true if ``value`` is a boolean (``true`` or ``false``), false otherwise."""
|
| 243 |
+
return FunctionExpression("isBoolean", (value,))
|
| 244 |
+
|
| 245 |
+
@classmethod
|
| 246 |
+
def isDate(cls, value: IntoExpression, /) -> Expression:
|
| 247 |
+
"""
|
| 248 |
+
Returns true if ``value`` is a Date object, false otherwise.
|
| 249 |
+
|
| 250 |
+
This method will return false for timestamp numbers or date-formatted strings; it recognizes
|
| 251 |
+
Date objects only.
|
| 252 |
+
"""
|
| 253 |
+
return FunctionExpression("isDate", (value,))
|
| 254 |
+
|
| 255 |
+
@classmethod
|
| 256 |
+
def isDefined(cls, value: IntoExpression, /) -> Expression:
|
| 257 |
+
"""
|
| 258 |
+
Returns true if ``value`` is a defined value, false if ``value`` equals ``undefined``.
|
| 259 |
+
|
| 260 |
+
This method will return true for ``null`` and ``NaN`` values.
|
| 261 |
+
"""
|
| 262 |
+
return FunctionExpression("isDefined", (value,))
|
| 263 |
+
|
| 264 |
+
@classmethod
|
| 265 |
+
def isNumber(cls, value: IntoExpression, /) -> Expression:
|
| 266 |
+
"""
|
| 267 |
+
Returns true if ``value`` is a number, false otherwise.
|
| 268 |
+
|
| 269 |
+
``NaN`` and ``Infinity`` are considered numbers.
|
| 270 |
+
"""
|
| 271 |
+
return FunctionExpression("isNumber", (value,))
|
| 272 |
+
|
| 273 |
+
@classmethod
|
| 274 |
+
def isObject(cls, value: IntoExpression, /) -> Expression:
|
| 275 |
+
"""Returns true if ``value`` is an object (including arrays and Dates), false otherwise."""
|
| 276 |
+
return FunctionExpression("isObject", (value,))
|
| 277 |
+
|
| 278 |
+
@classmethod
|
| 279 |
+
def isRegExp(cls, value: IntoExpression, /) -> Expression:
|
| 280 |
+
"""Returns true if ``value`` is a RegExp (regular expression) object, false otherwise."""
|
| 281 |
+
return FunctionExpression("isRegExp", (value,))
|
| 282 |
+
|
| 283 |
+
@classmethod
|
| 284 |
+
def isString(cls, value: IntoExpression, /) -> Expression:
|
| 285 |
+
"""Returns true if ``value`` is a string, false otherwise."""
|
| 286 |
+
return FunctionExpression("isString", (value,))
|
| 287 |
+
|
| 288 |
+
@classmethod
|
| 289 |
+
def isValid(cls, value: IntoExpression, /) -> Expression:
|
| 290 |
+
"""Returns true if ``value`` is not ``null``, ``undefined``, or ``NaN``, false otherwise."""
|
| 291 |
+
return FunctionExpression("isValid", (value,))
|
| 292 |
+
|
| 293 |
+
@classmethod
|
| 294 |
+
def toBoolean(cls, value: IntoExpression, /) -> Expression:
|
| 295 |
+
"""
|
| 296 |
+
Coerces the input ``value`` to a string.
|
| 297 |
+
|
| 298 |
+
Null values and empty strings are mapped to ``null``.
|
| 299 |
+
"""
|
| 300 |
+
return FunctionExpression("toBoolean", (value,))
|
| 301 |
+
|
| 302 |
+
@classmethod
|
| 303 |
+
def toDate(cls, value: IntoExpression, /) -> Expression:
|
| 304 |
+
"""
|
| 305 |
+
Coerces the input ``value`` to a Date instance.
|
| 306 |
+
|
| 307 |
+
Null values and empty strings are mapped to ``null``. If an optional *parser* function is
|
| 308 |
+
provided, it is used to perform date parsing, otherwise ``Date.parse`` is used. Be aware
|
| 309 |
+
that ``Date.parse`` has different implementations across browsers!
|
| 310 |
+
"""
|
| 311 |
+
return FunctionExpression("toDate", (value,))
|
| 312 |
+
|
| 313 |
+
@classmethod
|
| 314 |
+
def toNumber(cls, value: IntoExpression, /) -> Expression:
|
| 315 |
+
"""
|
| 316 |
+
Coerces the input ``value`` to a number.
|
| 317 |
+
|
| 318 |
+
Null values and empty strings are mapped to ``null``.
|
| 319 |
+
"""
|
| 320 |
+
return FunctionExpression("toNumber", (value,))
|
| 321 |
+
|
| 322 |
+
@classmethod
|
| 323 |
+
def toString(cls, value: IntoExpression, /) -> Expression:
|
| 324 |
+
"""
|
| 325 |
+
Coerces the input ``value`` to a string.
|
| 326 |
+
|
| 327 |
+
Null values and empty strings are mapped to ``null``.
|
| 328 |
+
"""
|
| 329 |
+
return FunctionExpression("toString", (value,))
|
| 330 |
+
|
| 331 |
+
@classmethod
|
| 332 |
+
def if_(
|
| 333 |
+
cls,
|
| 334 |
+
test: IntoExpression,
|
| 335 |
+
thenValue: IntoExpression,
|
| 336 |
+
elseValue: IntoExpression,
|
| 337 |
+
/,
|
| 338 |
+
) -> Expression:
|
| 339 |
+
"""
|
| 340 |
+
If ``test`` is truthy, returns ``thenValue``.
|
| 341 |
+
|
| 342 |
+
Otherwise, returns ``elseValue``. The *if* function is equivalent to the ternary operator
|
| 343 |
+
``a ? b : c``.
|
| 344 |
+
"""
|
| 345 |
+
return FunctionExpression("if", (test, thenValue, elseValue))
|
| 346 |
+
|
| 347 |
+
@classmethod
|
| 348 |
+
def isNaN(cls, value: IntoExpression, /) -> Expression:
|
| 349 |
+
"""
|
| 350 |
+
Returns true if ``value`` is not a number.
|
| 351 |
+
|
| 352 |
+
Same as JavaScript's `Number.isNaN`_.
|
| 353 |
+
|
| 354 |
+
.. _Number.isNaN:
|
| 355 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNan
|
| 356 |
+
"""
|
| 357 |
+
return FunctionExpression("isNaN", (value,))
|
| 358 |
+
|
| 359 |
+
@classmethod
|
| 360 |
+
def isFinite(cls, value: IntoExpression, /) -> Expression:
|
| 361 |
+
"""
|
| 362 |
+
Returns true if ``value`` is a finite number.
|
| 363 |
+
|
| 364 |
+
Same as JavaScript's `Number.isFinite`_.
|
| 365 |
+
|
| 366 |
+
.. _Number.isFinite:
|
| 367 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite
|
| 368 |
+
"""
|
| 369 |
+
return FunctionExpression("isFinite", (value,))
|
| 370 |
+
|
| 371 |
+
@classmethod
|
| 372 |
+
def abs(cls, value: IntoExpression, /) -> Expression:
|
| 373 |
+
"""
|
| 374 |
+
Returns the absolute value of ``value``.
|
| 375 |
+
|
| 376 |
+
Same as JavaScript's `Math.abs`_.
|
| 377 |
+
|
| 378 |
+
.. _Math.abs:
|
| 379 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/abs
|
| 380 |
+
"""
|
| 381 |
+
return FunctionExpression("abs", (value,))
|
| 382 |
+
|
| 383 |
+
@classmethod
|
| 384 |
+
def acos(cls, value: IntoExpression, /) -> Expression:
|
| 385 |
+
"""
|
| 386 |
+
Trigonometric arccosine.
|
| 387 |
+
|
| 388 |
+
Same as JavaScript's `Math.acos`_.
|
| 389 |
+
|
| 390 |
+
.. _Math.acos:
|
| 391 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/acos
|
| 392 |
+
"""
|
| 393 |
+
return FunctionExpression("acos", (value,))
|
| 394 |
+
|
| 395 |
+
@classmethod
|
| 396 |
+
def asin(cls, value: IntoExpression, /) -> Expression:
|
| 397 |
+
"""
|
| 398 |
+
Trigonometric arcsine.
|
| 399 |
+
|
| 400 |
+
Same as JavaScript's `Math.asin`_.
|
| 401 |
+
|
| 402 |
+
.. _Math.asin:
|
| 403 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/asin
|
| 404 |
+
"""
|
| 405 |
+
return FunctionExpression("asin", (value,))
|
| 406 |
+
|
| 407 |
+
@classmethod
|
| 408 |
+
def atan(cls, value: IntoExpression, /) -> Expression:
|
| 409 |
+
"""
|
| 410 |
+
Trigonometric arctangent.
|
| 411 |
+
|
| 412 |
+
Same as JavaScript's `Math.atan`_.
|
| 413 |
+
|
| 414 |
+
.. _Math.atan:
|
| 415 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/atan
|
| 416 |
+
"""
|
| 417 |
+
return FunctionExpression("atan", (value,))
|
| 418 |
+
|
| 419 |
+
@classmethod
|
| 420 |
+
def atan2(cls, dy: IntoExpression, dx: IntoExpression, /) -> Expression:
|
| 421 |
+
"""
|
| 422 |
+
Returns the arctangent of *dy / dx*.
|
| 423 |
+
|
| 424 |
+
Same as JavaScript's `Math.atan2`_.
|
| 425 |
+
|
| 426 |
+
.. _Math.atan2:
|
| 427 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/atan2
|
| 428 |
+
"""
|
| 429 |
+
return FunctionExpression("atan2", (dy, dx))
|
| 430 |
+
|
| 431 |
+
@classmethod
|
| 432 |
+
def ceil(cls, value: IntoExpression, /) -> Expression:
|
| 433 |
+
"""
|
| 434 |
+
Rounds ``value`` to the nearest integer of equal or greater value.
|
| 435 |
+
|
| 436 |
+
Same as JavaScript's `Math.ceil`_.
|
| 437 |
+
|
| 438 |
+
.. _Math.ceil:
|
| 439 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/ceil
|
| 440 |
+
"""
|
| 441 |
+
return FunctionExpression("ceil", (value,))
|
| 442 |
+
|
| 443 |
+
@classmethod
|
| 444 |
+
def clamp(
|
| 445 |
+
cls, value: IntoExpression, min: IntoExpression, max: IntoExpression, /
|
| 446 |
+
) -> Expression:
|
| 447 |
+
"""Restricts ``value`` to be between the specified ``min`` and ``max``."""
|
| 448 |
+
return FunctionExpression("clamp", (value, min, max))
|
| 449 |
+
|
| 450 |
+
@classmethod
|
| 451 |
+
def cos(cls, value: IntoExpression, /) -> Expression:
|
| 452 |
+
"""
|
| 453 |
+
Trigonometric cosine.
|
| 454 |
+
|
| 455 |
+
Same as JavaScript's `Math.cos`_.
|
| 456 |
+
|
| 457 |
+
.. _Math.cos:
|
| 458 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/cos
|
| 459 |
+
"""
|
| 460 |
+
return FunctionExpression("cos", (value,))
|
| 461 |
+
|
| 462 |
+
@classmethod
|
| 463 |
+
def exp(cls, exponent: IntoExpression, /) -> Expression:
|
| 464 |
+
"""
|
| 465 |
+
Returns the value of *e* raised to the provided ``exponent``.
|
| 466 |
+
|
| 467 |
+
Same as JavaScript's `Math.exp`_.
|
| 468 |
+
|
| 469 |
+
.. _Math.exp:
|
| 470 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/exp
|
| 471 |
+
"""
|
| 472 |
+
return FunctionExpression("exp", (exponent,))
|
| 473 |
+
|
| 474 |
+
@classmethod
|
| 475 |
+
def floor(cls, value: IntoExpression, /) -> Expression:
|
| 476 |
+
"""
|
| 477 |
+
Rounds ``value`` to the nearest integer of equal or lower value.
|
| 478 |
+
|
| 479 |
+
Same as JavaScript's `Math.floor`_.
|
| 480 |
+
|
| 481 |
+
.. _Math.floor:
|
| 482 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/floor
|
| 483 |
+
"""
|
| 484 |
+
return FunctionExpression("floor", (value,))
|
| 485 |
+
|
| 486 |
+
@classmethod
|
| 487 |
+
def hypot(cls, value: IntoExpression, /) -> Expression:
|
| 488 |
+
"""
|
| 489 |
+
Returns the square root of the sum of squares of its arguments.
|
| 490 |
+
|
| 491 |
+
Same as JavaScript's `Math.hypot`_.
|
| 492 |
+
|
| 493 |
+
.. _Math.hypot:
|
| 494 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/hypot
|
| 495 |
+
"""
|
| 496 |
+
return FunctionExpression("hypot", (value,))
|
| 497 |
+
|
| 498 |
+
@classmethod
|
| 499 |
+
def log(cls, value: IntoExpression, /) -> Expression:
|
| 500 |
+
"""
|
| 501 |
+
Returns the natural logarithm of ``value``.
|
| 502 |
+
|
| 503 |
+
Same as JavaScript's `Math.log`_.
|
| 504 |
+
|
| 505 |
+
.. _Math.log:
|
| 506 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/log
|
| 507 |
+
"""
|
| 508 |
+
return FunctionExpression("log", (value,))
|
| 509 |
+
|
| 510 |
+
@classmethod
|
| 511 |
+
def max(
|
| 512 |
+
cls, value1: IntoExpression, value2: IntoExpression, *args: Any
|
| 513 |
+
) -> Expression:
|
| 514 |
+
"""
|
| 515 |
+
Returns the maximum argument value.
|
| 516 |
+
|
| 517 |
+
Same as JavaScript's `Math.max`_.
|
| 518 |
+
|
| 519 |
+
.. _Math.max:
|
| 520 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/max
|
| 521 |
+
"""
|
| 522 |
+
return FunctionExpression("max", (value1, value2, *args))
|
| 523 |
+
|
| 524 |
+
@classmethod
|
| 525 |
+
def min(
|
| 526 |
+
cls, value1: IntoExpression, value2: IntoExpression, *args: Any
|
| 527 |
+
) -> Expression:
|
| 528 |
+
"""
|
| 529 |
+
Returns the minimum argument value.
|
| 530 |
+
|
| 531 |
+
Same as JavaScript's `Math.min`_.
|
| 532 |
+
|
| 533 |
+
.. _Math.min:
|
| 534 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/min
|
| 535 |
+
"""
|
| 536 |
+
return FunctionExpression("min", (value1, value2, *args))
|
| 537 |
+
|
| 538 |
+
@classmethod
|
| 539 |
+
def pow(cls, value: IntoExpression, exponent: IntoExpression, /) -> Expression:
|
| 540 |
+
"""
|
| 541 |
+
Returns ``value`` raised to the given ``exponent``.
|
| 542 |
+
|
| 543 |
+
Same as JavaScript's `Math.pow`_.
|
| 544 |
+
|
| 545 |
+
.. _Math.pow:
|
| 546 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/pow
|
| 547 |
+
"""
|
| 548 |
+
return FunctionExpression("pow", (value, exponent))
|
| 549 |
+
|
| 550 |
+
@classmethod
|
| 551 |
+
def random(cls) -> Expression:
|
| 552 |
+
"""
|
| 553 |
+
Returns a pseudo-random number in the range [0,1).
|
| 554 |
+
|
| 555 |
+
Same as JavaScript's `Math.random`_.
|
| 556 |
+
|
| 557 |
+
.. _Math.random:
|
| 558 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/random
|
| 559 |
+
"""
|
| 560 |
+
return FunctionExpression("random", ())
|
| 561 |
+
|
| 562 |
+
@classmethod
|
| 563 |
+
def round(cls, value: IntoExpression, /) -> Expression:
|
| 564 |
+
"""
|
| 565 |
+
Rounds ``value`` to the nearest integer.
|
| 566 |
+
|
| 567 |
+
Same as JavaScript's `Math.round`_.
|
| 568 |
+
|
| 569 |
+
.. _Math.round:
|
| 570 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/round
|
| 571 |
+
"""
|
| 572 |
+
return FunctionExpression("round", (value,))
|
| 573 |
+
|
| 574 |
+
@classmethod
|
| 575 |
+
def sin(cls, value: IntoExpression, /) -> Expression:
|
| 576 |
+
"""
|
| 577 |
+
Trigonometric sine.
|
| 578 |
+
|
| 579 |
+
Same as JavaScript's `Math.sin`_.
|
| 580 |
+
|
| 581 |
+
.. _Math.sin:
|
| 582 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/sin
|
| 583 |
+
"""
|
| 584 |
+
return FunctionExpression("sin", (value,))
|
| 585 |
+
|
| 586 |
+
@classmethod
|
| 587 |
+
def sqrt(cls, value: IntoExpression, /) -> Expression:
|
| 588 |
+
"""
|
| 589 |
+
Square root function.
|
| 590 |
+
|
| 591 |
+
Same as JavaScript's `Math.sqrt`_.
|
| 592 |
+
|
| 593 |
+
.. _Math.sqrt:
|
| 594 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/sqrt
|
| 595 |
+
"""
|
| 596 |
+
return FunctionExpression("sqrt", (value,))
|
| 597 |
+
|
| 598 |
+
@classmethod
|
| 599 |
+
def tan(cls, value: IntoExpression, /) -> Expression:
|
| 600 |
+
"""
|
| 601 |
+
Trigonometric tangent.
|
| 602 |
+
|
| 603 |
+
Same as JavaScript's `Math.tan`_.
|
| 604 |
+
|
| 605 |
+
.. _Math.tan:
|
| 606 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/tan
|
| 607 |
+
"""
|
| 608 |
+
return FunctionExpression("tan", (value,))
|
| 609 |
+
|
| 610 |
+
@classmethod
|
| 611 |
+
def sampleNormal(
|
| 612 |
+
cls, mean: IntoExpression = None, stdev: IntoExpression = None, /
|
| 613 |
+
) -> Expression:
|
| 614 |
+
"""
|
| 615 |
+
Returns a sample from a univariate `normal (Gaussian) probability distribution`_ with specified ``mean`` and standard deviation ``stdev``.
|
| 616 |
+
|
| 617 |
+
If unspecified, the mean defaults to ``0`` and the standard deviation defaults to ``1``.
|
| 618 |
+
|
| 619 |
+
.. _normal (Gaussian) probability distribution:
|
| 620 |
+
https://en.wikipedia.org/wiki/Normal_distribution
|
| 621 |
+
"""
|
| 622 |
+
return FunctionExpression("sampleNormal", (mean, stdev))
|
| 623 |
+
|
| 624 |
+
@classmethod
|
| 625 |
+
def cumulativeNormal(
|
| 626 |
+
cls,
|
| 627 |
+
value: IntoExpression,
|
| 628 |
+
mean: IntoExpression = None,
|
| 629 |
+
stdev: IntoExpression = None,
|
| 630 |
+
/,
|
| 631 |
+
) -> Expression:
|
| 632 |
+
"""
|
| 633 |
+
Returns the value of the `cumulative distribution function`_ at the given input domain ``value`` for a normal distribution with specified ``mean`` and standard deviation ``stdev``.
|
| 634 |
+
|
| 635 |
+
If unspecified, the mean defaults to ``0`` and the standard deviation defaults to ``1``.
|
| 636 |
+
|
| 637 |
+
.. _cumulative distribution function:
|
| 638 |
+
https://en.wikipedia.org/wiki/Cumulative_distribution_function
|
| 639 |
+
"""
|
| 640 |
+
return FunctionExpression("cumulativeNormal", (value, mean, stdev))
|
| 641 |
+
|
| 642 |
+
@classmethod
|
| 643 |
+
def densityNormal(
|
| 644 |
+
cls,
|
| 645 |
+
value: IntoExpression,
|
| 646 |
+
mean: IntoExpression = None,
|
| 647 |
+
stdev: IntoExpression = None,
|
| 648 |
+
/,
|
| 649 |
+
) -> Expression:
|
| 650 |
+
"""
|
| 651 |
+
Returns the value of the `probability density function`_ at the given input domain ``value``, for a normal distribution with specified ``mean`` and standard deviation ``stdev``.
|
| 652 |
+
|
| 653 |
+
If unspecified, the mean defaults to ``0`` and the standard deviation defaults to ``1``.
|
| 654 |
+
|
| 655 |
+
.. _probability density function:
|
| 656 |
+
https://en.wikipedia.org/wiki/Probability_density_function
|
| 657 |
+
"""
|
| 658 |
+
return FunctionExpression("densityNormal", (value, mean, stdev))
|
| 659 |
+
|
| 660 |
+
@classmethod
|
| 661 |
+
def quantileNormal(
|
| 662 |
+
cls,
|
| 663 |
+
probability: IntoExpression,
|
| 664 |
+
mean: IntoExpression = None,
|
| 665 |
+
stdev: IntoExpression = None,
|
| 666 |
+
/,
|
| 667 |
+
) -> Expression:
|
| 668 |
+
"""
|
| 669 |
+
Returns the quantile value (the inverse of the `cumulative distribution function`_) for the given input ``probability``, for a normal distribution with specified ``mean`` and standard deviation ``stdev``.
|
| 670 |
+
|
| 671 |
+
If unspecified, the mean defaults to ``0`` and the standard deviation defaults to ``1``.
|
| 672 |
+
|
| 673 |
+
.. _cumulative distribution function:
|
| 674 |
+
https://en.wikipedia.org/wiki/Cumulative_distribution_function
|
| 675 |
+
"""
|
| 676 |
+
return FunctionExpression("quantileNormal", (probability, mean, stdev))
|
| 677 |
+
|
| 678 |
+
@classmethod
|
| 679 |
+
def sampleLogNormal(
|
| 680 |
+
cls, mean: IntoExpression = None, stdev: IntoExpression = None, /
|
| 681 |
+
) -> Expression:
|
| 682 |
+
"""
|
| 683 |
+
Returns a sample from a univariate `log-normal probability distribution`_ with specified log ``mean`` and log standard deviation ``stdev``.
|
| 684 |
+
|
| 685 |
+
If unspecified, the log mean defaults to ``0`` and the log standard deviation defaults to
|
| 686 |
+
``1``.
|
| 687 |
+
|
| 688 |
+
.. _log-normal probability distribution:
|
| 689 |
+
https://en.wikipedia.org/wiki/Log-normal_distribution
|
| 690 |
+
"""
|
| 691 |
+
return FunctionExpression("sampleLogNormal", (mean, stdev))
|
| 692 |
+
|
| 693 |
+
@classmethod
|
| 694 |
+
def cumulativeLogNormal(
|
| 695 |
+
cls,
|
| 696 |
+
value: IntoExpression,
|
| 697 |
+
mean: IntoExpression = None,
|
| 698 |
+
stdev: IntoExpression = None,
|
| 699 |
+
/,
|
| 700 |
+
) -> Expression:
|
| 701 |
+
"""
|
| 702 |
+
Returns the value of the `cumulative distribution function`_ at the given input domain ``value`` for a log-normal distribution with specified log ``mean`` and log standard deviation ``stdev``.
|
| 703 |
+
|
| 704 |
+
If unspecified, the log mean defaults to ``0`` and the log standard deviation defaults to
|
| 705 |
+
``1``.
|
| 706 |
+
|
| 707 |
+
.. _cumulative distribution function:
|
| 708 |
+
https://en.wikipedia.org/wiki/Cumulative_distribution_function
|
| 709 |
+
"""
|
| 710 |
+
return FunctionExpression("cumulativeLogNormal", (value, mean, stdev))
|
| 711 |
+
|
| 712 |
+
@classmethod
|
| 713 |
+
def densityLogNormal(
|
| 714 |
+
cls,
|
| 715 |
+
value: IntoExpression,
|
| 716 |
+
mean: IntoExpression = None,
|
| 717 |
+
stdev: IntoExpression = None,
|
| 718 |
+
/,
|
| 719 |
+
) -> Expression:
|
| 720 |
+
"""
|
| 721 |
+
Returns the value of the `probability density function`_ at the given input domain ``value``, for a log-normal distribution with specified log ``mean`` and log standard deviation ``stdev``.
|
| 722 |
+
|
| 723 |
+
If unspecified, the log mean defaults to ``0`` and the log standard deviation defaults to
|
| 724 |
+
``1``.
|
| 725 |
+
|
| 726 |
+
.. _probability density function:
|
| 727 |
+
https://en.wikipedia.org/wiki/Probability_density_function
|
| 728 |
+
"""
|
| 729 |
+
return FunctionExpression("densityLogNormal", (value, mean, stdev))
|
| 730 |
+
|
| 731 |
+
@classmethod
|
| 732 |
+
def quantileLogNormal(
|
| 733 |
+
cls,
|
| 734 |
+
probability: IntoExpression,
|
| 735 |
+
mean: IntoExpression = None,
|
| 736 |
+
stdev: IntoExpression = None,
|
| 737 |
+
/,
|
| 738 |
+
) -> Expression:
|
| 739 |
+
"""
|
| 740 |
+
Returns the quantile value (the inverse of the `cumulative distribution function`_) for the given input ``probability``, for a log-normal distribution with specified log ``mean`` and log standard deviation ``stdev``.
|
| 741 |
+
|
| 742 |
+
If unspecified, the log mean defaults to ``0`` and the log standard deviation defaults to
|
| 743 |
+
``1``.
|
| 744 |
+
|
| 745 |
+
.. _cumulative distribution function:
|
| 746 |
+
https://en.wikipedia.org/wiki/Cumulative_distribution_function
|
| 747 |
+
"""
|
| 748 |
+
return FunctionExpression("quantileLogNormal", (probability, mean, stdev))
|
| 749 |
+
|
| 750 |
+
@classmethod
|
| 751 |
+
def sampleUniform(
|
| 752 |
+
cls, min: IntoExpression = None, max: IntoExpression = None, /
|
| 753 |
+
) -> Expression:
|
| 754 |
+
"""
|
| 755 |
+
Returns a sample from a univariate `continuous uniform probability distribution`_ over the interval [``min``, ``max``).
|
| 756 |
+
|
| 757 |
+
If unspecified, ``min`` defaults to ``0`` and ``max`` defaults to ``1``. If only one
|
| 758 |
+
argument is provided, it is interpreted as the ``max`` value.
|
| 759 |
+
|
| 760 |
+
.. _continuous uniform probability distribution:
|
| 761 |
+
https://en.wikipedia.org/wiki/Continuous_uniform_distribution
|
| 762 |
+
"""
|
| 763 |
+
return FunctionExpression("sampleUniform", (min, max))
|
| 764 |
+
|
| 765 |
+
@classmethod
|
| 766 |
+
def cumulativeUniform(
|
| 767 |
+
cls,
|
| 768 |
+
value: IntoExpression,
|
| 769 |
+
min: IntoExpression = None,
|
| 770 |
+
max: IntoExpression = None,
|
| 771 |
+
/,
|
| 772 |
+
) -> Expression:
|
| 773 |
+
"""
|
| 774 |
+
Returns the value of the `cumulative distribution function`_ at the given input domain ``value`` for a uniform distribution over the interval [``min``, ``max``).
|
| 775 |
+
|
| 776 |
+
If unspecified, ``min`` defaults to ``0`` and ``max`` defaults to ``1``. If only one
|
| 777 |
+
argument is provided, it is interpreted as the ``max`` value.
|
| 778 |
+
|
| 779 |
+
.. _cumulative distribution function:
|
| 780 |
+
https://en.wikipedia.org/wiki/Cumulative_distribution_function
|
| 781 |
+
"""
|
| 782 |
+
return FunctionExpression("cumulativeUniform", (value, min, max))
|
| 783 |
+
|
| 784 |
+
@classmethod
|
| 785 |
+
def densityUniform(
|
| 786 |
+
cls,
|
| 787 |
+
value: IntoExpression,
|
| 788 |
+
min: IntoExpression = None,
|
| 789 |
+
max: IntoExpression = None,
|
| 790 |
+
/,
|
| 791 |
+
) -> Expression:
|
| 792 |
+
"""
|
| 793 |
+
Returns the value of the `probability density function`_ at the given input domain ``value``, for a uniform distribution over the interval [``min``, ``max``).
|
| 794 |
+
|
| 795 |
+
If unspecified, ``min`` defaults to ``0`` and ``max`` defaults to ``1``. If only one
|
| 796 |
+
argument is provided, it is interpreted as the ``max`` value.
|
| 797 |
+
|
| 798 |
+
.. _probability density function:
|
| 799 |
+
https://en.wikipedia.org/wiki/Probability_density_function
|
| 800 |
+
"""
|
| 801 |
+
return FunctionExpression("densityUniform", (value, min, max))
|
| 802 |
+
|
| 803 |
+
@classmethod
|
| 804 |
+
def quantileUniform(
|
| 805 |
+
cls,
|
| 806 |
+
probability: IntoExpression,
|
| 807 |
+
min: IntoExpression = None,
|
| 808 |
+
max: IntoExpression = None,
|
| 809 |
+
/,
|
| 810 |
+
) -> Expression:
|
| 811 |
+
"""
|
| 812 |
+
Returns the quantile value (the inverse of the `cumulative distribution function`_) for the given input ``probability``, for a uniform distribution over the interval [``min``, ``max``).
|
| 813 |
+
|
| 814 |
+
If unspecified, ``min`` defaults to ``0`` and ``max`` defaults to ``1``. If only one
|
| 815 |
+
argument is provided, it is interpreted as the ``max`` value.
|
| 816 |
+
|
| 817 |
+
.. _cumulative distribution function:
|
| 818 |
+
https://en.wikipedia.org/wiki/Cumulative_distribution_function
|
| 819 |
+
"""
|
| 820 |
+
return FunctionExpression("quantileUniform", (probability, min, max))
|
| 821 |
+
|
| 822 |
+
@classmethod
|
| 823 |
+
def now(cls) -> Expression:
|
| 824 |
+
"""Returns the timestamp for the current time."""
|
| 825 |
+
return FunctionExpression("now", ())
|
| 826 |
+
|
| 827 |
+
@classmethod
|
| 828 |
+
def datetime(
|
| 829 |
+
cls,
|
| 830 |
+
year: IntoExpression,
|
| 831 |
+
month: IntoExpression,
|
| 832 |
+
day: IntoExpression = None,
|
| 833 |
+
hour: IntoExpression = None,
|
| 834 |
+
min: IntoExpression = None,
|
| 835 |
+
sec: IntoExpression = None,
|
| 836 |
+
millisec: IntoExpression = None,
|
| 837 |
+
/,
|
| 838 |
+
) -> Expression:
|
| 839 |
+
"""
|
| 840 |
+
Returns a new ``Date`` instance.
|
| 841 |
+
|
| 842 |
+
The ``month`` is 0-based, such that ``1`` represents February.
|
| 843 |
+
"""
|
| 844 |
+
return FunctionExpression(
|
| 845 |
+
"datetime", (year, month, day, hour, min, sec, millisec)
|
| 846 |
+
)
|
| 847 |
+
|
| 848 |
+
@classmethod
|
| 849 |
+
def date(cls, datetime: IntoExpression, /) -> Expression:
|
| 850 |
+
"""Returns the day of the month for the given ``datetime`` value, in local time."""
|
| 851 |
+
return FunctionExpression("date", (datetime,))
|
| 852 |
+
|
| 853 |
+
@classmethod
|
| 854 |
+
def day(cls, datetime: IntoExpression, /) -> Expression:
|
| 855 |
+
"""Returns the day of the week for the given ``datetime`` value, in local time."""
|
| 856 |
+
return FunctionExpression("day", (datetime,))
|
| 857 |
+
|
| 858 |
+
@classmethod
|
| 859 |
+
def dayofyear(cls, datetime: IntoExpression, /) -> Expression:
|
| 860 |
+
"""Returns the one-based day of the year for the given ``datetime`` value, in local time."""
|
| 861 |
+
return FunctionExpression("dayofyear", (datetime,))
|
| 862 |
+
|
| 863 |
+
@classmethod
|
| 864 |
+
def year(cls, datetime: IntoExpression, /) -> Expression:
|
| 865 |
+
"""Returns the year for the given ``datetime`` value, in local time."""
|
| 866 |
+
return FunctionExpression("year", (datetime,))
|
| 867 |
+
|
| 868 |
+
@classmethod
|
| 869 |
+
def quarter(cls, datetime: IntoExpression, /) -> Expression:
|
| 870 |
+
"""Returns the quarter of the year (0-3) for the given ``datetime`` value, in local time."""
|
| 871 |
+
return FunctionExpression("quarter", (datetime,))
|
| 872 |
+
|
| 873 |
+
@classmethod
|
| 874 |
+
def month(cls, datetime: IntoExpression, /) -> Expression:
|
| 875 |
+
"""Returns the (zero-based) month for the given ``datetime`` value, in local time."""
|
| 876 |
+
return FunctionExpression("month", (datetime,))
|
| 877 |
+
|
| 878 |
+
@classmethod
|
| 879 |
+
def week(cls, date: IntoExpression, /) -> Expression:
|
| 880 |
+
"""
|
| 881 |
+
Returns the week number of the year for the given *datetime*, in local time.
|
| 882 |
+
|
| 883 |
+
This function assumes Sunday-based weeks. Days before the first Sunday of the year are
|
| 884 |
+
considered to be in week 0, the first Sunday of the year is the start of week 1, the second
|
| 885 |
+
Sunday week 2, *etc.*.
|
| 886 |
+
"""
|
| 887 |
+
return FunctionExpression("week", (date,))
|
| 888 |
+
|
| 889 |
+
@classmethod
|
| 890 |
+
def hours(cls, datetime: IntoExpression, /) -> Expression:
|
| 891 |
+
"""Returns the hours component for the given ``datetime`` value, in local time."""
|
| 892 |
+
return FunctionExpression("hours", (datetime,))
|
| 893 |
+
|
| 894 |
+
@classmethod
|
| 895 |
+
def minutes(cls, datetime: IntoExpression, /) -> Expression:
|
| 896 |
+
"""Returns the minutes component for the given ``datetime`` value, in local time."""
|
| 897 |
+
return FunctionExpression("minutes", (datetime,))
|
| 898 |
+
|
| 899 |
+
@classmethod
|
| 900 |
+
def seconds(cls, datetime: IntoExpression, /) -> Expression:
|
| 901 |
+
"""Returns the seconds component for the given ``datetime`` value, in local time."""
|
| 902 |
+
return FunctionExpression("seconds", (datetime,))
|
| 903 |
+
|
| 904 |
+
@classmethod
|
| 905 |
+
def milliseconds(cls, datetime: IntoExpression, /) -> Expression:
|
| 906 |
+
"""Returns the milliseconds component for the given ``datetime`` value, in local time."""
|
| 907 |
+
return FunctionExpression("milliseconds", (datetime,))
|
| 908 |
+
|
| 909 |
+
@classmethod
|
| 910 |
+
def time(cls, datetime: IntoExpression, /) -> Expression:
|
| 911 |
+
"""Returns the epoch-based timestamp for the given ``datetime`` value."""
|
| 912 |
+
return FunctionExpression("time", (datetime,))
|
| 913 |
+
|
| 914 |
+
@classmethod
|
| 915 |
+
def timezoneoffset(cls, datetime: IntoExpression, /) -> Expression:
|
| 916 |
+
"""Returns the timezone offset from the local timezone to UTC for the given ``datetime`` value."""
|
| 917 |
+
return FunctionExpression("timezoneoffset", (datetime,))
|
| 918 |
+
|
| 919 |
+
@classmethod
|
| 920 |
+
def timeOffset(
|
| 921 |
+
cls, unit: IntoExpression, date: IntoExpression, step: IntoExpression = None, /
|
| 922 |
+
) -> Expression:
|
| 923 |
+
"""
|
| 924 |
+
Returns a new ``Date`` instance that offsets the given ``date`` by the specified time `*unit*`_ in the local timezone.
|
| 925 |
+
|
| 926 |
+
The optional ``step`` argument indicates the number of time unit steps to offset by (default
|
| 927 |
+
1).
|
| 928 |
+
|
| 929 |
+
.. _*unit*:
|
| 930 |
+
https://vega.github.io/vega/docs/api/time/#time-units
|
| 931 |
+
"""
|
| 932 |
+
return FunctionExpression("timeOffset", (unit, date, step))
|
| 933 |
+
|
| 934 |
+
@classmethod
|
| 935 |
+
def timeSequence(
|
| 936 |
+
cls,
|
| 937 |
+
unit: IntoExpression,
|
| 938 |
+
start: IntoExpression,
|
| 939 |
+
stop: IntoExpression,
|
| 940 |
+
step: IntoExpression = None,
|
| 941 |
+
/,
|
| 942 |
+
) -> Expression:
|
| 943 |
+
"""
|
| 944 |
+
Returns an array of ``Date`` instances from ``start`` (inclusive) to ``stop`` (exclusive), with each entry separated by the given time `*unit*`_ in the local timezone.
|
| 945 |
+
|
| 946 |
+
The optional ``step`` argument indicates the number of time unit steps to take between each
|
| 947 |
+
sequence entry (default 1).
|
| 948 |
+
|
| 949 |
+
.. _*unit*:
|
| 950 |
+
https://vega.github.io/vega/docs/api/time/#time-units
|
| 951 |
+
"""
|
| 952 |
+
return FunctionExpression("timeSequence", (unit, start, stop, step))
|
| 953 |
+
|
| 954 |
+
@classmethod
|
| 955 |
+
def utc(
|
| 956 |
+
cls,
|
| 957 |
+
year: IntoExpression,
|
| 958 |
+
month: IntoExpression,
|
| 959 |
+
day: IntoExpression = None,
|
| 960 |
+
hour: IntoExpression = None,
|
| 961 |
+
min: IntoExpression = None,
|
| 962 |
+
sec: IntoExpression = None,
|
| 963 |
+
millisec: IntoExpression = None,
|
| 964 |
+
/,
|
| 965 |
+
) -> Expression:
|
| 966 |
+
"""
|
| 967 |
+
Returns a timestamp for the given UTC date.
|
| 968 |
+
|
| 969 |
+
The ``month`` is 0-based, such that ``1`` represents February.
|
| 970 |
+
"""
|
| 971 |
+
return FunctionExpression("utc", (year, month, day, hour, min, sec, millisec))
|
| 972 |
+
|
| 973 |
+
@classmethod
|
| 974 |
+
def utcdate(cls, datetime: IntoExpression, /) -> Expression:
|
| 975 |
+
"""Returns the day of the month for the given ``datetime`` value, in UTC time."""
|
| 976 |
+
return FunctionExpression("utcdate", (datetime,))
|
| 977 |
+
|
| 978 |
+
@classmethod
|
| 979 |
+
def utcday(cls, datetime: IntoExpression, /) -> Expression:
|
| 980 |
+
"""Returns the day of the week for the given ``datetime`` value, in UTC time."""
|
| 981 |
+
return FunctionExpression("utcday", (datetime,))
|
| 982 |
+
|
| 983 |
+
@classmethod
|
| 984 |
+
def utcdayofyear(cls, datetime: IntoExpression, /) -> Expression:
|
| 985 |
+
"""Returns the one-based day of the year for the given ``datetime`` value, in UTC time."""
|
| 986 |
+
return FunctionExpression("utcdayofyear", (datetime,))
|
| 987 |
+
|
| 988 |
+
@classmethod
|
| 989 |
+
def utcyear(cls, datetime: IntoExpression, /) -> Expression:
|
| 990 |
+
"""Returns the year for the given ``datetime`` value, in UTC time."""
|
| 991 |
+
return FunctionExpression("utcyear", (datetime,))
|
| 992 |
+
|
| 993 |
+
@classmethod
|
| 994 |
+
def utcquarter(cls, datetime: IntoExpression, /) -> Expression:
|
| 995 |
+
"""Returns the quarter of the year (0-3) for the given ``datetime`` value, in UTC time."""
|
| 996 |
+
return FunctionExpression("utcquarter", (datetime,))
|
| 997 |
+
|
| 998 |
+
@classmethod
|
| 999 |
+
def utcmonth(cls, datetime: IntoExpression, /) -> Expression:
|
| 1000 |
+
"""Returns the (zero-based) month for the given ``datetime`` value, in UTC time."""
|
| 1001 |
+
return FunctionExpression("utcmonth", (datetime,))
|
| 1002 |
+
|
| 1003 |
+
@classmethod
|
| 1004 |
+
def utcweek(cls, date: IntoExpression, /) -> Expression:
|
| 1005 |
+
"""
|
| 1006 |
+
Returns the week number of the year for the given *datetime*, in UTC time.
|
| 1007 |
+
|
| 1008 |
+
This function assumes Sunday-based weeks. Days before the first Sunday of the year are
|
| 1009 |
+
considered to be in week 0, the first Sunday of the year is the start of week 1, the second
|
| 1010 |
+
Sunday week 2, *etc.*.
|
| 1011 |
+
"""
|
| 1012 |
+
return FunctionExpression("utcweek", (date,))
|
| 1013 |
+
|
| 1014 |
+
@classmethod
|
| 1015 |
+
def utchours(cls, datetime: IntoExpression, /) -> Expression:
|
| 1016 |
+
"""Returns the hours component for the given ``datetime`` value, in UTC time."""
|
| 1017 |
+
return FunctionExpression("utchours", (datetime,))
|
| 1018 |
+
|
| 1019 |
+
@classmethod
|
| 1020 |
+
def utcminutes(cls, datetime: IntoExpression, /) -> Expression:
|
| 1021 |
+
"""Returns the minutes component for the given ``datetime`` value, in UTC time."""
|
| 1022 |
+
return FunctionExpression("utcminutes", (datetime,))
|
| 1023 |
+
|
| 1024 |
+
@classmethod
|
| 1025 |
+
def utcseconds(cls, datetime: IntoExpression, /) -> Expression:
|
| 1026 |
+
"""Returns the seconds component for the given ``datetime`` value, in UTC time."""
|
| 1027 |
+
return FunctionExpression("utcseconds", (datetime,))
|
| 1028 |
+
|
| 1029 |
+
@classmethod
|
| 1030 |
+
def utcmilliseconds(cls, datetime: IntoExpression, /) -> Expression:
|
| 1031 |
+
"""Returns the milliseconds component for the given ``datetime`` value, in UTC time."""
|
| 1032 |
+
return FunctionExpression("utcmilliseconds", (datetime,))
|
| 1033 |
+
|
| 1034 |
+
@classmethod
|
| 1035 |
+
def utcOffset(
|
| 1036 |
+
cls, unit: IntoExpression, date: IntoExpression, step: IntoExpression = None, /
|
| 1037 |
+
) -> Expression:
|
| 1038 |
+
"""
|
| 1039 |
+
Returns a new ``Date`` instance that offsets the given ``date`` by the specified time `*unit*`_ in UTC time.
|
| 1040 |
+
|
| 1041 |
+
The optional ``step`` argument indicates the number of time unit steps to offset by (default
|
| 1042 |
+
1).
|
| 1043 |
+
|
| 1044 |
+
.. _*unit*:
|
| 1045 |
+
https://vega.github.io/vega/docs/api/time/#time-units
|
| 1046 |
+
"""
|
| 1047 |
+
return FunctionExpression("utcOffset", (unit, date, step))
|
| 1048 |
+
|
| 1049 |
+
@classmethod
|
| 1050 |
+
def utcSequence(
|
| 1051 |
+
cls,
|
| 1052 |
+
unit: IntoExpression,
|
| 1053 |
+
start: IntoExpression,
|
| 1054 |
+
stop: IntoExpression,
|
| 1055 |
+
step: IntoExpression = None,
|
| 1056 |
+
/,
|
| 1057 |
+
) -> Expression:
|
| 1058 |
+
"""
|
| 1059 |
+
Returns an array of ``Date`` instances from ``start`` (inclusive) to ``stop`` (exclusive), with each entry separated by the given time `*unit*`_ in UTC time.
|
| 1060 |
+
|
| 1061 |
+
The optional ``step`` argument indicates the number of time unit steps to take between each
|
| 1062 |
+
sequence entry (default 1).
|
| 1063 |
+
|
| 1064 |
+
.. _*unit*:
|
| 1065 |
+
https://vega.github.io/vega/docs/api/time/#time-units
|
| 1066 |
+
"""
|
| 1067 |
+
return FunctionExpression("utcSequence", (unit, start, stop, step))
|
| 1068 |
+
|
| 1069 |
+
@classmethod
|
| 1070 |
+
def extent(cls, array: IntoExpression, /) -> Expression:
|
| 1071 |
+
"""Returns a new *[min, max]* array with the minimum and maximum values of the input array, ignoring ``null``, ``undefined``, and ``NaN`` values."""
|
| 1072 |
+
return FunctionExpression("extent", (array,))
|
| 1073 |
+
|
| 1074 |
+
@classmethod
|
| 1075 |
+
def clampRange(
|
| 1076 |
+
cls, range: IntoExpression, min: IntoExpression, max: IntoExpression, /
|
| 1077 |
+
) -> Expression:
|
| 1078 |
+
"""
|
| 1079 |
+
Clamps a two-element ``range`` array in a span-preserving manner.
|
| 1080 |
+
|
| 1081 |
+
If the span of the input ``range`` is less than *(max - min)* and an endpoint exceeds either
|
| 1082 |
+
the ``min`` or ``max`` value, the range is translated such that the span is preserved and
|
| 1083 |
+
one endpoint touches the boundary of the *[min, max]* range. If the span exceeds *(max -
|
| 1084 |
+
min)*, the range *[min, max]* is returned.
|
| 1085 |
+
"""
|
| 1086 |
+
return FunctionExpression("clampRange", (range, min, max))
|
| 1087 |
+
|
| 1088 |
+
@classmethod
|
| 1089 |
+
def indexof(cls, array: IntoExpression, value: IntoExpression, /) -> Expression:
|
| 1090 |
+
"""Returns the first index of ``value`` in the input ``array``."""
|
| 1091 |
+
return FunctionExpression("indexof", (array, value))
|
| 1092 |
+
|
| 1093 |
+
@classmethod
|
| 1094 |
+
def inrange(cls, value: IntoExpression, range: IntoExpression, /) -> Expression:
|
| 1095 |
+
"""Tests whether ``value`` lies within (or is equal to either) the first and last values of the ``range`` array."""
|
| 1096 |
+
return FunctionExpression("inrange", (value, range))
|
| 1097 |
+
|
| 1098 |
+
@classmethod
|
| 1099 |
+
def join(
|
| 1100 |
+
cls, array: IntoExpression, separator: IntoExpression = None, /
|
| 1101 |
+
) -> Expression:
|
| 1102 |
+
"""Returns a new string by concatenating all of the elements of the input ``array``, separated by commas or a specified ``separator`` string."""
|
| 1103 |
+
return FunctionExpression("join", (array, separator))
|
| 1104 |
+
|
| 1105 |
+
@classmethod
|
| 1106 |
+
def lastindexof(cls, array: IntoExpression, value: IntoExpression, /) -> Expression:
|
| 1107 |
+
"""Returns the last index of ``value`` in the input ``array``."""
|
| 1108 |
+
return FunctionExpression("lastindexof", (array, value))
|
| 1109 |
+
|
| 1110 |
+
@classmethod
|
| 1111 |
+
def length(cls, array: IntoExpression, /) -> Expression:
|
| 1112 |
+
"""Returns the length of the input ``array``."""
|
| 1113 |
+
return FunctionExpression("length", (array,))
|
| 1114 |
+
|
| 1115 |
+
@classmethod
|
| 1116 |
+
def lerp(cls, array: IntoExpression, fraction: IntoExpression, /) -> Expression:
|
| 1117 |
+
"""
|
| 1118 |
+
Returns the linearly interpolated value between the first and last entries in the ``array`` for the provided interpolation ``fraction`` (typically between 0 and 1).
|
| 1119 |
+
|
| 1120 |
+
For example, ``alt.expr.lerp([0, 50], 0.5)`` returns 25.
|
| 1121 |
+
"""
|
| 1122 |
+
return FunctionExpression("lerp", (array, fraction))
|
| 1123 |
+
|
| 1124 |
+
@classmethod
|
| 1125 |
+
def peek(cls, array: IntoExpression, /) -> Expression:
|
| 1126 |
+
"""
|
| 1127 |
+
Returns the last element in the input ``array``.
|
| 1128 |
+
|
| 1129 |
+
Similar to the built-in ``Array.pop`` method, except that it does not remove the last
|
| 1130 |
+
element. This method is a convenient shorthand for ``array[array.length - 1]``.
|
| 1131 |
+
"""
|
| 1132 |
+
return FunctionExpression("peek", (array,))
|
| 1133 |
+
|
| 1134 |
+
@classmethod
|
| 1135 |
+
def pluck(cls, array: IntoExpression, field: IntoExpression, /) -> Expression:
|
| 1136 |
+
"""
|
| 1137 |
+
Retrieves the value for the specified ``field`` from a given ``array`` of objects.
|
| 1138 |
+
|
| 1139 |
+
The input ``field`` string may include nested properties (e.g., ``foo.bar.bz``).
|
| 1140 |
+
"""
|
| 1141 |
+
return FunctionExpression("pluck", (array, field))
|
| 1142 |
+
|
| 1143 |
+
@classmethod
|
| 1144 |
+
def reverse(cls, array: IntoExpression, /) -> Expression:
|
| 1145 |
+
"""
|
| 1146 |
+
Returns a new array with elements in a reverse order of the input ``array``.
|
| 1147 |
+
|
| 1148 |
+
The first array element becomes the last, and the last array element becomes the first.
|
| 1149 |
+
"""
|
| 1150 |
+
return FunctionExpression("reverse", (array,))
|
| 1151 |
+
|
| 1152 |
+
@classmethod
|
| 1153 |
+
def sequence(cls, *args: Any) -> Expression:
|
| 1154 |
+
"""
|
| 1155 |
+
Returns an array containing an arithmetic sequence of numbers.
|
| 1156 |
+
|
| 1157 |
+
If ``step`` is omitted, it defaults to 1. If ``start`` is omitted, it defaults to 0. The
|
| 1158 |
+
``stop`` value is exclusive; it is not included in the result. If ``step`` is positive, the
|
| 1159 |
+
last element is the largest *start + i * step* less than ``stop``; if ``step`` is negative,
|
| 1160 |
+
the last element is the smallest *start + i * step* greater than ``stop``. If the returned
|
| 1161 |
+
array would contain an infinite number of values, an empty range is returned. The arguments
|
| 1162 |
+
are not required to be integers.
|
| 1163 |
+
"""
|
| 1164 |
+
return FunctionExpression("sequence", args)
|
| 1165 |
+
|
| 1166 |
+
@classmethod
|
| 1167 |
+
def slice(
|
| 1168 |
+
cls, array: IntoExpression, start: IntoExpression, end: IntoExpression = None, /
|
| 1169 |
+
) -> Expression:
|
| 1170 |
+
"""
|
| 1171 |
+
Returns a section of ``array`` between the ``start`` and ``end`` indices.
|
| 1172 |
+
|
| 1173 |
+
If the ``end`` argument is negative, it is treated as an offset from the end of the array
|
| 1174 |
+
(*alt.expr.length(array) + end*).
|
| 1175 |
+
"""
|
| 1176 |
+
return FunctionExpression("slice", (array, start, end))
|
| 1177 |
+
|
| 1178 |
+
@classmethod
|
| 1179 |
+
def span(cls, array: IntoExpression, /) -> Expression:
|
| 1180 |
+
"""Returns the span of ``array``: the difference between the last and first elements, or *array[array.length-1] - array[0]*."""
|
| 1181 |
+
return FunctionExpression("span", (array,))
|
| 1182 |
+
|
| 1183 |
+
@classmethod
|
| 1184 |
+
def lower(cls, string: IntoExpression, /) -> Expression:
|
| 1185 |
+
"""Transforms ``string`` to lower-case letters."""
|
| 1186 |
+
return FunctionExpression("lower", (string,))
|
| 1187 |
+
|
| 1188 |
+
@classmethod
|
| 1189 |
+
def pad(
|
| 1190 |
+
cls,
|
| 1191 |
+
string: IntoExpression,
|
| 1192 |
+
length: IntoExpression,
|
| 1193 |
+
character: IntoExpression = None,
|
| 1194 |
+
align: IntoExpression = None,
|
| 1195 |
+
/,
|
| 1196 |
+
) -> Expression:
|
| 1197 |
+
"""
|
| 1198 |
+
Pads a ``string`` value with repeated instances of a ``character`` up to a specified ``length``.
|
| 1199 |
+
|
| 1200 |
+
If ``character`` is not specified, a space (' ') is used. By default, padding is added to
|
| 1201 |
+
the end of a string. An optional ``align`` parameter specifies if padding should be added to
|
| 1202 |
+
the ``'left'`` (beginning), ``'center'``, or ``'right'`` (end) of the input string.
|
| 1203 |
+
"""
|
| 1204 |
+
return FunctionExpression("pad", (string, length, character, align))
|
| 1205 |
+
|
| 1206 |
+
@classmethod
|
| 1207 |
+
def parseFloat(cls, string: IntoExpression, /) -> Expression:
|
| 1208 |
+
"""
|
| 1209 |
+
Parses the input ``string`` to a floating-point value.
|
| 1210 |
+
|
| 1211 |
+
Same as JavaScript's ``parseFloat``.
|
| 1212 |
+
"""
|
| 1213 |
+
return FunctionExpression("parseFloat", (string,))
|
| 1214 |
+
|
| 1215 |
+
@classmethod
|
| 1216 |
+
def parseInt(cls, string: IntoExpression, /) -> Expression:
|
| 1217 |
+
"""
|
| 1218 |
+
Parses the input ``string`` to an integer value.
|
| 1219 |
+
|
| 1220 |
+
Same as JavaScript's ``parseInt``.
|
| 1221 |
+
"""
|
| 1222 |
+
return FunctionExpression("parseInt", (string,))
|
| 1223 |
+
|
| 1224 |
+
@classmethod
|
| 1225 |
+
def replace(
|
| 1226 |
+
cls,
|
| 1227 |
+
string: IntoExpression,
|
| 1228 |
+
pattern: IntoExpression,
|
| 1229 |
+
replacement: IntoExpression,
|
| 1230 |
+
/,
|
| 1231 |
+
) -> Expression:
|
| 1232 |
+
"""
|
| 1233 |
+
Returns a new string with some or all matches of ``pattern`` replaced by a ``replacement`` string.
|
| 1234 |
+
|
| 1235 |
+
The ``pattern`` can be a string or a regular expression. If ``pattern`` is a string, only
|
| 1236 |
+
the first instance will be replaced. Same as `JavaScript's String.replace`_.
|
| 1237 |
+
|
| 1238 |
+
.. _JavaScript's String.replace:
|
| 1239 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace
|
| 1240 |
+
"""
|
| 1241 |
+
return FunctionExpression("replace", (string, pattern, replacement))
|
| 1242 |
+
|
| 1243 |
+
@classmethod
|
| 1244 |
+
def substring(
|
| 1245 |
+
cls,
|
| 1246 |
+
string: IntoExpression,
|
| 1247 |
+
start: IntoExpression,
|
| 1248 |
+
end: IntoExpression = None,
|
| 1249 |
+
/,
|
| 1250 |
+
) -> Expression:
|
| 1251 |
+
"""Returns a section of ``string`` between the ``start`` and ``end`` indices."""
|
| 1252 |
+
return FunctionExpression("substring", (string, start, end))
|
| 1253 |
+
|
| 1254 |
+
@classmethod
|
| 1255 |
+
def trim(cls, string: IntoExpression, /) -> Expression:
|
| 1256 |
+
"""Returns a trimmed string with preceding and trailing whitespace removed."""
|
| 1257 |
+
return FunctionExpression("trim", (string,))
|
| 1258 |
+
|
| 1259 |
+
@classmethod
|
| 1260 |
+
def truncate(
|
| 1261 |
+
cls,
|
| 1262 |
+
string: IntoExpression,
|
| 1263 |
+
length: IntoExpression,
|
| 1264 |
+
align: IntoExpression = None,
|
| 1265 |
+
ellipsis: IntoExpression = None,
|
| 1266 |
+
/,
|
| 1267 |
+
) -> Expression:
|
| 1268 |
+
"""
|
| 1269 |
+
Truncates an input ``string`` to a target ``length``.
|
| 1270 |
+
|
| 1271 |
+
The optional ``align`` argument indicates what part of the string should be truncated:
|
| 1272 |
+
``'left'`` (the beginning), ``'center'``, or ``'right'`` (the end). By default, the
|
| 1273 |
+
``'right'`` end of the string is truncated. The optional ``ellipsis`` argument indicates the
|
| 1274 |
+
string to use to indicate truncated content; by default the ellipsis character ``…``
|
| 1275 |
+
(``\u2026``) is used.
|
| 1276 |
+
"""
|
| 1277 |
+
return FunctionExpression("truncate", (string, length, align, ellipsis))
|
| 1278 |
+
|
| 1279 |
+
@classmethod
|
| 1280 |
+
def upper(cls, string: IntoExpression, /) -> Expression:
|
| 1281 |
+
"""Transforms ``string`` to upper-case letters."""
|
| 1282 |
+
return FunctionExpression("upper", (string,))
|
| 1283 |
+
|
| 1284 |
+
@classmethod
|
| 1285 |
+
def merge(
|
| 1286 |
+
cls, object1: IntoExpression, object2: IntoExpression = None, *args: Any
|
| 1287 |
+
) -> Expression:
|
| 1288 |
+
"""
|
| 1289 |
+
Merges the input objects ``object1``, ``object2``, etc into a new output object.
|
| 1290 |
+
|
| 1291 |
+
Inputs are visited in sequential order, such that key values from later arguments can
|
| 1292 |
+
overwrite those from earlier arguments. Example: ``alt.expr.merge({a:1, b:2}, {a:3}) ->
|
| 1293 |
+
{a:3, b:2}``.
|
| 1294 |
+
"""
|
| 1295 |
+
return FunctionExpression("merge", (object1, object2, *args))
|
| 1296 |
+
|
| 1297 |
+
@classmethod
|
| 1298 |
+
def dayFormat(cls, day: IntoExpression, /) -> Expression:
|
| 1299 |
+
"""
|
| 1300 |
+
Formats a (0-6) *weekday* number as a full week day name, according to the current locale.
|
| 1301 |
+
|
| 1302 |
+
For example: ``alt.expr.dayFormat(0) -> "Sunday"``.
|
| 1303 |
+
"""
|
| 1304 |
+
return FunctionExpression("dayFormat", (day,))
|
| 1305 |
+
|
| 1306 |
+
@classmethod
|
| 1307 |
+
def dayAbbrevFormat(cls, day: IntoExpression, /) -> Expression:
|
| 1308 |
+
"""
|
| 1309 |
+
Formats a (0-6) *weekday* number as an abbreviated week day name, according to the current locale.
|
| 1310 |
+
|
| 1311 |
+
For example: ``alt.expr.dayAbbrevFormat(0) -> "Sun"``.
|
| 1312 |
+
"""
|
| 1313 |
+
return FunctionExpression("dayAbbrevFormat", (day,))
|
| 1314 |
+
|
| 1315 |
+
@classmethod
|
| 1316 |
+
def format(cls, value: IntoExpression, specifier: IntoExpression, /) -> Expression:
|
| 1317 |
+
"""
|
| 1318 |
+
Formats a numeric ``value`` as a string.
|
| 1319 |
+
|
| 1320 |
+
The ``specifier`` must be a valid `d3-format specifier`_ (e.g., ``alt.expr.format(value,
|
| 1321 |
+
',.2f')``. Null values are formatted as ``"null"``.
|
| 1322 |
+
|
| 1323 |
+
.. _d3-format specifier:
|
| 1324 |
+
https://github.com/d3/d3-format/
|
| 1325 |
+
"""
|
| 1326 |
+
return FunctionExpression("format", (value, specifier))
|
| 1327 |
+
|
| 1328 |
+
@classmethod
|
| 1329 |
+
def monthFormat(cls, month: IntoExpression, /) -> Expression:
|
| 1330 |
+
"""
|
| 1331 |
+
Formats a (zero-based) ``month`` number as a full month name, according to the current locale.
|
| 1332 |
+
|
| 1333 |
+
For example: ``alt.expr.monthFormat(0) -> "January"``.
|
| 1334 |
+
"""
|
| 1335 |
+
return FunctionExpression("monthFormat", (month,))
|
| 1336 |
+
|
| 1337 |
+
@classmethod
|
| 1338 |
+
def monthAbbrevFormat(cls, month: IntoExpression, /) -> Expression:
|
| 1339 |
+
"""
|
| 1340 |
+
Formats a (zero-based) ``month`` number as an abbreviated month name, according to the current locale.
|
| 1341 |
+
|
| 1342 |
+
For example: ``alt.expr.monthAbbrevFormat(0) -> "Jan"``.
|
| 1343 |
+
"""
|
| 1344 |
+
return FunctionExpression("monthAbbrevFormat", (month,))
|
| 1345 |
+
|
| 1346 |
+
@classmethod
|
| 1347 |
+
def timeUnitSpecifier(
|
| 1348 |
+
cls, units: IntoExpression, specifiers: IntoExpression = None, /
|
| 1349 |
+
) -> Expression:
|
| 1350 |
+
"""
|
| 1351 |
+
Returns a time format specifier string for the given time `*units*`_.
|
| 1352 |
+
|
| 1353 |
+
The optional ``specifiers`` object provides a set of specifier sub-strings for customizing
|
| 1354 |
+
the format; for more, see the `timeUnitSpecifier API documentation`_. The resulting
|
| 1355 |
+
specifier string can then be used as input to the `timeFormat`_ or `utcFormat`_ functions,
|
| 1356 |
+
or as the *format* parameter of an axis or legend. For example: ``alt.expr.timeFormat(date,
|
| 1357 |
+
alt.expr.timeUnitSpecifier('year'))`` or ``alt.expr.timeFormat(date,
|
| 1358 |
+
alt.expr.timeUnitSpecifier(['hours', 'minutes']))``.
|
| 1359 |
+
|
| 1360 |
+
.. _*units*:
|
| 1361 |
+
https://vega.github.io/vega/docs/api/time/#time-units
|
| 1362 |
+
.. _timeUnitSpecifier API documentation:
|
| 1363 |
+
https://vega.github.io/vega/docs/api/time/#timeUnitSpecifier
|
| 1364 |
+
.. _timeFormat:
|
| 1365 |
+
https://vega.github.io/vega/docs/expressions/#timeFormat
|
| 1366 |
+
.. _utcFormat:
|
| 1367 |
+
https://vega.github.io/vega/docs/expressions/#utcFormat
|
| 1368 |
+
"""
|
| 1369 |
+
return FunctionExpression("timeUnitSpecifier", (units, specifiers))
|
| 1370 |
+
|
| 1371 |
+
@classmethod
|
| 1372 |
+
def timeFormat(
|
| 1373 |
+
cls, value: IntoExpression, specifier: IntoExpression, /
|
| 1374 |
+
) -> Expression:
|
| 1375 |
+
"""
|
| 1376 |
+
Formats a datetime ``value`` (either a ``Date`` object or timestamp) as a string, according to the local time.
|
| 1377 |
+
|
| 1378 |
+
The ``specifier`` must be a valid `d3-time-format specifier`_ or `TimeMultiFormat object`_.
|
| 1379 |
+
For example: ``alt.expr.timeFormat(timestamp, '%A')``. Null values are formatted as
|
| 1380 |
+
``"null"``.
|
| 1381 |
+
|
| 1382 |
+
.. _d3-time-format specifier:
|
| 1383 |
+
https://github.com/d3/d3-time-format/
|
| 1384 |
+
.. _TimeMultiFormat object:
|
| 1385 |
+
https://vega.github.io/vega/docs/types/#TimeMultiFormat
|
| 1386 |
+
"""
|
| 1387 |
+
return FunctionExpression("timeFormat", (value, specifier))
|
| 1388 |
+
|
| 1389 |
+
@classmethod
|
| 1390 |
+
def timeParse(
|
| 1391 |
+
cls, string: IntoExpression, specifier: IntoExpression, /
|
| 1392 |
+
) -> Expression:
|
| 1393 |
+
"""
|
| 1394 |
+
Parses a ``string`` value to a Date object, according to the local time.
|
| 1395 |
+
|
| 1396 |
+
The ``specifier`` must be a valid `d3-time-format specifier`_. For example:
|
| 1397 |
+
``alt.expr.timeParse('June 30, 2015', '%B %d, %Y')``.
|
| 1398 |
+
|
| 1399 |
+
.. _d3-time-format specifier:
|
| 1400 |
+
https://github.com/d3/d3-time-format/
|
| 1401 |
+
"""
|
| 1402 |
+
return FunctionExpression("timeParse", (string, specifier))
|
| 1403 |
+
|
| 1404 |
+
@classmethod
|
| 1405 |
+
def utcFormat(
|
| 1406 |
+
cls, value: IntoExpression, specifier: IntoExpression, /
|
| 1407 |
+
) -> Expression:
|
| 1408 |
+
"""
|
| 1409 |
+
Formats a datetime ``value`` (either a ``Date`` object or timestamp) as a string, according to `UTC`_ time.
|
| 1410 |
+
|
| 1411 |
+
The ``specifier`` must be a valid `d3-time-format specifier`_ or `TimeMultiFormat object`_.
|
| 1412 |
+
For example: ``alt.expr.utcFormat(timestamp, '%A')``. Null values are formatted as
|
| 1413 |
+
``"null"``.
|
| 1414 |
+
|
| 1415 |
+
.. _UTC:
|
| 1416 |
+
https://en.wikipedia.org/wiki/Coordinated_Universal_Time
|
| 1417 |
+
.. _d3-time-format specifier:
|
| 1418 |
+
https://github.com/d3/d3-time-format/
|
| 1419 |
+
.. _TimeMultiFormat object:
|
| 1420 |
+
https://vega.github.io/vega/docs/types/#TimeMultiFormat
|
| 1421 |
+
"""
|
| 1422 |
+
return FunctionExpression("utcFormat", (value, specifier))
|
| 1423 |
+
|
| 1424 |
+
@classmethod
|
| 1425 |
+
def utcParse(
|
| 1426 |
+
cls, value: IntoExpression, specifier: IntoExpression, /
|
| 1427 |
+
) -> Expression:
|
| 1428 |
+
"""
|
| 1429 |
+
Parses a *string* value to a Date object, according to `UTC`_ time.
|
| 1430 |
+
|
| 1431 |
+
The ``specifier`` must be a valid `d3-time-format specifier`_. For example:
|
| 1432 |
+
``alt.expr.utcParse('June 30, 2015', '%B %d, %Y')``.
|
| 1433 |
+
|
| 1434 |
+
.. _UTC:
|
| 1435 |
+
https://en.wikipedia.org/wiki/Coordinated_Universal_Time
|
| 1436 |
+
.. _d3-time-format specifier:
|
| 1437 |
+
https://github.com/d3/d3-time-format/
|
| 1438 |
+
"""
|
| 1439 |
+
return FunctionExpression("utcParse", (value, specifier))
|
| 1440 |
+
|
| 1441 |
+
@classmethod
|
| 1442 |
+
def regexp(
|
| 1443 |
+
cls, pattern: IntoExpression, flags: IntoExpression = None, /
|
| 1444 |
+
) -> Expression:
|
| 1445 |
+
"""
|
| 1446 |
+
Creates a regular expression instance from an input ``pattern`` string and optional ``flags``.
|
| 1447 |
+
|
| 1448 |
+
Same as `JavaScript's RegExp`_.
|
| 1449 |
+
|
| 1450 |
+
.. _JavaScript's RegExp:
|
| 1451 |
+
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp
|
| 1452 |
+
"""
|
| 1453 |
+
return FunctionExpression("regexp", (pattern, flags))
|
| 1454 |
+
|
| 1455 |
+
@classmethod
|
| 1456 |
+
def test(
|
| 1457 |
+
cls, regexp: IntoExpression, string: IntoExpression = None, /
|
| 1458 |
+
) -> Expression:
|
| 1459 |
+
r"""
|
| 1460 |
+
Evaluates a regular expression ``regexp`` against the input ``string``, returning ``true`` if the string matches the pattern, ``false`` otherwise.
|
| 1461 |
+
|
| 1462 |
+
For example: ``alt.expr.test(/\\d{3}/, "32-21-9483") -> true``.
|
| 1463 |
+
"""
|
| 1464 |
+
return FunctionExpression("test", (regexp, string))
|
| 1465 |
+
|
| 1466 |
+
@classmethod
|
| 1467 |
+
def rgb(cls, *args: Any) -> Expression:
|
| 1468 |
+
"""
|
| 1469 |
+
Constructs a new `RGB`_ color.
|
| 1470 |
+
|
| 1471 |
+
If ``r``, ``g`` and ``b`` are specified, these represent the channel values of the returned
|
| 1472 |
+
color; an ``opacity`` may also be specified. If a CSS Color Module Level 3 *specifier*
|
| 1473 |
+
string is specified, it is parsed and then converted to the RGB color space. Uses
|
| 1474 |
+
`d3-color's rgb function`_.
|
| 1475 |
+
|
| 1476 |
+
.. _RGB:
|
| 1477 |
+
https://en.wikipedia.org/wiki/RGB_color_model
|
| 1478 |
+
.. _d3-color's rgb function:
|
| 1479 |
+
https://github.com/d3/d3-color#rgb
|
| 1480 |
+
"""
|
| 1481 |
+
return FunctionExpression("rgb", args)
|
| 1482 |
+
|
| 1483 |
+
@classmethod
|
| 1484 |
+
def hsl(cls, *args: Any) -> Expression:
|
| 1485 |
+
"""
|
| 1486 |
+
Constructs a new `HSL`_ color.
|
| 1487 |
+
|
| 1488 |
+
If ``h``, ``s`` and ``l`` are specified, these represent the channel values of the returned
|
| 1489 |
+
color; an ``opacity`` may also be specified. If a CSS Color Module Level 3 *specifier*
|
| 1490 |
+
string is specified, it is parsed and then converted to the HSL color space. Uses
|
| 1491 |
+
`d3-color's hsl function`_.
|
| 1492 |
+
|
| 1493 |
+
.. _HSL:
|
| 1494 |
+
https://en.wikipedia.org/wiki/HSL_and_HSV
|
| 1495 |
+
.. _d3-color's hsl function:
|
| 1496 |
+
https://github.com/d3/d3-color#hsl
|
| 1497 |
+
"""
|
| 1498 |
+
return FunctionExpression("hsl", args)
|
| 1499 |
+
|
| 1500 |
+
@classmethod
|
| 1501 |
+
def lab(cls, *args: Any) -> Expression:
|
| 1502 |
+
"""
|
| 1503 |
+
Constructs a new `CIE LAB`_ color.
|
| 1504 |
+
|
| 1505 |
+
If ``l``, ``a`` and ``b`` are specified, these represent the channel values of the returned
|
| 1506 |
+
color; an ``opacity`` may also be specified. If a CSS Color Module Level 3 *specifier*
|
| 1507 |
+
string is specified, it is parsed and then converted to the LAB color space. Uses
|
| 1508 |
+
`d3-color's lab function`_.
|
| 1509 |
+
|
| 1510 |
+
.. _CIE LAB:
|
| 1511 |
+
https://en.wikipedia.org/wiki/Lab_color_space#CIELAB
|
| 1512 |
+
.. _d3-color's lab function:
|
| 1513 |
+
https://github.com/d3/d3-color#lab
|
| 1514 |
+
"""
|
| 1515 |
+
return FunctionExpression("lab", args)
|
| 1516 |
+
|
| 1517 |
+
@classmethod
|
| 1518 |
+
def hcl(cls, *args: Any) -> Expression:
|
| 1519 |
+
"""
|
| 1520 |
+
Constructs a new `HCL`_ (hue, chroma, luminance) color.
|
| 1521 |
+
|
| 1522 |
+
If ``h``, ``c`` and ``l`` are specified, these represent the channel values of the returned
|
| 1523 |
+
color; an ``opacity`` may also be specified. If a CSS Color Module Level 3 *specifier*
|
| 1524 |
+
string is specified, it is parsed and then converted to the HCL color space. Uses
|
| 1525 |
+
`d3-color's hcl function`_.
|
| 1526 |
+
|
| 1527 |
+
.. _HCL:
|
| 1528 |
+
https://en.wikipedia.org/wiki/Lab_color_space#CIELAB
|
| 1529 |
+
.. _d3-color's hcl function:
|
| 1530 |
+
https://github.com/d3/d3-color#hcl
|
| 1531 |
+
"""
|
| 1532 |
+
return FunctionExpression("hcl", args)
|
| 1533 |
+
|
| 1534 |
+
@classmethod
|
| 1535 |
+
def luminance(cls, specifier: IntoExpression, /) -> Expression:
|
| 1536 |
+
"""
|
| 1537 |
+
Returns the luminance for the given color ``specifier`` (compatible with `d3-color's rgb function`_).
|
| 1538 |
+
|
| 1539 |
+
The luminance is calculated according to the `W3C Web Content Accessibility Guidelines`_.
|
| 1540 |
+
|
| 1541 |
+
.. _d3-color's rgb function:
|
| 1542 |
+
https://github.com/d3/d3-color#rgb
|
| 1543 |
+
.. _W3C Web Content Accessibility Guidelines:
|
| 1544 |
+
https://www.w3.org/TR/2008/REC-WCAG20-20081211/#relativeluminancedef
|
| 1545 |
+
"""
|
| 1546 |
+
return FunctionExpression("luminance", (specifier,))
|
| 1547 |
+
|
| 1548 |
+
@classmethod
|
| 1549 |
+
def contrast(
|
| 1550 |
+
cls, specifier1: IntoExpression, specifier2: IntoExpression, /
|
| 1551 |
+
) -> Expression:
|
| 1552 |
+
"""
|
| 1553 |
+
Returns the contrast ratio between the input color specifiers as a float between 1 and 21.
|
| 1554 |
+
|
| 1555 |
+
The contrast is calculated according to the `W3C Web Content Accessibility Guidelines`_.
|
| 1556 |
+
|
| 1557 |
+
.. _W3C Web Content Accessibility Guidelines:
|
| 1558 |
+
https://www.w3.org/TR/2008/REC-WCAG20-20081211/#contrast-ratiodef
|
| 1559 |
+
"""
|
| 1560 |
+
return FunctionExpression("contrast", (specifier1, specifier2))
|
| 1561 |
+
|
| 1562 |
+
@classmethod
|
| 1563 |
+
def item(cls) -> Expression:
|
| 1564 |
+
"""Returns the current scenegraph item that is the target of the event."""
|
| 1565 |
+
return FunctionExpression("item", ())
|
| 1566 |
+
|
| 1567 |
+
@classmethod
|
| 1568 |
+
def group(cls, name: IntoExpression = None, /) -> Expression:
|
| 1569 |
+
"""
|
| 1570 |
+
Returns the scenegraph group mark item in which the current event has occurred.
|
| 1571 |
+
|
| 1572 |
+
If no arguments are provided, the immediate parent group is returned. If a group name is
|
| 1573 |
+
provided, the matching ancestor group item is returned.
|
| 1574 |
+
"""
|
| 1575 |
+
return FunctionExpression("group", (name,))
|
| 1576 |
+
|
| 1577 |
+
@classmethod
|
| 1578 |
+
def xy(cls, item: IntoExpression = None, /) -> Expression:
|
| 1579 |
+
"""
|
| 1580 |
+
Returns the x- and y-coordinates for the current event as a two-element array.
|
| 1581 |
+
|
| 1582 |
+
If no arguments are provided, the top-level coordinate space of the view is used. If a
|
| 1583 |
+
scenegraph ``item`` (or string group name) is provided, the coordinate space of the group
|
| 1584 |
+
item is used.
|
| 1585 |
+
"""
|
| 1586 |
+
return FunctionExpression("xy", (item,))
|
| 1587 |
+
|
| 1588 |
+
@classmethod
|
| 1589 |
+
def x(cls, item: IntoExpression = None, /) -> Expression:
|
| 1590 |
+
"""
|
| 1591 |
+
Returns the x coordinate for the current event.
|
| 1592 |
+
|
| 1593 |
+
If no arguments are provided, the top-level coordinate space of the view is used. If a
|
| 1594 |
+
scenegraph ``item`` (or string group name) is provided, the coordinate space of the group
|
| 1595 |
+
item is used.
|
| 1596 |
+
"""
|
| 1597 |
+
return FunctionExpression("x", (item,))
|
| 1598 |
+
|
| 1599 |
+
@classmethod
|
| 1600 |
+
def y(cls, item: IntoExpression = None, /) -> Expression:
|
| 1601 |
+
"""
|
| 1602 |
+
Returns the y coordinate for the current event.
|
| 1603 |
+
|
| 1604 |
+
If no arguments are provided, the top-level coordinate space of the view is used. If a
|
| 1605 |
+
scenegraph ``item`` (or string group name) is provided, the coordinate space of the group
|
| 1606 |
+
item is used.
|
| 1607 |
+
"""
|
| 1608 |
+
return FunctionExpression("y", (item,))
|
| 1609 |
+
|
| 1610 |
+
@classmethod
|
| 1611 |
+
def pinchDistance(cls, event: IntoExpression, /) -> Expression:
|
| 1612 |
+
"""Returns the pixel distance between the first two touch points of a multi-touch event."""
|
| 1613 |
+
return FunctionExpression("pinchDistance", (event,))
|
| 1614 |
+
|
| 1615 |
+
@classmethod
|
| 1616 |
+
def pinchAngle(cls, event: IntoExpression, /) -> Expression:
|
| 1617 |
+
"""Returns the angle of the line connecting the first two touch points of a multi-touch event."""
|
| 1618 |
+
return FunctionExpression("pinchAngle", (event,))
|
| 1619 |
+
|
| 1620 |
+
@classmethod
|
| 1621 |
+
def inScope(cls, item: IntoExpression, /) -> Expression:
|
| 1622 |
+
"""Returns true if the given scenegraph ``item`` is a descendant of the group mark in which the event handler was defined, false otherwise."""
|
| 1623 |
+
return FunctionExpression("inScope", (item,))
|
| 1624 |
+
|
| 1625 |
+
@classmethod
|
| 1626 |
+
def data(cls, name: IntoExpression, /) -> Expression:
|
| 1627 |
+
"""
|
| 1628 |
+
Returns the array of data objects for the Vega data set with the given ``name``.
|
| 1629 |
+
|
| 1630 |
+
If the data set is not found, returns an empty array.
|
| 1631 |
+
"""
|
| 1632 |
+
return FunctionExpression("data", (name,))
|
| 1633 |
+
|
| 1634 |
+
@classmethod
|
| 1635 |
+
def indata(
|
| 1636 |
+
cls, name: IntoExpression, field: IntoExpression, value: IntoExpression, /
|
| 1637 |
+
) -> Expression:
|
| 1638 |
+
"""
|
| 1639 |
+
Tests if the data set with a given ``name`` contains a datum with a ``field`` value that matches the input ``value``.
|
| 1640 |
+
|
| 1641 |
+
For example: ``alt.expr.indata('table', 'category', value)``.
|
| 1642 |
+
"""
|
| 1643 |
+
return FunctionExpression("indata", (name, field, value))
|
| 1644 |
+
|
| 1645 |
+
@classmethod
|
| 1646 |
+
def scale(
|
| 1647 |
+
cls,
|
| 1648 |
+
name: IntoExpression,
|
| 1649 |
+
value: IntoExpression,
|
| 1650 |
+
group: IntoExpression = None,
|
| 1651 |
+
/,
|
| 1652 |
+
) -> Expression:
|
| 1653 |
+
"""
|
| 1654 |
+
Applies the named scale transform (or projection) to the specified ``value``.
|
| 1655 |
+
|
| 1656 |
+
The optional ``group`` argument takes a scenegraph group mark item to indicate the specific
|
| 1657 |
+
scope in which to look up the scale or projection.
|
| 1658 |
+
"""
|
| 1659 |
+
return FunctionExpression("scale", (name, value, group))
|
| 1660 |
+
|
| 1661 |
+
@classmethod
|
| 1662 |
+
def invert(
|
| 1663 |
+
cls,
|
| 1664 |
+
name: IntoExpression,
|
| 1665 |
+
value: IntoExpression,
|
| 1666 |
+
group: IntoExpression = None,
|
| 1667 |
+
/,
|
| 1668 |
+
) -> Expression:
|
| 1669 |
+
"""
|
| 1670 |
+
Inverts the named scale transform (or projection) for the specified ``value``.
|
| 1671 |
+
|
| 1672 |
+
The optional ``group`` argument takes a scenegraph group mark item to indicate the specific
|
| 1673 |
+
scope in which to look up the scale or projection.
|
| 1674 |
+
"""
|
| 1675 |
+
return FunctionExpression("invert", (name, value, group))
|
| 1676 |
+
|
| 1677 |
+
@classmethod
|
| 1678 |
+
def copy(cls, name: IntoExpression, group: IntoExpression = None, /) -> Expression: # type: ignore[override]
|
| 1679 |
+
"""
|
| 1680 |
+
Returns a copy (a new cloned instance) of the named scale transform of projection, or ``undefined`` if no scale or projection is found.
|
| 1681 |
+
|
| 1682 |
+
The optional ``group`` argument takes a scenegraph group mark item to indicate the specific
|
| 1683 |
+
scope in which to look up the scale or projection.
|
| 1684 |
+
"""
|
| 1685 |
+
return FunctionExpression("copy", (name, group))
|
| 1686 |
+
|
| 1687 |
+
@classmethod
|
| 1688 |
+
def domain(
|
| 1689 |
+
cls, name: IntoExpression, group: IntoExpression = None, /
|
| 1690 |
+
) -> Expression:
|
| 1691 |
+
"""
|
| 1692 |
+
Returns the scale domain array for the named scale transform, or an empty array if the scale is not found.
|
| 1693 |
+
|
| 1694 |
+
The optional ``group`` argument takes a scenegraph group mark item to indicate the specific
|
| 1695 |
+
scope in which to look up the scale.
|
| 1696 |
+
"""
|
| 1697 |
+
return FunctionExpression("domain", (name, group))
|
| 1698 |
+
|
| 1699 |
+
@classmethod
|
| 1700 |
+
def range(cls, name: IntoExpression, group: IntoExpression = None, /) -> Expression:
|
| 1701 |
+
"""
|
| 1702 |
+
Returns the scale range array for the named scale transform, or an empty array if the scale is not found.
|
| 1703 |
+
|
| 1704 |
+
The optional ``group`` argument takes a scenegraph group mark item to indicate the specific
|
| 1705 |
+
scope in which to look up the scale.
|
| 1706 |
+
"""
|
| 1707 |
+
return FunctionExpression("range", (name, group))
|
| 1708 |
+
|
| 1709 |
+
@classmethod
|
| 1710 |
+
def bandwidth(
|
| 1711 |
+
cls, name: IntoExpression, group: IntoExpression = None, /
|
| 1712 |
+
) -> Expression:
|
| 1713 |
+
"""
|
| 1714 |
+
Returns the current band width for the named band scale transform, or zero if the scale is not found or is not a band scale.
|
| 1715 |
+
|
| 1716 |
+
The optional ``group`` argument takes a scenegraph group mark item to indicate the specific
|
| 1717 |
+
scope in which to look up the scale.
|
| 1718 |
+
"""
|
| 1719 |
+
return FunctionExpression("bandwidth", (name, group))
|
| 1720 |
+
|
| 1721 |
+
@classmethod
|
| 1722 |
+
def bandspace(
|
| 1723 |
+
cls,
|
| 1724 |
+
count: IntoExpression,
|
| 1725 |
+
paddingInner: IntoExpression = None,
|
| 1726 |
+
paddingOuter: IntoExpression = None,
|
| 1727 |
+
/,
|
| 1728 |
+
) -> Expression:
|
| 1729 |
+
"""
|
| 1730 |
+
Returns the number of steps needed within a band scale, based on the ``count`` of domain elements and the inner and outer padding values.
|
| 1731 |
+
|
| 1732 |
+
While normally calculated within the scale itself, this function can be helpful for
|
| 1733 |
+
determining the size of a chart's layout.
|
| 1734 |
+
"""
|
| 1735 |
+
return FunctionExpression("bandspace", (count, paddingInner, paddingOuter))
|
| 1736 |
+
|
| 1737 |
+
@classmethod
|
| 1738 |
+
def gradient(
|
| 1739 |
+
cls,
|
| 1740 |
+
scale: IntoExpression,
|
| 1741 |
+
p0: IntoExpression,
|
| 1742 |
+
p1: IntoExpression,
|
| 1743 |
+
count: IntoExpression = None,
|
| 1744 |
+
/,
|
| 1745 |
+
) -> Expression:
|
| 1746 |
+
"""
|
| 1747 |
+
Returns a linear color gradient for the ``scale`` (whose range must be a `continuous color scheme`_) and starting and ending points ``p0`` and ``p1``, each an *[x, y]* array.
|
| 1748 |
+
|
| 1749 |
+
The points ``p0`` and ``p1`` should be expressed in normalized coordinates in the domain [0,
|
| 1750 |
+
1], relative to the bounds of the item being colored. If unspecified, ``p0`` defaults to
|
| 1751 |
+
``[0, 0]`` and ``p1`` defaults to ``[1, 0]``, for a horizontal gradient that spans the full
|
| 1752 |
+
bounds of an item. The optional ``count`` argument indicates a desired target number of
|
| 1753 |
+
sample points to take from the color scale.
|
| 1754 |
+
|
| 1755 |
+
.. _continuous color scheme:
|
| 1756 |
+
https://vega.github.io/vega/docs/schemes
|
| 1757 |
+
"""
|
| 1758 |
+
return FunctionExpression("gradient", (scale, p0, p1, count))
|
| 1759 |
+
|
| 1760 |
+
@classmethod
|
| 1761 |
+
def panLinear(cls, domain: IntoExpression, delta: IntoExpression, /) -> Expression:
|
| 1762 |
+
"""
|
| 1763 |
+
Given a linear scale ``domain`` array with numeric or datetime values, returns a new two-element domain array that is the result of panning the domain by a fractional ``delta``.
|
| 1764 |
+
|
| 1765 |
+
The ``delta`` value represents fractional units of the scale range; for example, ``0.5``
|
| 1766 |
+
indicates panning the scale domain to the right by half the scale range.
|
| 1767 |
+
"""
|
| 1768 |
+
return FunctionExpression("panLinear", (domain, delta))
|
| 1769 |
+
|
| 1770 |
+
@classmethod
|
| 1771 |
+
def panLog(cls, domain: IntoExpression, delta: IntoExpression, /) -> Expression:
|
| 1772 |
+
"""
|
| 1773 |
+
Given a log scale ``domain`` array with numeric or datetime values, returns a new two-element domain array that is the result of panning the domain by a fractional ``delta``.
|
| 1774 |
+
|
| 1775 |
+
The ``delta`` value represents fractional units of the scale range; for example, ``0.5``
|
| 1776 |
+
indicates panning the scale domain to the right by half the scale range.
|
| 1777 |
+
"""
|
| 1778 |
+
return FunctionExpression("panLog", (domain, delta))
|
| 1779 |
+
|
| 1780 |
+
@classmethod
|
| 1781 |
+
def panPow(
|
| 1782 |
+
cls, domain: IntoExpression, delta: IntoExpression, exponent: IntoExpression, /
|
| 1783 |
+
) -> Expression:
|
| 1784 |
+
"""
|
| 1785 |
+
Given a power scale ``domain`` array with numeric or datetime values and the given ``exponent``, returns a new two-element domain array that is the result of panning the domain by a fractional ``delta``.
|
| 1786 |
+
|
| 1787 |
+
The ``delta`` value represents fractional units of the scale range; for example, ``0.5``
|
| 1788 |
+
indicates panning the scale domain to the right by half the scale range.
|
| 1789 |
+
"""
|
| 1790 |
+
return FunctionExpression("panPow", (domain, delta, exponent))
|
| 1791 |
+
|
| 1792 |
+
@classmethod
|
| 1793 |
+
def panSymlog(
|
| 1794 |
+
cls, domain: IntoExpression, delta: IntoExpression, constant: IntoExpression, /
|
| 1795 |
+
) -> Expression:
|
| 1796 |
+
"""
|
| 1797 |
+
Given a symmetric log scale ``domain`` array with numeric or datetime values parameterized by the given ``constant``, returns a new two-element domain array that is the result of panning the domain by a fractional ``delta``.
|
| 1798 |
+
|
| 1799 |
+
The ``delta`` value represents fractional units of the scale range; for example, ``0.5``
|
| 1800 |
+
indicates panning the scale domain to the right by half the scale range.
|
| 1801 |
+
"""
|
| 1802 |
+
return FunctionExpression("panSymlog", (domain, delta, constant))
|
| 1803 |
+
|
| 1804 |
+
@classmethod
|
| 1805 |
+
def zoomLinear(
|
| 1806 |
+
cls,
|
| 1807 |
+
domain: IntoExpression,
|
| 1808 |
+
anchor: IntoExpression,
|
| 1809 |
+
scaleFactor: IntoExpression,
|
| 1810 |
+
/,
|
| 1811 |
+
) -> Expression:
|
| 1812 |
+
"""
|
| 1813 |
+
Given a linear scale ``domain`` array with numeric or datetime values, returns a new two-element domain array that is the result of zooming the domain by a ``scaleFactor``, centered at the provided fractional ``anchor``.
|
| 1814 |
+
|
| 1815 |
+
The ``anchor`` value represents the zoom position in terms of fractional units of the scale
|
| 1816 |
+
range; for example, ``0.5`` indicates a zoom centered on the mid-point of the scale range.
|
| 1817 |
+
"""
|
| 1818 |
+
return FunctionExpression("zoomLinear", (domain, anchor, scaleFactor))
|
| 1819 |
+
|
| 1820 |
+
@classmethod
|
| 1821 |
+
def zoomLog(
|
| 1822 |
+
cls,
|
| 1823 |
+
domain: IntoExpression,
|
| 1824 |
+
anchor: IntoExpression,
|
| 1825 |
+
scaleFactor: IntoExpression,
|
| 1826 |
+
/,
|
| 1827 |
+
) -> Expression:
|
| 1828 |
+
"""
|
| 1829 |
+
Given a log scale ``domain`` array with numeric or datetime values, returns a new two-element domain array that is the result of zooming the domain by a ``scaleFactor``, centered at the provided fractional ``anchor``.
|
| 1830 |
+
|
| 1831 |
+
The ``anchor`` value represents the zoom position in terms of fractional units of the scale
|
| 1832 |
+
range; for example, ``0.5`` indicates a zoom centered on the mid-point of the scale range.
|
| 1833 |
+
"""
|
| 1834 |
+
return FunctionExpression("zoomLog", (domain, anchor, scaleFactor))
|
| 1835 |
+
|
| 1836 |
+
@classmethod
|
| 1837 |
+
def zoomPow(
|
| 1838 |
+
cls,
|
| 1839 |
+
domain: IntoExpression,
|
| 1840 |
+
anchor: IntoExpression,
|
| 1841 |
+
scaleFactor: IntoExpression,
|
| 1842 |
+
exponent: IntoExpression,
|
| 1843 |
+
/,
|
| 1844 |
+
) -> Expression:
|
| 1845 |
+
"""
|
| 1846 |
+
Given a power scale ``domain`` array with numeric or datetime values and the given ``exponent``, returns a new two-element domain array that is the result of zooming the domain by a ``scaleFactor``, centered at the provided fractional ``anchor``.
|
| 1847 |
+
|
| 1848 |
+
The ``anchor`` value represents the zoom position in terms of fractional units of the scale
|
| 1849 |
+
range; for example, ``0.5`` indicates a zoom centered on the mid-point of the scale range.
|
| 1850 |
+
"""
|
| 1851 |
+
return FunctionExpression("zoomPow", (domain, anchor, scaleFactor, exponent))
|
| 1852 |
+
|
| 1853 |
+
@classmethod
|
| 1854 |
+
def zoomSymlog(
|
| 1855 |
+
cls,
|
| 1856 |
+
domain: IntoExpression,
|
| 1857 |
+
anchor: IntoExpression,
|
| 1858 |
+
scaleFactor: IntoExpression,
|
| 1859 |
+
constant: IntoExpression,
|
| 1860 |
+
/,
|
| 1861 |
+
) -> Expression:
|
| 1862 |
+
"""
|
| 1863 |
+
Given a symmetric log scale ``domain`` array with numeric or datetime values parameterized by the given ``constant``, returns a new two-element domain array that is the result of zooming the domain by a ``scaleFactor``, centered at the provided fractional ``anchor``.
|
| 1864 |
+
|
| 1865 |
+
The ``anchor`` value represents the zoom position in terms of fractional units of the scale
|
| 1866 |
+
range; for example, ``0.5`` indicates a zoom centered on the mid-point of the scale range.
|
| 1867 |
+
"""
|
| 1868 |
+
return FunctionExpression("zoomSymlog", (domain, anchor, scaleFactor, constant))
|
| 1869 |
+
|
| 1870 |
+
@classmethod
|
| 1871 |
+
def geoArea(
|
| 1872 |
+
cls,
|
| 1873 |
+
projection: IntoExpression,
|
| 1874 |
+
feature: IntoExpression,
|
| 1875 |
+
group: IntoExpression = None,
|
| 1876 |
+
/,
|
| 1877 |
+
) -> Expression:
|
| 1878 |
+
"""
|
| 1879 |
+
Returns the projected planar area (typically in square pixels) of a GeoJSON ``feature`` according to the named ``projection``.
|
| 1880 |
+
|
| 1881 |
+
If the ``projection`` argument is ``null``, computes the spherical area in steradians using
|
| 1882 |
+
unprojected longitude, latitude coordinates. The optional ``group`` argument takes a
|
| 1883 |
+
scenegraph group mark item to indicate the specific scope in which to look up the
|
| 1884 |
+
projection. Uses d3-geo's `geoArea`_ and `path.area`_ methods.
|
| 1885 |
+
|
| 1886 |
+
.. _geoArea:
|
| 1887 |
+
https://github.com/d3/d3-geo#geoArea
|
| 1888 |
+
.. _path.area:
|
| 1889 |
+
https://github.com/d3/d3-geo#path_area
|
| 1890 |
+
"""
|
| 1891 |
+
return FunctionExpression("geoArea", (projection, feature, group))
|
| 1892 |
+
|
| 1893 |
+
@classmethod
|
| 1894 |
+
def geoBounds(
|
| 1895 |
+
cls,
|
| 1896 |
+
projection: IntoExpression,
|
| 1897 |
+
feature: IntoExpression,
|
| 1898 |
+
group: IntoExpression = None,
|
| 1899 |
+
/,
|
| 1900 |
+
) -> Expression:
|
| 1901 |
+
"""
|
| 1902 |
+
Returns the projected planar bounding box (typically in pixels) for the specified GeoJSON ``feature``, according to the named ``projection``.
|
| 1903 |
+
|
| 1904 |
+
The bounding box is represented by a two-dimensional array: [[*x₀*, *y₀*], [*x₁*, *y₁*]],
|
| 1905 |
+
where *x₀* is the minimum x-coordinate, *y₀* is the minimum y-coordinate, *x₁* is the
|
| 1906 |
+
maximum x-coordinate, and *y₁* is the maximum y-coordinate. If the ``projection`` argument
|
| 1907 |
+
is ``null``, computes the spherical bounding box using unprojected longitude, latitude
|
| 1908 |
+
coordinates. The optional ``group`` argument takes a scenegraph group mark item to indicate
|
| 1909 |
+
the specific scope in which to look up the projection. Uses d3-geo's `geoBounds`_ and
|
| 1910 |
+
`path.bounds`_ methods.
|
| 1911 |
+
|
| 1912 |
+
.. _geoBounds:
|
| 1913 |
+
https://github.com/d3/d3-geo#geoBounds
|
| 1914 |
+
.. _path.bounds:
|
| 1915 |
+
https://github.com/d3/d3-geo#path_bounds
|
| 1916 |
+
"""
|
| 1917 |
+
return FunctionExpression("geoBounds", (projection, feature, group))
|
| 1918 |
+
|
| 1919 |
+
@classmethod
|
| 1920 |
+
def geoCentroid(
|
| 1921 |
+
cls,
|
| 1922 |
+
projection: IntoExpression,
|
| 1923 |
+
feature: IntoExpression,
|
| 1924 |
+
group: IntoExpression = None,
|
| 1925 |
+
/,
|
| 1926 |
+
) -> Expression:
|
| 1927 |
+
"""
|
| 1928 |
+
Returns the projected planar centroid (typically in pixels) for the specified GeoJSON ``feature``, according to the named ``projection``.
|
| 1929 |
+
|
| 1930 |
+
If the ``projection`` argument is ``null``, computes the spherical centroid using
|
| 1931 |
+
unprojected longitude, latitude coordinates. The optional ``group`` argument takes a
|
| 1932 |
+
scenegraph group mark item to indicate the specific scope in which to look up the
|
| 1933 |
+
projection. Uses d3-geo's `geoCentroid`_ and `path.centroid`_ methods.
|
| 1934 |
+
|
| 1935 |
+
.. _geoCentroid:
|
| 1936 |
+
https://github.com/d3/d3-geo#geoCentroid
|
| 1937 |
+
.. _path.centroid:
|
| 1938 |
+
https://github.com/d3/d3-geo#path_centroid
|
| 1939 |
+
"""
|
| 1940 |
+
return FunctionExpression("geoCentroid", (projection, feature, group))
|
| 1941 |
+
|
| 1942 |
+
@classmethod
|
| 1943 |
+
def geoScale(
|
| 1944 |
+
cls, projection: IntoExpression, group: IntoExpression = None, /
|
| 1945 |
+
) -> Expression:
|
| 1946 |
+
"""
|
| 1947 |
+
Returns the scale value for the named ``projection``.
|
| 1948 |
+
|
| 1949 |
+
The optional ``group`` argument takes a scenegraph group mark item to indicate the specific
|
| 1950 |
+
scope in which to look up the projection.
|
| 1951 |
+
"""
|
| 1952 |
+
return FunctionExpression("geoScale", (projection, group))
|
| 1953 |
+
|
| 1954 |
+
@classmethod
|
| 1955 |
+
def treePath(
|
| 1956 |
+
cls, name: IntoExpression, source: IntoExpression, target: IntoExpression, /
|
| 1957 |
+
) -> Expression:
|
| 1958 |
+
"""
|
| 1959 |
+
For the hierarchy data set with the given ``name``, returns the shortest path through from the ``source`` node id to the ``target`` node id.
|
| 1960 |
+
|
| 1961 |
+
The path starts at the ``source`` node, ascends to the least common ancestor of the
|
| 1962 |
+
``source`` node and the ``target`` node, and then descends to the ``target`` node.
|
| 1963 |
+
"""
|
| 1964 |
+
return FunctionExpression("treePath", (name, source, target))
|
| 1965 |
+
|
| 1966 |
+
@classmethod
|
| 1967 |
+
def treeAncestors(cls, name: IntoExpression, node: IntoExpression, /) -> Expression:
|
| 1968 |
+
"""For the hierarchy data set with the given ``name``, returns the array of ancestors nodes, starting with the input ``node``, then followed by each parent up to the root."""
|
| 1969 |
+
return FunctionExpression("treeAncestors", (name, node))
|
| 1970 |
+
|
| 1971 |
+
@classmethod
|
| 1972 |
+
def containerSize(cls) -> Expression:
|
| 1973 |
+
"""
|
| 1974 |
+
Returns the current CSS box size (``[el.clientWidth, el.clientHeight]``) of the parent DOM element that contains the Vega view.
|
| 1975 |
+
|
| 1976 |
+
If there is no container element, returns ``[undefined, undefined]``.
|
| 1977 |
+
"""
|
| 1978 |
+
return FunctionExpression("containerSize", ())
|
| 1979 |
+
|
| 1980 |
+
@classmethod
|
| 1981 |
+
def screen(cls) -> Expression:
|
| 1982 |
+
"""
|
| 1983 |
+
Returns the `window.screen`_ object, or ``{}`` if Vega is not running in a browser environment.
|
| 1984 |
+
|
| 1985 |
+
.. _window.screen:
|
| 1986 |
+
https://developer.mozilla.org/en-US/docs/Web/API/Window/screen
|
| 1987 |
+
"""
|
| 1988 |
+
return FunctionExpression("screen", ())
|
| 1989 |
+
|
| 1990 |
+
@classmethod
|
| 1991 |
+
def windowSize(cls) -> Expression:
|
| 1992 |
+
"""Returns the current window size (``[window.innerWidth, window.innerHeight]``) or ``[undefined, undefined]`` if Vega is not running in a browser environment."""
|
| 1993 |
+
return FunctionExpression("windowSize", ())
|
| 1994 |
+
|
| 1995 |
+
@classmethod
|
| 1996 |
+
def warn(
|
| 1997 |
+
cls, value1: IntoExpression, value2: IntoExpression = None, *args: Any
|
| 1998 |
+
) -> Expression:
|
| 1999 |
+
"""
|
| 2000 |
+
Logs a warning message and returns the last argument.
|
| 2001 |
+
|
| 2002 |
+
For the message to appear in the console, the visualization view must have the appropriate
|
| 2003 |
+
logging level set.
|
| 2004 |
+
"""
|
| 2005 |
+
return FunctionExpression("warn", (value1, value2, *args))
|
| 2006 |
+
|
| 2007 |
+
@classmethod
|
| 2008 |
+
def info(
|
| 2009 |
+
cls, value1: IntoExpression, value2: IntoExpression = None, *args: Any
|
| 2010 |
+
) -> Expression:
|
| 2011 |
+
"""
|
| 2012 |
+
Logs an informative message and returns the last argument.
|
| 2013 |
+
|
| 2014 |
+
For the message to appear in the console, the visualization view must have the appropriate
|
| 2015 |
+
logging level set.
|
| 2016 |
+
"""
|
| 2017 |
+
return FunctionExpression("info", (value1, value2, *args))
|
| 2018 |
+
|
| 2019 |
+
@classmethod
|
| 2020 |
+
def debug(
|
| 2021 |
+
cls, value1: IntoExpression, value2: IntoExpression = None, *args: Any
|
| 2022 |
+
) -> Expression:
|
| 2023 |
+
"""
|
| 2024 |
+
Logs a debugging message and returns the last argument.
|
| 2025 |
+
|
| 2026 |
+
For the message to appear in the console, the visualization view must have the appropriate
|
| 2027 |
+
logging level set.
|
| 2028 |
+
"""
|
| 2029 |
+
return FunctionExpression("debug", (value1, value2, *args))
|
| 2030 |
+
|
| 2031 |
+
|
| 2032 |
+
_ExprType = expr
|
| 2033 |
+
# NOTE: Compatibility alias for previous type of `alt.expr`.
|
| 2034 |
+
# `_ExprType` was not referenced in any internal imports/tests.
|
mgm/lib/python3.10/site-packages/altair/expr/consts.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
CONST_LISTING = {
|
| 4 |
+
"NaN": "not a number (same as JavaScript literal NaN)",
|
| 5 |
+
"LN10": "the natural log of 10 (alias to Math.LN10)",
|
| 6 |
+
"E": "the transcendental number e (alias to Math.E)",
|
| 7 |
+
"LOG10E": "the base 10 logarithm e (alias to Math.LOG10E)",
|
| 8 |
+
"LOG2E": "the base 2 logarithm of e (alias to Math.LOG2E)",
|
| 9 |
+
"SQRT1_2": "the square root of 0.5 (alias to Math.SQRT1_2)",
|
| 10 |
+
"LN2": "the natural log of 2 (alias to Math.LN2)",
|
| 11 |
+
"SQRT2": "the square root of 2 (alias to Math.SQRT1_2)",
|
| 12 |
+
"PI": "the transcendental number pi (alias to Math.PI)",
|
| 13 |
+
}
|
mgm/lib/python3.10/site-packages/altair/expr/core.py
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import datetime as dt
|
| 4 |
+
from typing import TYPE_CHECKING, Any, Literal, Union
|
| 5 |
+
|
| 6 |
+
from altair.utils import SchemaBase
|
| 7 |
+
|
| 8 |
+
if TYPE_CHECKING:
|
| 9 |
+
import sys
|
| 10 |
+
|
| 11 |
+
from altair.vegalite.v5.schema._typing import Map, PrimitiveValue_T
|
| 12 |
+
|
| 13 |
+
if sys.version_info >= (3, 10):
|
| 14 |
+
from typing import TypeAlias
|
| 15 |
+
else:
|
| 16 |
+
from typing_extensions import TypeAlias
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class DatumType:
|
| 20 |
+
"""An object to assist in building Vega-Lite Expressions."""
|
| 21 |
+
|
| 22 |
+
def __repr__(self) -> str:
|
| 23 |
+
return "datum"
|
| 24 |
+
|
| 25 |
+
def __getattr__(self, attr) -> GetAttrExpression:
|
| 26 |
+
if attr.startswith("__") and attr.endswith("__"):
|
| 27 |
+
raise AttributeError(attr)
|
| 28 |
+
return GetAttrExpression("datum", attr)
|
| 29 |
+
|
| 30 |
+
def __getitem__(self, attr) -> GetItemExpression:
|
| 31 |
+
return GetItemExpression("datum", attr)
|
| 32 |
+
|
| 33 |
+
def __call__(self, datum, **kwargs) -> dict[str, Any]:
|
| 34 |
+
"""Specify a datum for use in an encoding."""
|
| 35 |
+
return dict(datum=datum, **kwargs)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
datum = DatumType()
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def _js_repr(val) -> str:
|
| 42 |
+
"""Return a javascript-safe string representation of val."""
|
| 43 |
+
if val is True:
|
| 44 |
+
return "true"
|
| 45 |
+
elif val is False:
|
| 46 |
+
return "false"
|
| 47 |
+
elif val is None:
|
| 48 |
+
return "null"
|
| 49 |
+
elif isinstance(val, OperatorMixin):
|
| 50 |
+
return val._to_expr()
|
| 51 |
+
elif isinstance(val, dt.date):
|
| 52 |
+
return _from_date_datetime(val)
|
| 53 |
+
else:
|
| 54 |
+
return repr(val)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def _from_date_datetime(obj: dt.date | dt.datetime, /) -> str:
|
| 58 |
+
"""
|
| 59 |
+
Parse native `datetime.(date|datetime)` into a `datetime expression`_ string.
|
| 60 |
+
|
| 61 |
+
**Month is 0-based**
|
| 62 |
+
|
| 63 |
+
.. _datetime expression:
|
| 64 |
+
https://vega.github.io/vega/docs/expressions/#datetime
|
| 65 |
+
"""
|
| 66 |
+
fn_name: Literal["datetime", "utc"] = "datetime"
|
| 67 |
+
args: tuple[int, ...] = obj.year, obj.month - 1, obj.day
|
| 68 |
+
if isinstance(obj, dt.datetime):
|
| 69 |
+
if tzinfo := obj.tzinfo:
|
| 70 |
+
if tzinfo is dt.timezone.utc:
|
| 71 |
+
fn_name = "utc"
|
| 72 |
+
else:
|
| 73 |
+
msg = (
|
| 74 |
+
f"Unsupported timezone {tzinfo!r}.\n"
|
| 75 |
+
"Only `'UTC'` or naive (local) datetimes are permitted.\n"
|
| 76 |
+
"See https://altair-viz.github.io/user_guide/generated/core/altair.DateTime.html"
|
| 77 |
+
)
|
| 78 |
+
raise TypeError(msg)
|
| 79 |
+
us = obj.microsecond
|
| 80 |
+
ms = us if us == 0 else us // 1_000
|
| 81 |
+
args = *args, obj.hour, obj.minute, obj.second, ms
|
| 82 |
+
return FunctionExpression(fn_name, args)._to_expr()
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
# Designed to work with Expression and VariableParameter
|
| 86 |
+
class OperatorMixin:
|
| 87 |
+
def _to_expr(self) -> str:
|
| 88 |
+
return repr(self)
|
| 89 |
+
|
| 90 |
+
def _from_expr(self, expr) -> Any:
|
| 91 |
+
return expr
|
| 92 |
+
|
| 93 |
+
def __add__(self, other):
|
| 94 |
+
comp_value = BinaryExpression("+", self, other)
|
| 95 |
+
return self._from_expr(comp_value)
|
| 96 |
+
|
| 97 |
+
def __radd__(self, other):
|
| 98 |
+
comp_value = BinaryExpression("+", other, self)
|
| 99 |
+
return self._from_expr(comp_value)
|
| 100 |
+
|
| 101 |
+
def __sub__(self, other):
|
| 102 |
+
comp_value = BinaryExpression("-", self, other)
|
| 103 |
+
return self._from_expr(comp_value)
|
| 104 |
+
|
| 105 |
+
def __rsub__(self, other):
|
| 106 |
+
comp_value = BinaryExpression("-", other, self)
|
| 107 |
+
return self._from_expr(comp_value)
|
| 108 |
+
|
| 109 |
+
def __mul__(self, other):
|
| 110 |
+
comp_value = BinaryExpression("*", self, other)
|
| 111 |
+
return self._from_expr(comp_value)
|
| 112 |
+
|
| 113 |
+
def __rmul__(self, other):
|
| 114 |
+
comp_value = BinaryExpression("*", other, self)
|
| 115 |
+
return self._from_expr(comp_value)
|
| 116 |
+
|
| 117 |
+
def __truediv__(self, other):
|
| 118 |
+
comp_value = BinaryExpression("/", self, other)
|
| 119 |
+
return self._from_expr(comp_value)
|
| 120 |
+
|
| 121 |
+
def __rtruediv__(self, other):
|
| 122 |
+
comp_value = BinaryExpression("/", other, self)
|
| 123 |
+
return self._from_expr(comp_value)
|
| 124 |
+
|
| 125 |
+
__div__ = __truediv__
|
| 126 |
+
|
| 127 |
+
__rdiv__ = __rtruediv__
|
| 128 |
+
|
| 129 |
+
def __mod__(self, other):
|
| 130 |
+
comp_value = BinaryExpression("%", self, other)
|
| 131 |
+
return self._from_expr(comp_value)
|
| 132 |
+
|
| 133 |
+
def __rmod__(self, other):
|
| 134 |
+
comp_value = BinaryExpression("%", other, self)
|
| 135 |
+
return self._from_expr(comp_value)
|
| 136 |
+
|
| 137 |
+
def __pow__(self, other):
|
| 138 |
+
# "**" Javascript operator is not supported in all browsers
|
| 139 |
+
comp_value = FunctionExpression("pow", (self, other))
|
| 140 |
+
return self._from_expr(comp_value)
|
| 141 |
+
|
| 142 |
+
def __rpow__(self, other):
|
| 143 |
+
# "**" Javascript operator is not supported in all browsers
|
| 144 |
+
comp_value = FunctionExpression("pow", (other, self))
|
| 145 |
+
return self._from_expr(comp_value)
|
| 146 |
+
|
| 147 |
+
def __neg__(self):
|
| 148 |
+
comp_value = UnaryExpression("-", self)
|
| 149 |
+
return self._from_expr(comp_value)
|
| 150 |
+
|
| 151 |
+
def __pos__(self):
|
| 152 |
+
comp_value = UnaryExpression("+", self)
|
| 153 |
+
return self._from_expr(comp_value)
|
| 154 |
+
|
| 155 |
+
# comparison operators
|
| 156 |
+
|
| 157 |
+
def __eq__(self, other):
|
| 158 |
+
comp_value = BinaryExpression("===", self, other)
|
| 159 |
+
return self._from_expr(comp_value)
|
| 160 |
+
|
| 161 |
+
def __ne__(self, other):
|
| 162 |
+
comp_value = BinaryExpression("!==", self, other)
|
| 163 |
+
return self._from_expr(comp_value)
|
| 164 |
+
|
| 165 |
+
def __gt__(self, other):
|
| 166 |
+
comp_value = BinaryExpression(">", self, other)
|
| 167 |
+
return self._from_expr(comp_value)
|
| 168 |
+
|
| 169 |
+
def __lt__(self, other):
|
| 170 |
+
comp_value = BinaryExpression("<", self, other)
|
| 171 |
+
return self._from_expr(comp_value)
|
| 172 |
+
|
| 173 |
+
def __ge__(self, other):
|
| 174 |
+
comp_value = BinaryExpression(">=", self, other)
|
| 175 |
+
return self._from_expr(comp_value)
|
| 176 |
+
|
| 177 |
+
def __le__(self, other):
|
| 178 |
+
comp_value = BinaryExpression("<=", self, other)
|
| 179 |
+
return self._from_expr(comp_value)
|
| 180 |
+
|
| 181 |
+
def __abs__(self):
|
| 182 |
+
comp_value = FunctionExpression("abs", (self,))
|
| 183 |
+
return self._from_expr(comp_value)
|
| 184 |
+
|
| 185 |
+
# logical operators
|
| 186 |
+
|
| 187 |
+
def __and__(self, other):
|
| 188 |
+
comp_value = BinaryExpression("&&", self, other)
|
| 189 |
+
return self._from_expr(comp_value)
|
| 190 |
+
|
| 191 |
+
def __rand__(self, other):
|
| 192 |
+
comp_value = BinaryExpression("&&", other, self)
|
| 193 |
+
return self._from_expr(comp_value)
|
| 194 |
+
|
| 195 |
+
def __or__(self, other):
|
| 196 |
+
comp_value = BinaryExpression("||", self, other)
|
| 197 |
+
return self._from_expr(comp_value)
|
| 198 |
+
|
| 199 |
+
def __ror__(self, other):
|
| 200 |
+
comp_value = BinaryExpression("||", other, self)
|
| 201 |
+
return self._from_expr(comp_value)
|
| 202 |
+
|
| 203 |
+
def __invert__(self):
|
| 204 |
+
comp_value = UnaryExpression("!", self)
|
| 205 |
+
return self._from_expr(comp_value)
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class Expression(OperatorMixin, SchemaBase):
|
| 209 |
+
"""
|
| 210 |
+
Expression.
|
| 211 |
+
|
| 212 |
+
Base object for enabling build-up of Javascript expressions using
|
| 213 |
+
a Python syntax. Calling ``repr(obj)`` will return a Javascript
|
| 214 |
+
representation of the object and the operations it encodes.
|
| 215 |
+
"""
|
| 216 |
+
|
| 217 |
+
_schema = {"type": "string"}
|
| 218 |
+
|
| 219 |
+
def to_dict(self, *args, **kwargs):
|
| 220 |
+
return repr(self)
|
| 221 |
+
|
| 222 |
+
def __setattr__(self, attr, val) -> None:
|
| 223 |
+
# We don't need the setattr magic defined in SchemaBase
|
| 224 |
+
return object.__setattr__(self, attr, val)
|
| 225 |
+
|
| 226 |
+
# item access
|
| 227 |
+
def __getitem__(self, val):
|
| 228 |
+
return GetItemExpression(self, val)
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
class UnaryExpression(Expression):
|
| 232 |
+
def __init__(self, op, val) -> None:
|
| 233 |
+
super().__init__(op=op, val=val)
|
| 234 |
+
|
| 235 |
+
def __repr__(self):
|
| 236 |
+
return f"({self.op}{_js_repr(self.val)})"
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
class BinaryExpression(Expression):
|
| 240 |
+
def __init__(self, op, lhs, rhs) -> None:
|
| 241 |
+
super().__init__(op=op, lhs=lhs, rhs=rhs)
|
| 242 |
+
|
| 243 |
+
def __repr__(self):
|
| 244 |
+
return f"({_js_repr(self.lhs)} {self.op} {_js_repr(self.rhs)})"
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
class FunctionExpression(Expression):
|
| 248 |
+
def __init__(self, name, args) -> None:
|
| 249 |
+
super().__init__(name=name, args=args)
|
| 250 |
+
|
| 251 |
+
def __repr__(self):
|
| 252 |
+
args = ",".join(_js_repr(arg) for arg in self.args)
|
| 253 |
+
return f"{self.name}({args})"
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
class ConstExpression(Expression):
|
| 257 |
+
def __init__(self, name) -> None:
|
| 258 |
+
super().__init__(name=name)
|
| 259 |
+
|
| 260 |
+
def __repr__(self) -> str:
|
| 261 |
+
return str(self.name)
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
class GetAttrExpression(Expression):
|
| 265 |
+
def __init__(self, group, name) -> None:
|
| 266 |
+
super().__init__(group=group, name=name)
|
| 267 |
+
|
| 268 |
+
def __repr__(self):
|
| 269 |
+
return f"{self.group}.{self.name}"
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
class GetItemExpression(Expression):
|
| 273 |
+
def __init__(self, group, name) -> None:
|
| 274 |
+
super().__init__(group=group, name=name)
|
| 275 |
+
|
| 276 |
+
def __repr__(self) -> str:
|
| 277 |
+
return f"{self.group}[{self.name!r}]"
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
IntoExpression: TypeAlias = Union[
|
| 281 |
+
"PrimitiveValue_T", dt.date, dt.datetime, OperatorMixin, "Map"
|
| 282 |
+
]
|
mgm/lib/python3.10/site-packages/altair/expr/funcs.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
FUNCTION_LISTING = {
|
| 4 |
+
"isArray": r"Returns true if _value_ is an array, false otherwise.",
|
| 5 |
+
"isBoolean": r"Returns true if _value_ is a boolean (`true` or `false`), false otherwise.",
|
| 6 |
+
"isDate": r"Returns true if _value_ is a Date object, false otherwise. This method will return false for timestamp numbers or date-formatted strings; it recognizes Date objects only.",
|
| 7 |
+
"isDefined": r"Returns true if _value_ is a defined value, false if _value_ equals `undefined`. This method will return true for `null` and `NaN` values.",
|
| 8 |
+
"isNumber": r"Returns true if _value_ is a number, false otherwise. `NaN` and `Infinity` are considered numbers.",
|
| 9 |
+
"isObject": r"Returns true if _value_ is an object (including arrays and Dates), false otherwise.",
|
| 10 |
+
"isRegExp": r"Returns true if _value_ is a RegExp (regular expression) object, false otherwise.",
|
| 11 |
+
"isString": r"Returns true if _value_ is a string, false otherwise.",
|
| 12 |
+
"isValid": r"Returns true if _value_ is not `null`, `undefined`, or `NaN`, false otherwise.",
|
| 13 |
+
"toBoolean": r"Coerces the input _value_ to a string. Null values and empty strings are mapped to `null`.",
|
| 14 |
+
"toDate": r"Coerces the input _value_ to a Date instance. Null values and empty strings are mapped to `null`. If an optional _parser_ function is provided, it is used to perform date parsing, otherwise `Date.parse` is used. Be aware that `Date.parse` has different implementations across browsers!",
|
| 15 |
+
"toNumber": r"Coerces the input _value_ to a number. Null values and empty strings are mapped to `null`.",
|
| 16 |
+
"toString": r"Coerces the input _value_ to a string. Null values and empty strings are mapped to `null`.",
|
| 17 |
+
"if": r"If _test_ is truthy, returns _thenValue_. Otherwise, returns _elseValue_. The _if_ function is equivalent to the ternary operator `a ? b : c`.",
|
| 18 |
+
"isNaN": r"Returns true if _value_ is not a number. Same as JavaScript's `isNaN`.",
|
| 19 |
+
"isFinite": r"Returns true if _value_ is a finite number. Same as JavaScript's `isFinite`.",
|
| 20 |
+
"abs": r"Returns the absolute value of _value_. Same as JavaScript's `Math.abs`.",
|
| 21 |
+
"acos": r"Trigonometric arccosine. Same as JavaScript's `Math.acos`.",
|
| 22 |
+
"asin": r"Trigonometric arcsine. Same as JavaScript's `Math.asin`.",
|
| 23 |
+
"atan": r"Trigonometric arctangent. Same as JavaScript's `Math.atan`.",
|
| 24 |
+
"atan2": r"Returns the arctangent of _dy / dx_. Same as JavaScript's `Math.atan2`.",
|
| 25 |
+
"ceil": r"Rounds _value_ to the nearest integer of equal or greater value. Same as JavaScript's `Math.ceil`.",
|
| 26 |
+
"clamp": r"Restricts _value_ to be between the specified _min_ and _max_.",
|
| 27 |
+
"cos": r"Trigonometric cosine. Same as JavaScript's `Math.cos`.",
|
| 28 |
+
"exp": r"Returns the value of _e_ raised to the provided _exponent_. Same as JavaScript's `Math.exp`.",
|
| 29 |
+
"floor": r"Rounds _value_ to the nearest integer of equal or lower value. Same as JavaScript's `Math.floor`.",
|
| 30 |
+
"hypot": r"Returns the square root of the sum of squares of its arguments. Same as JavaScript's `Math.hypot`.",
|
| 31 |
+
"log": r"Returns the natural logarithm of _value_. Same as JavaScript's `Math.log`.",
|
| 32 |
+
"max": r"Returns the maximum argument value. Same as JavaScript's `Math.max`.",
|
| 33 |
+
"min": r"Returns the minimum argument value. Same as JavaScript's `Math.min`.",
|
| 34 |
+
"pow": r"Returns _value_ raised to the given _exponent_. Same as JavaScript's `Math.pow`.",
|
| 35 |
+
"random": r"Returns a pseudo-random number in the range [0,1). Same as JavaScript's `Math.random`.",
|
| 36 |
+
"round": r"Rounds _value_ to the nearest integer. Same as JavaScript's `Math.round`.",
|
| 37 |
+
"sin": r"Trigonometric sine. Same as JavaScript's `Math.sin`.",
|
| 38 |
+
"sqrt": r"Square root function. Same as JavaScript's `Math.sqrt`.",
|
| 39 |
+
"tan": r"Trigonometric tangent. Same as JavaScript's `Math.tan`.",
|
| 40 |
+
"sampleNormal": r"Returns a sample from a univariate [normal (Gaussian) probability distribution](https://en.wikipedia.org/wiki/Normal_distribution) with specified _mean_ and standard deviation _stdev_. If unspecified, the mean defaults to `0` and the standard deviation defaults to `1`.",
|
| 41 |
+
"cumulativeNormal": r"Returns the value of the [cumulative distribution function](https://en.wikipedia.org/wiki/Cumulative_distribution_function) at the given input domain _value_ for a normal distribution with specified _mean_ and standard deviation _stdev_. If unspecified, the mean defaults to `0` and the standard deviation defaults to `1`.",
|
| 42 |
+
"densityNormal": r"Returns the value of the [probability density function](https://en.wikipedia.org/wiki/Probability_density_function) at the given input domain _value_, for a normal distribution with specified _mean_ and standard deviation _stdev_. If unspecified, the mean defaults to `0` and the standard deviation defaults to `1`.",
|
| 43 |
+
"quantileNormal": r"Returns the quantile value (the inverse of the [cumulative distribution function](https://en.wikipedia.org/wiki/Cumulative_distribution_function)) for the given input _probability_, for a normal distribution with specified _mean_ and standard deviation _stdev_. If unspecified, the mean defaults to `0` and the standard deviation defaults to `1`.",
|
| 44 |
+
"sampleLogNormal": r"Returns a sample from a univariate [log-normal probability distribution](https://en.wikipedia.org/wiki/Log-normal_distribution) with specified log _mean_ and log standard deviation _stdev_. If unspecified, the log mean defaults to `0` and the log standard deviation defaults to `1`.",
|
| 45 |
+
"cumulativeLogNormal": r"Returns the value of the [cumulative distribution function](https://en.wikipedia.org/wiki/Cumulative_distribution_function) at the given input domain _value_ for a log-normal distribution with specified log _mean_ and log standard deviation _stdev_. If unspecified, the log mean defaults to `0` and the log standard deviation defaults to `1`.",
|
| 46 |
+
"densityLogNormal": r"Returns the value of the [probability density function](https://en.wikipedia.org/wiki/Probability_density_function) at the given input domain _value_, for a log-normal distribution with specified log _mean_ and log standard deviation _stdev_. If unspecified, the log mean defaults to `0` and the log standard deviation defaults to `1`.",
|
| 47 |
+
"quantileLogNormal": r"Returns the quantile value (the inverse of the [cumulative distribution function](https://en.wikipedia.org/wiki/Cumulative_distribution_function)) for the given input _probability_, for a log-normal distribution with specified log _mean_ and log standard deviation _stdev_. If unspecified, the log mean defaults to `0` and the log standard deviation defaults to `1`.",
|
| 48 |
+
"sampleUniform": r"Returns a sample from a univariate [continuous uniform probability distribution](https://en.wikipedia.org/wiki/Uniform_distribution_(continuous)) over the interval [_min_, _max_). If unspecified, _min_ defaults to `0` and _max_ defaults to `1`. If only one argument is provided, it is interpreted as the _max_ value.",
|
| 49 |
+
"cumulativeUniform": r"Returns the value of the [cumulative distribution function](https://en.wikipedia.org/wiki/Cumulative_distribution_function) at the given input domain _value_ for a uniform distribution over the interval [_min_, _max_). If unspecified, _min_ defaults to `0` and _max_ defaults to `1`. If only one argument is provided, it is interpreted as the _max_ value.",
|
| 50 |
+
"densityUniform": r"Returns the value of the [probability density function](https://en.wikipedia.org/wiki/Probability_density_function) at the given input domain _value_, for a uniform distribution over the interval [_min_, _max_). If unspecified, _min_ defaults to `0` and _max_ defaults to `1`. If only one argument is provided, it is interpreted as the _max_ value.",
|
| 51 |
+
"quantileUniform": r"Returns the quantile value (the inverse of the [cumulative distribution function](https://en.wikipedia.org/wiki/Cumulative_distribution_function)) for the given input _probability_, for a uniform distribution over the interval [_min_, _max_). If unspecified, _min_ defaults to `0` and _max_ defaults to `1`. If only one argument is provided, it is interpreted as the _max_ value.",
|
| 52 |
+
"now": r"Returns the timestamp for the current time.",
|
| 53 |
+
"datetime": r"Returns a new `Date` instance. The _month_ is 0-based, such that `1` represents February.",
|
| 54 |
+
"date": r"Returns the day of the month for the given _datetime_ value, in local time.",
|
| 55 |
+
"day": r"Returns the day of the week for the given _datetime_ value, in local time.",
|
| 56 |
+
"dayofyear": r"Returns the one-based day of the year for the given _datetime_ value, in local time.",
|
| 57 |
+
"year": r"Returns the year for the given _datetime_ value, in local time.",
|
| 58 |
+
"quarter": r"Returns the quarter of the year (0-3) for the given _datetime_ value, in local time.",
|
| 59 |
+
"month": r"Returns the (zero-based) month for the given _datetime_ value, in local time.",
|
| 60 |
+
"week": r"Returns the week number of the year for the given _datetime_, in local time. This function assumes Sunday-based weeks. Days before the first Sunday of the year are considered to be in week 0, the first Sunday of the year is the start of week 1, the second Sunday week 2, _etc._.",
|
| 61 |
+
"hours": r"Returns the hours component for the given _datetime_ value, in local time.",
|
| 62 |
+
"minutes": r"Returns the minutes component for the given _datetime_ value, in local time.",
|
| 63 |
+
"seconds": r"Returns the seconds component for the given _datetime_ value, in local time.",
|
| 64 |
+
"milliseconds": r"Returns the milliseconds component for the given _datetime_ value, in local time.",
|
| 65 |
+
"time": r"Returns the epoch-based timestamp for the given _datetime_ value.",
|
| 66 |
+
"timezoneoffset": r"Returns the timezone offset from the local timezone to UTC for the given _datetime_ value.",
|
| 67 |
+
"timeOffset": r"Returns a new `Date` instance that offsets the given _date_ by the specified time [_unit_](../api/time/#time-units) in the local timezone. The optional _step_ argument indicates the number of time unit steps to offset by (default 1).",
|
| 68 |
+
"timeSequence": r"Returns an array of `Date` instances from _start_ (inclusive) to _stop_ (exclusive), with each entry separated by the given time [_unit_](../api/time/#time-units) in the local timezone. The optional _step_ argument indicates the number of time unit steps to take between each sequence entry (default 1).",
|
| 69 |
+
"utc": r"Returns a timestamp for the given UTC date. The _month_ is 0-based, such that `1` represents February.",
|
| 70 |
+
"utcdate": r"Returns the day of the month for the given _datetime_ value, in UTC time.",
|
| 71 |
+
"utcday": r"Returns the day of the week for the given _datetime_ value, in UTC time.",
|
| 72 |
+
"utcdayofyear": r"Returns the one-based day of the year for the given _datetime_ value, in UTC time.",
|
| 73 |
+
"utcyear": r"Returns the year for the given _datetime_ value, in UTC time.",
|
| 74 |
+
"utcquarter": r"Returns the quarter of the year (0-3) for the given _datetime_ value, in UTC time.",
|
| 75 |
+
"utcmonth": r"Returns the (zero-based) month for the given _datetime_ value, in UTC time.",
|
| 76 |
+
"utcweek": r"Returns the week number of the year for the given _datetime_, in UTC time. This function assumes Sunday-based weeks. Days before the first Sunday of the year are considered to be in week 0, the first Sunday of the year is the start of week 1, the second Sunday week 2, _etc._.",
|
| 77 |
+
"utchours": r"Returns the hours component for the given _datetime_ value, in UTC time.",
|
| 78 |
+
"utcminutes": r"Returns the minutes component for the given _datetime_ value, in UTC time.",
|
| 79 |
+
"utcseconds": r"Returns the seconds component for the given _datetime_ value, in UTC time.",
|
| 80 |
+
"utcmilliseconds": r"Returns the milliseconds component for the given _datetime_ value, in UTC time.",
|
| 81 |
+
"utcOffset": r"Returns a new `Date` instance that offsets the given _date_ by the specified time [_unit_](../api/time/#time-units) in UTC time. The optional _step_ argument indicates the number of time unit steps to offset by (default 1).",
|
| 82 |
+
"utcSequence": r"Returns an array of `Date` instances from _start_ (inclusive) to _stop_ (exclusive), with each entry separated by the given time [_unit_](../api/time/#time-units) in UTC time. The optional _step_ argument indicates the number of time unit steps to take between each sequence entry (default 1).",
|
| 83 |
+
"extent": r"Returns a new _[min, max]_ array with the minimum and maximum values of the input array, ignoring `null`, `undefined`, and `NaN` values.",
|
| 84 |
+
"clampRange": r"Clamps a two-element _range_ array in a span-preserving manner. If the span of the input _range_ is less than _(max - min)_ and an endpoint exceeds either the _min_ or _max_ value, the range is translated such that the span is preserved and one endpoint touches the boundary of the _[min, max]_ range. If the span exceeds _(max - min)_, the range _[min, max]_ is returned.",
|
| 85 |
+
"indexof": r"Returns the first index of _value_ in the input _array_, or the first index of _substring_ in the input _string_..",
|
| 86 |
+
"inrange": r"Tests whether _value_ lies within (or is equal to either) the first and last values of the _range_ array.",
|
| 87 |
+
"join": r"Returns a new string by concatenating all of the elements of the input _array_, separated by commas or a specified _separator_ string.",
|
| 88 |
+
"lastindexof": r"Returns the last index of _value_ in the input _array_, or the last index of _substring_ in the input _string_..",
|
| 89 |
+
"length": r"Returns the length of the input _array_, or the length of the input _string_.",
|
| 90 |
+
"lerp": r"Returns the linearly interpolated value between the first and last entries in the _array_ for the provided interpolation _fraction_ (typically between 0 and 1). For example, `lerp([0, 50], 0.5)` returns 25.",
|
| 91 |
+
"peek": r"Returns the last element in the input _array_. Similar to the built-in `Array.pop` method, except that it does not remove the last element. This method is a convenient shorthand for `array[array.length - 1]`.",
|
| 92 |
+
"pluck": r"Retrieves the value for the specified *field* from a given *array* of objects. The input *field* string may include nested properties (e.g., `foo.bar.bz`).",
|
| 93 |
+
"reverse": r"Returns a new array with elements in a reverse order of the input _array_. The first array element becomes the last, and the last array element becomes the first.",
|
| 94 |
+
"sequence": r"Returns an array containing an arithmetic sequence of numbers. If _step_ is omitted, it defaults to 1. If _start_ is omitted, it defaults to 0. The _stop_ value is exclusive; it is not included in the result. If _step_ is positive, the last element is the largest _start + i * step_ less than _stop_; if _step_ is negative, the last element is the smallest _start + i * step_ greater than _stop_. If the returned array would contain an infinite number of values, an empty range is returned. The arguments are not required to be integers.",
|
| 95 |
+
"slice": r"Returns a section of _array_ between the _start_ and _end_ indices. If the _end_ argument is negative, it is treated as an offset from the end of the array (_length(array) + end_).",
|
| 96 |
+
"span": r"Returns the span of _array_: the difference between the last and first elements, or _array[array.length-1] - array[0]_. Or if input is a string: a section of _string_ between the _start_ and _end_ indices. If the _end_ argument is negative, it is treated as an offset from the end of the string (_length(string) + end_)..",
|
| 97 |
+
"lower": r"Transforms _string_ to lower-case letters.",
|
| 98 |
+
"pad": r"Pads a _string_ value with repeated instances of a _character_ up to a specified _length_. If _character_ is not specified, a space (' ') is used. By default, padding is added to the end of a string. An optional _align_ parameter specifies if padding should be added to the `'left'` (beginning), `'center'`, or `'right'` (end) of the input string.",
|
| 99 |
+
"parseFloat": r"Parses the input _string_ to a floating-point value. Same as JavaScript's `parseFloat`.",
|
| 100 |
+
"parseInt": r"Parses the input _string_ to an integer value. Same as JavaScript's `parseInt`.",
|
| 101 |
+
"replace": r"Returns a new string with some or all matches of _pattern_ replaced by a _replacement_ string. The _pattern_ can be a string or a regular expression. If _pattern_ is a string, only the first instance will be replaced. Same as [JavaScript's String.replace](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace).",
|
| 102 |
+
"split": r"Returns an array of tokens created by splitting the input _string_ according to a provided _separator_ pattern. The result can optionally be constrained to return at most _limit_ tokens.",
|
| 103 |
+
"substring": r"Returns a section of _string_ between the _start_ and _end_ indices.",
|
| 104 |
+
"trim": r"Returns a trimmed string with preceding and trailing whitespace removed.",
|
| 105 |
+
"truncate": r"Truncates an input _string_ to a target _length_. The optional _align_ argument indicates what part of the string should be truncated: `'left'` (the beginning), `'center'`, or `'right'` (the end). By default, the `'right'` end of the string is truncated. The optional _ellipsis_ argument indicates the string to use to indicate truncated content; by default the ellipsis character `...` (`\\u2026`) is used.",
|
| 106 |
+
"upper": r"Transforms _string_ to upper-case letters.",
|
| 107 |
+
"merge": r"Merges the input objects _object1_, _object2_, etc into a new output object. Inputs are visited in sequential order, such that key values from later arguments can overwrite those from earlier arguments. Example: `merge({a:1, b:2}, {a:3}) -> {a:3, b:2}`.",
|
| 108 |
+
"dayFormat": r"Formats a (0-6) _weekday_ number as a full week day name, according to the current locale. For example: `dayFormat(0) -> \"Sunday\"`.",
|
| 109 |
+
"dayAbbrevFormat": r"Formats a (0-6) _weekday_ number as an abbreviated week day name, according to the current locale. For example: `dayAbbrevFormat(0) -> \"Sun\"`.",
|
| 110 |
+
"format": r"Formats a numeric _value_ as a string. The _specifier_ must be a valid [d3-format specifier](https://github.com/d3/d3-format/) (e.g., `format(value, ',.2f')`.",
|
| 111 |
+
"monthFormat": r"Formats a (zero-based) _month_ number as a full month name, according to the current locale. For example: `monthFormat(0) -> \"January\"`.",
|
| 112 |
+
"monthAbbrevFormat": r"Formats a (zero-based) _month_ number as an abbreviated month name, according to the current locale. For example: `monthAbbrevFormat(0) -> \"Jan\"`.",
|
| 113 |
+
"timeUnitSpecifier": r"Returns a time format specifier string for the given time [_units_](../api/time/#time-units). The optional _specifiers_ object provides a set of specifier sub-strings for customizing the format; for more, see the [timeUnitSpecifier API documentation](../api/time/#timeUnitSpecifier). The resulting specifier string can then be used as input to the [timeFormat](#timeFormat) or [utcFormat](#utcFormat) functions, or as the _format_ parameter of an axis or legend. For example: `timeFormat(date, timeUnitSpecifier('year'))` or `timeFormat(date, timeUnitSpecifier(['hours', 'minutes']))`.",
|
| 114 |
+
"timeFormat": r"Formats a datetime _value_ (either a `Date` object or timestamp) as a string, according to the local time. The _specifier_ must be a valid [d3-time-format specifier](https://github.com/d3/d3-time-format/). For example: `timeFormat(timestamp, '%A')`.",
|
| 115 |
+
"timeParse": r"Parses a _string_ value to a Date object, according to the local time. The _specifier_ must be a valid [d3-time-format specifier](https://github.com/d3/d3-time-format/). For example: `timeParse('June 30, 2015', '%B %d, %Y')`.",
|
| 116 |
+
"utcFormat": r"Formats a datetime _value_ (either a `Date` object or timestamp) as a string, according to [UTC](https://en.wikipedia.org/wiki/Coordinated_Universal_Time) time. The _specifier_ must be a valid [d3-time-format specifier](https://github.com/d3/d3-time-format/). For example: `utcFormat(timestamp, '%A')`.",
|
| 117 |
+
"utcParse": r"Parses a _string_ value to a Date object, according to [UTC](https://en.wikipedia.org/wiki/Coordinated_Universal_Time) time. The _specifier_ must be a valid [d3-time-format specifier](https://github.com/d3/d3-time-format/). For example: `utcParse('June 30, 2015', '%B %d, %Y')`.",
|
| 118 |
+
"regexp": r"Creates a regular expression instance from an input _pattern_ string and optional _flags_. Same as [JavaScript's `RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp).",
|
| 119 |
+
"test": r"Evaluates a regular expression _regexp_ against the input _string_, returning `true` if the string matches the pattern, `false` otherwise. For example: `test(/\\d{3}/, \"32-21-9483\") -> true`.",
|
| 120 |
+
"rgb": r"Constructs a new [RGB](https://en.wikipedia.org/wiki/RGB_color_model) color. If _r_, _g_ and _b_ are specified, these represent the channel values of the returned color; an _opacity_ may also be specified. If a CSS Color Module Level 3 _specifier_ string is specified, it is parsed and then converted to the RGB color space. Uses [d3-color's rgb function](https://github.com/d3/d3-color#rgb).",
|
| 121 |
+
"hsl": r"Constructs a new [HSL](https://en.wikipedia.org/wiki/HSL_and_HSV) color. If _h_, _s_ and _l_ are specified, these represent the channel values of the returned color; an _opacity_ may also be specified. If a CSS Color Module Level 3 _specifier_ string is specified, it is parsed and then converted to the HSL color space. Uses [d3-color's hsl function](https://github.com/d3/d3-color#hsl).",
|
| 122 |
+
"lab": r"Constructs a new [CIE LAB](https://en.wikipedia.org/wiki/Lab_color_space#CIELAB) color. If _l_, _a_ and _b_ are specified, these represent the channel values of the returned color; an _opacity_ may also be specified. If a CSS Color Module Level 3 _specifier_ string is specified, it is parsed and then converted to the LAB color space. Uses [d3-color's lab function](https://github.com/d3/d3-color#lab).",
|
| 123 |
+
"hcl": r"Constructs a new [HCL](https://en.wikipedia.org/wiki/Lab_color_space#CIELAB) (hue, chroma, luminance) color. If _h_, _c_ and _l_ are specified, these represent the channel values of the returned color; an _opacity_ may also be specified. If a CSS Color Module Level 3 _specifier_ string is specified, it is parsed and then converted to the HCL color space. Uses [d3-color's hcl function](https://github.com/d3/d3-color#hcl).",
|
| 124 |
+
"luminance": r"Returns the luminance for the given color _specifier_ (compatible with [d3-color's rgb function](https://github.com/d3/d3-color#rgb)). The luminance is calculated according to the [W3C Web Content Accessibility Guidelines](https://www.w3.org/TR/2008/REC-WCAG20-20081211/#relativeluminancedef).",
|
| 125 |
+
"contrast": r"Returns the contrast ratio between the input color specifiers as a float between 1 and 21. The contrast is calculated according to the [W3C Web Content Accessibility Guidelines](https://www.w3.org/TR/2008/REC-WCAG20-20081211/#contrast-ratiodef).",
|
| 126 |
+
"item": r"Returns the current scenegraph item that is the target of the event.",
|
| 127 |
+
"group": r"Returns the scenegraph group mark item in which the current event has occurred. If no arguments are provided, the immediate parent group is returned. If a group name is provided, the matching ancestor group item is returned.",
|
| 128 |
+
"xy": r"Returns the x- and y-coordinates for the current event as a two-element array. If no arguments are provided, the top-level coordinate space of the view is used. If a scenegraph _item_ (or string group name) is provided, the coordinate space of the group item is used.",
|
| 129 |
+
"x": r"Returns the x coordinate for the current event. If no arguments are provided, the top-level coordinate space of the view is used. If a scenegraph _item_ (or string group name) is provided, the coordinate space of the group item is used.",
|
| 130 |
+
"y": r"Returns the y coordinate for the current event. If no arguments are provided, the top-level coordinate space of the view is used. If a scenegraph _item_ (or string group name) is provided, the coordinate space of the group item is used.",
|
| 131 |
+
"pinchDistance": r"Returns the pixel distance between the first two touch points of a multi-touch event.",
|
| 132 |
+
"pinchAngle": r"Returns the angle of the line connecting the first two touch points of a multi-touch event.",
|
| 133 |
+
"inScope": r"Returns true if the given scenegraph _item_ is a descendant of the group mark in which the event handler was defined, false otherwise.",
|
| 134 |
+
"data": r"Returns the array of data objects for the Vega data set with the given _name_. If the data set is not found, returns an empty array.",
|
| 135 |
+
"indata": r"Tests if the data set with a given _name_ contains a datum with a _field_ value that matches the input _value_. For example: `indata('table', 'category', value)`.",
|
| 136 |
+
"scale": r"Applies the named scale transform (or projection) to the specified _value_. The optional _group_ argument takes a scenegraph group mark item to indicate the specific scope in which to look up the scale or projection.",
|
| 137 |
+
"invert": r"Inverts the named scale transform (or projection) for the specified _value_. The optional _group_ argument takes a scenegraph group mark item to indicate the specific scope in which to look up the scale or projection.",
|
| 138 |
+
"copy": r"Returns a copy (a new cloned instance) of the named scale transform of projection, or `undefined` if no scale or projection is found. The optional _group_ argument takes a scenegraph group mark item to indicate the specific scope in which to look up the scale or projection.",
|
| 139 |
+
"domain": r"Returns the scale domain array for the named scale transform, or an empty array if the scale is not found. The optional _group_ argument takes a scenegraph group mark item to indicate the specific scope in which to look up the scale.",
|
| 140 |
+
"range": r"Returns the scale range array for the named scale transform, or an empty array if the scale is not found. The optional _group_ argument takes a scenegraph group mark item to indicate the specific scope in which to look up the scale.",
|
| 141 |
+
"bandwidth": r"Returns the current band width for the named band scale transform, or zero if the scale is not found or is not a band scale. The optional _group_ argument takes a scenegraph group mark item to indicate the specific scope in which to look up the scale.",
|
| 142 |
+
"bandspace": r"Returns the number of steps needed within a band scale, based on the _count_ of domain elements and the inner and outer padding values. While normally calculated within the scale itself, this function can be helpful for determining the size of a chart's layout.",
|
| 143 |
+
"gradient": r"Returns a linear color gradient for the _scale_ (whose range must be a [continuous color scheme](../schemes)) and starting and ending points _p0_ and _p1_, each an _[x, y]_ array. The points _p0_ and _p1_ should be expressed in normalized coordinates in the domain [0, 1], relative to the bounds of the item being colored. If unspecified, _p0_ defaults to `[0, 0]` and _p1_ defaults to `[1, 0]`, for a horizontal gradient that spans the full bounds of an item. The optional _count_ argument indicates a desired target number of sample points to take from the color scale.",
|
| 144 |
+
"panLinear": r"Given a linear scale _domain_ array with numeric or datetime values, returns a new two-element domain array that is the result of panning the domain by a fractional _delta_. The _delta_ value represents fractional units of the scale range; for example, `0.5` indicates panning the scale domain to the right by half the scale range.",
|
| 145 |
+
"panLog": r"Given a log scale _domain_ array with numeric or datetime values, returns a new two-element domain array that is the result of panning the domain by a fractional _delta_. The _delta_ value represents fractional units of the scale range; for example, `0.5` indicates panning the scale domain to the right by half the scale range.",
|
| 146 |
+
"panPow": r"Given a power scale _domain_ array with numeric or datetime values and the given _exponent_, returns a new two-element domain array that is the result of panning the domain by a fractional _delta_. The _delta_ value represents fractional units of the scale range; for example, `0.5` indicates panning the scale domain to the right by half the scale range.",
|
| 147 |
+
"panSymlog": r"Given a symmetric log scale _domain_ array with numeric or datetime values parameterized by the given _constant_, returns a new two-element domain array that is the result of panning the domain by a fractional _delta_. The _delta_ value represents fractional units of the scale range; for example, `0.5` indicates panning the scale domain to the right by half the scale range.",
|
| 148 |
+
"zoomLinear": r"Given a linear scale _domain_ array with numeric or datetime values, returns a new two-element domain array that is the result of zooming the domain by a _scaleFactor_, centered at the provided fractional _anchor_. The _anchor_ value represents the zoom position in terms of fractional units of the scale range; for example, `0.5` indicates a zoom centered on the mid-point of the scale range.",
|
| 149 |
+
"zoomLog": r"Given a log scale _domain_ array with numeric or datetime values, returns a new two-element domain array that is the result of zooming the domain by a _scaleFactor_, centered at the provided fractional _anchor_. The _anchor_ value represents the zoom position in terms of fractional units of the scale range; for example, `0.5` indicates a zoom centered on the mid-point of the scale range.",
|
| 150 |
+
"zoomPow": r"Given a power scale _domain_ array with numeric or datetime values and the given _exponent_, returns a new two-element domain array that is the result of zooming the domain by a _scaleFactor_, centered at the provided fractional _anchor_. The _anchor_ value represents the zoom position in terms of fractional units of the scale range; for example, `0.5` indicates a zoom centered on the mid-point of the scale range.",
|
| 151 |
+
"zoomSymlog": r"Given a symmetric log scale _domain_ array with numeric or datetime values parameterized by the given _constant_, returns a new two-element domain array that is the result of zooming the domain by a _scaleFactor_, centered at the provided fractional _anchor_. The _anchor_ value represents the zoom position in terms of fractional units of the scale range; for example, `0.5` indicates a zoom centered on the mid-point of the scale range.",
|
| 152 |
+
"geoArea": r"Returns the projected planar area (typically in square pixels) of a GeoJSON _feature_ according to the named _projection_. If the _projection_ argument is `null`, computes the spherical area in steradians using unprojected longitude, latitude coordinates. The optional _group_ argument takes a scenegraph group mark item to indicate the specific scope in which to look up the projection. Uses d3-geo's [geoArea](https://github.com/d3/d3-geo#geoArea) and [path.area](https://github.com/d3/d3-geo#path_area) methods.",
|
| 153 |
+
"geoBounds": r"Returns the projected planar bounding box (typically in pixels) for the specified GeoJSON _feature_, according to the named _projection_. The bounding box is represented by a two-dimensional array: [[_x0_, _y0_], [_x1_, _y1_]], where _x0_ is the minimum x-coordinate, _y0_ is the minimum y-coordinate, _x1_ is the maximum x-coordinate, and _y1_ is the maximum y-coordinate. If the _projection_ argument is `null`, computes the spherical bounding box using unprojected longitude, latitude coordinates. The optional _group_ argument takes a scenegraph group mark item to indicate the specific scope in which to look up the projection. Uses d3-geo's [geoBounds](https://github.com/d3/d3-geo#geoBounds) and [path.bounds](https://github.com/d3/d3-geo#path_bounds) methods.",
|
| 154 |
+
"geoCentroid": r"Returns the projected planar centroid (typically in pixels) for the specified GeoJSON _feature_, according to the named _projection_. If the _projection_ argument is `null`, computes the spherical centroid using unprojected longitude, latitude coordinates. The optional _group_ argument takes a scenegraph group mark item to indicate the specific scope in which to look up the projection. Uses d3-geo's [geoCentroid](https://github.com/d3/d3-geo#geoCentroid) and [path.centroid](https://github.com/d3/d3-geo#path_centroid) methods.",
|
| 155 |
+
"treePath": r"For the hierarchy data set with the given _name_, returns the shortest path through from the _source_ node id to the _target_ node id. The path starts at the _source_ node, ascends to the least common ancestor of the _source_ node and the _target_ node, and then descends to the _target_ node.",
|
| 156 |
+
"treeAncestors": r"For the hierarchy data set with the given _name_, returns the array of ancestors nodes, starting with the input _node_, then followed by each parent up to the root.",
|
| 157 |
+
"containerSize": r"Returns the current CSS box size (`[el.clientWidth, el.clientHeight]`) of the parent DOM element that contains the Vega view. If there is no container element, returns `[undefined, undefined]`.",
|
| 158 |
+
"screen": r"Returns the [`window.screen`](https://developer.mozilla.org/en-US/docs/Web/API/Window/screen) object, or `{}` if Vega is not running in a browser environment.",
|
| 159 |
+
"windowSize": r"Returns the current window size (`[window.innerWidth, window.innerHeight]`) or `[undefined, undefined]` if Vega is not running in a browser environment.",
|
| 160 |
+
"warn": r"Logs a warning message and returns the last argument. For the message to appear in the console, the visualization view must have the appropriate logging level set.",
|
| 161 |
+
"info": r"Logs an informative message and returns the last argument. For the message to appear in the console, the visualization view must have the appropriate logging level set.",
|
| 162 |
+
"debug": r"Logs a debugging message and returns the last argument. For the message to appear in the console, the visualization view must have the appropriate logging level set.",
|
| 163 |
+
}
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
# This maps vega expression function names to the Python name
|
| 167 |
+
NAME_MAP = {"if": "if_"}
|
mgm/lib/python3.10/site-packages/altair/utils/__init__.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .core import (
|
| 2 |
+
SHORTHAND_KEYS,
|
| 3 |
+
display_traceback,
|
| 4 |
+
infer_encoding_types,
|
| 5 |
+
infer_vegalite_type_for_pandas,
|
| 6 |
+
parse_shorthand,
|
| 7 |
+
sanitize_narwhals_dataframe,
|
| 8 |
+
sanitize_pandas_dataframe,
|
| 9 |
+
update_nested,
|
| 10 |
+
use_signature,
|
| 11 |
+
)
|
| 12 |
+
from .deprecation import AltairDeprecationWarning, deprecated, deprecated_warn
|
| 13 |
+
from .html import spec_to_html
|
| 14 |
+
from .plugin_registry import PluginRegistry
|
| 15 |
+
from .schemapi import Optional, SchemaBase, SchemaLike, Undefined, is_undefined
|
| 16 |
+
|
| 17 |
+
__all__ = (
|
| 18 |
+
"SHORTHAND_KEYS",
|
| 19 |
+
"AltairDeprecationWarning",
|
| 20 |
+
"Optional",
|
| 21 |
+
"PluginRegistry",
|
| 22 |
+
"SchemaBase",
|
| 23 |
+
"SchemaLike",
|
| 24 |
+
"Undefined",
|
| 25 |
+
"deprecated",
|
| 26 |
+
"deprecated_warn",
|
| 27 |
+
"display_traceback",
|
| 28 |
+
"infer_encoding_types",
|
| 29 |
+
"infer_vegalite_type_for_pandas",
|
| 30 |
+
"is_undefined",
|
| 31 |
+
"parse_shorthand",
|
| 32 |
+
"sanitize_narwhals_dataframe",
|
| 33 |
+
"sanitize_pandas_dataframe",
|
| 34 |
+
"spec_to_html",
|
| 35 |
+
"update_nested",
|
| 36 |
+
"use_signature",
|
| 37 |
+
)
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (892 Bytes). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_dfi_types.cpython-310.pyc
ADDED
|
Binary file (6.76 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_importers.cpython-310.pyc
ADDED
|
Binary file (3.32 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_show.cpython-310.pyc
ADDED
|
Binary file (2.49 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_transformed_data.cpython-310.pyc
ADDED
|
Binary file (13.9 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/_vegafusion_data.cpython-310.pyc
ADDED
|
Binary file (8.32 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/compiler.cpython-310.pyc
ADDED
|
Binary file (521 Bytes). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/core.cpython-310.pyc
ADDED
|
Binary file (25 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/data.cpython-310.pyc
ADDED
|
Binary file (12.2 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/deprecation.cpython-310.pyc
ADDED
|
Binary file (5.04 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/display.cpython-310.pyc
ADDED
|
Binary file (8.41 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/execeval.cpython-310.pyc
ADDED
|
Binary file (3 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/html.cpython-310.pyc
ADDED
|
Binary file (13 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/mimebundle.cpython-310.pyc
ADDED
|
Binary file (9.01 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/plugin_registry.cpython-310.pyc
ADDED
|
Binary file (9.97 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/save.cpython-310.pyc
ADDED
|
Binary file (6.42 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/schemapi.cpython-310.pyc
ADDED
|
Binary file (48.5 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/selection.cpython-310.pyc
ADDED
|
Binary file (4.4 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/__pycache__/server.cpython-310.pyc
ADDED
|
Binary file (4.99 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/utils/_dfi_types.py
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# DataFrame Interchange Protocol Types
|
| 2 |
+
# Copied from https://data-apis.org/dataframe-protocol/latest/API.html,
|
| 3 |
+
# changed ABCs to Protocols, and subset the type hints to only those that are
|
| 4 |
+
# relevant for Altair.
|
| 5 |
+
#
|
| 6 |
+
# These classes are only for use in type signatures
|
| 7 |
+
from __future__ import annotations
|
| 8 |
+
|
| 9 |
+
import enum
|
| 10 |
+
from typing import TYPE_CHECKING, Any, Protocol
|
| 11 |
+
|
| 12 |
+
if TYPE_CHECKING:
|
| 13 |
+
from collections.abc import Iterable
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class DtypeKind(enum.IntEnum):
|
| 17 |
+
"""
|
| 18 |
+
Integer enum for data types.
|
| 19 |
+
|
| 20 |
+
Attributes
|
| 21 |
+
----------
|
| 22 |
+
INT : int
|
| 23 |
+
Matches to signed integer data type.
|
| 24 |
+
UINT : int
|
| 25 |
+
Matches to unsigned integer data type.
|
| 26 |
+
FLOAT : int
|
| 27 |
+
Matches to floating point data type.
|
| 28 |
+
BOOL : int
|
| 29 |
+
Matches to boolean data type.
|
| 30 |
+
STRING : int
|
| 31 |
+
Matches to string data type (UTF-8 encoded).
|
| 32 |
+
DATETIME : int
|
| 33 |
+
Matches to datetime data type.
|
| 34 |
+
CATEGORICAL : int
|
| 35 |
+
Matches to categorical data type.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
INT = 0
|
| 39 |
+
UINT = 1
|
| 40 |
+
FLOAT = 2
|
| 41 |
+
BOOL = 20
|
| 42 |
+
STRING = 21 # UTF-8
|
| 43 |
+
DATETIME = 22
|
| 44 |
+
CATEGORICAL = 23
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# Type hint of first element would actually be DtypeKind but can't use that
|
| 48 |
+
# as other libraries won't use an instance of our own Enum in this module but have
|
| 49 |
+
# their own. Type checkers will raise an error on that even though the enums
|
| 50 |
+
# are identical.
|
| 51 |
+
class Column(Protocol):
|
| 52 |
+
@property
|
| 53 |
+
def dtype(self) -> tuple[Any, int, str, str]:
|
| 54 |
+
"""
|
| 55 |
+
Dtype description as a tuple ``(kind, bit-width, format string, endianness)``.
|
| 56 |
+
|
| 57 |
+
Bit-width : the number of bits as an integer
|
| 58 |
+
Format string : data type description format string in Apache Arrow C
|
| 59 |
+
Data Interface format.
|
| 60 |
+
Endianness : current only native endianness (``=``) is supported
|
| 61 |
+
|
| 62 |
+
Notes
|
| 63 |
+
-----
|
| 64 |
+
- Kind specifiers are aligned with DLPack where possible (hence the
|
| 65 |
+
jump to 20, leave enough room for future extension)
|
| 66 |
+
- Masks must be specified as boolean with either bit width 1 (for bit
|
| 67 |
+
masks) or 8 (for byte masks).
|
| 68 |
+
- Dtype width in bits was preferred over bytes
|
| 69 |
+
- Endianness isn't too useful, but included now in case in the future
|
| 70 |
+
we need to support non-native endianness
|
| 71 |
+
- Went with Apache Arrow format strings over NumPy format strings
|
| 72 |
+
because they're more complete from a dataframe perspective
|
| 73 |
+
- Format strings are mostly useful for datetime specification, and
|
| 74 |
+
for categoricals.
|
| 75 |
+
- For categoricals, the format string describes the type of the
|
| 76 |
+
categorical in the data buffer. In case of a separate encoding of
|
| 77 |
+
the categorical (e.g. an integer to string mapping), this can
|
| 78 |
+
be derived from ``self.describe_categorical``.
|
| 79 |
+
- Data types not included: complex, Arrow-style null, binary, decimal,
|
| 80 |
+
and nested (list, struct, map, union) dtypes.
|
| 81 |
+
"""
|
| 82 |
+
...
|
| 83 |
+
|
| 84 |
+
# Have to use a generic Any return type as not all libraries who implement
|
| 85 |
+
# the dataframe interchange protocol implement the TypedDict that is usually
|
| 86 |
+
# returned here in the same way. As TypedDicts are invariant, even a slight change
|
| 87 |
+
# will lead to an error by a type checker. See PR in which this code was added
|
| 88 |
+
# for details.
|
| 89 |
+
@property
|
| 90 |
+
def describe_categorical(self) -> Any:
|
| 91 |
+
"""
|
| 92 |
+
If the dtype is categorical, there are two options.
|
| 93 |
+
|
| 94 |
+
- There are only values in the data buffer.
|
| 95 |
+
- There is a separate non-categorical Column encoding categorical values.
|
| 96 |
+
|
| 97 |
+
Raises TypeError if the dtype is not categorical
|
| 98 |
+
|
| 99 |
+
Returns the dictionary with description on how to interpret the data buffer:
|
| 100 |
+
- "is_ordered" : bool, whether the ordering of dictionary indices is
|
| 101 |
+
semantically meaningful.
|
| 102 |
+
- "is_dictionary" : bool, whether a mapping of
|
| 103 |
+
categorical values to other objects exists
|
| 104 |
+
- "categories" : Column representing the (implicit) mapping of indices to
|
| 105 |
+
category values (e.g. an array of cat1, cat2, ...).
|
| 106 |
+
None if not a dictionary-style categorical.
|
| 107 |
+
|
| 108 |
+
TBD: are there any other in-memory representations that are needed?
|
| 109 |
+
"""
|
| 110 |
+
...
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class DataFrame(Protocol):
|
| 114 |
+
"""
|
| 115 |
+
A data frame class, with only the methods required by the interchange protocol defined.
|
| 116 |
+
|
| 117 |
+
A "data frame" represents an ordered collection of named columns.
|
| 118 |
+
A column's "name" must be a unique string.
|
| 119 |
+
Columns may be accessed by name or by position.
|
| 120 |
+
|
| 121 |
+
This could be a public data frame class, or an object with the methods and
|
| 122 |
+
attributes defined on this DataFrame class could be returned from the
|
| 123 |
+
``__dataframe__`` method of a public data frame class in a library adhering
|
| 124 |
+
to the dataframe interchange protocol specification.
|
| 125 |
+
"""
|
| 126 |
+
|
| 127 |
+
def __dataframe__(
|
| 128 |
+
self, nan_as_null: bool = False, allow_copy: bool = True
|
| 129 |
+
) -> DataFrame:
|
| 130 |
+
"""
|
| 131 |
+
Construct a new exchange object, potentially changing the parameters.
|
| 132 |
+
|
| 133 |
+
``nan_as_null`` is a keyword intended for the consumer to tell the
|
| 134 |
+
producer to overwrite null values in the data with ``NaN``.
|
| 135 |
+
It is intended for cases where the consumer does not support the bit
|
| 136 |
+
mask or byte mask that is the producer's native representation.
|
| 137 |
+
``allow_copy`` is a keyword that defines whether or not the library is
|
| 138 |
+
allowed to make a copy of the data. For example, copying data would be
|
| 139 |
+
necessary if a library supports strided buffers, given that this protocol
|
| 140 |
+
specifies contiguous buffers.
|
| 141 |
+
"""
|
| 142 |
+
...
|
| 143 |
+
|
| 144 |
+
def column_names(self) -> Iterable[str]:
|
| 145 |
+
"""Return an iterator yielding the column names."""
|
| 146 |
+
...
|
| 147 |
+
|
| 148 |
+
def get_column_by_name(self, name: str) -> Column:
|
| 149 |
+
"""Return the column whose name is the indicated name."""
|
| 150 |
+
...
|
| 151 |
+
|
| 152 |
+
def get_chunks(self, n_chunks: int | None = None) -> Iterable[DataFrame]:
|
| 153 |
+
"""
|
| 154 |
+
Return an iterator yielding the chunks.
|
| 155 |
+
|
| 156 |
+
By default (None), yields the chunks that the data is stored as by the
|
| 157 |
+
producer. If given, ``n_chunks`` must be a multiple of
|
| 158 |
+
``self.num_chunks()``, meaning the producer must subdivide each chunk
|
| 159 |
+
before yielding it.
|
| 160 |
+
|
| 161 |
+
Note that the producer must ensure that all columns are chunked the
|
| 162 |
+
same way.
|
| 163 |
+
"""
|
| 164 |
+
...
|
mgm/lib/python3.10/site-packages/altair/utils/_importers.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from importlib.metadata import version as importlib_version
|
| 4 |
+
from typing import TYPE_CHECKING
|
| 5 |
+
|
| 6 |
+
from packaging.version import Version
|
| 7 |
+
|
| 8 |
+
if TYPE_CHECKING:
|
| 9 |
+
from types import ModuleType
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def import_vegafusion() -> ModuleType:
|
| 13 |
+
min_version = "1.5.0"
|
| 14 |
+
try:
|
| 15 |
+
import vegafusion as vf
|
| 16 |
+
|
| 17 |
+
version = importlib_version("vegafusion")
|
| 18 |
+
if Version(version) >= Version("2.0.0a0"):
|
| 19 |
+
# In VegaFusion 2.0 there is no vegafusion-python-embed package
|
| 20 |
+
return vf
|
| 21 |
+
else:
|
| 22 |
+
embed_version = importlib_version("vegafusion-python-embed")
|
| 23 |
+
if version != embed_version or Version(version) < Version(min_version):
|
| 24 |
+
msg = (
|
| 25 |
+
"The versions of the vegafusion and vegafusion-python-embed packages must match\n"
|
| 26 |
+
f"and must be version {min_version} or greater.\n"
|
| 27 |
+
f"Found:\n"
|
| 28 |
+
f" - vegafusion=={version}\n"
|
| 29 |
+
f" - vegafusion-python-embed=={embed_version}\n"
|
| 30 |
+
)
|
| 31 |
+
raise RuntimeError(msg)
|
| 32 |
+
return vf
|
| 33 |
+
except ImportError as err:
|
| 34 |
+
msg = (
|
| 35 |
+
'The "vegafusion" data transformer and chart.transformed_data feature requires\n'
|
| 36 |
+
f"version {min_version} or greater of the 'vegafusion-python-embed' and 'vegafusion' packages.\n"
|
| 37 |
+
"These can be installed with pip using:\n"
|
| 38 |
+
f' pip install "vegafusion[embed]>={min_version}"\n'
|
| 39 |
+
"Or with conda using:\n"
|
| 40 |
+
f' conda install -c conda-forge "vegafusion-python-embed>={min_version}" '
|
| 41 |
+
f'"vegafusion>={min_version}"\n\n'
|
| 42 |
+
f"ImportError: {err.args[0]}"
|
| 43 |
+
)
|
| 44 |
+
raise ImportError(msg) from err
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def import_vl_convert() -> ModuleType:
|
| 48 |
+
min_version = "1.6.0"
|
| 49 |
+
try:
|
| 50 |
+
version = importlib_version("vl-convert-python")
|
| 51 |
+
if Version(version) < Version(min_version):
|
| 52 |
+
msg = (
|
| 53 |
+
f"The vl-convert-python package must be version {min_version} or greater. "
|
| 54 |
+
f"Found version {version}"
|
| 55 |
+
)
|
| 56 |
+
raise RuntimeError(msg)
|
| 57 |
+
import vl_convert as vlc
|
| 58 |
+
|
| 59 |
+
return vlc
|
| 60 |
+
except ImportError as err:
|
| 61 |
+
msg = (
|
| 62 |
+
f"The vl-convert Vega-Lite compiler and file export feature requires\n"
|
| 63 |
+
f"version {min_version} or greater of the 'vl-convert-python' package. \n"
|
| 64 |
+
f"This can be installed with pip using:\n"
|
| 65 |
+
f' pip install "vl-convert-python>={min_version}"\n'
|
| 66 |
+
"or conda:\n"
|
| 67 |
+
f' conda install -c conda-forge "vl-convert-python>={min_version}"\n\n'
|
| 68 |
+
f"ImportError: {err.args[0]}"
|
| 69 |
+
)
|
| 70 |
+
raise ImportError(msg) from err
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def vl_version_for_vl_convert() -> str:
|
| 74 |
+
from altair.vegalite import SCHEMA_VERSION
|
| 75 |
+
|
| 76 |
+
# Compute VlConvert's vl_version string (of the form 'v5_2')
|
| 77 |
+
# from SCHEMA_VERSION (of the form 'v5.2.0')
|
| 78 |
+
return "_".join(SCHEMA_VERSION.split(".")[:2])
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def import_pyarrow_interchange() -> ModuleType:
|
| 82 |
+
min_version = "11.0.0"
|
| 83 |
+
try:
|
| 84 |
+
version = importlib_version("pyarrow")
|
| 85 |
+
|
| 86 |
+
if Version(version) < Version(min_version):
|
| 87 |
+
msg = (
|
| 88 |
+
f"The pyarrow package must be version {min_version} or greater. "
|
| 89 |
+
f"Found version {version}"
|
| 90 |
+
)
|
| 91 |
+
raise RuntimeError(msg)
|
| 92 |
+
import pyarrow.interchange as pi
|
| 93 |
+
|
| 94 |
+
return pi
|
| 95 |
+
except ImportError as err:
|
| 96 |
+
msg = (
|
| 97 |
+
f"Usage of the DataFrame Interchange Protocol requires\n"
|
| 98 |
+
f"version {min_version} or greater of the pyarrow package. \n"
|
| 99 |
+
f"This can be installed with pip using:\n"
|
| 100 |
+
f' pip install "pyarrow>={min_version}"\n'
|
| 101 |
+
"or conda:\n"
|
| 102 |
+
f' conda install -c conda-forge "pyarrow>={min_version}"\n\n'
|
| 103 |
+
f"ImportError: {err.args[0]}"
|
| 104 |
+
)
|
| 105 |
+
raise ImportError(msg) from err
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def pyarrow_available() -> bool:
|
| 109 |
+
try:
|
| 110 |
+
import_pyarrow_interchange()
|
| 111 |
+
return True
|
| 112 |
+
except (ImportError, RuntimeError):
|
| 113 |
+
return False
|
mgm/lib/python3.10/site-packages/altair/utils/_show.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import webbrowser
|
| 4 |
+
from http.server import BaseHTTPRequestHandler, HTTPServer
|
| 5 |
+
from typing import TYPE_CHECKING
|
| 6 |
+
|
| 7 |
+
if TYPE_CHECKING:
|
| 8 |
+
from collections.abc import Iterable
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def open_html_in_browser(
|
| 12 |
+
html: str | bytes,
|
| 13 |
+
using: str | Iterable[str] | None = None,
|
| 14 |
+
port: int | None = None,
|
| 15 |
+
) -> None:
|
| 16 |
+
"""
|
| 17 |
+
Display an html document in a web browser without creating a temp file.
|
| 18 |
+
|
| 19 |
+
Instantiates a simple http server and uses the webbrowser module to
|
| 20 |
+
open the server's URL
|
| 21 |
+
|
| 22 |
+
Parameters
|
| 23 |
+
----------
|
| 24 |
+
html: str
|
| 25 |
+
HTML string to display
|
| 26 |
+
using: str or iterable of str
|
| 27 |
+
Name of the web browser to open (e.g. "chrome", "firefox", etc.).
|
| 28 |
+
If an iterable, choose the first browser available on the system.
|
| 29 |
+
If none, choose the system default browser.
|
| 30 |
+
port: int
|
| 31 |
+
Port to use. Defaults to a random port
|
| 32 |
+
"""
|
| 33 |
+
# Encode html to bytes
|
| 34 |
+
html_bytes = html.encode("utf8") if isinstance(html, str) else html
|
| 35 |
+
|
| 36 |
+
browser = None
|
| 37 |
+
|
| 38 |
+
if using is None:
|
| 39 |
+
browser = webbrowser.get(None)
|
| 40 |
+
else:
|
| 41 |
+
# normalize using to an iterable
|
| 42 |
+
if isinstance(using, str):
|
| 43 |
+
using = [using]
|
| 44 |
+
|
| 45 |
+
for browser_key in using:
|
| 46 |
+
try:
|
| 47 |
+
browser = webbrowser.get(browser_key)
|
| 48 |
+
if browser is not None:
|
| 49 |
+
break
|
| 50 |
+
except webbrowser.Error:
|
| 51 |
+
pass
|
| 52 |
+
|
| 53 |
+
if browser is None:
|
| 54 |
+
raise ValueError("Failed to locate a browser with name in " + str(using))
|
| 55 |
+
|
| 56 |
+
class OneShotRequestHandler(BaseHTTPRequestHandler):
|
| 57 |
+
def do_GET(self) -> None:
|
| 58 |
+
self.send_response(200)
|
| 59 |
+
self.send_header("Content-type", "text/html")
|
| 60 |
+
self.end_headers()
|
| 61 |
+
|
| 62 |
+
bufferSize = 1024 * 1024
|
| 63 |
+
for i in range(0, len(html_bytes), bufferSize):
|
| 64 |
+
self.wfile.write(html_bytes[i : i + bufferSize])
|
| 65 |
+
|
| 66 |
+
def log_message(self, format, *args):
|
| 67 |
+
# Silence stderr logging
|
| 68 |
+
pass
|
| 69 |
+
|
| 70 |
+
# Use specified port if provided, otherwise choose a random port (port value of 0)
|
| 71 |
+
server = HTTPServer(
|
| 72 |
+
("127.0.0.1", port if port is not None else 0), OneShotRequestHandler
|
| 73 |
+
)
|
| 74 |
+
browser.open(f"http://127.0.0.1:{server.server_port}")
|
| 75 |
+
server.handle_request()
|
mgm/lib/python3.10/site-packages/altair/utils/_transformed_data.py
ADDED
|
@@ -0,0 +1,567 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING, Any, overload
|
| 4 |
+
|
| 5 |
+
from altair import (
|
| 6 |
+
Chart,
|
| 7 |
+
ConcatChart,
|
| 8 |
+
ConcatSpecGenericSpec,
|
| 9 |
+
FacetChart,
|
| 10 |
+
FacetedUnitSpec,
|
| 11 |
+
FacetSpec,
|
| 12 |
+
HConcatChart,
|
| 13 |
+
HConcatSpecGenericSpec,
|
| 14 |
+
LayerChart,
|
| 15 |
+
LayerSpec,
|
| 16 |
+
NonNormalizedSpec,
|
| 17 |
+
TopLevelConcatSpec,
|
| 18 |
+
TopLevelFacetSpec,
|
| 19 |
+
TopLevelHConcatSpec,
|
| 20 |
+
TopLevelLayerSpec,
|
| 21 |
+
TopLevelUnitSpec,
|
| 22 |
+
TopLevelVConcatSpec,
|
| 23 |
+
UnitSpec,
|
| 24 |
+
UnitSpecWithFrame,
|
| 25 |
+
VConcatChart,
|
| 26 |
+
VConcatSpecGenericSpec,
|
| 27 |
+
data_transformers,
|
| 28 |
+
)
|
| 29 |
+
from altair.utils._vegafusion_data import get_inline_tables, import_vegafusion
|
| 30 |
+
from altair.utils.schemapi import Undefined
|
| 31 |
+
|
| 32 |
+
if TYPE_CHECKING:
|
| 33 |
+
import sys
|
| 34 |
+
from collections.abc import Iterable
|
| 35 |
+
|
| 36 |
+
if sys.version_info >= (3, 10):
|
| 37 |
+
from typing import TypeAlias
|
| 38 |
+
else:
|
| 39 |
+
from typing_extensions import TypeAlias
|
| 40 |
+
|
| 41 |
+
from altair.typing import ChartType
|
| 42 |
+
from altair.utils.core import DataFrameLike
|
| 43 |
+
|
| 44 |
+
Scope: TypeAlias = tuple[int, ...]
|
| 45 |
+
FacetMapping: TypeAlias = dict[tuple[str, Scope], tuple[str, Scope]]
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
# For the transformed_data functionality, the chart classes in the values
|
| 49 |
+
# can be considered equivalent to the chart class in the key.
|
| 50 |
+
_chart_class_mapping = {
|
| 51 |
+
Chart: (
|
| 52 |
+
Chart,
|
| 53 |
+
TopLevelUnitSpec,
|
| 54 |
+
FacetedUnitSpec,
|
| 55 |
+
UnitSpec,
|
| 56 |
+
UnitSpecWithFrame,
|
| 57 |
+
NonNormalizedSpec,
|
| 58 |
+
),
|
| 59 |
+
LayerChart: (LayerChart, TopLevelLayerSpec, LayerSpec),
|
| 60 |
+
ConcatChart: (ConcatChart, TopLevelConcatSpec, ConcatSpecGenericSpec),
|
| 61 |
+
HConcatChart: (HConcatChart, TopLevelHConcatSpec, HConcatSpecGenericSpec),
|
| 62 |
+
VConcatChart: (VConcatChart, TopLevelVConcatSpec, VConcatSpecGenericSpec),
|
| 63 |
+
FacetChart: (FacetChart, TopLevelFacetSpec, FacetSpec),
|
| 64 |
+
}
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
@overload
|
| 68 |
+
def transformed_data(
|
| 69 |
+
chart: Chart | FacetChart,
|
| 70 |
+
row_limit: int | None = None,
|
| 71 |
+
exclude: Iterable[str] | None = None,
|
| 72 |
+
) -> DataFrameLike | None: ...
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
@overload
|
| 76 |
+
def transformed_data(
|
| 77 |
+
chart: LayerChart | HConcatChart | VConcatChart | ConcatChart,
|
| 78 |
+
row_limit: int | None = None,
|
| 79 |
+
exclude: Iterable[str] | None = None,
|
| 80 |
+
) -> list[DataFrameLike]: ...
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def transformed_data(chart, row_limit=None, exclude=None):
|
| 84 |
+
"""
|
| 85 |
+
Evaluate a Chart's transforms.
|
| 86 |
+
|
| 87 |
+
Evaluate the data transforms associated with a Chart and return the
|
| 88 |
+
transformed data as one or more DataFrames
|
| 89 |
+
|
| 90 |
+
Parameters
|
| 91 |
+
----------
|
| 92 |
+
chart : Chart, FacetChart, LayerChart, HConcatChart, VConcatChart, or ConcatChart
|
| 93 |
+
Altair chart to evaluate transforms on
|
| 94 |
+
row_limit : int (optional)
|
| 95 |
+
Maximum number of rows to return for each DataFrame. None (default) for unlimited
|
| 96 |
+
exclude : iterable of str
|
| 97 |
+
Set of the names of charts to exclude
|
| 98 |
+
|
| 99 |
+
Returns
|
| 100 |
+
-------
|
| 101 |
+
DataFrame or list of DataFrames or None
|
| 102 |
+
If input chart is a Chart or Facet Chart, returns a DataFrame of the
|
| 103 |
+
transformed data. Otherwise, returns a list of DataFrames of the
|
| 104 |
+
transformed data
|
| 105 |
+
"""
|
| 106 |
+
vf = import_vegafusion()
|
| 107 |
+
# Add mark if none is specified to satisfy Vega-Lite
|
| 108 |
+
if isinstance(chart, Chart) and chart.mark == Undefined:
|
| 109 |
+
chart = chart.mark_point()
|
| 110 |
+
|
| 111 |
+
# Deep copy chart so that we can rename marks without affecting caller
|
| 112 |
+
chart = chart.copy(deep=True)
|
| 113 |
+
|
| 114 |
+
# Ensure that all views are named so that we can look them up in the
|
| 115 |
+
# resulting Vega specification
|
| 116 |
+
chart_names = name_views(chart, 0, exclude=exclude)
|
| 117 |
+
|
| 118 |
+
# Compile to Vega and extract inline DataFrames
|
| 119 |
+
with data_transformers.enable("vegafusion"):
|
| 120 |
+
vega_spec = chart.to_dict(format="vega", context={"pre_transform": False})
|
| 121 |
+
inline_datasets = get_inline_tables(vega_spec)
|
| 122 |
+
|
| 123 |
+
# Build mapping from mark names to vega datasets
|
| 124 |
+
facet_mapping = get_facet_mapping(vega_spec)
|
| 125 |
+
dataset_mapping = get_datasets_for_view_names(vega_spec, chart_names, facet_mapping)
|
| 126 |
+
|
| 127 |
+
# Build a list of vega dataset names that corresponds to the order
|
| 128 |
+
# of the chart components
|
| 129 |
+
dataset_names = []
|
| 130 |
+
for chart_name in chart_names:
|
| 131 |
+
if chart_name in dataset_mapping:
|
| 132 |
+
dataset_names.append(dataset_mapping[chart_name])
|
| 133 |
+
else:
|
| 134 |
+
msg = "Failed to locate all datasets"
|
| 135 |
+
raise ValueError(msg)
|
| 136 |
+
|
| 137 |
+
# Extract transformed datasets with VegaFusion
|
| 138 |
+
datasets, _ = vf.runtime.pre_transform_datasets(
|
| 139 |
+
vega_spec,
|
| 140 |
+
dataset_names,
|
| 141 |
+
row_limit=row_limit,
|
| 142 |
+
inline_datasets=inline_datasets,
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
if isinstance(chart, (Chart, FacetChart)):
|
| 146 |
+
# Return DataFrame (or None if it was excluded) if input was a simple Chart
|
| 147 |
+
if not datasets:
|
| 148 |
+
return None
|
| 149 |
+
else:
|
| 150 |
+
return datasets[0]
|
| 151 |
+
else:
|
| 152 |
+
# Otherwise return the list of DataFrames
|
| 153 |
+
return datasets
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
# The equivalent classes from _chart_class_mapping should also be added
|
| 157 |
+
# to the type hints below for `chart` as the function would also work for them.
|
| 158 |
+
# However, this was not possible so far as mypy then complains about
|
| 159 |
+
# "Overloaded function signatures 1 and 2 overlap with incompatible return types [misc]"
|
| 160 |
+
# This might be due to the complex type hierarchy of the chart classes.
|
| 161 |
+
# See also https://github.com/python/mypy/issues/5119
|
| 162 |
+
# and https://github.com/python/mypy/issues/4020 which show that mypy might not have
|
| 163 |
+
# a very consistent behavior for overloaded functions.
|
| 164 |
+
# The same error appeared when trying it with Protocols for the concat and layer charts.
|
| 165 |
+
# This function is only used internally and so we accept this inconsistency for now.
|
| 166 |
+
def name_views(
|
| 167 |
+
chart: ChartType, i: int = 0, exclude: Iterable[str] | None = None
|
| 168 |
+
) -> list[str]:
|
| 169 |
+
"""
|
| 170 |
+
Name unnamed chart views.
|
| 171 |
+
|
| 172 |
+
Name unnamed charts views so that we can look them up later in
|
| 173 |
+
the compiled Vega spec.
|
| 174 |
+
|
| 175 |
+
Note: This function mutates the input chart by applying names to
|
| 176 |
+
unnamed views.
|
| 177 |
+
|
| 178 |
+
Parameters
|
| 179 |
+
----------
|
| 180 |
+
chart : Chart, FacetChart, LayerChart, HConcatChart, VConcatChart, or ConcatChart
|
| 181 |
+
Altair chart to apply names to
|
| 182 |
+
i : int (default 0)
|
| 183 |
+
Starting chart index
|
| 184 |
+
exclude : iterable of str
|
| 185 |
+
Names of charts to exclude
|
| 186 |
+
|
| 187 |
+
Returns
|
| 188 |
+
-------
|
| 189 |
+
list of str
|
| 190 |
+
List of the names of the charts and subcharts
|
| 191 |
+
"""
|
| 192 |
+
exclude = set(exclude) if exclude is not None else set()
|
| 193 |
+
if isinstance(
|
| 194 |
+
chart, (_chart_class_mapping[Chart], _chart_class_mapping[FacetChart])
|
| 195 |
+
):
|
| 196 |
+
if chart.name not in exclude:
|
| 197 |
+
if chart.name in {None, Undefined}:
|
| 198 |
+
# Add name since none is specified
|
| 199 |
+
chart.name = Chart._get_name()
|
| 200 |
+
return [chart.name]
|
| 201 |
+
else:
|
| 202 |
+
return []
|
| 203 |
+
else:
|
| 204 |
+
subcharts: list[Any]
|
| 205 |
+
if isinstance(chart, _chart_class_mapping[LayerChart]):
|
| 206 |
+
subcharts = chart.layer
|
| 207 |
+
elif isinstance(chart, _chart_class_mapping[HConcatChart]):
|
| 208 |
+
subcharts = chart.hconcat
|
| 209 |
+
elif isinstance(chart, _chart_class_mapping[VConcatChart]):
|
| 210 |
+
subcharts = chart.vconcat
|
| 211 |
+
elif isinstance(chart, _chart_class_mapping[ConcatChart]):
|
| 212 |
+
subcharts = chart.concat
|
| 213 |
+
else:
|
| 214 |
+
msg = (
|
| 215 |
+
"transformed_data accepts an instance of "
|
| 216 |
+
"Chart, FacetChart, LayerChart, HConcatChart, VConcatChart, or ConcatChart\n"
|
| 217 |
+
f"Received value of type: {type(chart)}"
|
| 218 |
+
)
|
| 219 |
+
raise ValueError(msg)
|
| 220 |
+
|
| 221 |
+
chart_names: list[str] = []
|
| 222 |
+
for subchart in subcharts:
|
| 223 |
+
for name in name_views(subchart, i=i + len(chart_names), exclude=exclude):
|
| 224 |
+
chart_names.append(name)
|
| 225 |
+
return chart_names
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def get_group_mark_for_scope(
|
| 229 |
+
vega_spec: dict[str, Any], scope: Scope
|
| 230 |
+
) -> dict[str, Any] | None:
|
| 231 |
+
"""
|
| 232 |
+
Get the group mark at a particular scope.
|
| 233 |
+
|
| 234 |
+
Parameters
|
| 235 |
+
----------
|
| 236 |
+
vega_spec : dict
|
| 237 |
+
Top-level Vega specification dictionary
|
| 238 |
+
scope : tuple of int
|
| 239 |
+
Scope tuple. If empty, the original Vega specification is returned.
|
| 240 |
+
Otherwise, the nested group mark at the scope specified is returned.
|
| 241 |
+
|
| 242 |
+
Returns
|
| 243 |
+
-------
|
| 244 |
+
dict or None
|
| 245 |
+
Top-level Vega spec (if scope is empty)
|
| 246 |
+
or group mark (if scope is non-empty)
|
| 247 |
+
or None (if group mark at scope does not exist)
|
| 248 |
+
|
| 249 |
+
Examples
|
| 250 |
+
--------
|
| 251 |
+
>>> spec = {
|
| 252 |
+
... "marks": [
|
| 253 |
+
... {"type": "group", "marks": [{"type": "symbol"}]},
|
| 254 |
+
... {"type": "group", "marks": [{"type": "rect"}]},
|
| 255 |
+
... ]
|
| 256 |
+
... }
|
| 257 |
+
>>> get_group_mark_for_scope(spec, (1,))
|
| 258 |
+
{'type': 'group', 'marks': [{'type': 'rect'}]}
|
| 259 |
+
"""
|
| 260 |
+
group = vega_spec
|
| 261 |
+
|
| 262 |
+
# Find group at scope
|
| 263 |
+
for scope_value in scope:
|
| 264 |
+
group_index = 0
|
| 265 |
+
child_group = None
|
| 266 |
+
for mark in group.get("marks", []):
|
| 267 |
+
if mark.get("type") == "group":
|
| 268 |
+
if group_index == scope_value:
|
| 269 |
+
child_group = mark
|
| 270 |
+
break
|
| 271 |
+
group_index += 1
|
| 272 |
+
if child_group is None:
|
| 273 |
+
return None
|
| 274 |
+
group = child_group
|
| 275 |
+
|
| 276 |
+
return group
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
def get_datasets_for_scope(vega_spec: dict[str, Any], scope: Scope) -> list[str]:
|
| 280 |
+
"""
|
| 281 |
+
Get the names of the datasets that are defined at a given scope.
|
| 282 |
+
|
| 283 |
+
Parameters
|
| 284 |
+
----------
|
| 285 |
+
vega_spec : dict
|
| 286 |
+
Top-leve Vega specification
|
| 287 |
+
scope : tuple of int
|
| 288 |
+
Scope tuple. If empty, the names of top-level datasets are returned
|
| 289 |
+
Otherwise, the names of the datasets defined in the nested group mark
|
| 290 |
+
at the specified scope are returned.
|
| 291 |
+
|
| 292 |
+
Returns
|
| 293 |
+
-------
|
| 294 |
+
list of str
|
| 295 |
+
List of the names of the datasets defined at the specified scope
|
| 296 |
+
|
| 297 |
+
Examples
|
| 298 |
+
--------
|
| 299 |
+
>>> spec = {
|
| 300 |
+
... "data": [{"name": "data1"}],
|
| 301 |
+
... "marks": [
|
| 302 |
+
... {
|
| 303 |
+
... "type": "group",
|
| 304 |
+
... "data": [{"name": "data2"}],
|
| 305 |
+
... "marks": [{"type": "symbol"}],
|
| 306 |
+
... },
|
| 307 |
+
... {
|
| 308 |
+
... "type": "group",
|
| 309 |
+
... "data": [
|
| 310 |
+
... {"name": "data3"},
|
| 311 |
+
... {"name": "data4"},
|
| 312 |
+
... ],
|
| 313 |
+
... "marks": [{"type": "rect"}],
|
| 314 |
+
... },
|
| 315 |
+
... ],
|
| 316 |
+
... }
|
| 317 |
+
|
| 318 |
+
>>> get_datasets_for_scope(spec, ())
|
| 319 |
+
['data1']
|
| 320 |
+
|
| 321 |
+
>>> get_datasets_for_scope(spec, (0,))
|
| 322 |
+
['data2']
|
| 323 |
+
|
| 324 |
+
>>> get_datasets_for_scope(spec, (1,))
|
| 325 |
+
['data3', 'data4']
|
| 326 |
+
|
| 327 |
+
Returns empty when no group mark exists at scope
|
| 328 |
+
>>> get_datasets_for_scope(spec, (1, 3))
|
| 329 |
+
[]
|
| 330 |
+
"""
|
| 331 |
+
group = get_group_mark_for_scope(vega_spec, scope) or {}
|
| 332 |
+
|
| 333 |
+
# get datasets from group
|
| 334 |
+
datasets = []
|
| 335 |
+
for dataset in group.get("data", []):
|
| 336 |
+
datasets.append(dataset["name"])
|
| 337 |
+
|
| 338 |
+
# Add facet dataset
|
| 339 |
+
facet_dataset = group.get("from", {}).get("facet", {}).get("name", None)
|
| 340 |
+
if facet_dataset:
|
| 341 |
+
datasets.append(facet_dataset)
|
| 342 |
+
return datasets
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
def get_definition_scope_for_data_reference(
|
| 346 |
+
vega_spec: dict[str, Any], data_name: str, usage_scope: Scope
|
| 347 |
+
) -> Scope | None:
|
| 348 |
+
"""
|
| 349 |
+
Return the scope that a dataset is defined at, for a given usage scope.
|
| 350 |
+
|
| 351 |
+
Parameters
|
| 352 |
+
----------
|
| 353 |
+
vega_spec: dict
|
| 354 |
+
Top-level Vega specification
|
| 355 |
+
data_name: str
|
| 356 |
+
The name of a dataset reference
|
| 357 |
+
usage_scope: tuple of int
|
| 358 |
+
The scope that the dataset is referenced in
|
| 359 |
+
|
| 360 |
+
Returns
|
| 361 |
+
-------
|
| 362 |
+
tuple of int
|
| 363 |
+
The scope where the referenced dataset is defined,
|
| 364 |
+
or None if no such dataset is found
|
| 365 |
+
|
| 366 |
+
Examples
|
| 367 |
+
--------
|
| 368 |
+
>>> spec = {
|
| 369 |
+
... "data": [{"name": "data1"}],
|
| 370 |
+
... "marks": [
|
| 371 |
+
... {
|
| 372 |
+
... "type": "group",
|
| 373 |
+
... "data": [{"name": "data2"}],
|
| 374 |
+
... "marks": [
|
| 375 |
+
... {
|
| 376 |
+
... "type": "symbol",
|
| 377 |
+
... "encode": {
|
| 378 |
+
... "update": {
|
| 379 |
+
... "x": {"field": "x", "data": "data1"},
|
| 380 |
+
... "y": {"field": "y", "data": "data2"},
|
| 381 |
+
... }
|
| 382 |
+
... },
|
| 383 |
+
... }
|
| 384 |
+
... ],
|
| 385 |
+
... }
|
| 386 |
+
... ],
|
| 387 |
+
... }
|
| 388 |
+
|
| 389 |
+
data1 is referenced at scope [0] and defined at scope []
|
| 390 |
+
>>> get_definition_scope_for_data_reference(spec, "data1", (0,))
|
| 391 |
+
()
|
| 392 |
+
|
| 393 |
+
data2 is referenced at scope [0] and defined at scope [0]
|
| 394 |
+
>>> get_definition_scope_for_data_reference(spec, "data2", (0,))
|
| 395 |
+
(0,)
|
| 396 |
+
|
| 397 |
+
If data2 is not visible at scope [] (the top level),
|
| 398 |
+
because it's defined in scope [0]
|
| 399 |
+
>>> repr(get_definition_scope_for_data_reference(spec, "data2", ()))
|
| 400 |
+
'None'
|
| 401 |
+
"""
|
| 402 |
+
for i in reversed(range(len(usage_scope) + 1)):
|
| 403 |
+
scope = usage_scope[:i]
|
| 404 |
+
datasets = get_datasets_for_scope(vega_spec, scope)
|
| 405 |
+
if data_name in datasets:
|
| 406 |
+
return scope
|
| 407 |
+
return None
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
def get_facet_mapping(group: dict[str, Any], scope: Scope = ()) -> FacetMapping:
|
| 411 |
+
"""
|
| 412 |
+
Create mapping from facet definitions to source datasets.
|
| 413 |
+
|
| 414 |
+
Parameters
|
| 415 |
+
----------
|
| 416 |
+
group : dict
|
| 417 |
+
Top-level Vega spec or nested group mark
|
| 418 |
+
scope : tuple of int
|
| 419 |
+
Scope of the group dictionary within a top-level Vega spec
|
| 420 |
+
|
| 421 |
+
Returns
|
| 422 |
+
-------
|
| 423 |
+
dict
|
| 424 |
+
Dictionary from (facet_name, facet_scope) to (dataset_name, dataset_scope)
|
| 425 |
+
|
| 426 |
+
Examples
|
| 427 |
+
--------
|
| 428 |
+
>>> spec = {
|
| 429 |
+
... "data": [{"name": "data1"}],
|
| 430 |
+
... "marks": [
|
| 431 |
+
... {
|
| 432 |
+
... "type": "group",
|
| 433 |
+
... "from": {
|
| 434 |
+
... "facet": {
|
| 435 |
+
... "name": "facet1",
|
| 436 |
+
... "data": "data1",
|
| 437 |
+
... "groupby": ["colA"],
|
| 438 |
+
... }
|
| 439 |
+
... },
|
| 440 |
+
... }
|
| 441 |
+
... ],
|
| 442 |
+
... }
|
| 443 |
+
>>> get_facet_mapping(spec)
|
| 444 |
+
{('facet1', (0,)): ('data1', ())}
|
| 445 |
+
"""
|
| 446 |
+
facet_mapping = {}
|
| 447 |
+
group_index = 0
|
| 448 |
+
mark_group = get_group_mark_for_scope(group, scope) or {}
|
| 449 |
+
for mark in mark_group.get("marks", []):
|
| 450 |
+
if mark.get("type", None) == "group":
|
| 451 |
+
# Get facet for this group
|
| 452 |
+
group_scope = (*scope, group_index)
|
| 453 |
+
facet = mark.get("from", {}).get("facet", None)
|
| 454 |
+
if facet is not None:
|
| 455 |
+
facet_name = facet.get("name", None)
|
| 456 |
+
facet_data = facet.get("data", None)
|
| 457 |
+
if facet_name is not None and facet_data is not None:
|
| 458 |
+
definition_scope = get_definition_scope_for_data_reference(
|
| 459 |
+
group, facet_data, scope
|
| 460 |
+
)
|
| 461 |
+
if definition_scope is not None:
|
| 462 |
+
facet_mapping[facet_name, group_scope] = (
|
| 463 |
+
facet_data,
|
| 464 |
+
definition_scope,
|
| 465 |
+
)
|
| 466 |
+
|
| 467 |
+
# Handle children recursively
|
| 468 |
+
child_mapping = get_facet_mapping(group, scope=group_scope)
|
| 469 |
+
facet_mapping.update(child_mapping)
|
| 470 |
+
group_index += 1
|
| 471 |
+
|
| 472 |
+
return facet_mapping
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
def get_from_facet_mapping(
|
| 476 |
+
scoped_dataset: tuple[str, Scope], facet_mapping: FacetMapping
|
| 477 |
+
) -> tuple[str, Scope]:
|
| 478 |
+
"""
|
| 479 |
+
Apply facet mapping to a scoped dataset.
|
| 480 |
+
|
| 481 |
+
Parameters
|
| 482 |
+
----------
|
| 483 |
+
scoped_dataset : (str, tuple of int)
|
| 484 |
+
A dataset name and scope tuple
|
| 485 |
+
facet_mapping : dict from (str, tuple of int) to (str, tuple of int)
|
| 486 |
+
The facet mapping produced by get_facet_mapping
|
| 487 |
+
|
| 488 |
+
Returns
|
| 489 |
+
-------
|
| 490 |
+
(str, tuple of int)
|
| 491 |
+
Dataset name and scope tuple that has been mapped as many times as possible
|
| 492 |
+
|
| 493 |
+
Examples
|
| 494 |
+
--------
|
| 495 |
+
Facet mapping as produced by get_facet_mapping
|
| 496 |
+
>>> facet_mapping = {
|
| 497 |
+
... ("facet1", (0,)): ("data1", ()),
|
| 498 |
+
... ("facet2", (0, 1)): ("facet1", (0,)),
|
| 499 |
+
... }
|
| 500 |
+
>>> get_from_facet_mapping(("facet2", (0, 1)), facet_mapping)
|
| 501 |
+
('data1', ())
|
| 502 |
+
"""
|
| 503 |
+
while scoped_dataset in facet_mapping:
|
| 504 |
+
scoped_dataset = facet_mapping[scoped_dataset]
|
| 505 |
+
return scoped_dataset
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
def get_datasets_for_view_names(
|
| 509 |
+
group: dict[str, Any],
|
| 510 |
+
vl_chart_names: list[str],
|
| 511 |
+
facet_mapping: FacetMapping,
|
| 512 |
+
scope: Scope = (),
|
| 513 |
+
) -> dict[str, tuple[str, Scope]]:
|
| 514 |
+
"""
|
| 515 |
+
Get the Vega datasets that correspond to the provided Altair view names.
|
| 516 |
+
|
| 517 |
+
Parameters
|
| 518 |
+
----------
|
| 519 |
+
group : dict
|
| 520 |
+
Top-level Vega spec or nested group mark
|
| 521 |
+
vl_chart_names : list of str
|
| 522 |
+
List of the Vega-Lite
|
| 523 |
+
facet_mapping : dict from (str, tuple of int) to (str, tuple of int)
|
| 524 |
+
The facet mapping produced by get_facet_mapping
|
| 525 |
+
scope : tuple of int
|
| 526 |
+
Scope of the group dictionary within a top-level Vega spec
|
| 527 |
+
|
| 528 |
+
Returns
|
| 529 |
+
-------
|
| 530 |
+
dict from str to (str, tuple of int)
|
| 531 |
+
Dict from Altair view names to scoped datasets
|
| 532 |
+
"""
|
| 533 |
+
datasets = {}
|
| 534 |
+
group_index = 0
|
| 535 |
+
mark_group = get_group_mark_for_scope(group, scope) or {}
|
| 536 |
+
for mark in mark_group.get("marks", []):
|
| 537 |
+
for vl_chart_name in vl_chart_names:
|
| 538 |
+
if mark.get("name", "") == f"{vl_chart_name}_cell":
|
| 539 |
+
data_name = mark.get("from", {}).get("facet", None).get("data", None)
|
| 540 |
+
scoped_data_name = (data_name, scope)
|
| 541 |
+
datasets[vl_chart_name] = get_from_facet_mapping(
|
| 542 |
+
scoped_data_name, facet_mapping
|
| 543 |
+
)
|
| 544 |
+
break
|
| 545 |
+
|
| 546 |
+
name = mark.get("name", "")
|
| 547 |
+
if mark.get("type", "") == "group":
|
| 548 |
+
group_data_names = get_datasets_for_view_names(
|
| 549 |
+
group, vl_chart_names, facet_mapping, scope=(*scope, group_index)
|
| 550 |
+
)
|
| 551 |
+
for k, v in group_data_names.items():
|
| 552 |
+
datasets.setdefault(k, v)
|
| 553 |
+
group_index += 1
|
| 554 |
+
else:
|
| 555 |
+
for vl_chart_name in vl_chart_names:
|
| 556 |
+
if name.startswith(vl_chart_name) and name.endswith("_marks"):
|
| 557 |
+
data_name = mark.get("from", {}).get("data", None)
|
| 558 |
+
scoped_data = get_definition_scope_for_data_reference(
|
| 559 |
+
group, data_name, scope
|
| 560 |
+
)
|
| 561 |
+
if scoped_data is not None:
|
| 562 |
+
datasets[vl_chart_name] = get_from_facet_mapping(
|
| 563 |
+
(data_name, scoped_data), facet_mapping
|
| 564 |
+
)
|
| 565 |
+
break
|
| 566 |
+
|
| 567 |
+
return datasets
|
mgm/lib/python3.10/site-packages/altair/utils/_vegafusion_data.py
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import uuid
|
| 4 |
+
from importlib.metadata import version as importlib_version
|
| 5 |
+
from typing import TYPE_CHECKING, Any, Callable, Final, TypedDict, Union, overload
|
| 6 |
+
from weakref import WeakValueDictionary
|
| 7 |
+
|
| 8 |
+
from narwhals.stable.v1.dependencies import is_into_dataframe
|
| 9 |
+
from packaging.version import Version
|
| 10 |
+
|
| 11 |
+
from altair.utils._importers import import_vegafusion
|
| 12 |
+
from altair.utils.core import DataFrameLike
|
| 13 |
+
from altair.utils.data import (
|
| 14 |
+
DataType,
|
| 15 |
+
MaxRowsError,
|
| 16 |
+
SupportsGeoInterface,
|
| 17 |
+
ToValuesReturnType,
|
| 18 |
+
)
|
| 19 |
+
from altair.vegalite.data import default_data_transformer
|
| 20 |
+
|
| 21 |
+
if TYPE_CHECKING:
|
| 22 |
+
import sys
|
| 23 |
+
from collections.abc import MutableMapping
|
| 24 |
+
|
| 25 |
+
from narwhals.stable.v1.typing import IntoDataFrame
|
| 26 |
+
|
| 27 |
+
from vegafusion.runtime import ChartState
|
| 28 |
+
|
| 29 |
+
if sys.version_info >= (3, 13):
|
| 30 |
+
from typing import TypeIs
|
| 31 |
+
else:
|
| 32 |
+
from typing_extensions import TypeIs
|
| 33 |
+
|
| 34 |
+
# Temporary storage for dataframes that have been extracted
|
| 35 |
+
# from charts by the vegafusion data transformer. Use a WeakValueDictionary
|
| 36 |
+
# rather than a dict so that the Python interpreter is free to garbage
|
| 37 |
+
# collect the stored DataFrames.
|
| 38 |
+
extracted_inline_tables: MutableMapping[str, DataFrameLike] = WeakValueDictionary()
|
| 39 |
+
|
| 40 |
+
# Special URL prefix that VegaFusion uses to denote that a
|
| 41 |
+
# dataset in a Vega spec corresponds to an entry in the `inline_datasets`
|
| 42 |
+
# kwarg of vf.runtime.pre_transform_spec().
|
| 43 |
+
VEGAFUSION_PREFIX: Final = "vegafusion+dataset://"
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
try:
|
| 47 |
+
VEGAFUSION_VERSION: Version | None = Version(importlib_version("vegafusion"))
|
| 48 |
+
except ImportError:
|
| 49 |
+
VEGAFUSION_VERSION = None
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
if VEGAFUSION_VERSION and Version("2.0.0a0") <= VEGAFUSION_VERSION:
|
| 53 |
+
|
| 54 |
+
def is_supported_by_vf(data: Any) -> TypeIs[DataFrameLike]:
|
| 55 |
+
# Test whether VegaFusion supports the data type
|
| 56 |
+
# VegaFusion v2 support narwhals-compatible DataFrames
|
| 57 |
+
return isinstance(data, DataFrameLike) or is_into_dataframe(data)
|
| 58 |
+
|
| 59 |
+
else:
|
| 60 |
+
|
| 61 |
+
def is_supported_by_vf(data: Any) -> TypeIs[DataFrameLike]:
|
| 62 |
+
return isinstance(data, DataFrameLike)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class _ToVegaFusionReturnUrlDict(TypedDict):
|
| 66 |
+
url: str
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
_VegaFusionReturnType = Union[_ToVegaFusionReturnUrlDict, ToValuesReturnType]
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@overload
|
| 73 |
+
def vegafusion_data_transformer(
|
| 74 |
+
data: None = ..., max_rows: int = ...
|
| 75 |
+
) -> Callable[..., Any]: ...
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
@overload
|
| 79 |
+
def vegafusion_data_transformer(
|
| 80 |
+
data: DataFrameLike, max_rows: int = ...
|
| 81 |
+
) -> ToValuesReturnType: ...
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
@overload
|
| 85 |
+
def vegafusion_data_transformer(
|
| 86 |
+
data: dict | IntoDataFrame | SupportsGeoInterface, max_rows: int = ...
|
| 87 |
+
) -> _VegaFusionReturnType: ...
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def vegafusion_data_transformer(
|
| 91 |
+
data: DataType | None = None, max_rows: int = 100000
|
| 92 |
+
) -> Callable[..., Any] | _VegaFusionReturnType:
|
| 93 |
+
"""VegaFusion Data Transformer."""
|
| 94 |
+
if data is None:
|
| 95 |
+
return vegafusion_data_transformer
|
| 96 |
+
|
| 97 |
+
if is_supported_by_vf(data) and not isinstance(data, SupportsGeoInterface):
|
| 98 |
+
table_name = f"table_{uuid.uuid4()}".replace("-", "_")
|
| 99 |
+
extracted_inline_tables[table_name] = data
|
| 100 |
+
return {"url": VEGAFUSION_PREFIX + table_name}
|
| 101 |
+
else:
|
| 102 |
+
# Use default transformer for geo interface objects
|
| 103 |
+
# # (e.g. a geopandas GeoDataFrame)
|
| 104 |
+
# Or if we don't recognize data type
|
| 105 |
+
return default_data_transformer(data)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def get_inline_table_names(vega_spec: dict[str, Any]) -> set[str]:
|
| 109 |
+
"""
|
| 110 |
+
Get a set of the inline datasets names in the provided Vega spec.
|
| 111 |
+
|
| 112 |
+
Inline datasets are encoded as URLs that start with the table://
|
| 113 |
+
prefix.
|
| 114 |
+
|
| 115 |
+
Parameters
|
| 116 |
+
----------
|
| 117 |
+
vega_spec: dict
|
| 118 |
+
A Vega specification dict
|
| 119 |
+
|
| 120 |
+
Returns
|
| 121 |
+
-------
|
| 122 |
+
set of str
|
| 123 |
+
Set of the names of the inline datasets that are referenced
|
| 124 |
+
in the specification.
|
| 125 |
+
|
| 126 |
+
Examples
|
| 127 |
+
--------
|
| 128 |
+
>>> spec = {
|
| 129 |
+
... "data": [
|
| 130 |
+
... {"name": "foo", "url": "https://path/to/file.csv"},
|
| 131 |
+
... {"name": "bar", "url": "vegafusion+dataset://inline_dataset_123"},
|
| 132 |
+
... ]
|
| 133 |
+
... }
|
| 134 |
+
>>> get_inline_table_names(spec)
|
| 135 |
+
{'inline_dataset_123'}
|
| 136 |
+
"""
|
| 137 |
+
table_names = set()
|
| 138 |
+
|
| 139 |
+
# Process datasets
|
| 140 |
+
for data in vega_spec.get("data", []):
|
| 141 |
+
url = data.get("url", "")
|
| 142 |
+
if url.startswith(VEGAFUSION_PREFIX):
|
| 143 |
+
name = url[len(VEGAFUSION_PREFIX) :]
|
| 144 |
+
table_names.add(name)
|
| 145 |
+
|
| 146 |
+
# Recursively process child marks, which may have their own datasets
|
| 147 |
+
for mark in vega_spec.get("marks", []):
|
| 148 |
+
table_names.update(get_inline_table_names(mark))
|
| 149 |
+
|
| 150 |
+
return table_names
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def get_inline_tables(vega_spec: dict[str, Any]) -> dict[str, DataFrameLike]:
|
| 154 |
+
"""
|
| 155 |
+
Get the inline tables referenced by a Vega specification.
|
| 156 |
+
|
| 157 |
+
Note: This function should only be called on a Vega spec that corresponds
|
| 158 |
+
to a chart that was processed by the vegafusion_data_transformer.
|
| 159 |
+
Furthermore, this function may only be called once per spec because
|
| 160 |
+
the returned dataframes are deleted from internal storage.
|
| 161 |
+
|
| 162 |
+
Parameters
|
| 163 |
+
----------
|
| 164 |
+
vega_spec: dict
|
| 165 |
+
A Vega specification dict
|
| 166 |
+
|
| 167 |
+
Returns
|
| 168 |
+
-------
|
| 169 |
+
dict from str to dataframe
|
| 170 |
+
dict from inline dataset name to dataframe object
|
| 171 |
+
"""
|
| 172 |
+
inline_names = get_inline_table_names(vega_spec)
|
| 173 |
+
# exclude named dataset that was provided by the user,
|
| 174 |
+
# or dataframes that have been deleted.
|
| 175 |
+
table_names = inline_names.intersection(extracted_inline_tables)
|
| 176 |
+
return {k: extracted_inline_tables.pop(k) for k in table_names}
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
def compile_to_vegafusion_chart_state(
|
| 180 |
+
vegalite_spec: dict[str, Any], local_tz: str
|
| 181 |
+
) -> ChartState:
|
| 182 |
+
"""
|
| 183 |
+
Compile a Vega-Lite spec to a VegaFusion ChartState.
|
| 184 |
+
|
| 185 |
+
Note: This function should only be called on a Vega-Lite spec
|
| 186 |
+
that was generated with the "vegafusion" data transformer enabled.
|
| 187 |
+
In particular, this spec may contain references to extract datasets
|
| 188 |
+
using table:// prefixed URLs.
|
| 189 |
+
|
| 190 |
+
Parameters
|
| 191 |
+
----------
|
| 192 |
+
vegalite_spec: dict
|
| 193 |
+
A Vega-Lite spec that was generated from an Altair chart with
|
| 194 |
+
the "vegafusion" data transformer enabled
|
| 195 |
+
local_tz: str
|
| 196 |
+
Local timezone name (e.g. 'America/New_York')
|
| 197 |
+
|
| 198 |
+
Returns
|
| 199 |
+
-------
|
| 200 |
+
ChartState
|
| 201 |
+
A VegaFusion ChartState object
|
| 202 |
+
"""
|
| 203 |
+
# Local import to avoid circular ImportError
|
| 204 |
+
from altair import data_transformers, vegalite_compilers
|
| 205 |
+
|
| 206 |
+
vf = import_vegafusion()
|
| 207 |
+
|
| 208 |
+
# Compile Vega-Lite spec to Vega
|
| 209 |
+
compiler = vegalite_compilers.get()
|
| 210 |
+
if compiler is None:
|
| 211 |
+
msg = "No active vega-lite compiler plugin found"
|
| 212 |
+
raise ValueError(msg)
|
| 213 |
+
|
| 214 |
+
vega_spec = compiler(vegalite_spec)
|
| 215 |
+
|
| 216 |
+
# Retrieve dict of inline tables referenced by the spec
|
| 217 |
+
inline_tables = get_inline_tables(vega_spec)
|
| 218 |
+
|
| 219 |
+
# Pre-evaluate transforms in vega spec with vegafusion
|
| 220 |
+
row_limit = data_transformers.options.get("max_rows", None)
|
| 221 |
+
|
| 222 |
+
chart_state = vf.runtime.new_chart_state(
|
| 223 |
+
vega_spec,
|
| 224 |
+
local_tz=local_tz,
|
| 225 |
+
inline_datasets=inline_tables,
|
| 226 |
+
row_limit=row_limit,
|
| 227 |
+
)
|
| 228 |
+
|
| 229 |
+
# Check from row limit warning and convert to MaxRowsError
|
| 230 |
+
handle_row_limit_exceeded(row_limit, chart_state.get_warnings())
|
| 231 |
+
|
| 232 |
+
return chart_state
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def compile_with_vegafusion(vegalite_spec: dict[str, Any]) -> dict[str, Any]:
|
| 236 |
+
"""
|
| 237 |
+
Compile a Vega-Lite spec to Vega and pre-transform with VegaFusion.
|
| 238 |
+
|
| 239 |
+
Note: This function should only be called on a Vega-Lite spec
|
| 240 |
+
that was generated with the "vegafusion" data transformer enabled.
|
| 241 |
+
In particular, this spec may contain references to extract datasets
|
| 242 |
+
using table:// prefixed URLs.
|
| 243 |
+
|
| 244 |
+
Parameters
|
| 245 |
+
----------
|
| 246 |
+
vegalite_spec: dict
|
| 247 |
+
A Vega-Lite spec that was generated from an Altair chart with
|
| 248 |
+
the "vegafusion" data transformer enabled
|
| 249 |
+
|
| 250 |
+
Returns
|
| 251 |
+
-------
|
| 252 |
+
dict
|
| 253 |
+
A Vega spec that has been pre-transformed by VegaFusion
|
| 254 |
+
"""
|
| 255 |
+
# Local import to avoid circular ImportError
|
| 256 |
+
from altair import data_transformers, vegalite_compilers
|
| 257 |
+
|
| 258 |
+
vf = import_vegafusion()
|
| 259 |
+
|
| 260 |
+
# Compile Vega-Lite spec to Vega
|
| 261 |
+
compiler = vegalite_compilers.get()
|
| 262 |
+
if compiler is None:
|
| 263 |
+
msg = "No active vega-lite compiler plugin found"
|
| 264 |
+
raise ValueError(msg)
|
| 265 |
+
|
| 266 |
+
vega_spec = compiler(vegalite_spec)
|
| 267 |
+
|
| 268 |
+
# Retrieve dict of inline tables referenced by the spec
|
| 269 |
+
inline_tables = get_inline_tables(vega_spec)
|
| 270 |
+
|
| 271 |
+
# Pre-evaluate transforms in vega spec with vegafusion
|
| 272 |
+
row_limit = data_transformers.options.get("max_rows", None)
|
| 273 |
+
transformed_vega_spec, warnings = vf.runtime.pre_transform_spec(
|
| 274 |
+
vega_spec,
|
| 275 |
+
vf.get_local_tz(),
|
| 276 |
+
inline_datasets=inline_tables,
|
| 277 |
+
row_limit=row_limit,
|
| 278 |
+
)
|
| 279 |
+
|
| 280 |
+
# Check from row limit warning and convert to MaxRowsError
|
| 281 |
+
handle_row_limit_exceeded(row_limit, warnings)
|
| 282 |
+
|
| 283 |
+
return transformed_vega_spec
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
def handle_row_limit_exceeded(row_limit: int, warnings: list):
|
| 287 |
+
for warning in warnings:
|
| 288 |
+
if warning.get("type") == "RowLimitExceeded":
|
| 289 |
+
msg = (
|
| 290 |
+
"The number of dataset rows after filtering and aggregation exceeds\n"
|
| 291 |
+
f"the current limit of {row_limit}. Try adding an aggregation to reduce\n"
|
| 292 |
+
"the size of the dataset that must be loaded into the browser. Or, disable\n"
|
| 293 |
+
"the limit by calling alt.data_transformers.disable_max_rows(). Note that\n"
|
| 294 |
+
"disabling this limit may cause the browser to freeze or crash."
|
| 295 |
+
)
|
| 296 |
+
raise MaxRowsError(msg)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def using_vegafusion() -> bool:
|
| 300 |
+
"""Check whether the vegafusion data transformer is enabled."""
|
| 301 |
+
# Local import to avoid circular ImportError
|
| 302 |
+
from altair import data_transformers
|
| 303 |
+
|
| 304 |
+
return data_transformers.active == "vegafusion"
|
mgm/lib/python3.10/site-packages/altair/utils/compiler.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Callable
|
| 2 |
+
|
| 3 |
+
from altair.utils import PluginRegistry
|
| 4 |
+
|
| 5 |
+
# ==============================================================================
|
| 6 |
+
# Vega-Lite to Vega compiler registry
|
| 7 |
+
# ==============================================================================
|
| 8 |
+
VegaLiteCompilerType = Callable[[dict[str, Any]], dict[str, Any]]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class VegaLiteCompilerRegistry(PluginRegistry[VegaLiteCompilerType, dict[str, Any]]):
|
| 12 |
+
pass
|
mgm/lib/python3.10/site-packages/altair/utils/core.py
ADDED
|
@@ -0,0 +1,981 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utility routines."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import itertools
|
| 6 |
+
import json
|
| 7 |
+
import re
|
| 8 |
+
import sys
|
| 9 |
+
import traceback
|
| 10 |
+
import warnings
|
| 11 |
+
from collections.abc import Iterator, Mapping, MutableMapping
|
| 12 |
+
from copy import deepcopy
|
| 13 |
+
from itertools import groupby
|
| 14 |
+
from operator import itemgetter
|
| 15 |
+
from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar, cast, overload
|
| 16 |
+
|
| 17 |
+
import jsonschema
|
| 18 |
+
import narwhals.stable.v1 as nw
|
| 19 |
+
from narwhals.stable.v1.dependencies import is_pandas_dataframe, is_polars_dataframe
|
| 20 |
+
from narwhals.stable.v1.typing import IntoDataFrame
|
| 21 |
+
|
| 22 |
+
from altair.utils.schemapi import SchemaBase, SchemaLike, Undefined
|
| 23 |
+
|
| 24 |
+
if sys.version_info >= (3, 12):
|
| 25 |
+
from typing import Protocol, TypeAliasType, runtime_checkable
|
| 26 |
+
else:
|
| 27 |
+
from typing_extensions import Protocol, TypeAliasType, runtime_checkable
|
| 28 |
+
if sys.version_info >= (3, 10):
|
| 29 |
+
from typing import Concatenate, ParamSpec
|
| 30 |
+
else:
|
| 31 |
+
from typing_extensions import Concatenate, ParamSpec
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
if TYPE_CHECKING:
|
| 35 |
+
import typing as t
|
| 36 |
+
|
| 37 |
+
import pandas as pd
|
| 38 |
+
from narwhals.stable.v1.typing import IntoExpr
|
| 39 |
+
|
| 40 |
+
from altair.utils._dfi_types import DataFrame as DfiDataFrame
|
| 41 |
+
from altair.vegalite.v5.schema._typing import StandardType_T as InferredVegaLiteType
|
| 42 |
+
|
| 43 |
+
TIntoDataFrame = TypeVar("TIntoDataFrame", bound=IntoDataFrame)
|
| 44 |
+
T = TypeVar("T")
|
| 45 |
+
P = ParamSpec("P")
|
| 46 |
+
R = TypeVar("R")
|
| 47 |
+
|
| 48 |
+
WrapsFunc = TypeAliasType("WrapsFunc", Callable[..., R], type_params=(R,))
|
| 49 |
+
WrappedFunc = TypeAliasType("WrappedFunc", Callable[P, R], type_params=(P, R))
|
| 50 |
+
# NOTE: Requires stringized form to avoid `< (3, 11)` issues
|
| 51 |
+
# See: https://github.com/vega/altair/actions/runs/10667859416/job/29567290871?pr=3565
|
| 52 |
+
WrapsMethod = TypeAliasType(
|
| 53 |
+
"WrapsMethod", "Callable[Concatenate[T, ...], R]", type_params=(T, R)
|
| 54 |
+
)
|
| 55 |
+
WrappedMethod = TypeAliasType(
|
| 56 |
+
"WrappedMethod", Callable[Concatenate[T, P], R], type_params=(T, P, R)
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
@runtime_checkable
|
| 61 |
+
class DataFrameLike(Protocol):
|
| 62 |
+
def __dataframe__(
|
| 63 |
+
self, nan_as_null: bool = False, allow_copy: bool = True
|
| 64 |
+
) -> DfiDataFrame: ...
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
TYPECODE_MAP = {
|
| 68 |
+
"ordinal": "O",
|
| 69 |
+
"nominal": "N",
|
| 70 |
+
"quantitative": "Q",
|
| 71 |
+
"temporal": "T",
|
| 72 |
+
"geojson": "G",
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
INV_TYPECODE_MAP = {v: k for k, v in TYPECODE_MAP.items()}
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
# aggregates from vega-lite version 4.6.0
|
| 79 |
+
AGGREGATES = [
|
| 80 |
+
"argmax",
|
| 81 |
+
"argmin",
|
| 82 |
+
"average",
|
| 83 |
+
"count",
|
| 84 |
+
"distinct",
|
| 85 |
+
"max",
|
| 86 |
+
"mean",
|
| 87 |
+
"median",
|
| 88 |
+
"min",
|
| 89 |
+
"missing",
|
| 90 |
+
"product",
|
| 91 |
+
"q1",
|
| 92 |
+
"q3",
|
| 93 |
+
"ci0",
|
| 94 |
+
"ci1",
|
| 95 |
+
"stderr",
|
| 96 |
+
"stdev",
|
| 97 |
+
"stdevp",
|
| 98 |
+
"sum",
|
| 99 |
+
"valid",
|
| 100 |
+
"values",
|
| 101 |
+
"variance",
|
| 102 |
+
"variancep",
|
| 103 |
+
"exponential",
|
| 104 |
+
"exponentialb",
|
| 105 |
+
]
|
| 106 |
+
|
| 107 |
+
# window aggregates from vega-lite version 4.6.0
|
| 108 |
+
WINDOW_AGGREGATES = [
|
| 109 |
+
"row_number",
|
| 110 |
+
"rank",
|
| 111 |
+
"dense_rank",
|
| 112 |
+
"percent_rank",
|
| 113 |
+
"cume_dist",
|
| 114 |
+
"ntile",
|
| 115 |
+
"lag",
|
| 116 |
+
"lead",
|
| 117 |
+
"first_value",
|
| 118 |
+
"last_value",
|
| 119 |
+
"nth_value",
|
| 120 |
+
]
|
| 121 |
+
|
| 122 |
+
# timeUnits from vega-lite version 4.17.0
|
| 123 |
+
TIMEUNITS = [
|
| 124 |
+
"year",
|
| 125 |
+
"quarter",
|
| 126 |
+
"month",
|
| 127 |
+
"week",
|
| 128 |
+
"day",
|
| 129 |
+
"dayofyear",
|
| 130 |
+
"date",
|
| 131 |
+
"hours",
|
| 132 |
+
"minutes",
|
| 133 |
+
"seconds",
|
| 134 |
+
"milliseconds",
|
| 135 |
+
"yearquarter",
|
| 136 |
+
"yearquartermonth",
|
| 137 |
+
"yearmonth",
|
| 138 |
+
"yearmonthdate",
|
| 139 |
+
"yearmonthdatehours",
|
| 140 |
+
"yearmonthdatehoursminutes",
|
| 141 |
+
"yearmonthdatehoursminutesseconds",
|
| 142 |
+
"yearweek",
|
| 143 |
+
"yearweekday",
|
| 144 |
+
"yearweekdayhours",
|
| 145 |
+
"yearweekdayhoursminutes",
|
| 146 |
+
"yearweekdayhoursminutesseconds",
|
| 147 |
+
"yeardayofyear",
|
| 148 |
+
"quartermonth",
|
| 149 |
+
"monthdate",
|
| 150 |
+
"monthdatehours",
|
| 151 |
+
"monthdatehoursminutes",
|
| 152 |
+
"monthdatehoursminutesseconds",
|
| 153 |
+
"weekday",
|
| 154 |
+
"weeksdayhours",
|
| 155 |
+
"weekdayhours",
|
| 156 |
+
"weekdayhoursminutes",
|
| 157 |
+
"weekdayhoursminutesseconds",
|
| 158 |
+
"dayhours",
|
| 159 |
+
"dayhoursminutes",
|
| 160 |
+
"dayhoursminutesseconds",
|
| 161 |
+
"hoursminutes",
|
| 162 |
+
"hoursminutesseconds",
|
| 163 |
+
"minutesseconds",
|
| 164 |
+
"secondsmilliseconds",
|
| 165 |
+
"utcyear",
|
| 166 |
+
"utcquarter",
|
| 167 |
+
"utcmonth",
|
| 168 |
+
"utcweek",
|
| 169 |
+
"utcday",
|
| 170 |
+
"utcdayofyear",
|
| 171 |
+
"utcdate",
|
| 172 |
+
"utchours",
|
| 173 |
+
"utcminutes",
|
| 174 |
+
"utcseconds",
|
| 175 |
+
"utcmilliseconds",
|
| 176 |
+
"utcyearquarter",
|
| 177 |
+
"utcyearquartermonth",
|
| 178 |
+
"utcyearmonth",
|
| 179 |
+
"utcyearmonthdate",
|
| 180 |
+
"utcyearmonthdatehours",
|
| 181 |
+
"utcyearmonthdatehoursminutes",
|
| 182 |
+
"utcyearmonthdatehoursminutesseconds",
|
| 183 |
+
"utcyearweek",
|
| 184 |
+
"utcyearweekday",
|
| 185 |
+
"utcyearweekdayhours",
|
| 186 |
+
"utcyearweekdayhoursminutes",
|
| 187 |
+
"utcyearweekdayhoursminutesseconds",
|
| 188 |
+
"utcyeardayofyear",
|
| 189 |
+
"utcquartermonth",
|
| 190 |
+
"utcmonthdate",
|
| 191 |
+
"utcmonthdatehours",
|
| 192 |
+
"utcmonthdatehoursminutes",
|
| 193 |
+
"utcmonthdatehoursminutesseconds",
|
| 194 |
+
"utcweekday",
|
| 195 |
+
"utcweekdayhours",
|
| 196 |
+
"utcweekdayhoursminutes",
|
| 197 |
+
"utcweekdayhoursminutesseconds",
|
| 198 |
+
"utcdayhours",
|
| 199 |
+
"utcdayhoursminutes",
|
| 200 |
+
"utcdayhoursminutesseconds",
|
| 201 |
+
"utchoursminutes",
|
| 202 |
+
"utchoursminutesseconds",
|
| 203 |
+
"utcminutesseconds",
|
| 204 |
+
"utcsecondsmilliseconds",
|
| 205 |
+
]
|
| 206 |
+
|
| 207 |
+
VALID_TYPECODES = list(itertools.chain(iter(TYPECODE_MAP), iter(INV_TYPECODE_MAP)))
|
| 208 |
+
|
| 209 |
+
SHORTHAND_UNITS = {
|
| 210 |
+
"field": "(?P<field>.*)",
|
| 211 |
+
"type": "(?P<type>{})".format("|".join(VALID_TYPECODES)),
|
| 212 |
+
"agg_count": "(?P<aggregate>count)",
|
| 213 |
+
"op_count": "(?P<op>count)",
|
| 214 |
+
"aggregate": "(?P<aggregate>{})".format("|".join(AGGREGATES)),
|
| 215 |
+
"window_op": "(?P<op>{})".format("|".join(AGGREGATES + WINDOW_AGGREGATES)),
|
| 216 |
+
"timeUnit": "(?P<timeUnit>{})".format("|".join(TIMEUNITS)),
|
| 217 |
+
}
|
| 218 |
+
|
| 219 |
+
SHORTHAND_KEYS: frozenset[Literal["field", "aggregate", "type", "timeUnit"]] = (
|
| 220 |
+
frozenset(("field", "aggregate", "type", "timeUnit"))
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def infer_vegalite_type_for_pandas(
|
| 225 |
+
data: Any,
|
| 226 |
+
) -> InferredVegaLiteType | tuple[InferredVegaLiteType, list[Any]]:
|
| 227 |
+
"""
|
| 228 |
+
From an array-like input, infer the correct vega typecode.
|
| 229 |
+
|
| 230 |
+
('ordinal', 'nominal', 'quantitative', or 'temporal').
|
| 231 |
+
|
| 232 |
+
Parameters
|
| 233 |
+
----------
|
| 234 |
+
data: Any
|
| 235 |
+
"""
|
| 236 |
+
# This is safe to import here, as this function is only called on pandas input.
|
| 237 |
+
from pandas.api.types import infer_dtype
|
| 238 |
+
|
| 239 |
+
typ = infer_dtype(data, skipna=False)
|
| 240 |
+
|
| 241 |
+
if typ in {
|
| 242 |
+
"floating",
|
| 243 |
+
"mixed-integer-float",
|
| 244 |
+
"integer",
|
| 245 |
+
"mixed-integer",
|
| 246 |
+
"complex",
|
| 247 |
+
}:
|
| 248 |
+
return "quantitative"
|
| 249 |
+
elif typ == "categorical" and hasattr(data, "cat") and data.cat.ordered:
|
| 250 |
+
return ("ordinal", data.cat.categories.tolist())
|
| 251 |
+
elif typ in {"string", "bytes", "categorical", "boolean", "mixed", "unicode"}:
|
| 252 |
+
return "nominal"
|
| 253 |
+
elif typ in {
|
| 254 |
+
"datetime",
|
| 255 |
+
"datetime64",
|
| 256 |
+
"timedelta",
|
| 257 |
+
"timedelta64",
|
| 258 |
+
"date",
|
| 259 |
+
"time",
|
| 260 |
+
"period",
|
| 261 |
+
}:
|
| 262 |
+
return "temporal"
|
| 263 |
+
else:
|
| 264 |
+
warnings.warn(
|
| 265 |
+
f"I don't know how to infer vegalite type from '{typ}'. "
|
| 266 |
+
"Defaulting to nominal.",
|
| 267 |
+
stacklevel=1,
|
| 268 |
+
)
|
| 269 |
+
return "nominal"
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def merge_props_geom(feat: dict[str, Any]) -> dict[str, Any]:
|
| 273 |
+
"""
|
| 274 |
+
Merge properties with geometry.
|
| 275 |
+
|
| 276 |
+
* Overwrites 'type' and 'geometry' entries if existing.
|
| 277 |
+
"""
|
| 278 |
+
geom = {k: feat[k] for k in ("type", "geometry")}
|
| 279 |
+
try:
|
| 280 |
+
feat["properties"].update(geom)
|
| 281 |
+
props_geom = feat["properties"]
|
| 282 |
+
except (AttributeError, KeyError):
|
| 283 |
+
# AttributeError when 'properties' equals None
|
| 284 |
+
# KeyError when 'properties' is non-existing
|
| 285 |
+
props_geom = geom
|
| 286 |
+
|
| 287 |
+
return props_geom
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
def sanitize_geo_interface(geo: t.MutableMapping[Any, Any]) -> dict[str, Any]:
|
| 291 |
+
"""
|
| 292 |
+
Santize a geo_interface to prepare it for serialization.
|
| 293 |
+
|
| 294 |
+
* Make a copy
|
| 295 |
+
* Convert type array or _Array to list
|
| 296 |
+
* Convert tuples to lists (using json.loads/dumps)
|
| 297 |
+
* Merge properties with geometry
|
| 298 |
+
"""
|
| 299 |
+
geo = deepcopy(geo)
|
| 300 |
+
|
| 301 |
+
# convert type _Array or array to list
|
| 302 |
+
for key in geo:
|
| 303 |
+
if str(type(geo[key]).__name__).startswith(("_Array", "array")):
|
| 304 |
+
geo[key] = geo[key].tolist()
|
| 305 |
+
|
| 306 |
+
# convert (nested) tuples to lists
|
| 307 |
+
geo_dct: dict = json.loads(json.dumps(geo))
|
| 308 |
+
|
| 309 |
+
# sanitize features
|
| 310 |
+
if geo_dct["type"] == "FeatureCollection":
|
| 311 |
+
geo_dct = geo_dct["features"]
|
| 312 |
+
if len(geo_dct) > 0:
|
| 313 |
+
for idx, feat in enumerate(geo_dct):
|
| 314 |
+
geo_dct[idx] = merge_props_geom(feat)
|
| 315 |
+
elif geo_dct["type"] == "Feature":
|
| 316 |
+
geo_dct = merge_props_geom(geo_dct)
|
| 317 |
+
else:
|
| 318 |
+
geo_dct = {"type": "Feature", "geometry": geo_dct}
|
| 319 |
+
|
| 320 |
+
return geo_dct
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
def numpy_is_subtype(dtype: Any, subtype: Any) -> bool:
|
| 324 |
+
# This is only called on `numpy` inputs, so it's safe to import it here.
|
| 325 |
+
import numpy as np
|
| 326 |
+
|
| 327 |
+
try:
|
| 328 |
+
return np.issubdtype(dtype, subtype)
|
| 329 |
+
except (NotImplementedError, TypeError):
|
| 330 |
+
return False
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
def sanitize_pandas_dataframe(df: pd.DataFrame) -> pd.DataFrame: # noqa: C901
|
| 334 |
+
"""
|
| 335 |
+
Sanitize a DataFrame to prepare it for serialization.
|
| 336 |
+
|
| 337 |
+
* Make a copy
|
| 338 |
+
* Convert RangeIndex columns to strings
|
| 339 |
+
* Raise ValueError if column names are not strings
|
| 340 |
+
* Raise ValueError if it has a hierarchical index.
|
| 341 |
+
* Convert categoricals to strings.
|
| 342 |
+
* Convert np.bool_ dtypes to Python bool objects
|
| 343 |
+
* Convert np.int dtypes to Python int objects
|
| 344 |
+
* Convert floats to objects and replace NaNs/infs with None.
|
| 345 |
+
* Convert DateTime dtypes into appropriate string representations
|
| 346 |
+
* Convert Nullable integers to objects and replace NaN with None
|
| 347 |
+
* Convert Nullable boolean to objects and replace NaN with None
|
| 348 |
+
* convert dedicated string column to objects and replace NaN with None
|
| 349 |
+
* Raise a ValueError for TimeDelta dtypes
|
| 350 |
+
"""
|
| 351 |
+
# This is safe to import here, as this function is only called on pandas input.
|
| 352 |
+
# NumPy is a required dependency of pandas so is also safe to import.
|
| 353 |
+
import numpy as np
|
| 354 |
+
import pandas as pd
|
| 355 |
+
|
| 356 |
+
df = df.copy()
|
| 357 |
+
|
| 358 |
+
if isinstance(df.columns, pd.RangeIndex):
|
| 359 |
+
df.columns = df.columns.astype(str)
|
| 360 |
+
|
| 361 |
+
for col_name in df.columns:
|
| 362 |
+
if not isinstance(col_name, str):
|
| 363 |
+
msg = (
|
| 364 |
+
f"Dataframe contains invalid column name: {col_name!r}. "
|
| 365 |
+
"Column names must be strings"
|
| 366 |
+
)
|
| 367 |
+
raise ValueError(msg)
|
| 368 |
+
|
| 369 |
+
if isinstance(df.index, pd.MultiIndex):
|
| 370 |
+
msg = "Hierarchical indices not supported"
|
| 371 |
+
raise ValueError(msg)
|
| 372 |
+
if isinstance(df.columns, pd.MultiIndex):
|
| 373 |
+
msg = "Hierarchical indices not supported"
|
| 374 |
+
raise ValueError(msg)
|
| 375 |
+
|
| 376 |
+
def to_list_if_array(val):
|
| 377 |
+
if isinstance(val, np.ndarray):
|
| 378 |
+
return val.tolist()
|
| 379 |
+
else:
|
| 380 |
+
return val
|
| 381 |
+
|
| 382 |
+
for dtype_item in df.dtypes.items():
|
| 383 |
+
# We know that the column names are strings from the isinstance check
|
| 384 |
+
# further above but mypy thinks it is of type Hashable and therefore does not
|
| 385 |
+
# let us assign it to the col_name variable which is already of type str.
|
| 386 |
+
col_name = cast(str, dtype_item[0])
|
| 387 |
+
dtype = dtype_item[1]
|
| 388 |
+
dtype_name = str(dtype)
|
| 389 |
+
if dtype_name == "category":
|
| 390 |
+
# Work around bug in to_json for categorical types in older versions
|
| 391 |
+
# of pandas as they do not properly convert NaN values to null in to_json.
|
| 392 |
+
# We can probably remove this part once we require pandas >= 1.0
|
| 393 |
+
col = df[col_name].astype(object)
|
| 394 |
+
df[col_name] = col.where(col.notnull(), None)
|
| 395 |
+
elif dtype_name == "string":
|
| 396 |
+
# dedicated string datatype (since 1.0)
|
| 397 |
+
# https://pandas.pydata.org/pandas-docs/version/1.0.0/whatsnew/v1.0.0.html#dedicated-string-data-type
|
| 398 |
+
col = df[col_name].astype(object)
|
| 399 |
+
df[col_name] = col.where(col.notnull(), None)
|
| 400 |
+
elif dtype_name == "bool":
|
| 401 |
+
# convert numpy bools to objects; np.bool is not JSON serializable
|
| 402 |
+
df[col_name] = df[col_name].astype(object)
|
| 403 |
+
elif dtype_name == "boolean":
|
| 404 |
+
# dedicated boolean datatype (since 1.0)
|
| 405 |
+
# https://pandas.io/docs/user_guide/boolean.html
|
| 406 |
+
col = df[col_name].astype(object)
|
| 407 |
+
df[col_name] = col.where(col.notnull(), None)
|
| 408 |
+
elif dtype_name.startswith(("datetime", "timestamp")):
|
| 409 |
+
# Convert datetimes to strings. This needs to be a full ISO string
|
| 410 |
+
# with time, which is why we cannot use ``col.astype(str)``.
|
| 411 |
+
# This is because Javascript parses date-only times in UTC, but
|
| 412 |
+
# parses full ISO-8601 dates as local time, and dates in Vega and
|
| 413 |
+
# Vega-Lite are displayed in local time by default.
|
| 414 |
+
# (see https://github.com/vega/altair/issues/1027)
|
| 415 |
+
df[col_name] = (
|
| 416 |
+
df[col_name].apply(lambda x: x.isoformat()).replace("NaT", "")
|
| 417 |
+
)
|
| 418 |
+
elif dtype_name.startswith("timedelta"):
|
| 419 |
+
msg = (
|
| 420 |
+
f'Field "{col_name}" has type "{dtype}" which is '
|
| 421 |
+
"not supported by Altair. Please convert to "
|
| 422 |
+
"either a timestamp or a numerical value."
|
| 423 |
+
""
|
| 424 |
+
)
|
| 425 |
+
raise ValueError(msg)
|
| 426 |
+
elif dtype_name.startswith("geometry"):
|
| 427 |
+
# geopandas >=0.6.1 uses the dtype geometry. Continue here
|
| 428 |
+
# otherwise it will give an error on np.issubdtype(dtype, np.integer)
|
| 429 |
+
continue
|
| 430 |
+
elif (
|
| 431 |
+
dtype_name
|
| 432 |
+
in {
|
| 433 |
+
"Int8",
|
| 434 |
+
"Int16",
|
| 435 |
+
"Int32",
|
| 436 |
+
"Int64",
|
| 437 |
+
"UInt8",
|
| 438 |
+
"UInt16",
|
| 439 |
+
"UInt32",
|
| 440 |
+
"UInt64",
|
| 441 |
+
"Float32",
|
| 442 |
+
"Float64",
|
| 443 |
+
}
|
| 444 |
+
): # nullable integer datatypes (since 24.0) and nullable float datatypes (since 1.2.0)
|
| 445 |
+
# https://pandas.pydata.org/pandas-docs/version/0.25/whatsnew/v0.24.0.html#optional-integer-na-support
|
| 446 |
+
col = df[col_name].astype(object)
|
| 447 |
+
df[col_name] = col.where(col.notnull(), None)
|
| 448 |
+
elif numpy_is_subtype(dtype, np.integer):
|
| 449 |
+
# convert integers to objects; np.int is not JSON serializable
|
| 450 |
+
df[col_name] = df[col_name].astype(object)
|
| 451 |
+
elif numpy_is_subtype(dtype, np.floating):
|
| 452 |
+
# For floats, convert to Python float: np.float is not JSON serializable
|
| 453 |
+
# Also convert NaN/inf values to null, as they are not JSON serializable
|
| 454 |
+
col = df[col_name]
|
| 455 |
+
bad_values = col.isnull() | np.isinf(col)
|
| 456 |
+
df[col_name] = col.astype(object).where(~bad_values, None)
|
| 457 |
+
elif dtype == object: # noqa: E721
|
| 458 |
+
# Convert numpy arrays saved as objects to lists
|
| 459 |
+
# Arrays are not JSON serializable
|
| 460 |
+
col = df[col_name].astype(object).apply(to_list_if_array)
|
| 461 |
+
df[col_name] = col.where(col.notnull(), None)
|
| 462 |
+
return df
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
def sanitize_narwhals_dataframe(
|
| 466 |
+
data: nw.DataFrame[TIntoDataFrame],
|
| 467 |
+
) -> nw.DataFrame[TIntoDataFrame]:
|
| 468 |
+
"""Sanitize narwhals.DataFrame for JSON serialization."""
|
| 469 |
+
schema = data.schema
|
| 470 |
+
columns: list[IntoExpr] = []
|
| 471 |
+
# See https://github.com/vega/altair/issues/1027 for why this is necessary.
|
| 472 |
+
local_iso_fmt_string = "%Y-%m-%dT%H:%M:%S"
|
| 473 |
+
is_polars = is_polars_dataframe(data.to_native())
|
| 474 |
+
for name, dtype in schema.items():
|
| 475 |
+
if dtype == nw.Date and is_polars:
|
| 476 |
+
# Polars doesn't allow formatting `Date` with time directives.
|
| 477 |
+
# The date -> datetime cast is extremely fast compared with `to_string`
|
| 478 |
+
columns.append(
|
| 479 |
+
nw.col(name).cast(nw.Datetime).dt.to_string(local_iso_fmt_string)
|
| 480 |
+
)
|
| 481 |
+
elif dtype == nw.Date:
|
| 482 |
+
columns.append(nw.col(name).dt.to_string(local_iso_fmt_string))
|
| 483 |
+
elif dtype == nw.Datetime:
|
| 484 |
+
columns.append(nw.col(name).dt.to_string(f"{local_iso_fmt_string}%.f"))
|
| 485 |
+
elif dtype == nw.Duration:
|
| 486 |
+
msg = (
|
| 487 |
+
f'Field "{name}" has type "{dtype}" which is '
|
| 488 |
+
"not supported by Altair. Please convert to "
|
| 489 |
+
"either a timestamp or a numerical value."
|
| 490 |
+
""
|
| 491 |
+
)
|
| 492 |
+
raise ValueError(msg)
|
| 493 |
+
else:
|
| 494 |
+
columns.append(name)
|
| 495 |
+
return data.select(columns)
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
def to_eager_narwhals_dataframe(data: IntoDataFrame) -> nw.DataFrame[Any]:
|
| 499 |
+
"""
|
| 500 |
+
Wrap `data` in `narwhals.DataFrame`.
|
| 501 |
+
|
| 502 |
+
If `data` is not supported by Narwhals, but it is convertible
|
| 503 |
+
to a PyArrow table, then first convert to a PyArrow Table,
|
| 504 |
+
and then wrap in `narwhals.DataFrame`.
|
| 505 |
+
"""
|
| 506 |
+
data_nw = nw.from_native(data, eager_or_interchange_only=True)
|
| 507 |
+
if nw.get_level(data_nw) == "interchange":
|
| 508 |
+
# If Narwhals' support for `data`'s class is only metadata-level, then we
|
| 509 |
+
# use the interchange protocol to convert to a PyArrow Table.
|
| 510 |
+
from altair.utils.data import arrow_table_from_dfi_dataframe
|
| 511 |
+
|
| 512 |
+
pa_table = arrow_table_from_dfi_dataframe(data) # type: ignore[arg-type]
|
| 513 |
+
data_nw = nw.from_native(pa_table, eager_only=True)
|
| 514 |
+
return data_nw
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
def parse_shorthand( # noqa: C901
|
| 518 |
+
shorthand: dict[str, Any] | str,
|
| 519 |
+
data: IntoDataFrame | None = None,
|
| 520 |
+
parse_aggregates: bool = True,
|
| 521 |
+
parse_window_ops: bool = False,
|
| 522 |
+
parse_timeunits: bool = True,
|
| 523 |
+
parse_types: bool = True,
|
| 524 |
+
) -> dict[str, Any]:
|
| 525 |
+
"""
|
| 526 |
+
General tool to parse shorthand values.
|
| 527 |
+
|
| 528 |
+
These are of the form:
|
| 529 |
+
|
| 530 |
+
- "col_name"
|
| 531 |
+
- "col_name:O"
|
| 532 |
+
- "average(col_name)"
|
| 533 |
+
- "average(col_name):O"
|
| 534 |
+
|
| 535 |
+
Optionally, a dataframe may be supplied, from which the type
|
| 536 |
+
will be inferred if not specified in the shorthand.
|
| 537 |
+
|
| 538 |
+
Parameters
|
| 539 |
+
----------
|
| 540 |
+
shorthand : dict or string
|
| 541 |
+
The shorthand representation to be parsed
|
| 542 |
+
data : DataFrame, optional
|
| 543 |
+
If specified and of type DataFrame, then use these values to infer the
|
| 544 |
+
column type if not provided by the shorthand.
|
| 545 |
+
parse_aggregates : boolean
|
| 546 |
+
If True (default), then parse aggregate functions within the shorthand.
|
| 547 |
+
parse_window_ops : boolean
|
| 548 |
+
If True then parse window operations within the shorthand (default:False)
|
| 549 |
+
parse_timeunits : boolean
|
| 550 |
+
If True (default), then parse timeUnits from within the shorthand
|
| 551 |
+
parse_types : boolean
|
| 552 |
+
If True (default), then parse typecodes within the shorthand
|
| 553 |
+
|
| 554 |
+
Returns
|
| 555 |
+
-------
|
| 556 |
+
attrs : dict
|
| 557 |
+
a dictionary of attributes extracted from the shorthand
|
| 558 |
+
|
| 559 |
+
Examples
|
| 560 |
+
--------
|
| 561 |
+
>>> import pandas as pd
|
| 562 |
+
>>> data = pd.DataFrame({"foo": ["A", "B", "A", "B"], "bar": [1, 2, 3, 4]})
|
| 563 |
+
|
| 564 |
+
>>> parse_shorthand("name") == {"field": "name"}
|
| 565 |
+
True
|
| 566 |
+
|
| 567 |
+
>>> parse_shorthand("name:Q") == {"field": "name", "type": "quantitative"}
|
| 568 |
+
True
|
| 569 |
+
|
| 570 |
+
>>> parse_shorthand("average(col)") == {"aggregate": "average", "field": "col"}
|
| 571 |
+
True
|
| 572 |
+
|
| 573 |
+
>>> parse_shorthand("foo:O") == {"field": "foo", "type": "ordinal"}
|
| 574 |
+
True
|
| 575 |
+
|
| 576 |
+
>>> parse_shorthand("min(foo):Q") == {
|
| 577 |
+
... "aggregate": "min",
|
| 578 |
+
... "field": "foo",
|
| 579 |
+
... "type": "quantitative",
|
| 580 |
+
... }
|
| 581 |
+
True
|
| 582 |
+
|
| 583 |
+
>>> parse_shorthand("month(col)") == {
|
| 584 |
+
... "field": "col",
|
| 585 |
+
... "timeUnit": "month",
|
| 586 |
+
... "type": "temporal",
|
| 587 |
+
... }
|
| 588 |
+
True
|
| 589 |
+
|
| 590 |
+
>>> parse_shorthand("year(col):O") == {
|
| 591 |
+
... "field": "col",
|
| 592 |
+
... "timeUnit": "year",
|
| 593 |
+
... "type": "ordinal",
|
| 594 |
+
... }
|
| 595 |
+
True
|
| 596 |
+
|
| 597 |
+
>>> parse_shorthand("foo", data) == {"field": "foo", "type": "nominal"}
|
| 598 |
+
True
|
| 599 |
+
|
| 600 |
+
>>> parse_shorthand("bar", data) == {"field": "bar", "type": "quantitative"}
|
| 601 |
+
True
|
| 602 |
+
|
| 603 |
+
>>> parse_shorthand("bar:O", data) == {"field": "bar", "type": "ordinal"}
|
| 604 |
+
True
|
| 605 |
+
|
| 606 |
+
>>> parse_shorthand("sum(bar)", data) == {
|
| 607 |
+
... "aggregate": "sum",
|
| 608 |
+
... "field": "bar",
|
| 609 |
+
... "type": "quantitative",
|
| 610 |
+
... }
|
| 611 |
+
True
|
| 612 |
+
|
| 613 |
+
>>> parse_shorthand("count()", data) == {
|
| 614 |
+
... "aggregate": "count",
|
| 615 |
+
... "type": "quantitative",
|
| 616 |
+
... }
|
| 617 |
+
True
|
| 618 |
+
"""
|
| 619 |
+
from altair.utils.data import is_data_type
|
| 620 |
+
|
| 621 |
+
if not shorthand:
|
| 622 |
+
return {}
|
| 623 |
+
|
| 624 |
+
patterns = []
|
| 625 |
+
|
| 626 |
+
if parse_aggregates:
|
| 627 |
+
patterns.extend([r"{agg_count}\(\)"])
|
| 628 |
+
patterns.extend([r"{aggregate}\({field}\)"])
|
| 629 |
+
if parse_window_ops:
|
| 630 |
+
patterns.extend([r"{op_count}\(\)"])
|
| 631 |
+
patterns.extend([r"{window_op}\({field}\)"])
|
| 632 |
+
if parse_timeunits:
|
| 633 |
+
patterns.extend([r"{timeUnit}\({field}\)"])
|
| 634 |
+
|
| 635 |
+
patterns.extend([r"{field}"])
|
| 636 |
+
|
| 637 |
+
if parse_types:
|
| 638 |
+
patterns = list(itertools.chain(*((p + ":{type}", p) for p in patterns)))
|
| 639 |
+
|
| 640 |
+
regexps = (
|
| 641 |
+
re.compile(r"\A" + p.format(**SHORTHAND_UNITS) + r"\Z", re.DOTALL)
|
| 642 |
+
for p in patterns
|
| 643 |
+
)
|
| 644 |
+
|
| 645 |
+
# find matches depending on valid fields passed
|
| 646 |
+
if isinstance(shorthand, dict):
|
| 647 |
+
attrs = shorthand
|
| 648 |
+
else:
|
| 649 |
+
attrs = next(
|
| 650 |
+
exp.match(shorthand).groupdict() # type: ignore[union-attr]
|
| 651 |
+
for exp in regexps
|
| 652 |
+
if exp.match(shorthand) is not None
|
| 653 |
+
)
|
| 654 |
+
|
| 655 |
+
# Handle short form of the type expression
|
| 656 |
+
if "type" in attrs:
|
| 657 |
+
attrs["type"] = INV_TYPECODE_MAP.get(attrs["type"], attrs["type"])
|
| 658 |
+
|
| 659 |
+
# counts are quantitative by default
|
| 660 |
+
if attrs == {"aggregate": "count"}:
|
| 661 |
+
attrs["type"] = "quantitative"
|
| 662 |
+
|
| 663 |
+
# times are temporal by default
|
| 664 |
+
if "timeUnit" in attrs and "type" not in attrs:
|
| 665 |
+
attrs["type"] = "temporal"
|
| 666 |
+
|
| 667 |
+
# if data is specified and type is not, infer type from data
|
| 668 |
+
if "type" not in attrs and is_data_type(data):
|
| 669 |
+
unescaped_field = attrs["field"].replace("\\", "")
|
| 670 |
+
data_nw = nw.from_native(data, eager_or_interchange_only=True)
|
| 671 |
+
schema = data_nw.schema
|
| 672 |
+
if unescaped_field in schema:
|
| 673 |
+
column = data_nw[unescaped_field]
|
| 674 |
+
if schema[unescaped_field] in {
|
| 675 |
+
nw.Object,
|
| 676 |
+
nw.Unknown,
|
| 677 |
+
} and is_pandas_dataframe(data_nw.to_native()):
|
| 678 |
+
attrs["type"] = infer_vegalite_type_for_pandas(column.to_native())
|
| 679 |
+
else:
|
| 680 |
+
attrs["type"] = infer_vegalite_type_for_narwhals(column)
|
| 681 |
+
if isinstance(attrs["type"], tuple):
|
| 682 |
+
attrs["sort"] = attrs["type"][1]
|
| 683 |
+
attrs["type"] = attrs["type"][0]
|
| 684 |
+
|
| 685 |
+
# If an unescaped colon is still present, it's often due to an incorrect data type specification
|
| 686 |
+
# but could also be due to using a column name with ":" in it.
|
| 687 |
+
if (
|
| 688 |
+
"field" in attrs
|
| 689 |
+
and ":" in attrs["field"]
|
| 690 |
+
and attrs["field"][attrs["field"].rfind(":") - 1] != "\\"
|
| 691 |
+
):
|
| 692 |
+
raise ValueError(
|
| 693 |
+
'"{}" '.format(attrs["field"].split(":")[-1])
|
| 694 |
+
+ "is not one of the valid encoding data types: {}.".format(
|
| 695 |
+
", ".join(TYPECODE_MAP.values())
|
| 696 |
+
)
|
| 697 |
+
+ "\nFor more details, see https://altair-viz.github.io/user_guide/encodings/index.html#encoding-data-types. "
|
| 698 |
+
+ "If you are trying to use a column name that contains a colon, "
|
| 699 |
+
+ 'prefix it with a backslash; for example "column\\:name" instead of "column:name".'
|
| 700 |
+
)
|
| 701 |
+
return attrs
|
| 702 |
+
|
| 703 |
+
|
| 704 |
+
def infer_vegalite_type_for_narwhals(
|
| 705 |
+
column: nw.Series,
|
| 706 |
+
) -> InferredVegaLiteType | tuple[InferredVegaLiteType, list]:
|
| 707 |
+
dtype = column.dtype
|
| 708 |
+
if (
|
| 709 |
+
nw.is_ordered_categorical(column)
|
| 710 |
+
and not (categories := column.cat.get_categories()).is_empty()
|
| 711 |
+
):
|
| 712 |
+
return "ordinal", categories.to_list()
|
| 713 |
+
if dtype == nw.String or dtype == nw.Categorical or dtype == nw.Boolean: # noqa: PLR1714
|
| 714 |
+
return "nominal"
|
| 715 |
+
elif dtype.is_numeric():
|
| 716 |
+
return "quantitative"
|
| 717 |
+
elif dtype == nw.Datetime or dtype == nw.Date: # noqa: PLR1714
|
| 718 |
+
# We use `== nw.Datetime` to check for any kind of Datetime, regardless of time
|
| 719 |
+
# unit and time zone. Prefer this over `dtype in {nw.Datetime, nw.Date}`,
|
| 720 |
+
# see https://narwhals-dev.github.io/narwhals/backcompat.
|
| 721 |
+
return "temporal"
|
| 722 |
+
else:
|
| 723 |
+
msg = f"Unexpected DtypeKind: {dtype}"
|
| 724 |
+
raise ValueError(msg)
|
| 725 |
+
|
| 726 |
+
|
| 727 |
+
def use_signature(tp: Callable[P, Any], /):
|
| 728 |
+
"""
|
| 729 |
+
Use the signature and doc of ``tp`` for the decorated callable ``cb``.
|
| 730 |
+
|
| 731 |
+
- **Overload 1**: Decorating method
|
| 732 |
+
- **Overload 2**: Decorating function
|
| 733 |
+
|
| 734 |
+
Returns
|
| 735 |
+
-------
|
| 736 |
+
**Adding the annotation breaks typing**:
|
| 737 |
+
|
| 738 |
+
Overload[Callable[[WrapsMethod[T, R]], WrappedMethod[T, P, R]], Callable[[WrapsFunc[R]], WrappedFunc[P, R]]]
|
| 739 |
+
"""
|
| 740 |
+
|
| 741 |
+
@overload
|
| 742 |
+
def decorate(cb: WrapsMethod[T, R], /) -> WrappedMethod[T, P, R]: ... # pyright: ignore[reportOverlappingOverload]
|
| 743 |
+
|
| 744 |
+
@overload
|
| 745 |
+
def decorate(cb: WrapsFunc[R], /) -> WrappedFunc[P, R]: ... # pyright: ignore[reportOverlappingOverload]
|
| 746 |
+
|
| 747 |
+
def decorate(cb: WrapsFunc[R], /) -> WrappedMethod[T, P, R] | WrappedFunc[P, R]:
|
| 748 |
+
"""
|
| 749 |
+
Raises when no doc was found.
|
| 750 |
+
|
| 751 |
+
Notes
|
| 752 |
+
-----
|
| 753 |
+
- Reference to ``tp`` is stored in ``cb.__wrapped__``.
|
| 754 |
+
- The doc for ``cb`` will have a ``.rst`` link added, referring to ``tp``.
|
| 755 |
+
"""
|
| 756 |
+
cb.__wrapped__ = getattr(tp, "__init__", tp) # type: ignore[attr-defined]
|
| 757 |
+
|
| 758 |
+
if doc_in := tp.__doc__:
|
| 759 |
+
line_1 = f"{cb.__doc__ or f'Refer to :class:`{tp.__name__}`'}\n"
|
| 760 |
+
cb.__doc__ = "".join((line_1, *doc_in.splitlines(keepends=True)[1:]))
|
| 761 |
+
return cb
|
| 762 |
+
else:
|
| 763 |
+
msg = f"Found no doc for {tp!r}"
|
| 764 |
+
raise AttributeError(msg)
|
| 765 |
+
|
| 766 |
+
return decorate
|
| 767 |
+
|
| 768 |
+
|
| 769 |
+
@overload
|
| 770 |
+
def update_nested(
|
| 771 |
+
original: t.MutableMapping[Any, Any],
|
| 772 |
+
update: t.Mapping[Any, Any],
|
| 773 |
+
copy: Literal[False] = ...,
|
| 774 |
+
) -> t.MutableMapping[Any, Any]: ...
|
| 775 |
+
@overload
|
| 776 |
+
def update_nested(
|
| 777 |
+
original: t.Mapping[Any, Any],
|
| 778 |
+
update: t.Mapping[Any, Any],
|
| 779 |
+
copy: Literal[True],
|
| 780 |
+
) -> t.MutableMapping[Any, Any]: ...
|
| 781 |
+
def update_nested(
|
| 782 |
+
original: Any,
|
| 783 |
+
update: t.Mapping[Any, Any],
|
| 784 |
+
copy: bool = False,
|
| 785 |
+
) -> t.MutableMapping[Any, Any]:
|
| 786 |
+
"""
|
| 787 |
+
Update nested dictionaries.
|
| 788 |
+
|
| 789 |
+
Parameters
|
| 790 |
+
----------
|
| 791 |
+
original : MutableMapping
|
| 792 |
+
the original (nested) dictionary, which will be updated in-place
|
| 793 |
+
update : Mapping
|
| 794 |
+
the nested dictionary of updates
|
| 795 |
+
copy : bool, default False
|
| 796 |
+
if True, then copy the original dictionary rather than modifying it
|
| 797 |
+
|
| 798 |
+
Returns
|
| 799 |
+
-------
|
| 800 |
+
original : MutableMapping
|
| 801 |
+
a reference to the (modified) original dict
|
| 802 |
+
|
| 803 |
+
Examples
|
| 804 |
+
--------
|
| 805 |
+
>>> original = {"x": {"b": 2, "c": 4}}
|
| 806 |
+
>>> update = {"x": {"b": 5, "d": 6}, "y": 40}
|
| 807 |
+
>>> update_nested(original, update) # doctest: +SKIP
|
| 808 |
+
{'x': {'b': 5, 'c': 4, 'd': 6}, 'y': 40}
|
| 809 |
+
>>> original # doctest: +SKIP
|
| 810 |
+
{'x': {'b': 5, 'c': 4, 'd': 6}, 'y': 40}
|
| 811 |
+
"""
|
| 812 |
+
if copy:
|
| 813 |
+
original = deepcopy(original)
|
| 814 |
+
for key, val in update.items():
|
| 815 |
+
if isinstance(val, Mapping):
|
| 816 |
+
orig_val = original.get(key, {})
|
| 817 |
+
if isinstance(orig_val, MutableMapping):
|
| 818 |
+
original[key] = update_nested(orig_val, val)
|
| 819 |
+
else:
|
| 820 |
+
original[key] = val
|
| 821 |
+
else:
|
| 822 |
+
original[key] = val
|
| 823 |
+
return original
|
| 824 |
+
|
| 825 |
+
|
| 826 |
+
def display_traceback(in_ipython: bool = True):
|
| 827 |
+
exc_info = sys.exc_info()
|
| 828 |
+
|
| 829 |
+
if in_ipython:
|
| 830 |
+
from IPython.core.getipython import get_ipython
|
| 831 |
+
|
| 832 |
+
ip = get_ipython()
|
| 833 |
+
else:
|
| 834 |
+
ip = None
|
| 835 |
+
|
| 836 |
+
if ip is not None:
|
| 837 |
+
ip.showtraceback(exc_info)
|
| 838 |
+
else:
|
| 839 |
+
traceback.print_exception(*exc_info)
|
| 840 |
+
|
| 841 |
+
|
| 842 |
+
_ChannelType = Literal["field", "datum", "value"]
|
| 843 |
+
_CHANNEL_CACHE: _ChannelCache
|
| 844 |
+
"""Singleton `_ChannelCache` instance.
|
| 845 |
+
|
| 846 |
+
Initialized on first use.
|
| 847 |
+
"""
|
| 848 |
+
|
| 849 |
+
|
| 850 |
+
class _ChannelCache:
|
| 851 |
+
channel_to_name: dict[type[SchemaBase], str]
|
| 852 |
+
name_to_channel: dict[str, dict[_ChannelType, type[SchemaBase]]]
|
| 853 |
+
|
| 854 |
+
@classmethod
|
| 855 |
+
def from_cache(cls) -> _ChannelCache:
|
| 856 |
+
global _CHANNEL_CACHE
|
| 857 |
+
try:
|
| 858 |
+
cached = _CHANNEL_CACHE
|
| 859 |
+
except NameError:
|
| 860 |
+
cached = cls.__new__(cls)
|
| 861 |
+
cached.channel_to_name = _init_channel_to_name() # pyright: ignore[reportAttributeAccessIssue]
|
| 862 |
+
cached.name_to_channel = _invert_group_channels(cached.channel_to_name)
|
| 863 |
+
_CHANNEL_CACHE = cached
|
| 864 |
+
return _CHANNEL_CACHE
|
| 865 |
+
|
| 866 |
+
def get_encoding(self, tp: type[Any], /) -> str:
|
| 867 |
+
if encoding := self.channel_to_name.get(tp):
|
| 868 |
+
return encoding
|
| 869 |
+
msg = f"positional of type {type(tp).__name__!r}"
|
| 870 |
+
raise NotImplementedError(msg)
|
| 871 |
+
|
| 872 |
+
def _wrap_in_channel(self, obj: Any, encoding: str, /):
|
| 873 |
+
if isinstance(obj, SchemaBase):
|
| 874 |
+
return obj
|
| 875 |
+
elif isinstance(obj, str):
|
| 876 |
+
obj = {"shorthand": obj}
|
| 877 |
+
elif isinstance(obj, (list, tuple)):
|
| 878 |
+
return [self._wrap_in_channel(el, encoding) for el in obj]
|
| 879 |
+
elif isinstance(obj, SchemaLike):
|
| 880 |
+
obj = obj.to_dict()
|
| 881 |
+
if channel := self.name_to_channel.get(encoding):
|
| 882 |
+
tp = channel["value" if "value" in obj else "field"]
|
| 883 |
+
try:
|
| 884 |
+
# Don't force validation here; some objects won't be valid until
|
| 885 |
+
# they're created in the context of a chart.
|
| 886 |
+
return tp.from_dict(obj, validate=False)
|
| 887 |
+
except jsonschema.ValidationError:
|
| 888 |
+
# our attempts at finding the correct class have failed
|
| 889 |
+
return obj
|
| 890 |
+
else:
|
| 891 |
+
warnings.warn(f"Unrecognized encoding channel {encoding!r}", stacklevel=1)
|
| 892 |
+
return obj
|
| 893 |
+
|
| 894 |
+
def infer_encoding_types(self, kwargs: dict[str, Any], /):
|
| 895 |
+
return {
|
| 896 |
+
encoding: self._wrap_in_channel(obj, encoding)
|
| 897 |
+
for encoding, obj in kwargs.items()
|
| 898 |
+
if obj is not Undefined
|
| 899 |
+
}
|
| 900 |
+
|
| 901 |
+
|
| 902 |
+
def _init_channel_to_name():
|
| 903 |
+
"""
|
| 904 |
+
Construct a dictionary of channel type to encoding name.
|
| 905 |
+
|
| 906 |
+
Note
|
| 907 |
+
----
|
| 908 |
+
The return type is not expressible using annotations, but is used
|
| 909 |
+
internally by `mypy`/`pyright` and avoids the need for type ignores.
|
| 910 |
+
|
| 911 |
+
Returns
|
| 912 |
+
-------
|
| 913 |
+
mapping: dict[type[`<subclass of FieldChannelMixin and SchemaBase>`] | type[`<subclass of ValueChannelMixin and SchemaBase>`] | type[`<subclass of DatumChannelMixin and SchemaBase>`], str]
|
| 914 |
+
"""
|
| 915 |
+
from altair.vegalite.v5.schema import channels as ch
|
| 916 |
+
|
| 917 |
+
mixins = ch.FieldChannelMixin, ch.ValueChannelMixin, ch.DatumChannelMixin
|
| 918 |
+
|
| 919 |
+
return {
|
| 920 |
+
c: c._encoding_name
|
| 921 |
+
for c in ch.__dict__.values()
|
| 922 |
+
if isinstance(c, type) and issubclass(c, mixins) and issubclass(c, SchemaBase)
|
| 923 |
+
}
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
def _invert_group_channels(
|
| 927 |
+
m: dict[type[SchemaBase], str], /
|
| 928 |
+
) -> dict[str, dict[_ChannelType, type[SchemaBase]]]:
|
| 929 |
+
"""Grouped inverted index for `_ChannelCache.channel_to_name`."""
|
| 930 |
+
|
| 931 |
+
def _reduce(it: Iterator[tuple[type[Any], str]]) -> Any:
|
| 932 |
+
"""
|
| 933 |
+
Returns a 1-2 item dict, per channel.
|
| 934 |
+
|
| 935 |
+
Never includes `datum`, as it is never utilized in `wrap_in_channel`.
|
| 936 |
+
"""
|
| 937 |
+
item: dict[Any, type[SchemaBase]] = {}
|
| 938 |
+
for tp, _ in it:
|
| 939 |
+
name = tp.__name__
|
| 940 |
+
if name.endswith("Datum"):
|
| 941 |
+
continue
|
| 942 |
+
elif name.endswith("Value"):
|
| 943 |
+
sub_key = "value"
|
| 944 |
+
else:
|
| 945 |
+
sub_key = "field"
|
| 946 |
+
item[sub_key] = tp
|
| 947 |
+
return item
|
| 948 |
+
|
| 949 |
+
grouper = groupby(m.items(), itemgetter(1))
|
| 950 |
+
return {k: _reduce(chans) for k, chans in grouper}
|
| 951 |
+
|
| 952 |
+
|
| 953 |
+
def infer_encoding_types(args: tuple[Any, ...], kwargs: dict[str, Any]):
|
| 954 |
+
"""
|
| 955 |
+
Infer typed keyword arguments for args and kwargs.
|
| 956 |
+
|
| 957 |
+
Parameters
|
| 958 |
+
----------
|
| 959 |
+
args : Sequence
|
| 960 |
+
Sequence of function args
|
| 961 |
+
kwargs : MutableMapping
|
| 962 |
+
Dict of function kwargs
|
| 963 |
+
|
| 964 |
+
Returns
|
| 965 |
+
-------
|
| 966 |
+
kwargs : dict
|
| 967 |
+
All args and kwargs in a single dict, with keys and types
|
| 968 |
+
based on the channels mapping.
|
| 969 |
+
"""
|
| 970 |
+
cache = _ChannelCache.from_cache()
|
| 971 |
+
# First use the mapping to convert args to kwargs based on their types.
|
| 972 |
+
for arg in args:
|
| 973 |
+
el = next(iter(arg), None) if isinstance(arg, (list, tuple)) else arg
|
| 974 |
+
encoding = cache.get_encoding(type(el))
|
| 975 |
+
if encoding not in kwargs:
|
| 976 |
+
kwargs[encoding] = arg
|
| 977 |
+
else:
|
| 978 |
+
msg = f"encoding {encoding!r} specified twice."
|
| 979 |
+
raise ValueError(msg)
|
| 980 |
+
|
| 981 |
+
return cache.infer_encoding_types(kwargs)
|
mgm/lib/python3.10/site-packages/altair/utils/data.py
ADDED
|
@@ -0,0 +1,442 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import hashlib
|
| 4 |
+
import json
|
| 5 |
+
import random
|
| 6 |
+
import sys
|
| 7 |
+
from collections.abc import MutableMapping, Sequence
|
| 8 |
+
from functools import partial
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import (
|
| 11 |
+
TYPE_CHECKING,
|
| 12 |
+
Any,
|
| 13 |
+
Callable,
|
| 14 |
+
Literal,
|
| 15 |
+
TypedDict,
|
| 16 |
+
TypeVar,
|
| 17 |
+
Union,
|
| 18 |
+
overload,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
import narwhals.stable.v1 as nw
|
| 22 |
+
from narwhals.stable.v1.dependencies import is_pandas_dataframe
|
| 23 |
+
from narwhals.stable.v1.typing import IntoDataFrame
|
| 24 |
+
|
| 25 |
+
from ._importers import import_pyarrow_interchange
|
| 26 |
+
from .core import (
|
| 27 |
+
DataFrameLike,
|
| 28 |
+
sanitize_geo_interface,
|
| 29 |
+
sanitize_narwhals_dataframe,
|
| 30 |
+
sanitize_pandas_dataframe,
|
| 31 |
+
to_eager_narwhals_dataframe,
|
| 32 |
+
)
|
| 33 |
+
from .plugin_registry import PluginRegistry
|
| 34 |
+
|
| 35 |
+
if sys.version_info >= (3, 13):
|
| 36 |
+
from typing import Protocol, runtime_checkable
|
| 37 |
+
else:
|
| 38 |
+
from typing_extensions import Protocol, runtime_checkable
|
| 39 |
+
if sys.version_info >= (3, 10):
|
| 40 |
+
from typing import Concatenate, ParamSpec
|
| 41 |
+
else:
|
| 42 |
+
from typing_extensions import Concatenate, ParamSpec
|
| 43 |
+
|
| 44 |
+
if TYPE_CHECKING:
|
| 45 |
+
if sys.version_info >= (3, 13):
|
| 46 |
+
from typing import TypeIs
|
| 47 |
+
else:
|
| 48 |
+
from typing_extensions import TypeIs
|
| 49 |
+
|
| 50 |
+
if sys.version_info >= (3, 10):
|
| 51 |
+
from typing import TypeAlias
|
| 52 |
+
else:
|
| 53 |
+
from typing_extensions import TypeAlias
|
| 54 |
+
import pandas as pd
|
| 55 |
+
import pyarrow as pa
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@runtime_checkable
|
| 59 |
+
class SupportsGeoInterface(Protocol):
|
| 60 |
+
__geo_interface__: MutableMapping
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
DataType: TypeAlias = Union[
|
| 64 |
+
dict[Any, Any], IntoDataFrame, SupportsGeoInterface, DataFrameLike
|
| 65 |
+
]
|
| 66 |
+
|
| 67 |
+
TDataType = TypeVar("TDataType", bound=DataType)
|
| 68 |
+
TIntoDataFrame = TypeVar("TIntoDataFrame", bound=IntoDataFrame)
|
| 69 |
+
|
| 70 |
+
VegaLiteDataDict: TypeAlias = dict[
|
| 71 |
+
str, Union[str, dict[Any, Any], list[dict[Any, Any]]]
|
| 72 |
+
]
|
| 73 |
+
ToValuesReturnType: TypeAlias = dict[str, Union[dict[Any, Any], list[dict[Any, Any]]]]
|
| 74 |
+
SampleReturnType = Union[IntoDataFrame, dict[str, Sequence], None]
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def is_data_type(obj: Any) -> TypeIs[DataType]:
|
| 78 |
+
return isinstance(obj, (dict, SupportsGeoInterface)) or isinstance(
|
| 79 |
+
nw.from_native(obj, eager_or_interchange_only=True, pass_through=True),
|
| 80 |
+
nw.DataFrame,
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
# ==============================================================================
|
| 85 |
+
# Data transformer registry
|
| 86 |
+
#
|
| 87 |
+
# A data transformer is a callable that takes a supported data type and returns
|
| 88 |
+
# a transformed dictionary version of it which is compatible with the VegaLite schema.
|
| 89 |
+
# The dict objects will be the Data portion of the VegaLite schema.
|
| 90 |
+
#
|
| 91 |
+
# Renderers only deal with the dict form of a
|
| 92 |
+
# VegaLite spec, after the Data model has been put into a schema compliant
|
| 93 |
+
# form.
|
| 94 |
+
# ==============================================================================
|
| 95 |
+
|
| 96 |
+
P = ParamSpec("P")
|
| 97 |
+
# NOTE: `Any` required due to the complexity of existing signatures imported in `altair.vegalite.v5.data.py`
|
| 98 |
+
R = TypeVar("R", VegaLiteDataDict, Any)
|
| 99 |
+
DataTransformerType = Callable[Concatenate[DataType, P], R]
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class DataTransformerRegistry(PluginRegistry[DataTransformerType, R]):
|
| 103 |
+
_global_settings = {"consolidate_datasets": True}
|
| 104 |
+
|
| 105 |
+
@property
|
| 106 |
+
def consolidate_datasets(self) -> bool:
|
| 107 |
+
return self._global_settings["consolidate_datasets"]
|
| 108 |
+
|
| 109 |
+
@consolidate_datasets.setter
|
| 110 |
+
def consolidate_datasets(self, value: bool) -> None:
|
| 111 |
+
self._global_settings["consolidate_datasets"] = value
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
# ==============================================================================
|
| 115 |
+
class MaxRowsError(Exception):
|
| 116 |
+
"""Raised when a data model has too many rows."""
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
@overload
|
| 120 |
+
def limit_rows(data: None = ..., max_rows: int | None = ...) -> partial: ...
|
| 121 |
+
@overload
|
| 122 |
+
def limit_rows(data: DataType, max_rows: int | None = ...) -> DataType: ...
|
| 123 |
+
def limit_rows(
|
| 124 |
+
data: DataType | None = None, max_rows: int | None = 5000
|
| 125 |
+
) -> partial | DataType:
|
| 126 |
+
"""
|
| 127 |
+
Raise MaxRowsError if the data model has more than max_rows.
|
| 128 |
+
|
| 129 |
+
If max_rows is None, then do not perform any check.
|
| 130 |
+
"""
|
| 131 |
+
if data is None:
|
| 132 |
+
return partial(limit_rows, max_rows=max_rows)
|
| 133 |
+
check_data_type(data)
|
| 134 |
+
|
| 135 |
+
def raise_max_rows_error():
|
| 136 |
+
msg = (
|
| 137 |
+
"The number of rows in your dataset is greater "
|
| 138 |
+
f"than the maximum allowed ({max_rows}).\n\n"
|
| 139 |
+
"Try enabling the VegaFusion data transformer which "
|
| 140 |
+
"raises this limit by pre-evaluating data\n"
|
| 141 |
+
"transformations in Python.\n"
|
| 142 |
+
" >> import altair as alt\n"
|
| 143 |
+
' >> alt.data_transformers.enable("vegafusion")\n\n'
|
| 144 |
+
"Or, see https://altair-viz.github.io/user_guide/large_datasets.html "
|
| 145 |
+
"for additional information\n"
|
| 146 |
+
"on how to plot large datasets."
|
| 147 |
+
)
|
| 148 |
+
raise MaxRowsError(msg)
|
| 149 |
+
|
| 150 |
+
if isinstance(data, SupportsGeoInterface):
|
| 151 |
+
if data.__geo_interface__["type"] == "FeatureCollection":
|
| 152 |
+
values = data.__geo_interface__["features"]
|
| 153 |
+
else:
|
| 154 |
+
values = data.__geo_interface__
|
| 155 |
+
elif isinstance(data, dict):
|
| 156 |
+
if "values" in data:
|
| 157 |
+
values = data["values"]
|
| 158 |
+
else:
|
| 159 |
+
return data
|
| 160 |
+
else:
|
| 161 |
+
data = to_eager_narwhals_dataframe(data)
|
| 162 |
+
values = data
|
| 163 |
+
|
| 164 |
+
if max_rows is not None and len(values) > max_rows:
|
| 165 |
+
raise_max_rows_error()
|
| 166 |
+
|
| 167 |
+
return data
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@overload
|
| 171 |
+
def sample(
|
| 172 |
+
data: None = ..., n: int | None = ..., frac: float | None = ...
|
| 173 |
+
) -> partial: ...
|
| 174 |
+
@overload
|
| 175 |
+
def sample(
|
| 176 |
+
data: TIntoDataFrame, n: int | None = ..., frac: float | None = ...
|
| 177 |
+
) -> TIntoDataFrame: ...
|
| 178 |
+
@overload
|
| 179 |
+
def sample(
|
| 180 |
+
data: DataType, n: int | None = ..., frac: float | None = ...
|
| 181 |
+
) -> SampleReturnType: ...
|
| 182 |
+
def sample(
|
| 183 |
+
data: DataType | None = None,
|
| 184 |
+
n: int | None = None,
|
| 185 |
+
frac: float | None = None,
|
| 186 |
+
) -> partial | SampleReturnType:
|
| 187 |
+
"""Reduce the size of the data model by sampling without replacement."""
|
| 188 |
+
if data is None:
|
| 189 |
+
return partial(sample, n=n, frac=frac)
|
| 190 |
+
check_data_type(data)
|
| 191 |
+
if is_pandas_dataframe(data):
|
| 192 |
+
return data.sample(n=n, frac=frac)
|
| 193 |
+
elif isinstance(data, dict):
|
| 194 |
+
if "values" in data:
|
| 195 |
+
values = data["values"]
|
| 196 |
+
if not n:
|
| 197 |
+
if frac is None:
|
| 198 |
+
msg = "frac cannot be None if n is None and data is a dictionary"
|
| 199 |
+
raise ValueError(msg)
|
| 200 |
+
n = int(frac * len(values))
|
| 201 |
+
values = random.sample(values, n)
|
| 202 |
+
return {"values": values}
|
| 203 |
+
else:
|
| 204 |
+
# Maybe this should raise an error or return something useful?
|
| 205 |
+
return None
|
| 206 |
+
data = nw.from_native(data, eager_only=True)
|
| 207 |
+
if not n:
|
| 208 |
+
if frac is None:
|
| 209 |
+
msg = "frac cannot be None if n is None with this data input type"
|
| 210 |
+
raise ValueError(msg)
|
| 211 |
+
n = int(frac * len(data))
|
| 212 |
+
indices = random.sample(range(len(data)), n)
|
| 213 |
+
return data[indices].to_native()
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
_FormatType = Literal["csv", "json"]
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
class _FormatDict(TypedDict):
|
| 220 |
+
type: _FormatType
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class _ToFormatReturnUrlDict(TypedDict):
|
| 224 |
+
url: str
|
| 225 |
+
format: _FormatDict
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
@overload
|
| 229 |
+
def to_json(
|
| 230 |
+
data: None = ...,
|
| 231 |
+
prefix: str = ...,
|
| 232 |
+
extension: str = ...,
|
| 233 |
+
filename: str = ...,
|
| 234 |
+
urlpath: str = ...,
|
| 235 |
+
) -> partial: ...
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
@overload
|
| 239 |
+
def to_json(
|
| 240 |
+
data: DataType,
|
| 241 |
+
prefix: str = ...,
|
| 242 |
+
extension: str = ...,
|
| 243 |
+
filename: str = ...,
|
| 244 |
+
urlpath: str = ...,
|
| 245 |
+
) -> _ToFormatReturnUrlDict: ...
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
def to_json(
|
| 249 |
+
data: DataType | None = None,
|
| 250 |
+
prefix: str = "altair-data",
|
| 251 |
+
extension: str = "json",
|
| 252 |
+
filename: str = "{prefix}-{hash}.{extension}",
|
| 253 |
+
urlpath: str = "",
|
| 254 |
+
) -> partial | _ToFormatReturnUrlDict:
|
| 255 |
+
"""Write the data model to a .json file and return a url based data model."""
|
| 256 |
+
kwds = _to_text_kwds(prefix, extension, filename, urlpath)
|
| 257 |
+
if data is None:
|
| 258 |
+
return partial(to_json, **kwds)
|
| 259 |
+
else:
|
| 260 |
+
data_str = _data_to_json_string(data)
|
| 261 |
+
return _to_text(data_str, **kwds, format=_FormatDict(type="json"))
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
@overload
|
| 265 |
+
def to_csv(
|
| 266 |
+
data: None = ...,
|
| 267 |
+
prefix: str = ...,
|
| 268 |
+
extension: str = ...,
|
| 269 |
+
filename: str = ...,
|
| 270 |
+
urlpath: str = ...,
|
| 271 |
+
) -> partial: ...
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
@overload
|
| 275 |
+
def to_csv(
|
| 276 |
+
data: dict | pd.DataFrame | DataFrameLike,
|
| 277 |
+
prefix: str = ...,
|
| 278 |
+
extension: str = ...,
|
| 279 |
+
filename: str = ...,
|
| 280 |
+
urlpath: str = ...,
|
| 281 |
+
) -> _ToFormatReturnUrlDict: ...
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
def to_csv(
|
| 285 |
+
data: dict | pd.DataFrame | DataFrameLike | None = None,
|
| 286 |
+
prefix: str = "altair-data",
|
| 287 |
+
extension: str = "csv",
|
| 288 |
+
filename: str = "{prefix}-{hash}.{extension}",
|
| 289 |
+
urlpath: str = "",
|
| 290 |
+
) -> partial | _ToFormatReturnUrlDict:
|
| 291 |
+
"""Write the data model to a .csv file and return a url based data model."""
|
| 292 |
+
kwds = _to_text_kwds(prefix, extension, filename, urlpath)
|
| 293 |
+
if data is None:
|
| 294 |
+
return partial(to_csv, **kwds)
|
| 295 |
+
else:
|
| 296 |
+
data_str = _data_to_csv_string(data)
|
| 297 |
+
return _to_text(data_str, **kwds, format=_FormatDict(type="csv"))
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
def _to_text(
|
| 301 |
+
data: str,
|
| 302 |
+
prefix: str,
|
| 303 |
+
extension: str,
|
| 304 |
+
filename: str,
|
| 305 |
+
urlpath: str,
|
| 306 |
+
format: _FormatDict,
|
| 307 |
+
) -> _ToFormatReturnUrlDict:
|
| 308 |
+
data_hash = _compute_data_hash(data)
|
| 309 |
+
filename = filename.format(prefix=prefix, hash=data_hash, extension=extension)
|
| 310 |
+
Path(filename).write_text(data, encoding="utf-8")
|
| 311 |
+
url = str(Path(urlpath, filename))
|
| 312 |
+
return _ToFormatReturnUrlDict({"url": url, "format": format})
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def _to_text_kwds(prefix: str, extension: str, filename: str, urlpath: str, /) -> dict[str, str]: # fmt: skip
|
| 316 |
+
return {"prefix": prefix, "extension": extension, "filename": filename, "urlpath": urlpath} # fmt: skip
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def to_values(data: DataType) -> ToValuesReturnType:
|
| 320 |
+
"""Replace a DataFrame by a data model with values."""
|
| 321 |
+
check_data_type(data)
|
| 322 |
+
# `pass_through=True` passes `data` through as-is if it is not a Narwhals object.
|
| 323 |
+
data_native = nw.to_native(data, pass_through=True)
|
| 324 |
+
if isinstance(data_native, SupportsGeoInterface):
|
| 325 |
+
return {"values": _from_geo_interface(data_native)}
|
| 326 |
+
elif is_pandas_dataframe(data_native):
|
| 327 |
+
data_native = sanitize_pandas_dataframe(data_native)
|
| 328 |
+
return {"values": data_native.to_dict(orient="records")}
|
| 329 |
+
elif isinstance(data_native, dict):
|
| 330 |
+
if "values" not in data_native:
|
| 331 |
+
msg = "values expected in data dict, but not present."
|
| 332 |
+
raise KeyError(msg)
|
| 333 |
+
return data_native
|
| 334 |
+
elif isinstance(data, nw.DataFrame):
|
| 335 |
+
data = sanitize_narwhals_dataframe(data)
|
| 336 |
+
return {"values": data.rows(named=True)}
|
| 337 |
+
else:
|
| 338 |
+
# Should never reach this state as tested by check_data_type
|
| 339 |
+
msg = f"Unrecognized data type: {type(data)}"
|
| 340 |
+
raise ValueError(msg)
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def check_data_type(data: DataType) -> None:
|
| 344 |
+
if not is_data_type(data):
|
| 345 |
+
msg = f"Expected dict, DataFrame or a __geo_interface__ attribute, got: {type(data)}"
|
| 346 |
+
raise TypeError(msg)
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
# ==============================================================================
|
| 350 |
+
# Private utilities
|
| 351 |
+
# ==============================================================================
|
| 352 |
+
def _compute_data_hash(data_str: str) -> str:
|
| 353 |
+
return hashlib.sha256(data_str.encode()).hexdigest()[:32]
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def _from_geo_interface(data: SupportsGeoInterface | Any) -> dict[str, Any]:
|
| 357 |
+
"""
|
| 358 |
+
Santize a ``__geo_interface__`` w/ pre-santize step for ``pandas`` if needed.
|
| 359 |
+
|
| 360 |
+
Notes
|
| 361 |
+
-----
|
| 362 |
+
Split out to resolve typing issues related to:
|
| 363 |
+
- Intersection types
|
| 364 |
+
- ``typing.TypeGuard``
|
| 365 |
+
- ``pd.DataFrame.__getattr__``
|
| 366 |
+
"""
|
| 367 |
+
if is_pandas_dataframe(data):
|
| 368 |
+
data = sanitize_pandas_dataframe(data)
|
| 369 |
+
return sanitize_geo_interface(data.__geo_interface__)
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
def _data_to_json_string(data: DataType) -> str:
|
| 373 |
+
"""Return a JSON string representation of the input data."""
|
| 374 |
+
check_data_type(data)
|
| 375 |
+
if isinstance(data, SupportsGeoInterface):
|
| 376 |
+
return json.dumps(_from_geo_interface(data))
|
| 377 |
+
elif is_pandas_dataframe(data):
|
| 378 |
+
data = sanitize_pandas_dataframe(data)
|
| 379 |
+
return data.to_json(orient="records", double_precision=15)
|
| 380 |
+
elif isinstance(data, dict):
|
| 381 |
+
if "values" not in data:
|
| 382 |
+
msg = "values expected in data dict, but not present."
|
| 383 |
+
raise KeyError(msg)
|
| 384 |
+
return json.dumps(data["values"], sort_keys=True)
|
| 385 |
+
try:
|
| 386 |
+
data_nw = nw.from_native(data, eager_only=True)
|
| 387 |
+
except TypeError as exc:
|
| 388 |
+
msg = "to_json only works with data expressed as a DataFrame or as a dict"
|
| 389 |
+
raise NotImplementedError(msg) from exc
|
| 390 |
+
data_nw = sanitize_narwhals_dataframe(data_nw)
|
| 391 |
+
return json.dumps(data_nw.rows(named=True))
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
def _data_to_csv_string(data: DataType) -> str:
|
| 395 |
+
"""Return a CSV string representation of the input data."""
|
| 396 |
+
check_data_type(data)
|
| 397 |
+
if isinstance(data, SupportsGeoInterface):
|
| 398 |
+
msg = (
|
| 399 |
+
f"to_csv does not yet work with data that "
|
| 400 |
+
f"is of type {type(SupportsGeoInterface).__name__!r}.\n"
|
| 401 |
+
f"See https://github.com/vega/altair/issues/3441"
|
| 402 |
+
)
|
| 403 |
+
raise NotImplementedError(msg)
|
| 404 |
+
elif is_pandas_dataframe(data):
|
| 405 |
+
data = sanitize_pandas_dataframe(data)
|
| 406 |
+
return data.to_csv(index=False)
|
| 407 |
+
elif isinstance(data, dict):
|
| 408 |
+
if "values" not in data:
|
| 409 |
+
msg = "values expected in data dict, but not present"
|
| 410 |
+
raise KeyError(msg)
|
| 411 |
+
try:
|
| 412 |
+
import pandas as pd
|
| 413 |
+
except ImportError as exc:
|
| 414 |
+
msg = "pandas is required to convert a dict to a CSV string"
|
| 415 |
+
raise ImportError(msg) from exc
|
| 416 |
+
return pd.DataFrame.from_dict(data["values"]).to_csv(index=False)
|
| 417 |
+
try:
|
| 418 |
+
data_nw = nw.from_native(data, eager_only=True)
|
| 419 |
+
except TypeError as exc:
|
| 420 |
+
msg = "to_csv only works with data expressed as a DataFrame or as a dict"
|
| 421 |
+
raise NotImplementedError(msg) from exc
|
| 422 |
+
return data_nw.write_csv()
|
| 423 |
+
|
| 424 |
+
|
| 425 |
+
def arrow_table_from_dfi_dataframe(dfi_df: DataFrameLike) -> pa.Table:
|
| 426 |
+
"""Convert a DataFrame Interchange Protocol compatible object to an Arrow Table."""
|
| 427 |
+
import pyarrow as pa
|
| 428 |
+
|
| 429 |
+
# First check if the dataframe object has a method to convert to arrow.
|
| 430 |
+
# Give this preference over the pyarrow from_dataframe function since the object
|
| 431 |
+
# has more control over the conversion, and may have broader compatibility.
|
| 432 |
+
# This is the case for Polars, which supports Date32 columns in direct conversion
|
| 433 |
+
# while pyarrow does not yet support this type in from_dataframe
|
| 434 |
+
for convert_method_name in ("arrow", "to_arrow", "to_arrow_table", "to_pyarrow"):
|
| 435 |
+
convert_method = getattr(dfi_df, convert_method_name, None)
|
| 436 |
+
if callable(convert_method):
|
| 437 |
+
result = convert_method()
|
| 438 |
+
if isinstance(result, pa.Table):
|
| 439 |
+
return result
|
| 440 |
+
|
| 441 |
+
pi = import_pyarrow_interchange()
|
| 442 |
+
return pi.from_dataframe(dfi_df)
|
mgm/lib/python3.10/site-packages/altair/utils/deprecation.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
import threading
|
| 5 |
+
import warnings
|
| 6 |
+
from typing import TYPE_CHECKING, Literal
|
| 7 |
+
|
| 8 |
+
if sys.version_info >= (3, 13):
|
| 9 |
+
from warnings import deprecated as _deprecated
|
| 10 |
+
else:
|
| 11 |
+
from typing_extensions import deprecated as _deprecated
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
if sys.version_info >= (3, 11):
|
| 16 |
+
from typing import LiteralString
|
| 17 |
+
else:
|
| 18 |
+
from typing_extensions import LiteralString
|
| 19 |
+
|
| 20 |
+
__all__ = [
|
| 21 |
+
"AltairDeprecationWarning",
|
| 22 |
+
"deprecated",
|
| 23 |
+
"deprecated_static_only",
|
| 24 |
+
"deprecated_warn",
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class AltairDeprecationWarning(DeprecationWarning): ...
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _format_message(
|
| 32 |
+
version: LiteralString,
|
| 33 |
+
alternative: LiteralString | None,
|
| 34 |
+
message: LiteralString | None,
|
| 35 |
+
/,
|
| 36 |
+
) -> LiteralString:
|
| 37 |
+
output = f"\nDeprecated since `altair={version}`."
|
| 38 |
+
if alternative:
|
| 39 |
+
output = f"{output} Use {alternative} instead."
|
| 40 |
+
return f"{output}\n{message}" if message else output
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
# NOTE: Annotating the return type breaks `pyright` detecting [reportDeprecated]
|
| 44 |
+
# NOTE: `LiteralString` requirement is introduced by stubs
|
| 45 |
+
def deprecated(
|
| 46 |
+
*,
|
| 47 |
+
version: LiteralString,
|
| 48 |
+
alternative: LiteralString | None = None,
|
| 49 |
+
message: LiteralString | None = None,
|
| 50 |
+
category: type[AltairDeprecationWarning] | None = AltairDeprecationWarning,
|
| 51 |
+
stacklevel: int = 1,
|
| 52 |
+
): # te.deprecated
|
| 53 |
+
"""
|
| 54 |
+
Indicate that a class, function or overload is deprecated.
|
| 55 |
+
|
| 56 |
+
When this decorator is applied to an object, the type checker
|
| 57 |
+
will generate a diagnostic on usage of the deprecated object.
|
| 58 |
+
|
| 59 |
+
Parameters
|
| 60 |
+
----------
|
| 61 |
+
version
|
| 62 |
+
``altair`` version the deprecation first appeared.
|
| 63 |
+
alternative
|
| 64 |
+
Suggested replacement class/method/function.
|
| 65 |
+
message
|
| 66 |
+
Additional message appended to ``version``, ``alternative``.
|
| 67 |
+
category
|
| 68 |
+
If the *category* is ``None``, no warning is emitted at runtime.
|
| 69 |
+
stacklevel
|
| 70 |
+
The *stacklevel* determines where the
|
| 71 |
+
warning is emitted. If it is ``1`` (the default), the warning
|
| 72 |
+
is emitted at the direct caller of the deprecated object; if it
|
| 73 |
+
is higher, it is emitted further up the stack.
|
| 74 |
+
Static type checker behavior is not affected by the *category*
|
| 75 |
+
and *stacklevel* arguments.
|
| 76 |
+
|
| 77 |
+
References
|
| 78 |
+
----------
|
| 79 |
+
[PEP 702](https://peps.python.org/pep-0702/)
|
| 80 |
+
"""
|
| 81 |
+
msg = _format_message(version, alternative, message)
|
| 82 |
+
return _deprecated(msg, category=category, stacklevel=stacklevel)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def deprecated_warn(
|
| 86 |
+
message: LiteralString,
|
| 87 |
+
*,
|
| 88 |
+
version: LiteralString,
|
| 89 |
+
alternative: LiteralString | None = None,
|
| 90 |
+
category: type[AltairDeprecationWarning] = AltairDeprecationWarning,
|
| 91 |
+
stacklevel: int = 2,
|
| 92 |
+
action: Literal["once"] | None = None,
|
| 93 |
+
) -> None:
|
| 94 |
+
"""
|
| 95 |
+
Indicate that the current code path is deprecated.
|
| 96 |
+
|
| 97 |
+
This should be used for non-trivial cases *only*. ``@deprecated`` should
|
| 98 |
+
always be preferred as it is recognized by static type checkers.
|
| 99 |
+
|
| 100 |
+
Parameters
|
| 101 |
+
----------
|
| 102 |
+
message
|
| 103 |
+
Explanation of the deprecated behaviour.
|
| 104 |
+
|
| 105 |
+
.. note::
|
| 106 |
+
Unlike ``@deprecated``, this is *not* optional.
|
| 107 |
+
|
| 108 |
+
version
|
| 109 |
+
``altair`` version the deprecation first appeared.
|
| 110 |
+
alternative
|
| 111 |
+
Suggested replacement argument/method/function.
|
| 112 |
+
category
|
| 113 |
+
The runtime warning type emitted.
|
| 114 |
+
stacklevel
|
| 115 |
+
How far up the call stack to make this warning appear.
|
| 116 |
+
A value of ``2`` attributes the warning to the caller
|
| 117 |
+
of the code calling ``deprecated_warn()``.
|
| 118 |
+
|
| 119 |
+
References
|
| 120 |
+
----------
|
| 121 |
+
[warnings.warn](https://docs.python.org/3/library/warnings.html#warnings.warn)
|
| 122 |
+
"""
|
| 123 |
+
msg = _format_message(version, alternative, message)
|
| 124 |
+
if action is None:
|
| 125 |
+
warnings.warn(msg, category=category, stacklevel=stacklevel)
|
| 126 |
+
elif action == "once":
|
| 127 |
+
_warn_once(msg, category=category, stacklevel=stacklevel)
|
| 128 |
+
else:
|
| 129 |
+
raise NotImplementedError(action)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
deprecated_static_only = _deprecated
|
| 133 |
+
"""
|
| 134 |
+
Using this decorator **exactly as described**, ensures ``message`` is displayed to a static type checker.
|
| 135 |
+
|
| 136 |
+
**BE CAREFUL USING THIS**.
|
| 137 |
+
|
| 138 |
+
See screenshots in `comment`_ for motivation.
|
| 139 |
+
|
| 140 |
+
Every use should look like::
|
| 141 |
+
|
| 142 |
+
@deprecated_static_only(
|
| 143 |
+
"Deprecated since `altair=5.5.0`. Use altair.other instead.",
|
| 144 |
+
category=None,
|
| 145 |
+
)
|
| 146 |
+
def old_function(*args): ...
|
| 147 |
+
|
| 148 |
+
If a runtime warning is desired, use `@alt.utils.deprecated` instead.
|
| 149 |
+
|
| 150 |
+
Parameters
|
| 151 |
+
----------
|
| 152 |
+
message : LiteralString
|
| 153 |
+
- **Not** a variable
|
| 154 |
+
- **Not** use placeholders
|
| 155 |
+
- **Not** use concatenation
|
| 156 |
+
- **Do not use anything that could be considered dynamic**
|
| 157 |
+
|
| 158 |
+
category : None
|
| 159 |
+
You **need** to explicitly pass ``None``
|
| 160 |
+
|
| 161 |
+
.. _comment:
|
| 162 |
+
https://github.com/vega/altair/pull/3618#issuecomment-2423991968
|
| 163 |
+
---
|
| 164 |
+
"""
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class _WarningsMonitor:
|
| 168 |
+
def __init__(self) -> None:
|
| 169 |
+
self._warned: dict[LiteralString, Literal[True]] = {}
|
| 170 |
+
self._lock = threading.Lock()
|
| 171 |
+
|
| 172 |
+
def __contains__(self, key: LiteralString, /) -> bool:
|
| 173 |
+
with self._lock:
|
| 174 |
+
return key in self._warned
|
| 175 |
+
|
| 176 |
+
def hit(self, key: LiteralString, /) -> None:
|
| 177 |
+
with self._lock:
|
| 178 |
+
self._warned[key] = True
|
| 179 |
+
|
| 180 |
+
def clear(self) -> None:
|
| 181 |
+
with self._lock:
|
| 182 |
+
self._warned.clear()
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
_warnings_monitor = _WarningsMonitor()
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def _warn_once(
|
| 189 |
+
msg: LiteralString, /, *, category: type[AltairDeprecationWarning], stacklevel: int
|
| 190 |
+
) -> None:
|
| 191 |
+
global _warnings_monitor
|
| 192 |
+
if msg in _warnings_monitor:
|
| 193 |
+
return
|
| 194 |
+
else:
|
| 195 |
+
_warnings_monitor.hit(msg)
|
| 196 |
+
warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
|
mgm/lib/python3.10/site-packages/altair/utils/display.py
ADDED
|
@@ -0,0 +1,232 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import pkgutil
|
| 5 |
+
import textwrap
|
| 6 |
+
import uuid
|
| 7 |
+
from typing import TYPE_CHECKING, Any, Callable, Union
|
| 8 |
+
|
| 9 |
+
from ._vegafusion_data import compile_with_vegafusion, using_vegafusion
|
| 10 |
+
from .mimebundle import spec_to_mimebundle
|
| 11 |
+
from .plugin_registry import PluginEnabler, PluginRegistry
|
| 12 |
+
from .schemapi import validate_jsonschema
|
| 13 |
+
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
import sys
|
| 16 |
+
|
| 17 |
+
if sys.version_info >= (3, 10):
|
| 18 |
+
from typing import TypeAlias
|
| 19 |
+
else:
|
| 20 |
+
from typing_extensions import TypeAlias
|
| 21 |
+
|
| 22 |
+
# ==============================================================================
|
| 23 |
+
# Renderer registry
|
| 24 |
+
# ==============================================================================
|
| 25 |
+
# MimeBundleType needs to be the same as what are acceptable return values
|
| 26 |
+
# for _repr_mimebundle_,
|
| 27 |
+
# see https://ipython.readthedocs.io/en/stable/config/integrating.html#MyObject._repr_mimebundle_
|
| 28 |
+
MimeBundleDataType: TypeAlias = dict[str, Any]
|
| 29 |
+
MimeBundleMetaDataType: TypeAlias = dict[str, Any]
|
| 30 |
+
MimeBundleType: TypeAlias = Union[
|
| 31 |
+
MimeBundleDataType, tuple[MimeBundleDataType, MimeBundleMetaDataType]
|
| 32 |
+
]
|
| 33 |
+
RendererType: TypeAlias = Callable[..., MimeBundleType]
|
| 34 |
+
# Subtype of MimeBundleType as more specific in the values of the dictionaries
|
| 35 |
+
|
| 36 |
+
DefaultRendererReturnType: TypeAlias = tuple[
|
| 37 |
+
dict[str, Union[str, dict[str, Any]]], dict[str, dict[str, Any]]
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class RendererRegistry(PluginRegistry[RendererType, MimeBundleType]):
|
| 42 |
+
entrypoint_err_messages = {
|
| 43 |
+
"notebook": textwrap.dedent(
|
| 44 |
+
"""
|
| 45 |
+
To use the 'notebook' renderer, you must install the vega package
|
| 46 |
+
and the associated Jupyter extension.
|
| 47 |
+
See https://altair-viz.github.io/getting_started/installation.html
|
| 48 |
+
for more information.
|
| 49 |
+
"""
|
| 50 |
+
),
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
def set_embed_options(
|
| 54 |
+
self,
|
| 55 |
+
defaultStyle: bool | str | None = None,
|
| 56 |
+
renderer: str | None = None,
|
| 57 |
+
width: int | None = None,
|
| 58 |
+
height: int | None = None,
|
| 59 |
+
padding: int | None = None,
|
| 60 |
+
scaleFactor: float | None = None,
|
| 61 |
+
actions: bool | dict[str, bool] | None = None,
|
| 62 |
+
format_locale: str | dict | None = None,
|
| 63 |
+
time_format_locale: str | dict | None = None,
|
| 64 |
+
**kwargs,
|
| 65 |
+
) -> PluginEnabler:
|
| 66 |
+
"""
|
| 67 |
+
Set options for embeddings of Vega & Vega-Lite charts.
|
| 68 |
+
|
| 69 |
+
Options are fully documented at https://github.com/vega/vega-embed.
|
| 70 |
+
Similar to the `enable()` method, this can be used as either
|
| 71 |
+
a persistent global switch, or as a temporary local setting using
|
| 72 |
+
a context manager (i.e. a `with` statement).
|
| 73 |
+
|
| 74 |
+
Parameters
|
| 75 |
+
----------
|
| 76 |
+
defaultStyle : bool or string
|
| 77 |
+
Specify a default stylesheet for embed actions.
|
| 78 |
+
renderer : string
|
| 79 |
+
The renderer to use for the view. One of "canvas" (default) or "svg"
|
| 80 |
+
width : integer
|
| 81 |
+
The view width in pixels
|
| 82 |
+
height : integer
|
| 83 |
+
The view height in pixels
|
| 84 |
+
padding : integer
|
| 85 |
+
The view padding in pixels
|
| 86 |
+
scaleFactor : number
|
| 87 |
+
The number by which to multiply the width and height (default 1)
|
| 88 |
+
of an exported PNG or SVG image.
|
| 89 |
+
actions : bool or dict
|
| 90 |
+
Determines if action links ("Export as PNG/SVG", "View Source",
|
| 91 |
+
"View Vega" (only for Vega-Lite), "Open in Vega Editor") are
|
| 92 |
+
included with the embedded view. If the value is true, all action
|
| 93 |
+
links will be shown and none if the value is false. This property
|
| 94 |
+
can take a key-value mapping object that maps keys (export, source,
|
| 95 |
+
compiled, editor) to boolean values for determining if
|
| 96 |
+
each action link should be shown.
|
| 97 |
+
format_locale : str or dict
|
| 98 |
+
d3-format locale name or dictionary. Defaults to "en-US" for United States English.
|
| 99 |
+
See https://github.com/d3/d3-format/tree/main/locale for available names and example
|
| 100 |
+
definitions.
|
| 101 |
+
time_format_locale : str or dict
|
| 102 |
+
d3-time-format locale name or dictionary. Defaults to "en-US" for United States English.
|
| 103 |
+
See https://github.com/d3/d3-time-format/tree/main/locale for available names and example
|
| 104 |
+
definitions.
|
| 105 |
+
**kwargs :
|
| 106 |
+
Additional options are passed directly to embed options.
|
| 107 |
+
"""
|
| 108 |
+
options: dict[str, bool | str | float | dict[str, bool] | None] = {
|
| 109 |
+
"defaultStyle": defaultStyle,
|
| 110 |
+
"renderer": renderer,
|
| 111 |
+
"width": width,
|
| 112 |
+
"height": height,
|
| 113 |
+
"padding": padding,
|
| 114 |
+
"scaleFactor": scaleFactor,
|
| 115 |
+
"actions": actions,
|
| 116 |
+
"formatLocale": format_locale,
|
| 117 |
+
"timeFormatLocale": time_format_locale,
|
| 118 |
+
}
|
| 119 |
+
kwargs.update({key: val for key, val in options.items() if val is not None})
|
| 120 |
+
return self.enable(None, embed_options=kwargs)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
# ==============================================================================
|
| 124 |
+
# VegaLite v1/v2 renderer logic
|
| 125 |
+
# ==============================================================================
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class Displayable:
|
| 129 |
+
"""
|
| 130 |
+
A base display class for VegaLite v1/v2.
|
| 131 |
+
|
| 132 |
+
This class takes a VegaLite v1/v2 spec and does the following:
|
| 133 |
+
|
| 134 |
+
1. Optionally validates the spec against a schema.
|
| 135 |
+
2. Uses the RendererPlugin to grab a renderer and call it when the
|
| 136 |
+
IPython/Jupyter display method (_repr_mimebundle_) is called.
|
| 137 |
+
|
| 138 |
+
The spec passed to this class must be fully schema compliant and already
|
| 139 |
+
have the data portion of the spec fully processed and ready to serialize.
|
| 140 |
+
In practice, this means, the data portion of the spec should have been passed
|
| 141 |
+
through appropriate data model transformers.
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
renderers: RendererRegistry | None = None
|
| 145 |
+
schema_path = ("altair", "")
|
| 146 |
+
|
| 147 |
+
def __init__(self, spec: dict[str, Any], validate: bool = False) -> None:
|
| 148 |
+
self.spec = spec
|
| 149 |
+
self.validate = validate
|
| 150 |
+
self._validate()
|
| 151 |
+
|
| 152 |
+
def _validate(self) -> None:
|
| 153 |
+
"""Validate the spec against the schema."""
|
| 154 |
+
data = pkgutil.get_data(*self.schema_path)
|
| 155 |
+
assert data is not None
|
| 156 |
+
schema_dict: dict[str, Any] = json.loads(data.decode("utf-8"))
|
| 157 |
+
validate_jsonschema(
|
| 158 |
+
self.spec,
|
| 159 |
+
schema_dict,
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
def _repr_mimebundle_(
|
| 163 |
+
self, include: Any = None, exclude: Any = None
|
| 164 |
+
) -> MimeBundleType:
|
| 165 |
+
"""Return a MIME bundle for display in Jupyter frontends."""
|
| 166 |
+
if self.renderers is not None:
|
| 167 |
+
renderer_func = self.renderers.get()
|
| 168 |
+
assert renderer_func is not None
|
| 169 |
+
return renderer_func(self.spec)
|
| 170 |
+
else:
|
| 171 |
+
return {}
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def default_renderer_base(
|
| 175 |
+
spec: dict[str, Any], mime_type: str, str_repr: str, **options
|
| 176 |
+
) -> DefaultRendererReturnType:
|
| 177 |
+
"""
|
| 178 |
+
A default renderer for Vega or VegaLite that works for modern frontends.
|
| 179 |
+
|
| 180 |
+
This renderer works with modern frontends (JupyterLab, nteract) that know
|
| 181 |
+
how to render the custom VegaLite MIME type listed above.
|
| 182 |
+
"""
|
| 183 |
+
# Local import to avoid circular ImportError
|
| 184 |
+
from altair.vegalite.v5.display import VEGA_MIME_TYPE, VEGALITE_MIME_TYPE
|
| 185 |
+
|
| 186 |
+
assert isinstance(spec, dict)
|
| 187 |
+
bundle: dict[str, str | dict] = {}
|
| 188 |
+
metadata: dict[str, dict[str, Any]] = {}
|
| 189 |
+
|
| 190 |
+
if using_vegafusion():
|
| 191 |
+
spec = compile_with_vegafusion(spec)
|
| 192 |
+
|
| 193 |
+
# Swap mimetype from Vega-Lite to Vega.
|
| 194 |
+
# If mimetype was JSON, leave it alone
|
| 195 |
+
if mime_type == VEGALITE_MIME_TYPE:
|
| 196 |
+
mime_type = VEGA_MIME_TYPE
|
| 197 |
+
|
| 198 |
+
bundle[mime_type] = spec
|
| 199 |
+
bundle["text/plain"] = str_repr
|
| 200 |
+
if options:
|
| 201 |
+
metadata[mime_type] = options
|
| 202 |
+
return bundle, metadata
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def json_renderer_base(
|
| 206 |
+
spec: dict[str, Any], str_repr: str, **options
|
| 207 |
+
) -> DefaultRendererReturnType:
|
| 208 |
+
"""
|
| 209 |
+
A renderer that returns a MIME type of application/json.
|
| 210 |
+
|
| 211 |
+
In JupyterLab/nteract this is rendered as a nice JSON tree.
|
| 212 |
+
"""
|
| 213 |
+
return default_renderer_base(
|
| 214 |
+
spec, mime_type="application/json", str_repr=str_repr, **options
|
| 215 |
+
)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
class HTMLRenderer:
|
| 219 |
+
"""Object to render charts as HTML, with a unique output div each time."""
|
| 220 |
+
|
| 221 |
+
def __init__(self, output_div: str = "altair-viz-{}", **kwargs) -> None:
|
| 222 |
+
self._output_div = output_div
|
| 223 |
+
self.kwargs = kwargs
|
| 224 |
+
|
| 225 |
+
@property
|
| 226 |
+
def output_div(self) -> str:
|
| 227 |
+
return self._output_div.format(uuid.uuid4().hex)
|
| 228 |
+
|
| 229 |
+
def __call__(self, spec: dict[str, Any], **metadata) -> dict[str, str]:
|
| 230 |
+
kwargs = self.kwargs.copy()
|
| 231 |
+
kwargs.update(**metadata, output_div=self.output_div)
|
| 232 |
+
return spec_to_mimebundle(spec, format="html", **kwargs)
|
mgm/lib/python3.10/site-packages/altair/utils/html.py
ADDED
|
@@ -0,0 +1,411 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
from typing import Any, Literal
|
| 5 |
+
|
| 6 |
+
import jinja2
|
| 7 |
+
|
| 8 |
+
from altair.utils._importers import import_vl_convert, vl_version_for_vl_convert
|
| 9 |
+
|
| 10 |
+
TemplateName = Literal["standard", "universal", "inline", "olli"]
|
| 11 |
+
RenderMode = Literal["vega", "vega-lite"]
|
| 12 |
+
|
| 13 |
+
HTML_TEMPLATE = jinja2.Template(
|
| 14 |
+
"""
|
| 15 |
+
{%- if fullhtml -%}
|
| 16 |
+
<!DOCTYPE html>
|
| 17 |
+
<html>
|
| 18 |
+
<head>
|
| 19 |
+
<meta charset="UTF-8">
|
| 20 |
+
{%- endif %}
|
| 21 |
+
<style>
|
| 22 |
+
#{{ output_div }}.vega-embed {
|
| 23 |
+
width: 100%;
|
| 24 |
+
display: flex;
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
#{{ output_div }}.vega-embed details,
|
| 28 |
+
#{{ output_div }}.vega-embed details summary {
|
| 29 |
+
position: relative;
|
| 30 |
+
}
|
| 31 |
+
</style>
|
| 32 |
+
{%- if not requirejs %}
|
| 33 |
+
<script type="text/javascript" src="{{ base_url }}/vega@{{ vega_version }}"></script>
|
| 34 |
+
{%- if mode == 'vega-lite' %}
|
| 35 |
+
<script type="text/javascript" src="{{ base_url }}/vega-lite@{{ vegalite_version }}"></script>
|
| 36 |
+
{%- endif %}
|
| 37 |
+
<script type="text/javascript" src="{{ base_url }}/vega-embed@{{ vegaembed_version }}"></script>
|
| 38 |
+
{%- endif %}
|
| 39 |
+
{%- if fullhtml %}
|
| 40 |
+
{%- if requirejs %}
|
| 41 |
+
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.6/require.min.js"></script>
|
| 42 |
+
<script>
|
| 43 |
+
requirejs.config({
|
| 44 |
+
"paths": {
|
| 45 |
+
"vega": "{{ base_url }}/vega@{{ vega_version }}?noext",
|
| 46 |
+
"vega-lib": "{{ base_url }}/vega-lib?noext",
|
| 47 |
+
"vega-lite": "{{ base_url }}/vega-lite@{{ vegalite_version }}?noext",
|
| 48 |
+
"vega-embed": "{{ base_url }}/vega-embed@{{ vegaembed_version }}?noext",
|
| 49 |
+
}
|
| 50 |
+
});
|
| 51 |
+
</script>
|
| 52 |
+
{%- endif %}
|
| 53 |
+
</head>
|
| 54 |
+
<body>
|
| 55 |
+
{%- endif %}
|
| 56 |
+
<div id="{{ output_div }}"></div>
|
| 57 |
+
<script>
|
| 58 |
+
{%- if requirejs and not fullhtml %}
|
| 59 |
+
requirejs.config({
|
| 60 |
+
"paths": {
|
| 61 |
+
"vega": "{{ base_url }}/vega@{{ vega_version }}?noext",
|
| 62 |
+
"vega-lib": "{{ base_url }}/vega-lib?noext",
|
| 63 |
+
"vega-lite": "{{ base_url }}/vega-lite@{{ vegalite_version }}?noext",
|
| 64 |
+
"vega-embed": "{{ base_url }}/vega-embed@{{ vegaembed_version }}?noext",
|
| 65 |
+
}
|
| 66 |
+
});
|
| 67 |
+
{% endif %}
|
| 68 |
+
{% if requirejs -%}
|
| 69 |
+
require(['vega-embed'],
|
| 70 |
+
{%- else -%}
|
| 71 |
+
(
|
| 72 |
+
{%- endif -%}
|
| 73 |
+
function(vegaEmbed) {
|
| 74 |
+
var spec = {{ spec }};
|
| 75 |
+
var embedOpt = {{ embed_options }};
|
| 76 |
+
|
| 77 |
+
function showError(el, error){
|
| 78 |
+
el.innerHTML = ('<div style="color:red;">'
|
| 79 |
+
+ '<p>JavaScript Error: ' + error.message + '</p>'
|
| 80 |
+
+ "<p>This usually means there's a typo in your chart specification. "
|
| 81 |
+
+ "See the javascript console for the full traceback.</p>"
|
| 82 |
+
+ '</div>');
|
| 83 |
+
throw error;
|
| 84 |
+
}
|
| 85 |
+
const el = document.getElementById('{{ output_div }}');
|
| 86 |
+
vegaEmbed("#{{ output_div }}", spec, embedOpt)
|
| 87 |
+
.catch(error => showError(el, error));
|
| 88 |
+
}){% if not requirejs %}(vegaEmbed){% endif %};
|
| 89 |
+
|
| 90 |
+
</script>
|
| 91 |
+
{%- if fullhtml %}
|
| 92 |
+
</body>
|
| 93 |
+
</html>
|
| 94 |
+
{%- endif %}
|
| 95 |
+
"""
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
HTML_TEMPLATE_UNIVERSAL = jinja2.Template(
|
| 100 |
+
"""
|
| 101 |
+
<style>
|
| 102 |
+
#{{ output_div }}.vega-embed {
|
| 103 |
+
width: 100%;
|
| 104 |
+
display: flex;
|
| 105 |
+
}
|
| 106 |
+
|
| 107 |
+
#{{ output_div }}.vega-embed details,
|
| 108 |
+
#{{ output_div }}.vega-embed details summary {
|
| 109 |
+
position: relative;
|
| 110 |
+
}
|
| 111 |
+
</style>
|
| 112 |
+
<div id="{{ output_div }}"></div>
|
| 113 |
+
<script type="text/javascript">
|
| 114 |
+
var VEGA_DEBUG = (typeof VEGA_DEBUG == "undefined") ? {} : VEGA_DEBUG;
|
| 115 |
+
(function(spec, embedOpt){
|
| 116 |
+
let outputDiv = document.currentScript.previousElementSibling;
|
| 117 |
+
if (outputDiv.id !== "{{ output_div }}") {
|
| 118 |
+
outputDiv = document.getElementById("{{ output_div }}");
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
const paths = {
|
| 122 |
+
"vega": "{{ base_url }}/vega@{{ vega_version }}?noext",
|
| 123 |
+
"vega-lib": "{{ base_url }}/vega-lib?noext",
|
| 124 |
+
"vega-lite": "{{ base_url }}/vega-lite@{{ vegalite_version }}?noext",
|
| 125 |
+
"vega-embed": "{{ base_url }}/vega-embed@{{ vegaembed_version }}?noext",
|
| 126 |
+
};
|
| 127 |
+
|
| 128 |
+
function maybeLoadScript(lib, version) {
|
| 129 |
+
var key = `${lib.replace("-", "")}_version`;
|
| 130 |
+
return (VEGA_DEBUG[key] == version) ?
|
| 131 |
+
Promise.resolve(paths[lib]) :
|
| 132 |
+
new Promise(function(resolve, reject) {
|
| 133 |
+
var s = document.createElement('script');
|
| 134 |
+
document.getElementsByTagName("head")[0].appendChild(s);
|
| 135 |
+
s.async = true;
|
| 136 |
+
s.onload = () => {
|
| 137 |
+
VEGA_DEBUG[key] = version;
|
| 138 |
+
return resolve(paths[lib]);
|
| 139 |
+
};
|
| 140 |
+
s.onerror = () => reject(`Error loading script: ${paths[lib]}`);
|
| 141 |
+
s.src = paths[lib];
|
| 142 |
+
});
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
function showError(err) {
|
| 146 |
+
outputDiv.innerHTML = `<div class="error" style="color:red;">${err}</div>`;
|
| 147 |
+
throw err;
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
function displayChart(vegaEmbed) {
|
| 151 |
+
vegaEmbed(outputDiv, spec, embedOpt)
|
| 152 |
+
.catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));
|
| 153 |
+
}
|
| 154 |
+
|
| 155 |
+
if(typeof define === "function" && define.amd) {
|
| 156 |
+
requirejs.config({paths});
|
| 157 |
+
let deps = ["vega-embed"];
|
| 158 |
+
require(deps, displayChart, err => showError(`Error loading script: ${err.message}`));
|
| 159 |
+
} else {
|
| 160 |
+
maybeLoadScript("vega", "{{vega_version}}")
|
| 161 |
+
.then(() => maybeLoadScript("vega-lite", "{{vegalite_version}}"))
|
| 162 |
+
.then(() => maybeLoadScript("vega-embed", "{{vegaembed_version}}"))
|
| 163 |
+
.catch(showError)
|
| 164 |
+
.then(() => displayChart(vegaEmbed));
|
| 165 |
+
}
|
| 166 |
+
})({{ spec }}, {{ embed_options }});
|
| 167 |
+
</script>
|
| 168 |
+
"""
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
# This is like the HTML_TEMPLATE template, but includes vega javascript inline
|
| 173 |
+
# so that the resulting file is not dependent on external resources. This was
|
| 174 |
+
# ported over from altair_saver.
|
| 175 |
+
#
|
| 176 |
+
# implies requirejs=False and full_html=True
|
| 177 |
+
INLINE_HTML_TEMPLATE = jinja2.Template(
|
| 178 |
+
"""\
|
| 179 |
+
<!DOCTYPE html>
|
| 180 |
+
<html>
|
| 181 |
+
<head>
|
| 182 |
+
<meta charset="UTF-8">
|
| 183 |
+
<style>
|
| 184 |
+
#{{ output_div }}.vega-embed {
|
| 185 |
+
width: 100%;
|
| 186 |
+
display: flex;
|
| 187 |
+
}
|
| 188 |
+
|
| 189 |
+
#{{ output_div }}.vega-embed details,
|
| 190 |
+
#{{ output_div }}.vega-embed details summary {
|
| 191 |
+
position: relative;
|
| 192 |
+
}
|
| 193 |
+
</style>
|
| 194 |
+
<script type="text/javascript">
|
| 195 |
+
// vega-embed.js bundle with Vega-Lite version v{{ vegalite_version }}
|
| 196 |
+
{{ vegaembed_script }}
|
| 197 |
+
</script>
|
| 198 |
+
</head>
|
| 199 |
+
<body>
|
| 200 |
+
<div class="vega-visualization" id="{{ output_div }}"></div>
|
| 201 |
+
<script type="text/javascript">
|
| 202 |
+
const spec = {{ spec }};
|
| 203 |
+
const embedOpt = {{ embed_options }};
|
| 204 |
+
vegaEmbed('#{{ output_div }}', spec, embedOpt).catch(console.error);
|
| 205 |
+
</script>
|
| 206 |
+
</body>
|
| 207 |
+
</html>
|
| 208 |
+
"""
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
HTML_TEMPLATE_OLLI = jinja2.Template(
|
| 213 |
+
"""
|
| 214 |
+
<style>
|
| 215 |
+
#{{ output_div }}.vega-embed {
|
| 216 |
+
width: 100%;
|
| 217 |
+
display: flex;
|
| 218 |
+
}
|
| 219 |
+
|
| 220 |
+
#{{ output_div }}.vega-embed details,
|
| 221 |
+
#{{ output_div }}.vega-embed details summary {
|
| 222 |
+
position: relative;
|
| 223 |
+
}
|
| 224 |
+
</style>
|
| 225 |
+
<div id="{{ output_div }}"></div>
|
| 226 |
+
<script type="text/javascript">
|
| 227 |
+
var VEGA_DEBUG = (typeof VEGA_DEBUG == "undefined") ? {} : VEGA_DEBUG;
|
| 228 |
+
(function(spec, embedOpt){
|
| 229 |
+
let outputDiv = document.currentScript.previousElementSibling;
|
| 230 |
+
if (outputDiv.id !== "{{ output_div }}") {
|
| 231 |
+
outputDiv = document.getElementById("{{ output_div }}");
|
| 232 |
+
}
|
| 233 |
+
const olliDiv = document.createElement("div");
|
| 234 |
+
const vegaDiv = document.createElement("div");
|
| 235 |
+
outputDiv.appendChild(vegaDiv);
|
| 236 |
+
outputDiv.appendChild(olliDiv);
|
| 237 |
+
outputDiv = vegaDiv;
|
| 238 |
+
|
| 239 |
+
const paths = {
|
| 240 |
+
"vega": "{{ base_url }}/vega@{{ vega_version }}?noext",
|
| 241 |
+
"vega-lib": "{{ base_url }}/vega-lib?noext",
|
| 242 |
+
"vega-lite": "{{ base_url }}/vega-lite@{{ vegalite_version }}?noext",
|
| 243 |
+
"vega-embed": "{{ base_url }}/vega-embed@{{ vegaembed_version }}?noext",
|
| 244 |
+
"olli": "{{ base_url }}/olli@{{ olli_version }}?noext",
|
| 245 |
+
"olli-adapters": "{{ base_url }}/olli-adapters@{{ olli_adapters_version }}?noext",
|
| 246 |
+
};
|
| 247 |
+
|
| 248 |
+
function maybeLoadScript(lib, version) {
|
| 249 |
+
var key = `${lib.replace("-", "")}_version`;
|
| 250 |
+
return (VEGA_DEBUG[key] == version) ?
|
| 251 |
+
Promise.resolve(paths[lib]) :
|
| 252 |
+
new Promise(function(resolve, reject) {
|
| 253 |
+
var s = document.createElement('script');
|
| 254 |
+
document.getElementsByTagName("head")[0].appendChild(s);
|
| 255 |
+
s.async = true;
|
| 256 |
+
s.onload = () => {
|
| 257 |
+
VEGA_DEBUG[key] = version;
|
| 258 |
+
return resolve(paths[lib]);
|
| 259 |
+
};
|
| 260 |
+
s.onerror = () => reject(`Error loading script: ${paths[lib]}`);
|
| 261 |
+
s.src = paths[lib];
|
| 262 |
+
});
|
| 263 |
+
}
|
| 264 |
+
|
| 265 |
+
function showError(err) {
|
| 266 |
+
outputDiv.innerHTML = `<div class="error" style="color:red;">${err}</div>`;
|
| 267 |
+
throw err;
|
| 268 |
+
}
|
| 269 |
+
|
| 270 |
+
function displayChart(vegaEmbed, olli, olliAdapters) {
|
| 271 |
+
vegaEmbed(outputDiv, spec, embedOpt)
|
| 272 |
+
.catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));
|
| 273 |
+
olliAdapters.VegaLiteAdapter(spec).then(olliVisSpec => {
|
| 274 |
+
const olliFunc = typeof olli === 'function' ? olli : olli.olli;
|
| 275 |
+
const olliRender = olliFunc(olliVisSpec);
|
| 276 |
+
olliDiv.append(olliRender);
|
| 277 |
+
});
|
| 278 |
+
}
|
| 279 |
+
|
| 280 |
+
if(typeof define === "function" && define.amd) {
|
| 281 |
+
requirejs.config({paths});
|
| 282 |
+
let deps = ["vega-embed", "olli", "olli-adapters"];
|
| 283 |
+
require(deps, displayChart, err => showError(`Error loading script: ${err.message}`));
|
| 284 |
+
} else {
|
| 285 |
+
maybeLoadScript("vega", "{{vega_version}}")
|
| 286 |
+
.then(() => maybeLoadScript("vega-lite", "{{vegalite_version}}"))
|
| 287 |
+
.then(() => maybeLoadScript("vega-embed", "{{vegaembed_version}}"))
|
| 288 |
+
.then(() => maybeLoadScript("olli", "{{olli_version}}"))
|
| 289 |
+
.then(() => maybeLoadScript("olli-adapters", "{{olli_adapters_version}}"))
|
| 290 |
+
.catch(showError)
|
| 291 |
+
.then(() => displayChart(vegaEmbed, olli, OlliAdapters));
|
| 292 |
+
}
|
| 293 |
+
})({{ spec }}, {{ embed_options }});
|
| 294 |
+
</script>
|
| 295 |
+
"""
|
| 296 |
+
)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
TEMPLATES: dict[TemplateName, jinja2.Template] = {
|
| 300 |
+
"standard": HTML_TEMPLATE,
|
| 301 |
+
"universal": HTML_TEMPLATE_UNIVERSAL,
|
| 302 |
+
"inline": INLINE_HTML_TEMPLATE,
|
| 303 |
+
"olli": HTML_TEMPLATE_OLLI,
|
| 304 |
+
}
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
def spec_to_html(
|
| 308 |
+
spec: dict[str, Any],
|
| 309 |
+
mode: RenderMode,
|
| 310 |
+
vega_version: str | None,
|
| 311 |
+
vegaembed_version: str | None,
|
| 312 |
+
vegalite_version: str | None = None,
|
| 313 |
+
base_url: str = "https://cdn.jsdelivr.net/npm",
|
| 314 |
+
output_div: str = "vis",
|
| 315 |
+
embed_options: dict[str, Any] | None = None,
|
| 316 |
+
json_kwds: dict[str, Any] | None = None,
|
| 317 |
+
fullhtml: bool = True,
|
| 318 |
+
requirejs: bool = False,
|
| 319 |
+
template: jinja2.Template | TemplateName = "standard",
|
| 320 |
+
) -> str:
|
| 321 |
+
"""
|
| 322 |
+
Embed a Vega/Vega-Lite spec into an HTML page.
|
| 323 |
+
|
| 324 |
+
Parameters
|
| 325 |
+
----------
|
| 326 |
+
spec : dict
|
| 327 |
+
a dictionary representing a vega-lite plot spec.
|
| 328 |
+
mode : string {'vega' | 'vega-lite'}
|
| 329 |
+
The rendering mode. This value is overridden by embed_options['mode'],
|
| 330 |
+
if it is present.
|
| 331 |
+
vega_version : string
|
| 332 |
+
For html output, the version of vega.js to use.
|
| 333 |
+
vegalite_version : string
|
| 334 |
+
For html output, the version of vegalite.js to use.
|
| 335 |
+
vegaembed_version : string
|
| 336 |
+
For html output, the version of vegaembed.js to use.
|
| 337 |
+
base_url : string (optional)
|
| 338 |
+
The base url from which to load the javascript libraries.
|
| 339 |
+
output_div : string (optional)
|
| 340 |
+
The id of the div element where the plot will be shown.
|
| 341 |
+
embed_options : dict (optional)
|
| 342 |
+
Dictionary of options to pass to the vega-embed script. Default
|
| 343 |
+
entry is {'mode': mode}.
|
| 344 |
+
json_kwds : dict (optional)
|
| 345 |
+
Dictionary of keywords to pass to json.dumps().
|
| 346 |
+
fullhtml : boolean (optional)
|
| 347 |
+
If True (default) then return a full html page. If False, then return
|
| 348 |
+
an HTML snippet that can be embedded into an HTML page.
|
| 349 |
+
requirejs : boolean (optional)
|
| 350 |
+
If False (default) then load libraries from base_url using <script>
|
| 351 |
+
tags. If True, then load libraries using requirejs
|
| 352 |
+
template : jinja2.Template or string (optional)
|
| 353 |
+
Specify the template to use (default = 'standard'). If template is a
|
| 354 |
+
string, it must be one of {'universal', 'standard', 'inline'}. Otherwise, it
|
| 355 |
+
can be a jinja2.Template object containing a custom template.
|
| 356 |
+
|
| 357 |
+
Returns
|
| 358 |
+
-------
|
| 359 |
+
output : string
|
| 360 |
+
an HTML string for rendering the chart.
|
| 361 |
+
"""
|
| 362 |
+
embed_options = embed_options or {}
|
| 363 |
+
json_kwds = json_kwds or {}
|
| 364 |
+
|
| 365 |
+
mode = embed_options.setdefault("mode", mode)
|
| 366 |
+
|
| 367 |
+
if mode not in {"vega", "vega-lite"}:
|
| 368 |
+
msg = "mode must be either 'vega' or 'vega-lite'"
|
| 369 |
+
raise ValueError(msg)
|
| 370 |
+
|
| 371 |
+
if vega_version is None:
|
| 372 |
+
msg = "must specify vega_version"
|
| 373 |
+
raise ValueError(msg)
|
| 374 |
+
|
| 375 |
+
if vegaembed_version is None:
|
| 376 |
+
msg = "must specify vegaembed_version"
|
| 377 |
+
raise ValueError(msg)
|
| 378 |
+
|
| 379 |
+
if mode == "vega-lite" and vegalite_version is None:
|
| 380 |
+
msg = "must specify vega-lite version for mode='vega-lite'"
|
| 381 |
+
raise ValueError(msg)
|
| 382 |
+
|
| 383 |
+
render_kwargs = {}
|
| 384 |
+
if template == "inline":
|
| 385 |
+
vlc = import_vl_convert()
|
| 386 |
+
vl_version = vl_version_for_vl_convert()
|
| 387 |
+
render_kwargs["vegaembed_script"] = vlc.javascript_bundle(vl_version=vl_version)
|
| 388 |
+
elif template == "olli":
|
| 389 |
+
OLLI_VERSION = "2"
|
| 390 |
+
OLLI_ADAPTERS_VERSION = "2"
|
| 391 |
+
render_kwargs["olli_version"] = OLLI_VERSION
|
| 392 |
+
render_kwargs["olli_adapters_version"] = OLLI_ADAPTERS_VERSION
|
| 393 |
+
|
| 394 |
+
jinja_template = TEMPLATES.get(template, template) # type: ignore[arg-type]
|
| 395 |
+
if not hasattr(jinja_template, "render"):
|
| 396 |
+
msg = f"Invalid template: {jinja_template}"
|
| 397 |
+
raise ValueError(msg)
|
| 398 |
+
|
| 399 |
+
return jinja_template.render(
|
| 400 |
+
spec=json.dumps(spec, **json_kwds),
|
| 401 |
+
embed_options=json.dumps(embed_options),
|
| 402 |
+
mode=mode,
|
| 403 |
+
vega_version=vega_version,
|
| 404 |
+
vegalite_version=vegalite_version,
|
| 405 |
+
vegaembed_version=vegaembed_version,
|
| 406 |
+
base_url=base_url,
|
| 407 |
+
output_div=output_div,
|
| 408 |
+
fullhtml=fullhtml,
|
| 409 |
+
requirejs=requirejs,
|
| 410 |
+
**render_kwargs,
|
| 411 |
+
)
|
mgm/lib/python3.10/site-packages/altair/utils/mimebundle.py
ADDED
|
@@ -0,0 +1,377 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import struct
|
| 4 |
+
from typing import TYPE_CHECKING, Any, Literal, cast, overload
|
| 5 |
+
|
| 6 |
+
from ._importers import import_vl_convert, vl_version_for_vl_convert
|
| 7 |
+
from .html import spec_to_html
|
| 8 |
+
|
| 9 |
+
if TYPE_CHECKING:
|
| 10 |
+
import sys
|
| 11 |
+
|
| 12 |
+
if sys.version_info >= (3, 10):
|
| 13 |
+
from typing import TypeAlias
|
| 14 |
+
else:
|
| 15 |
+
from typing_extensions import TypeAlias
|
| 16 |
+
|
| 17 |
+
MimeBundleFormat: TypeAlias = Literal[
|
| 18 |
+
"html", "json", "png", "svg", "pdf", "vega", "vega-lite"
|
| 19 |
+
]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@overload
|
| 23 |
+
def spec_to_mimebundle(
|
| 24 |
+
spec: dict[str, Any],
|
| 25 |
+
format: Literal["json", "vega-lite"],
|
| 26 |
+
mode: Literal["vega-lite"] | None = ...,
|
| 27 |
+
vega_version: str | None = ...,
|
| 28 |
+
vegaembed_version: str | None = ...,
|
| 29 |
+
vegalite_version: str | None = ...,
|
| 30 |
+
embed_options: dict[str, Any] | None = ...,
|
| 31 |
+
engine: Literal["vl-convert"] | None = ...,
|
| 32 |
+
**kwargs,
|
| 33 |
+
) -> dict[str, dict[str, Any]]: ...
|
| 34 |
+
@overload
|
| 35 |
+
def spec_to_mimebundle(
|
| 36 |
+
spec: dict[str, Any],
|
| 37 |
+
format: Literal["html"],
|
| 38 |
+
mode: Literal["vega-lite"] | None = ...,
|
| 39 |
+
vega_version: str | None = ...,
|
| 40 |
+
vegaembed_version: str | None = ...,
|
| 41 |
+
vegalite_version: str | None = ...,
|
| 42 |
+
embed_options: dict[str, Any] | None = ...,
|
| 43 |
+
engine: Literal["vl-convert"] | None = ...,
|
| 44 |
+
**kwargs,
|
| 45 |
+
) -> dict[str, str]: ...
|
| 46 |
+
@overload
|
| 47 |
+
def spec_to_mimebundle(
|
| 48 |
+
spec: dict[str, Any],
|
| 49 |
+
format: Literal["pdf", "svg", "vega"],
|
| 50 |
+
mode: Literal["vega-lite"] | None = ...,
|
| 51 |
+
vega_version: str | None = ...,
|
| 52 |
+
vegaembed_version: str | None = ...,
|
| 53 |
+
vegalite_version: str | None = ...,
|
| 54 |
+
embed_options: dict[str, Any] | None = ...,
|
| 55 |
+
engine: Literal["vl-convert"] | None = ...,
|
| 56 |
+
**kwargs,
|
| 57 |
+
) -> dict[str, Any]: ...
|
| 58 |
+
@overload
|
| 59 |
+
def spec_to_mimebundle(
|
| 60 |
+
spec: dict[str, Any],
|
| 61 |
+
format: Literal["png"],
|
| 62 |
+
mode: Literal["vega-lite"] | None = ...,
|
| 63 |
+
vega_version: str | None = ...,
|
| 64 |
+
vegaembed_version: str | None = ...,
|
| 65 |
+
vegalite_version: str | None = ...,
|
| 66 |
+
embed_options: dict[str, Any] | None = ...,
|
| 67 |
+
engine: Literal["vl-convert"] | None = ...,
|
| 68 |
+
**kwargs,
|
| 69 |
+
) -> tuple[dict[str, Any], dict[str, Any]]: ...
|
| 70 |
+
def spec_to_mimebundle(
|
| 71 |
+
spec: dict[str, Any],
|
| 72 |
+
format: MimeBundleFormat,
|
| 73 |
+
mode: Literal["vega-lite"] | None = None,
|
| 74 |
+
vega_version: str | None = None,
|
| 75 |
+
vegaembed_version: str | None = None,
|
| 76 |
+
vegalite_version: str | None = None,
|
| 77 |
+
embed_options: dict[str, Any] | None = None,
|
| 78 |
+
engine: Literal["vl-convert"] | None = None,
|
| 79 |
+
**kwargs,
|
| 80 |
+
) -> dict[str, Any] | tuple[dict[str, Any], dict[str, Any]]:
|
| 81 |
+
"""
|
| 82 |
+
Convert a vega-lite specification to a mimebundle.
|
| 83 |
+
|
| 84 |
+
The mimebundle type is controlled by the ``format`` argument, which can be
|
| 85 |
+
one of the following ['html', 'json', 'png', 'svg', 'pdf', 'vega', 'vega-lite']
|
| 86 |
+
|
| 87 |
+
Parameters
|
| 88 |
+
----------
|
| 89 |
+
spec : dict
|
| 90 |
+
a dictionary representing a vega-lite plot spec
|
| 91 |
+
format : string {'html', 'json', 'png', 'svg', 'pdf', 'vega', 'vega-lite'}
|
| 92 |
+
the file format to be saved.
|
| 93 |
+
mode : string {'vega-lite'}
|
| 94 |
+
The rendering mode.
|
| 95 |
+
vega_version : string
|
| 96 |
+
The version of vega.js to use
|
| 97 |
+
vegaembed_version : string
|
| 98 |
+
The version of vegaembed.js to use
|
| 99 |
+
vegalite_version : string
|
| 100 |
+
The version of vegalite.js to use. Only required if mode=='vega-lite'
|
| 101 |
+
embed_options : dict (optional)
|
| 102 |
+
The vegaEmbed options dictionary. Defaults to the embed options set with
|
| 103 |
+
alt.renderers.set_embed_options().
|
| 104 |
+
(See https://github.com/vega/vega-embed for details)
|
| 105 |
+
engine: string {'vl-convert'}
|
| 106 |
+
the conversion engine to use for 'png', 'svg', 'pdf', and 'vega' formats
|
| 107 |
+
**kwargs :
|
| 108 |
+
Additional arguments will be passed to the generating function
|
| 109 |
+
|
| 110 |
+
Returns
|
| 111 |
+
-------
|
| 112 |
+
output : dict
|
| 113 |
+
a mime-bundle representing the image
|
| 114 |
+
|
| 115 |
+
Note
|
| 116 |
+
----
|
| 117 |
+
The png, svg, pdf, and vega outputs require the vl-convert package
|
| 118 |
+
"""
|
| 119 |
+
# Local import to avoid circular ImportError
|
| 120 |
+
from altair import renderers
|
| 121 |
+
from altair.utils.display import compile_with_vegafusion, using_vegafusion
|
| 122 |
+
|
| 123 |
+
if mode != "vega-lite":
|
| 124 |
+
msg = "mode must be 'vega-lite'"
|
| 125 |
+
raise ValueError(msg)
|
| 126 |
+
|
| 127 |
+
internal_mode: Literal["vega-lite", "vega"] = mode
|
| 128 |
+
if using_vegafusion():
|
| 129 |
+
spec = compile_with_vegafusion(spec)
|
| 130 |
+
internal_mode = "vega"
|
| 131 |
+
|
| 132 |
+
# Default to the embed options set by alt.renderers.set_embed_options
|
| 133 |
+
if embed_options is None:
|
| 134 |
+
final_embed_options = renderers.options.get("embed_options", {})
|
| 135 |
+
else:
|
| 136 |
+
final_embed_options = embed_options
|
| 137 |
+
|
| 138 |
+
embed_options = preprocess_embed_options(final_embed_options)
|
| 139 |
+
|
| 140 |
+
if format in {"png", "svg", "pdf", "vega"}:
|
| 141 |
+
return _spec_to_mimebundle_with_engine(
|
| 142 |
+
spec,
|
| 143 |
+
cast(Literal["png", "svg", "pdf", "vega"], format),
|
| 144 |
+
internal_mode,
|
| 145 |
+
engine=engine,
|
| 146 |
+
format_locale=embed_options.get("formatLocale", None),
|
| 147 |
+
time_format_locale=embed_options.get("timeFormatLocale", None),
|
| 148 |
+
**kwargs,
|
| 149 |
+
)
|
| 150 |
+
elif format == "html":
|
| 151 |
+
html = spec_to_html(
|
| 152 |
+
spec,
|
| 153 |
+
mode=internal_mode,
|
| 154 |
+
vega_version=vega_version,
|
| 155 |
+
vegaembed_version=vegaembed_version,
|
| 156 |
+
vegalite_version=vegalite_version,
|
| 157 |
+
embed_options=embed_options,
|
| 158 |
+
**kwargs,
|
| 159 |
+
)
|
| 160 |
+
return {"text/html": html}
|
| 161 |
+
elif format == "vega-lite":
|
| 162 |
+
if vegalite_version is None:
|
| 163 |
+
msg = "Must specify vegalite_version"
|
| 164 |
+
raise ValueError(msg)
|
| 165 |
+
return {f"application/vnd.vegalite.v{vegalite_version[0]}+json": spec}
|
| 166 |
+
elif format == "json":
|
| 167 |
+
return {"application/json": spec}
|
| 168 |
+
else:
|
| 169 |
+
msg = (
|
| 170 |
+
"format must be one of "
|
| 171 |
+
"['html', 'json', 'png', 'svg', 'pdf', 'vega', 'vega-lite']"
|
| 172 |
+
)
|
| 173 |
+
raise ValueError(msg)
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def _spec_to_mimebundle_with_engine(
|
| 177 |
+
spec: dict,
|
| 178 |
+
format: Literal["png", "svg", "pdf", "vega"],
|
| 179 |
+
mode: Literal["vega-lite", "vega"],
|
| 180 |
+
format_locale: str | dict | None = None,
|
| 181 |
+
time_format_locale: str | dict | None = None,
|
| 182 |
+
**kwargs,
|
| 183 |
+
) -> Any:
|
| 184 |
+
"""
|
| 185 |
+
Helper for Vega-Lite to mimebundle conversions that require an engine.
|
| 186 |
+
|
| 187 |
+
Parameters
|
| 188 |
+
----------
|
| 189 |
+
spec : dict
|
| 190 |
+
a dictionary representing a vega-lite plot spec
|
| 191 |
+
format : string {'png', 'svg', 'pdf', 'vega'}
|
| 192 |
+
the format of the mimebundle to be returned
|
| 193 |
+
mode : string {'vega-lite', 'vega'}
|
| 194 |
+
The rendering mode.
|
| 195 |
+
engine: string {'vl-convert'}
|
| 196 |
+
the conversion engine to use
|
| 197 |
+
format_locale : str or dict
|
| 198 |
+
d3-format locale name or dictionary. Defaults to "en-US" for United States English.
|
| 199 |
+
See https://github.com/d3/d3-format/tree/main/locale for available names and example
|
| 200 |
+
definitions.
|
| 201 |
+
time_format_locale : str or dict
|
| 202 |
+
d3-time-format locale name or dictionary. Defaults to "en-US" for United States English.
|
| 203 |
+
See https://github.com/d3/d3-time-format/tree/main/locale for available names and example
|
| 204 |
+
definitions.
|
| 205 |
+
**kwargs :
|
| 206 |
+
Additional arguments will be passed to the conversion function
|
| 207 |
+
"""
|
| 208 |
+
# Normalize the engine string (if any) by lower casing
|
| 209 |
+
# and removing underscores and hyphens
|
| 210 |
+
engine = kwargs.pop("engine", None)
|
| 211 |
+
normalized_engine = _validate_normalize_engine(engine, format)
|
| 212 |
+
|
| 213 |
+
if normalized_engine == "vlconvert":
|
| 214 |
+
vlc = import_vl_convert()
|
| 215 |
+
vl_version = vl_version_for_vl_convert()
|
| 216 |
+
if format == "vega":
|
| 217 |
+
if mode == "vega":
|
| 218 |
+
vg = spec
|
| 219 |
+
else:
|
| 220 |
+
vg = vlc.vegalite_to_vega(spec, vl_version=vl_version)
|
| 221 |
+
return {"application/vnd.vega.v5+json": vg}
|
| 222 |
+
elif format == "svg":
|
| 223 |
+
if mode == "vega":
|
| 224 |
+
svg = vlc.vega_to_svg(
|
| 225 |
+
spec,
|
| 226 |
+
format_locale=format_locale,
|
| 227 |
+
time_format_locale=time_format_locale,
|
| 228 |
+
)
|
| 229 |
+
else:
|
| 230 |
+
svg = vlc.vegalite_to_svg(
|
| 231 |
+
spec,
|
| 232 |
+
vl_version=vl_version,
|
| 233 |
+
format_locale=format_locale,
|
| 234 |
+
time_format_locale=time_format_locale,
|
| 235 |
+
)
|
| 236 |
+
return {"image/svg+xml": svg}
|
| 237 |
+
elif format == "png":
|
| 238 |
+
scale = kwargs.get("scale_factor", 1)
|
| 239 |
+
# The default ppi for a PNG file is 72
|
| 240 |
+
default_ppi = 72
|
| 241 |
+
ppi = kwargs.get("ppi", default_ppi)
|
| 242 |
+
if mode == "vega":
|
| 243 |
+
png = vlc.vega_to_png(
|
| 244 |
+
spec,
|
| 245 |
+
scale=scale,
|
| 246 |
+
ppi=ppi,
|
| 247 |
+
format_locale=format_locale,
|
| 248 |
+
time_format_locale=time_format_locale,
|
| 249 |
+
)
|
| 250 |
+
else:
|
| 251 |
+
png = vlc.vegalite_to_png(
|
| 252 |
+
spec,
|
| 253 |
+
vl_version=vl_version,
|
| 254 |
+
scale=scale,
|
| 255 |
+
ppi=ppi,
|
| 256 |
+
format_locale=format_locale,
|
| 257 |
+
time_format_locale=time_format_locale,
|
| 258 |
+
)
|
| 259 |
+
factor = ppi / default_ppi
|
| 260 |
+
w, h = _pngxy(png)
|
| 261 |
+
return {"image/png": png}, {
|
| 262 |
+
"image/png": {"width": w / factor, "height": h / factor}
|
| 263 |
+
}
|
| 264 |
+
elif format == "pdf":
|
| 265 |
+
scale = kwargs.get("scale_factor", 1)
|
| 266 |
+
if mode == "vega":
|
| 267 |
+
pdf = vlc.vega_to_pdf(
|
| 268 |
+
spec,
|
| 269 |
+
scale=scale,
|
| 270 |
+
format_locale=format_locale,
|
| 271 |
+
time_format_locale=time_format_locale,
|
| 272 |
+
)
|
| 273 |
+
else:
|
| 274 |
+
pdf = vlc.vegalite_to_pdf(
|
| 275 |
+
spec,
|
| 276 |
+
vl_version=vl_version,
|
| 277 |
+
scale=scale,
|
| 278 |
+
format_locale=format_locale,
|
| 279 |
+
time_format_locale=time_format_locale,
|
| 280 |
+
)
|
| 281 |
+
return {"application/pdf": pdf}
|
| 282 |
+
else:
|
| 283 |
+
# This should be validated above
|
| 284 |
+
# but raise exception for the sake of future development
|
| 285 |
+
msg = f"Unexpected format {format!r}"
|
| 286 |
+
raise ValueError(msg)
|
| 287 |
+
else:
|
| 288 |
+
# This should be validated above
|
| 289 |
+
# but raise exception for the sake of future development
|
| 290 |
+
msg = f"Unexpected normalized_engine {normalized_engine!r}"
|
| 291 |
+
raise ValueError(msg)
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def _validate_normalize_engine(
|
| 295 |
+
engine: Literal["vl-convert"] | None,
|
| 296 |
+
format: Literal["png", "svg", "pdf", "vega"],
|
| 297 |
+
) -> str:
|
| 298 |
+
"""
|
| 299 |
+
Helper to validate and normalize the user-provided engine.
|
| 300 |
+
|
| 301 |
+
engine : {None, 'vl-convert'}
|
| 302 |
+
the user-provided engine string
|
| 303 |
+
format : string {'png', 'svg', 'pdf', 'vega'}
|
| 304 |
+
the format of the mimebundle to be returned
|
| 305 |
+
"""
|
| 306 |
+
# Try to import vl_convert
|
| 307 |
+
try:
|
| 308 |
+
vlc = import_vl_convert()
|
| 309 |
+
except ImportError:
|
| 310 |
+
vlc = None
|
| 311 |
+
|
| 312 |
+
# Normalize engine string by lower casing and removing underscores and hyphens
|
| 313 |
+
normalized_engine = (
|
| 314 |
+
None if engine is None else engine.lower().replace("-", "").replace("_", "")
|
| 315 |
+
)
|
| 316 |
+
|
| 317 |
+
# Validate or infer default value of normalized_engine
|
| 318 |
+
if normalized_engine == "vlconvert":
|
| 319 |
+
if vlc is None:
|
| 320 |
+
msg = "The 'vl-convert' conversion engine requires the vl-convert-python package"
|
| 321 |
+
raise ValueError(msg)
|
| 322 |
+
elif normalized_engine is None:
|
| 323 |
+
if vlc is not None:
|
| 324 |
+
normalized_engine = "vlconvert"
|
| 325 |
+
else:
|
| 326 |
+
msg = (
|
| 327 |
+
f"Saving charts in {format!r} format requires the vl-convert-python package: "
|
| 328 |
+
"see https://altair-viz.github.io/user_guide/saving_charts.html#png-svg-and-pdf-format"
|
| 329 |
+
)
|
| 330 |
+
raise ValueError(msg)
|
| 331 |
+
else:
|
| 332 |
+
msg = f"Invalid conversion engine {engine!r}. Expected vl-convert"
|
| 333 |
+
raise ValueError(msg)
|
| 334 |
+
return normalized_engine
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
def _pngxy(data):
|
| 338 |
+
"""
|
| 339 |
+
read the (width, height) from a PNG header.
|
| 340 |
+
|
| 341 |
+
Taken from IPython.display
|
| 342 |
+
"""
|
| 343 |
+
ihdr = data.index(b"IHDR")
|
| 344 |
+
# next 8 bytes are width/height
|
| 345 |
+
return struct.unpack(">ii", data[ihdr + 4 : ihdr + 12])
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
def preprocess_embed_options(embed_options: dict) -> dict:
|
| 349 |
+
"""
|
| 350 |
+
Preprocess embed options to a form compatible with Vega Embed.
|
| 351 |
+
|
| 352 |
+
Parameters
|
| 353 |
+
----------
|
| 354 |
+
embed_options : dict
|
| 355 |
+
The embed options dictionary to preprocess.
|
| 356 |
+
|
| 357 |
+
Returns
|
| 358 |
+
-------
|
| 359 |
+
embed_opts : dict
|
| 360 |
+
The preprocessed embed options dictionary.
|
| 361 |
+
"""
|
| 362 |
+
embed_options = (embed_options or {}).copy()
|
| 363 |
+
|
| 364 |
+
# Convert locale strings to objects compatible with Vega Embed using vl-convert
|
| 365 |
+
format_locale = embed_options.get("formatLocale", None)
|
| 366 |
+
if isinstance(format_locale, str):
|
| 367 |
+
vlc = import_vl_convert()
|
| 368 |
+
embed_options["formatLocale"] = vlc.get_format_locale(format_locale)
|
| 369 |
+
|
| 370 |
+
time_format_locale = embed_options.get("timeFormatLocale", None)
|
| 371 |
+
if isinstance(time_format_locale, str):
|
| 372 |
+
vlc = import_vl_convert()
|
| 373 |
+
embed_options["timeFormatLocale"] = vlc.get_time_format_locale(
|
| 374 |
+
time_format_locale
|
| 375 |
+
)
|
| 376 |
+
|
| 377 |
+
return embed_options
|
mgm/lib/python3.10/site-packages/altair/utils/plugin_registry.py
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
from functools import partial
|
| 5 |
+
from importlib.metadata import entry_points
|
| 6 |
+
from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar, cast
|
| 7 |
+
|
| 8 |
+
from altair.utils.deprecation import deprecated_warn
|
| 9 |
+
|
| 10 |
+
if sys.version_info >= (3, 13):
|
| 11 |
+
from typing import TypeIs
|
| 12 |
+
else:
|
| 13 |
+
from typing_extensions import TypeIs
|
| 14 |
+
if sys.version_info >= (3, 12):
|
| 15 |
+
from typing import TypeAliasType
|
| 16 |
+
else:
|
| 17 |
+
from typing_extensions import TypeAliasType
|
| 18 |
+
|
| 19 |
+
if TYPE_CHECKING:
|
| 20 |
+
from types import TracebackType
|
| 21 |
+
|
| 22 |
+
T = TypeVar("T")
|
| 23 |
+
R = TypeVar("R")
|
| 24 |
+
Plugin = TypeAliasType("Plugin", Callable[..., R], type_params=(R,))
|
| 25 |
+
PluginT = TypeVar("PluginT", bound=Plugin[Any])
|
| 26 |
+
IsPlugin = Callable[[object], TypeIs[Plugin[Any]]]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _is_type(tp: type[T], /) -> Callable[[object], TypeIs[type[T]]]:
|
| 30 |
+
"""
|
| 31 |
+
Converts a type to guard function.
|
| 32 |
+
|
| 33 |
+
Added for compatibility with original `PluginRegistry` default.
|
| 34 |
+
"""
|
| 35 |
+
|
| 36 |
+
def func(obj: object, /) -> TypeIs[type[T]]:
|
| 37 |
+
return isinstance(obj, tp)
|
| 38 |
+
|
| 39 |
+
return func
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class NoSuchEntryPoint(Exception):
|
| 43 |
+
def __init__(self, group, name):
|
| 44 |
+
self.group = group
|
| 45 |
+
self.name = name
|
| 46 |
+
|
| 47 |
+
def __str__(self):
|
| 48 |
+
return f"No {self.name!r} entry point found in group {self.group!r}"
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class PluginEnabler(Generic[PluginT, R]):
|
| 52 |
+
"""
|
| 53 |
+
Context manager for enabling plugins.
|
| 54 |
+
|
| 55 |
+
This object lets you use enable() as a context manager to
|
| 56 |
+
temporarily enable a given plugin::
|
| 57 |
+
|
| 58 |
+
with plugins.enable("name"):
|
| 59 |
+
do_something() # 'name' plugin temporarily enabled
|
| 60 |
+
# plugins back to original state
|
| 61 |
+
"""
|
| 62 |
+
|
| 63 |
+
def __init__(
|
| 64 |
+
self, registry: PluginRegistry[PluginT, R], name: str, **options: Any
|
| 65 |
+
) -> None:
|
| 66 |
+
self.registry: PluginRegistry[PluginT, R] = registry
|
| 67 |
+
self.name: str = name
|
| 68 |
+
self.options: dict[str, Any] = options
|
| 69 |
+
self.original_state: dict[str, Any] = registry._get_state()
|
| 70 |
+
self.registry._enable(name, **options)
|
| 71 |
+
|
| 72 |
+
def __enter__(self) -> PluginEnabler[PluginT, R]:
|
| 73 |
+
return self
|
| 74 |
+
|
| 75 |
+
def __exit__(self, typ: type, value: Exception, traceback: TracebackType) -> None:
|
| 76 |
+
self.registry._set_state(self.original_state)
|
| 77 |
+
|
| 78 |
+
def __repr__(self) -> str:
|
| 79 |
+
return f"{type(self.registry).__name__}.enable({self.name!r})"
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class PluginRegistry(Generic[PluginT, R]):
|
| 83 |
+
"""
|
| 84 |
+
A registry for plugins.
|
| 85 |
+
|
| 86 |
+
This is a plugin registry that allows plugins to be loaded/registered
|
| 87 |
+
in two ways:
|
| 88 |
+
|
| 89 |
+
1. Through an explicit call to ``.register(name, value)``.
|
| 90 |
+
2. By looking for other Python packages that are installed and provide
|
| 91 |
+
a setuptools entry point group.
|
| 92 |
+
|
| 93 |
+
When you create an instance of this class, provide the name of the
|
| 94 |
+
entry point group to use::
|
| 95 |
+
|
| 96 |
+
reg = PluginRegister("my_entrypoint_group")
|
| 97 |
+
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
# this is a mapping of name to error message to allow custom error messages
|
| 101 |
+
# in case an entrypoint is not found
|
| 102 |
+
entrypoint_err_messages: dict[str, str] = {}
|
| 103 |
+
|
| 104 |
+
# global settings is a key-value mapping of settings that are stored globally
|
| 105 |
+
# in the registry rather than passed to the plugins
|
| 106 |
+
_global_settings: dict[str, Any] = {}
|
| 107 |
+
|
| 108 |
+
def __init__(
|
| 109 |
+
self, entry_point_group: str = "", plugin_type: IsPlugin = callable
|
| 110 |
+
) -> None:
|
| 111 |
+
"""
|
| 112 |
+
Create a PluginRegistry for a named entry point group.
|
| 113 |
+
|
| 114 |
+
Parameters
|
| 115 |
+
----------
|
| 116 |
+
entry_point_group: str
|
| 117 |
+
The name of the entry point group.
|
| 118 |
+
plugin_type
|
| 119 |
+
A type narrowing function that will optionally be used for runtime
|
| 120 |
+
type checking loaded plugins.
|
| 121 |
+
|
| 122 |
+
References
|
| 123 |
+
----------
|
| 124 |
+
https://typing.readthedocs.io/en/latest/spec/narrowing.html
|
| 125 |
+
"""
|
| 126 |
+
self.entry_point_group: str = entry_point_group
|
| 127 |
+
self.plugin_type: IsPlugin
|
| 128 |
+
if plugin_type is not callable and isinstance(plugin_type, type):
|
| 129 |
+
msg: Any = (
|
| 130 |
+
f"Pass a callable `TypeIs` function to `plugin_type` instead.\n"
|
| 131 |
+
f"{type(self).__name__!r}(plugin_type)\n\n"
|
| 132 |
+
f"See also:\n"
|
| 133 |
+
f"https://typing.readthedocs.io/en/latest/spec/narrowing.html\n"
|
| 134 |
+
f"https://docs.astral.sh/ruff/rules/assert/"
|
| 135 |
+
)
|
| 136 |
+
deprecated_warn(msg, version="5.4.0")
|
| 137 |
+
self.plugin_type = cast(IsPlugin, _is_type(plugin_type))
|
| 138 |
+
else:
|
| 139 |
+
self.plugin_type = plugin_type
|
| 140 |
+
self._active: Plugin[R] | None = None
|
| 141 |
+
self._active_name: str = ""
|
| 142 |
+
self._plugins: dict[str, PluginT] = {}
|
| 143 |
+
self._options: dict[str, Any] = {}
|
| 144 |
+
self._global_settings: dict[str, Any] = self.__class__._global_settings.copy()
|
| 145 |
+
|
| 146 |
+
def register(self, name: str, value: PluginT | None) -> PluginT | None:
|
| 147 |
+
"""
|
| 148 |
+
Register a plugin by name and value.
|
| 149 |
+
|
| 150 |
+
This method is used for explicit registration of a plugin and shouldn't be
|
| 151 |
+
used to manage entry point managed plugins, which are auto-loaded.
|
| 152 |
+
|
| 153 |
+
Parameters
|
| 154 |
+
----------
|
| 155 |
+
name: str
|
| 156 |
+
The name of the plugin.
|
| 157 |
+
value: PluginType or None
|
| 158 |
+
The actual plugin object to register or None to unregister that plugin.
|
| 159 |
+
|
| 160 |
+
Returns
|
| 161 |
+
-------
|
| 162 |
+
plugin: PluginType or None
|
| 163 |
+
The plugin that was registered or unregistered.
|
| 164 |
+
"""
|
| 165 |
+
if value is None:
|
| 166 |
+
return self._plugins.pop(name, None)
|
| 167 |
+
elif self.plugin_type(value):
|
| 168 |
+
self._plugins[name] = value
|
| 169 |
+
return value
|
| 170 |
+
else:
|
| 171 |
+
msg = f"{type(value).__name__!r} is not compatible with {type(self).__name__!r}"
|
| 172 |
+
raise TypeError(msg)
|
| 173 |
+
|
| 174 |
+
def names(self) -> list[str]:
|
| 175 |
+
"""List the names of the registered and entry points plugins."""
|
| 176 |
+
exts = list(self._plugins.keys())
|
| 177 |
+
e_points = importlib_metadata_get(self.entry_point_group)
|
| 178 |
+
more_exts = [ep.name for ep in e_points]
|
| 179 |
+
exts.extend(more_exts)
|
| 180 |
+
return sorted(set(exts))
|
| 181 |
+
|
| 182 |
+
def _get_state(self) -> dict[str, Any]:
|
| 183 |
+
"""Return a dictionary representing the current state of the registry."""
|
| 184 |
+
return {
|
| 185 |
+
"_active": self._active,
|
| 186 |
+
"_active_name": self._active_name,
|
| 187 |
+
"_plugins": self._plugins.copy(),
|
| 188 |
+
"_options": self._options.copy(),
|
| 189 |
+
"_global_settings": self._global_settings.copy(),
|
| 190 |
+
}
|
| 191 |
+
|
| 192 |
+
def _set_state(self, state: dict[str, Any]) -> None:
|
| 193 |
+
"""Reset the state of the registry."""
|
| 194 |
+
assert set(state.keys()) == {
|
| 195 |
+
"_active",
|
| 196 |
+
"_active_name",
|
| 197 |
+
"_plugins",
|
| 198 |
+
"_options",
|
| 199 |
+
"_global_settings",
|
| 200 |
+
}
|
| 201 |
+
for key, val in state.items():
|
| 202 |
+
setattr(self, key, val)
|
| 203 |
+
|
| 204 |
+
def _enable(self, name: str, **options) -> None:
|
| 205 |
+
if name not in self._plugins:
|
| 206 |
+
try:
|
| 207 |
+
(ep,) = (
|
| 208 |
+
ep
|
| 209 |
+
for ep in importlib_metadata_get(self.entry_point_group)
|
| 210 |
+
if ep.name == name
|
| 211 |
+
)
|
| 212 |
+
except ValueError as err:
|
| 213 |
+
if name in self.entrypoint_err_messages:
|
| 214 |
+
raise ValueError(self.entrypoint_err_messages[name]) from err
|
| 215 |
+
else:
|
| 216 |
+
raise NoSuchEntryPoint(self.entry_point_group, name) from err
|
| 217 |
+
value = cast(PluginT, ep.load())
|
| 218 |
+
self.register(name, value)
|
| 219 |
+
self._active_name = name
|
| 220 |
+
self._active = self._plugins[name]
|
| 221 |
+
for key in set(options.keys()) & set(self._global_settings.keys()):
|
| 222 |
+
self._global_settings[key] = options.pop(key)
|
| 223 |
+
self._options = options
|
| 224 |
+
|
| 225 |
+
def enable(
|
| 226 |
+
self, name: str | None = None, **options: Any
|
| 227 |
+
) -> PluginEnabler[PluginT, R]:
|
| 228 |
+
"""
|
| 229 |
+
Enable a plugin by name.
|
| 230 |
+
|
| 231 |
+
This can be either called directly, or used as a context manager.
|
| 232 |
+
|
| 233 |
+
Parameters
|
| 234 |
+
----------
|
| 235 |
+
name : string (optional)
|
| 236 |
+
The name of the plugin to enable. If not specified, then use the
|
| 237 |
+
current active name.
|
| 238 |
+
**options :
|
| 239 |
+
Any additional parameters will be passed to the plugin as keyword
|
| 240 |
+
arguments
|
| 241 |
+
|
| 242 |
+
Returns
|
| 243 |
+
-------
|
| 244 |
+
PluginEnabler:
|
| 245 |
+
An object that allows enable() to be used as a context manager
|
| 246 |
+
"""
|
| 247 |
+
if name is None:
|
| 248 |
+
name = self.active
|
| 249 |
+
return PluginEnabler(self, name, **options)
|
| 250 |
+
|
| 251 |
+
@property
|
| 252 |
+
def active(self) -> str:
|
| 253 |
+
"""Return the name of the currently active plugin."""
|
| 254 |
+
return self._active_name
|
| 255 |
+
|
| 256 |
+
@property
|
| 257 |
+
def options(self) -> dict[str, Any]:
|
| 258 |
+
"""Return the current options dictionary."""
|
| 259 |
+
return self._options
|
| 260 |
+
|
| 261 |
+
def get(self) -> partial[R] | Plugin[R] | None:
|
| 262 |
+
"""Return the currently active plugin."""
|
| 263 |
+
if (func := self._active) and self.plugin_type(func):
|
| 264 |
+
return partial(func, **self._options) if self._options else func
|
| 265 |
+
elif self._active is not None:
|
| 266 |
+
msg = (
|
| 267 |
+
f"{type(self).__name__!r} requires all plugins to be callable objects, "
|
| 268 |
+
f"but {type(self._active).__name__!r} is not callable."
|
| 269 |
+
)
|
| 270 |
+
raise TypeError(msg)
|
| 271 |
+
elif TYPE_CHECKING:
|
| 272 |
+
# NOTE: The `None` return is implicit, but `mypy` isn't satisfied
|
| 273 |
+
# - `ruff` will factor out explicit `None` return
|
| 274 |
+
# - `pyright` has no issue
|
| 275 |
+
raise NotImplementedError
|
| 276 |
+
|
| 277 |
+
def __repr__(self) -> str:
|
| 278 |
+
return f"{type(self).__name__}(active={self.active!r}, registered={self.names()!r})"
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def importlib_metadata_get(group):
|
| 282 |
+
ep = entry_points()
|
| 283 |
+
# 'select' was introduced in Python 3.10 and 'get' got deprecated
|
| 284 |
+
# We don't check for Python version here as by checking with hasattr we
|
| 285 |
+
# also get compatibility with the importlib_metadata package which had a different
|
| 286 |
+
# deprecation cycle for 'get'
|
| 287 |
+
if hasattr(ep, "select"):
|
| 288 |
+
return ep.select(group=group) # pyright: ignore
|
| 289 |
+
else:
|
| 290 |
+
return ep.get(group, [])
|
mgm/lib/python3.10/site-packages/altair/utils/save.py
ADDED
|
@@ -0,0 +1,224 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import pathlib
|
| 5 |
+
import warnings
|
| 6 |
+
from typing import IO, TYPE_CHECKING, Any, Literal
|
| 7 |
+
|
| 8 |
+
from altair.utils._vegafusion_data import using_vegafusion
|
| 9 |
+
from altair.utils.deprecation import deprecated_warn
|
| 10 |
+
from altair.vegalite.v5.data import data_transformers
|
| 11 |
+
|
| 12 |
+
from .mimebundle import spec_to_mimebundle
|
| 13 |
+
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
from pathlib import Path
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def write_file_or_filename(
|
| 19 |
+
fp: str | Path | IO,
|
| 20 |
+
content: str | bytes,
|
| 21 |
+
mode: str = "w",
|
| 22 |
+
encoding: str | None = None,
|
| 23 |
+
) -> None:
|
| 24 |
+
"""Write content to fp, whether fp is a string, a pathlib Path or a file-like object."""
|
| 25 |
+
if isinstance(fp, (str, pathlib.Path)):
|
| 26 |
+
with pathlib.Path(fp).open(mode=mode, encoding=encoding) as f:
|
| 27 |
+
f.write(content)
|
| 28 |
+
else:
|
| 29 |
+
fp.write(content)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def set_inspect_format_argument(
|
| 33 |
+
format: str | None, fp: str | Path | IO, inline: bool
|
| 34 |
+
) -> str:
|
| 35 |
+
"""Inspect the format argument in the save function."""
|
| 36 |
+
if format is None:
|
| 37 |
+
if isinstance(fp, (str, pathlib.Path)):
|
| 38 |
+
format = pathlib.Path(fp).suffix.lstrip(".")
|
| 39 |
+
else:
|
| 40 |
+
msg = (
|
| 41 |
+
"must specify file format: "
|
| 42 |
+
"['png', 'svg', 'pdf', 'html', 'json', 'vega']"
|
| 43 |
+
)
|
| 44 |
+
raise ValueError(msg)
|
| 45 |
+
|
| 46 |
+
if format != "html" and inline:
|
| 47 |
+
warnings.warn("inline argument ignored for non HTML formats.", stacklevel=1)
|
| 48 |
+
|
| 49 |
+
return format
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def set_inspect_mode_argument(
|
| 53 |
+
mode: Literal["vega-lite"] | None,
|
| 54 |
+
embed_options: dict[str, Any],
|
| 55 |
+
spec: dict[str, Any],
|
| 56 |
+
vegalite_version: str | None,
|
| 57 |
+
) -> Literal["vega-lite"]:
|
| 58 |
+
"""Inspect the mode argument in the save function."""
|
| 59 |
+
if mode is None:
|
| 60 |
+
if "mode" in embed_options:
|
| 61 |
+
mode = embed_options["mode"]
|
| 62 |
+
elif "$schema" in spec:
|
| 63 |
+
mode = spec["$schema"].split("/")[-2]
|
| 64 |
+
else:
|
| 65 |
+
mode = "vega-lite"
|
| 66 |
+
|
| 67 |
+
if mode != "vega-lite":
|
| 68 |
+
msg = "mode must be 'vega-lite', " f"not '{mode}'"
|
| 69 |
+
raise ValueError(msg)
|
| 70 |
+
|
| 71 |
+
if mode == "vega-lite" and vegalite_version is None:
|
| 72 |
+
msg = "must specify vega-lite version"
|
| 73 |
+
raise ValueError(msg)
|
| 74 |
+
|
| 75 |
+
return mode
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def save(
|
| 79 |
+
chart,
|
| 80 |
+
fp: str | Path | IO,
|
| 81 |
+
vega_version: str | None,
|
| 82 |
+
vegaembed_version: str | None,
|
| 83 |
+
format: Literal["json", "html", "png", "svg", "pdf"] | None = None,
|
| 84 |
+
mode: Literal["vega-lite"] | None = None,
|
| 85 |
+
vegalite_version: str | None = None,
|
| 86 |
+
embed_options: dict | None = None,
|
| 87 |
+
json_kwds: dict | None = None,
|
| 88 |
+
scale_factor: float = 1,
|
| 89 |
+
engine: Literal["vl-convert"] | None = None,
|
| 90 |
+
inline: bool = False,
|
| 91 |
+
**kwargs,
|
| 92 |
+
) -> None:
|
| 93 |
+
"""
|
| 94 |
+
Save a chart to file in a variety of formats.
|
| 95 |
+
|
| 96 |
+
Supported formats are [json, html, png, svg, pdf]
|
| 97 |
+
|
| 98 |
+
Parameters
|
| 99 |
+
----------
|
| 100 |
+
chart : alt.Chart
|
| 101 |
+
the chart instance to save
|
| 102 |
+
fp : string filename, pathlib.Path or file-like object
|
| 103 |
+
file to which to write the chart.
|
| 104 |
+
format : string (optional)
|
| 105 |
+
the format to write: one of ['json', 'html', 'png', 'svg', 'pdf'].
|
| 106 |
+
If not specified, the format will be determined from the filename.
|
| 107 |
+
mode : string (optional)
|
| 108 |
+
Must be 'vega-lite'. If not specified, then infer the mode from
|
| 109 |
+
the '$schema' property of the spec, or the ``opt`` dictionary.
|
| 110 |
+
If it's not specified in either of those places, then use 'vega-lite'.
|
| 111 |
+
vega_version : string (optional)
|
| 112 |
+
For html output, the version of vega.js to use
|
| 113 |
+
vegalite_version : string (optional)
|
| 114 |
+
For html output, the version of vegalite.js to use
|
| 115 |
+
vegaembed_version : string (optional)
|
| 116 |
+
For html output, the version of vegaembed.js to use
|
| 117 |
+
embed_options : dict (optional)
|
| 118 |
+
The vegaEmbed options dictionary. Default is {}
|
| 119 |
+
(See https://github.com/vega/vega-embed for details)
|
| 120 |
+
json_kwds : dict (optional)
|
| 121 |
+
Additional keyword arguments are passed to the output method
|
| 122 |
+
associated with the specified format.
|
| 123 |
+
scale_factor : float (optional)
|
| 124 |
+
scale_factor to use to change size/resolution of png or svg output
|
| 125 |
+
engine: string {'vl-convert'}
|
| 126 |
+
the conversion engine to use for 'png', 'svg', and 'pdf' formats
|
| 127 |
+
inline: bool (optional)
|
| 128 |
+
If False (default), the required JavaScript libraries are loaded
|
| 129 |
+
from a CDN location in the resulting html file.
|
| 130 |
+
If True, the required JavaScript libraries are inlined into the resulting
|
| 131 |
+
html file so that it will work without an internet connection.
|
| 132 |
+
The vl-convert-python package is required if True.
|
| 133 |
+
**kwargs :
|
| 134 |
+
additional kwargs passed to spec_to_mimebundle.
|
| 135 |
+
"""
|
| 136 |
+
if _ := kwargs.pop("webdriver", None):
|
| 137 |
+
deprecated_warn(
|
| 138 |
+
"The webdriver argument is not relevant for the new vl-convert engine which replaced altair_saver. "
|
| 139 |
+
"The argument will be removed in a future release.",
|
| 140 |
+
version="5.0.0",
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
json_kwds = json_kwds or {}
|
| 144 |
+
encoding = kwargs.get("encoding", "utf-8")
|
| 145 |
+
format = set_inspect_format_argument(format, fp, inline) # type: ignore[assignment]
|
| 146 |
+
|
| 147 |
+
def perform_save() -> None:
|
| 148 |
+
spec = chart.to_dict(context={"pre_transform": False})
|
| 149 |
+
|
| 150 |
+
inner_mode = set_inspect_mode_argument(
|
| 151 |
+
mode, embed_options or {}, spec, vegalite_version
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
if format == "json":
|
| 155 |
+
json_spec = json.dumps(spec, **json_kwds)
|
| 156 |
+
write_file_or_filename(fp, json_spec, mode="w", encoding=encoding)
|
| 157 |
+
elif format == "html":
|
| 158 |
+
if inline:
|
| 159 |
+
kwargs["template"] = "inline"
|
| 160 |
+
mb_html = spec_to_mimebundle(
|
| 161 |
+
spec=spec,
|
| 162 |
+
format=format,
|
| 163 |
+
mode=inner_mode,
|
| 164 |
+
vega_version=vega_version,
|
| 165 |
+
vegalite_version=vegalite_version,
|
| 166 |
+
vegaembed_version=vegaembed_version,
|
| 167 |
+
embed_options=embed_options,
|
| 168 |
+
json_kwds=json_kwds,
|
| 169 |
+
**kwargs,
|
| 170 |
+
)
|
| 171 |
+
write_file_or_filename(
|
| 172 |
+
fp, mb_html["text/html"], mode="w", encoding=encoding
|
| 173 |
+
)
|
| 174 |
+
elif format == "png":
|
| 175 |
+
mb_png = spec_to_mimebundle(
|
| 176 |
+
spec=spec,
|
| 177 |
+
format=format,
|
| 178 |
+
mode=inner_mode,
|
| 179 |
+
vega_version=vega_version,
|
| 180 |
+
vegalite_version=vegalite_version,
|
| 181 |
+
vegaembed_version=vegaembed_version,
|
| 182 |
+
embed_options=embed_options,
|
| 183 |
+
scale_factor=scale_factor,
|
| 184 |
+
engine=engine,
|
| 185 |
+
**kwargs,
|
| 186 |
+
)
|
| 187 |
+
write_file_or_filename(fp, mb_png[0]["image/png"], mode="wb")
|
| 188 |
+
elif format in {"svg", "pdf", "vega"}:
|
| 189 |
+
mb_any = spec_to_mimebundle(
|
| 190 |
+
spec=spec,
|
| 191 |
+
format=format,
|
| 192 |
+
mode=inner_mode,
|
| 193 |
+
vega_version=vega_version,
|
| 194 |
+
vegalite_version=vegalite_version,
|
| 195 |
+
vegaembed_version=vegaembed_version,
|
| 196 |
+
embed_options=embed_options,
|
| 197 |
+
scale_factor=scale_factor,
|
| 198 |
+
engine=engine,
|
| 199 |
+
**kwargs,
|
| 200 |
+
)
|
| 201 |
+
if format == "pdf":
|
| 202 |
+
write_file_or_filename(fp, mb_any["application/pdf"], mode="wb")
|
| 203 |
+
else:
|
| 204 |
+
write_file_or_filename(
|
| 205 |
+
fp, mb_any["image/svg+xml"], mode="w", encoding=encoding
|
| 206 |
+
)
|
| 207 |
+
else:
|
| 208 |
+
msg = f"Unsupported format: '{format}'"
|
| 209 |
+
raise ValueError(msg)
|
| 210 |
+
|
| 211 |
+
if using_vegafusion():
|
| 212 |
+
# When the vegafusion data transformer is enabled, transforms will be
|
| 213 |
+
# evaluated during save and the resulting data will be included in the
|
| 214 |
+
# vega specification that is saved.
|
| 215 |
+
with data_transformers.disable_max_rows():
|
| 216 |
+
perform_save()
|
| 217 |
+
else:
|
| 218 |
+
# Temporarily turn off any data transformers so that all data is inlined
|
| 219 |
+
# when calling chart.to_dict. This is relevant for vl-convert which cannot access
|
| 220 |
+
# local json files which could be created by a json data transformer. Furthermore,
|
| 221 |
+
# we don't exit the with statement until this function completed due to the issue
|
| 222 |
+
# described at https://github.com/vega/vl-convert/issues/31
|
| 223 |
+
with data_transformers.enable("default"), data_transformers.disable_max_rows():
|
| 224 |
+
perform_save()
|
mgm/lib/python3.10/site-packages/altair/utils/schemapi.py
ADDED
|
@@ -0,0 +1,1616 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# The contents of this file are automatically written by
|
| 2 |
+
# tools/generate_schema_wrapper.py. Do not modify directly.
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import contextlib
|
| 6 |
+
import copy
|
| 7 |
+
import datetime as dt
|
| 8 |
+
import inspect
|
| 9 |
+
import json
|
| 10 |
+
import sys
|
| 11 |
+
import textwrap
|
| 12 |
+
from collections import defaultdict
|
| 13 |
+
from collections.abc import Iterable, Iterator, Mapping, Sequence
|
| 14 |
+
from functools import partial
|
| 15 |
+
from importlib.metadata import version as importlib_version
|
| 16 |
+
from itertools import chain, zip_longest
|
| 17 |
+
from math import ceil
|
| 18 |
+
from typing import (
|
| 19 |
+
TYPE_CHECKING,
|
| 20 |
+
Any,
|
| 21 |
+
Final,
|
| 22 |
+
Generic,
|
| 23 |
+
Literal,
|
| 24 |
+
TypeVar,
|
| 25 |
+
Union,
|
| 26 |
+
cast,
|
| 27 |
+
overload,
|
| 28 |
+
)
|
| 29 |
+
from typing_extensions import TypeAlias
|
| 30 |
+
|
| 31 |
+
import jsonschema
|
| 32 |
+
import jsonschema.exceptions
|
| 33 |
+
import jsonschema.validators
|
| 34 |
+
import narwhals.stable.v1 as nw
|
| 35 |
+
from packaging.version import Version
|
| 36 |
+
|
| 37 |
+
# This leads to circular imports with the vegalite module. Currently, this works
|
| 38 |
+
# but be aware that when you access it in this script, the vegalite module might
|
| 39 |
+
# not yet be fully instantiated in case your code is being executed during import time
|
| 40 |
+
from altair import vegalite
|
| 41 |
+
|
| 42 |
+
if sys.version_info >= (3, 12):
|
| 43 |
+
from typing import Protocol, TypeAliasType, runtime_checkable
|
| 44 |
+
else:
|
| 45 |
+
from typing_extensions import Protocol, TypeAliasType, runtime_checkable
|
| 46 |
+
|
| 47 |
+
if TYPE_CHECKING:
|
| 48 |
+
from types import ModuleType
|
| 49 |
+
from typing import ClassVar
|
| 50 |
+
|
| 51 |
+
from referencing import Registry
|
| 52 |
+
|
| 53 |
+
from altair.typing import ChartType
|
| 54 |
+
|
| 55 |
+
if sys.version_info >= (3, 13):
|
| 56 |
+
from typing import TypeIs
|
| 57 |
+
else:
|
| 58 |
+
from typing_extensions import TypeIs
|
| 59 |
+
|
| 60 |
+
if sys.version_info >= (3, 11):
|
| 61 |
+
from typing import Never, Self
|
| 62 |
+
else:
|
| 63 |
+
from typing_extensions import Never, Self
|
| 64 |
+
_OptionalModule: TypeAlias = "ModuleType | None"
|
| 65 |
+
|
| 66 |
+
ValidationErrorList: TypeAlias = list[jsonschema.exceptions.ValidationError]
|
| 67 |
+
GroupedValidationErrors: TypeAlias = dict[str, ValidationErrorList]
|
| 68 |
+
|
| 69 |
+
# This URI is arbitrary and could be anything else. It just cannot be an empty
|
| 70 |
+
# string as we need to reference the schema registered in
|
| 71 |
+
# the referencing.Registry.
|
| 72 |
+
_VEGA_LITE_ROOT_URI: Final = "urn:vega-lite-schema"
|
| 73 |
+
|
| 74 |
+
# Ideally, jsonschema specification would be parsed from the current Vega-Lite
|
| 75 |
+
# schema instead of being hardcoded here as a default value.
|
| 76 |
+
# However, due to circular imports between this module and the altair.vegalite
|
| 77 |
+
# modules, this information is not yet available at this point as altair.vegalite
|
| 78 |
+
# is only partially loaded. The draft version which is used is unlikely to
|
| 79 |
+
# change often so it's ok to keep this. There is also a test which validates
|
| 80 |
+
# that this value is always the same as in the Vega-Lite schema.
|
| 81 |
+
_DEFAULT_JSON_SCHEMA_DRAFT_URL: Final = "http://json-schema.org/draft-07/schema#"
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
# If DEBUG_MODE is True, then schema objects are converted to dict and
|
| 85 |
+
# validated at creation time. This slows things down, particularly for
|
| 86 |
+
# larger specs, but leads to much more useful tracebacks for the user.
|
| 87 |
+
# Individual schema classes can override this by setting the
|
| 88 |
+
# class-level _class_is_valid_at_instantiation attribute to False
|
| 89 |
+
DEBUG_MODE: bool = True
|
| 90 |
+
|
| 91 |
+
jsonschema_version_str = importlib_version("jsonschema")
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def enable_debug_mode() -> None:
|
| 95 |
+
global DEBUG_MODE
|
| 96 |
+
DEBUG_MODE = True
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def disable_debug_mode() -> None:
|
| 100 |
+
global DEBUG_MODE
|
| 101 |
+
DEBUG_MODE = False
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@contextlib.contextmanager
|
| 105 |
+
def debug_mode(arg: bool) -> Iterator[None]:
|
| 106 |
+
global DEBUG_MODE
|
| 107 |
+
original = DEBUG_MODE
|
| 108 |
+
DEBUG_MODE = arg
|
| 109 |
+
try:
|
| 110 |
+
yield
|
| 111 |
+
finally:
|
| 112 |
+
DEBUG_MODE = original
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
@overload
|
| 116 |
+
def validate_jsonschema(
|
| 117 |
+
spec: Any,
|
| 118 |
+
schema: dict[str, Any],
|
| 119 |
+
rootschema: dict[str, Any] | None = ...,
|
| 120 |
+
*,
|
| 121 |
+
raise_error: Literal[True] = ...,
|
| 122 |
+
) -> Never: ...
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
@overload
|
| 126 |
+
def validate_jsonschema(
|
| 127 |
+
spec: Any,
|
| 128 |
+
schema: dict[str, Any],
|
| 129 |
+
rootschema: dict[str, Any] | None = ...,
|
| 130 |
+
*,
|
| 131 |
+
raise_error: Literal[False],
|
| 132 |
+
) -> jsonschema.exceptions.ValidationError | None: ...
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def validate_jsonschema(
|
| 136 |
+
spec,
|
| 137 |
+
schema: dict[str, Any],
|
| 138 |
+
rootschema: dict[str, Any] | None = None,
|
| 139 |
+
*,
|
| 140 |
+
raise_error: bool = True,
|
| 141 |
+
) -> jsonschema.exceptions.ValidationError | None:
|
| 142 |
+
"""
|
| 143 |
+
Validates the passed in spec against the schema in the context of the rootschema.
|
| 144 |
+
|
| 145 |
+
If any errors are found, they are deduplicated and prioritized
|
| 146 |
+
and only the most relevant errors are kept. Errors are then either raised
|
| 147 |
+
or returned, depending on the value of `raise_error`.
|
| 148 |
+
"""
|
| 149 |
+
errors = _get_errors_from_spec(spec, schema, rootschema=rootschema)
|
| 150 |
+
if errors:
|
| 151 |
+
leaf_errors = _get_leaves_of_error_tree(errors)
|
| 152 |
+
grouped_errors = _group_errors_by_json_path(leaf_errors)
|
| 153 |
+
grouped_errors = _subset_to_most_specific_json_paths(grouped_errors)
|
| 154 |
+
grouped_errors = _deduplicate_errors(grouped_errors)
|
| 155 |
+
|
| 156 |
+
# Nothing special about this first error but we need to choose one
|
| 157 |
+
# which can be raised
|
| 158 |
+
main_error: Any = next(iter(grouped_errors.values()))[0]
|
| 159 |
+
# All errors are then attached as a new attribute to ValidationError so that
|
| 160 |
+
# they can be used in SchemaValidationError to craft a more helpful
|
| 161 |
+
# error message. Setting a new attribute like this is not ideal as
|
| 162 |
+
# it then no longer matches the type ValidationError. It would be better
|
| 163 |
+
# to refactor this function to never raise but only return errors.
|
| 164 |
+
main_error._all_errors = grouped_errors
|
| 165 |
+
if raise_error:
|
| 166 |
+
raise main_error
|
| 167 |
+
else:
|
| 168 |
+
return main_error
|
| 169 |
+
else:
|
| 170 |
+
return None
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def _get_errors_from_spec(
|
| 174 |
+
spec: dict[str, Any],
|
| 175 |
+
schema: dict[str, Any],
|
| 176 |
+
rootschema: dict[str, Any] | None = None,
|
| 177 |
+
) -> ValidationErrorList:
|
| 178 |
+
"""
|
| 179 |
+
Uses the relevant jsonschema validator to validate the passed in spec against the schema using the rootschema to resolve references.
|
| 180 |
+
|
| 181 |
+
The schema and rootschema themselves are not validated but instead considered as valid.
|
| 182 |
+
"""
|
| 183 |
+
# We don't use jsonschema.validate as this would validate the schema itself.
|
| 184 |
+
# Instead, we pass the schema directly to the validator class. This is done for
|
| 185 |
+
# two reasons: The schema comes from Vega-Lite and is not based on the user
|
| 186 |
+
# input, therefore there is no need to validate it in the first place. Furthermore,
|
| 187 |
+
# the "uri-reference" format checker fails for some of the references as URIs in
|
| 188 |
+
# "$ref" are not encoded,
|
| 189 |
+
# e.g. '#/definitions/ValueDefWithCondition<MarkPropFieldOrDatumDef,
|
| 190 |
+
# (Gradient|string|null)>' would be a valid $ref in a Vega-Lite schema but
|
| 191 |
+
# it is not a valid URI reference due to the characters such as '<'.
|
| 192 |
+
|
| 193 |
+
json_schema_draft_url = _get_json_schema_draft_url(rootschema or schema)
|
| 194 |
+
validator_cls = jsonschema.validators.validator_for(
|
| 195 |
+
{"$schema": json_schema_draft_url}
|
| 196 |
+
)
|
| 197 |
+
validator_kwargs: dict[str, Any] = {}
|
| 198 |
+
if hasattr(validator_cls, "FORMAT_CHECKER"):
|
| 199 |
+
validator_kwargs["format_checker"] = validator_cls.FORMAT_CHECKER
|
| 200 |
+
|
| 201 |
+
if _use_referencing_library():
|
| 202 |
+
schema = _prepare_references_in_schema(schema)
|
| 203 |
+
validator_kwargs["registry"] = _get_referencing_registry(
|
| 204 |
+
rootschema or schema, json_schema_draft_url
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
else:
|
| 208 |
+
# No resolver is necessary if the schema is already the full schema
|
| 209 |
+
validator_kwargs["resolver"] = (
|
| 210 |
+
jsonschema.RefResolver.from_schema(rootschema)
|
| 211 |
+
if rootschema is not None
|
| 212 |
+
else None
|
| 213 |
+
)
|
| 214 |
+
|
| 215 |
+
validator = validator_cls(schema, **validator_kwargs)
|
| 216 |
+
errors = list(validator.iter_errors(spec))
|
| 217 |
+
return errors
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def _get_json_schema_draft_url(schema: dict[str, Any]) -> str:
|
| 221 |
+
return schema.get("$schema", _DEFAULT_JSON_SCHEMA_DRAFT_URL)
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def _use_referencing_library() -> bool:
|
| 225 |
+
"""In version 4.18.0, the jsonschema package deprecated RefResolver in favor of the referencing library."""
|
| 226 |
+
return Version(jsonschema_version_str) >= Version("4.18")
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
def _prepare_references_in_schema(schema: dict[str, Any]) -> dict[str, Any]:
|
| 230 |
+
# Create a copy so that $ref is not modified in the original schema in case
|
| 231 |
+
# that it would still reference a dictionary which might be attached to
|
| 232 |
+
# an Altair class _schema attribute
|
| 233 |
+
schema = copy.deepcopy(schema)
|
| 234 |
+
|
| 235 |
+
def _prepare_refs(d: dict[str, Any]) -> dict[str, Any]:
|
| 236 |
+
"""
|
| 237 |
+
Add _VEGA_LITE_ROOT_URI in front of all $ref values.
|
| 238 |
+
|
| 239 |
+
This function recursively iterates through the whole dictionary.
|
| 240 |
+
|
| 241 |
+
$ref values can only be nested in dictionaries or lists
|
| 242 |
+
as the passed in `d` dictionary comes from the Vega-Lite json schema
|
| 243 |
+
and in json we only have arrays (-> lists in Python) and objects
|
| 244 |
+
(-> dictionaries in Python) which we need to iterate through.
|
| 245 |
+
"""
|
| 246 |
+
for key, value in d.items():
|
| 247 |
+
if key == "$ref":
|
| 248 |
+
d[key] = _VEGA_LITE_ROOT_URI + d[key]
|
| 249 |
+
elif isinstance(value, dict):
|
| 250 |
+
d[key] = _prepare_refs(value)
|
| 251 |
+
elif isinstance(value, list):
|
| 252 |
+
prepared_values = []
|
| 253 |
+
for v in value:
|
| 254 |
+
if isinstance(v, dict):
|
| 255 |
+
v = _prepare_refs(v)
|
| 256 |
+
prepared_values.append(v)
|
| 257 |
+
d[key] = prepared_values
|
| 258 |
+
return d
|
| 259 |
+
|
| 260 |
+
schema = _prepare_refs(schema)
|
| 261 |
+
return schema
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
# We do not annotate the return value here as the referencing library is not always
|
| 265 |
+
# available and this function is only executed in those cases.
|
| 266 |
+
def _get_referencing_registry(
|
| 267 |
+
rootschema: dict[str, Any], json_schema_draft_url: str | None = None
|
| 268 |
+
) -> Registry:
|
| 269 |
+
# Referencing is a dependency of newer jsonschema versions, starting with the
|
| 270 |
+
# version that is specified in _use_referencing_library and we therefore
|
| 271 |
+
# can expect that it is installed if the function returns True.
|
| 272 |
+
# We ignore 'import' mypy errors which happen when the referencing library
|
| 273 |
+
# is not installed. That's ok as in these cases this function is not called.
|
| 274 |
+
# We also have to ignore 'unused-ignore' errors as mypy raises those in case
|
| 275 |
+
# referencing is installed.
|
| 276 |
+
import referencing # type: ignore[import,unused-ignore]
|
| 277 |
+
import referencing.jsonschema # type: ignore[import,unused-ignore]
|
| 278 |
+
|
| 279 |
+
if json_schema_draft_url is None:
|
| 280 |
+
json_schema_draft_url = _get_json_schema_draft_url(rootschema)
|
| 281 |
+
|
| 282 |
+
specification = referencing.jsonschema.specification_with(json_schema_draft_url)
|
| 283 |
+
resource = specification.create_resource(rootschema)
|
| 284 |
+
return referencing.Registry().with_resource(
|
| 285 |
+
uri=_VEGA_LITE_ROOT_URI, resource=resource
|
| 286 |
+
)
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
def _json_path(err: jsonschema.exceptions.ValidationError) -> str:
|
| 290 |
+
"""
|
| 291 |
+
Drop in replacement for the .json_path property of the jsonschema ValidationError class.
|
| 292 |
+
|
| 293 |
+
This is not available as property for ValidationError with jsonschema<4.0.1.
|
| 294 |
+
|
| 295 |
+
More info, see https://github.com/vega/altair/issues/3038.
|
| 296 |
+
"""
|
| 297 |
+
path = "$"
|
| 298 |
+
for elem in err.absolute_path:
|
| 299 |
+
if isinstance(elem, int):
|
| 300 |
+
path += "[" + str(elem) + "]"
|
| 301 |
+
else:
|
| 302 |
+
path += "." + elem
|
| 303 |
+
return path
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
def _group_errors_by_json_path(
|
| 307 |
+
errors: ValidationErrorList,
|
| 308 |
+
) -> GroupedValidationErrors:
|
| 309 |
+
"""
|
| 310 |
+
Groups errors by the `json_path` attribute of the jsonschema ValidationError class.
|
| 311 |
+
|
| 312 |
+
This attribute contains the path to the offending element within
|
| 313 |
+
a chart specification and can therefore be considered as an identifier of an
|
| 314 |
+
'issue' in the chart that needs to be fixed.
|
| 315 |
+
"""
|
| 316 |
+
errors_by_json_path = defaultdict(list)
|
| 317 |
+
for err in errors:
|
| 318 |
+
err_key = getattr(err, "json_path", _json_path(err))
|
| 319 |
+
errors_by_json_path[err_key].append(err)
|
| 320 |
+
return dict(errors_by_json_path)
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
def _get_leaves_of_error_tree(
|
| 324 |
+
errors: ValidationErrorList,
|
| 325 |
+
) -> ValidationErrorList:
|
| 326 |
+
"""
|
| 327 |
+
For each error in `errors`, it traverses down the "error tree" that is generated by the jsonschema library to find and return all "leaf" errors.
|
| 328 |
+
|
| 329 |
+
These are errors which have no further errors that caused it and so they are the most specific errors
|
| 330 |
+
with the most specific error messages.
|
| 331 |
+
"""
|
| 332 |
+
leaves: ValidationErrorList = []
|
| 333 |
+
for err in errors:
|
| 334 |
+
if err.context:
|
| 335 |
+
# This means that the error `err` was caused by errors in subschemas.
|
| 336 |
+
# The list of errors from the subschemas are available in the property
|
| 337 |
+
# `context`.
|
| 338 |
+
leaves.extend(_get_leaves_of_error_tree(err.context))
|
| 339 |
+
else:
|
| 340 |
+
leaves.append(err)
|
| 341 |
+
return leaves
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def _subset_to_most_specific_json_paths(
|
| 345 |
+
errors_by_json_path: GroupedValidationErrors,
|
| 346 |
+
) -> GroupedValidationErrors:
|
| 347 |
+
"""
|
| 348 |
+
Removes key (json path), value (errors) pairs where the json path is fully contained in another json path.
|
| 349 |
+
|
| 350 |
+
For example if `errors_by_json_path` has two keys, `$.encoding.X` and `$.encoding.X.tooltip`,
|
| 351 |
+
then the first one will be removed and only the second one is returned.
|
| 352 |
+
|
| 353 |
+
This is done under the assumption that more specific json paths give more helpful error messages to the user.
|
| 354 |
+
"""
|
| 355 |
+
errors_by_json_path_specific: GroupedValidationErrors = {}
|
| 356 |
+
for json_path, errors in errors_by_json_path.items():
|
| 357 |
+
if not _contained_at_start_of_one_of_other_values(
|
| 358 |
+
json_path, list(errors_by_json_path.keys())
|
| 359 |
+
):
|
| 360 |
+
errors_by_json_path_specific[json_path] = errors
|
| 361 |
+
return errors_by_json_path_specific
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def _contained_at_start_of_one_of_other_values(x: str, values: Sequence[str]) -> bool:
|
| 365 |
+
# Does not count as "contained at start of other value" if the values are
|
| 366 |
+
# the same. These cases should be handled separately
|
| 367 |
+
return any(value.startswith(x) for value in values if x != value)
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
def _deduplicate_errors(
|
| 371 |
+
grouped_errors: GroupedValidationErrors,
|
| 372 |
+
) -> GroupedValidationErrors:
|
| 373 |
+
"""
|
| 374 |
+
Some errors have very similar error messages or are just in general not helpful for a user.
|
| 375 |
+
|
| 376 |
+
This function removes as many of these cases as possible and
|
| 377 |
+
can be extended over time to handle new cases that come up.
|
| 378 |
+
"""
|
| 379 |
+
grouped_errors_deduplicated: GroupedValidationErrors = {}
|
| 380 |
+
for json_path, element_errors in grouped_errors.items():
|
| 381 |
+
errors_by_validator = _group_errors_by_validator(element_errors)
|
| 382 |
+
|
| 383 |
+
deduplication_functions = {
|
| 384 |
+
"enum": _deduplicate_enum_errors,
|
| 385 |
+
"additionalProperties": _deduplicate_additional_properties_errors,
|
| 386 |
+
}
|
| 387 |
+
deduplicated_errors: ValidationErrorList = []
|
| 388 |
+
for validator, errors in errors_by_validator.items():
|
| 389 |
+
deduplication_func = deduplication_functions.get(validator)
|
| 390 |
+
if deduplication_func is not None:
|
| 391 |
+
errors = deduplication_func(errors)
|
| 392 |
+
deduplicated_errors.extend(_deduplicate_by_message(errors))
|
| 393 |
+
|
| 394 |
+
# Removes any ValidationError "'value' is a required property" as these
|
| 395 |
+
# errors are unlikely to be the relevant ones for the user. They come from
|
| 396 |
+
# validation against a schema definition where the output of `alt.value`
|
| 397 |
+
# would be valid. However, if a user uses `alt.value`, the `value` keyword
|
| 398 |
+
# is included automatically from that function and so it's unlikely
|
| 399 |
+
# that this was what the user intended if the keyword is not present
|
| 400 |
+
# in the first place.
|
| 401 |
+
deduplicated_errors = [
|
| 402 |
+
err for err in deduplicated_errors if not _is_required_value_error(err)
|
| 403 |
+
]
|
| 404 |
+
|
| 405 |
+
grouped_errors_deduplicated[json_path] = deduplicated_errors
|
| 406 |
+
return grouped_errors_deduplicated
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
def _is_required_value_error(err: jsonschema.exceptions.ValidationError) -> bool:
|
| 410 |
+
return err.validator == "required" and err.validator_value == ["value"]
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
def _group_errors_by_validator(errors: ValidationErrorList) -> GroupedValidationErrors:
|
| 414 |
+
"""
|
| 415 |
+
Groups the errors by the json schema "validator" that casued the error.
|
| 416 |
+
|
| 417 |
+
For example if the error is that a value is not one of an enumeration in the json schema
|
| 418 |
+
then the "validator" is `"enum"`, if the error is due to an unknown property that
|
| 419 |
+
was set although no additional properties are allowed then "validator" is
|
| 420 |
+
`"additionalProperties`, etc.
|
| 421 |
+
"""
|
| 422 |
+
errors_by_validator: defaultdict[str, ValidationErrorList] = defaultdict(list)
|
| 423 |
+
for err in errors:
|
| 424 |
+
# Ignore mypy error as err.validator as it wrongly sees err.validator
|
| 425 |
+
# as of type Optional[Validator] instead of str which it is according
|
| 426 |
+
# to the documentation and all tested cases
|
| 427 |
+
errors_by_validator[err.validator].append(err) # type: ignore[index]
|
| 428 |
+
return dict(errors_by_validator)
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def _deduplicate_enum_errors(errors: ValidationErrorList) -> ValidationErrorList:
|
| 432 |
+
"""
|
| 433 |
+
Deduplicate enum errors by removing the errors where the allowed values are a subset of another error.
|
| 434 |
+
|
| 435 |
+
For example, if `enum` contains two errors and one has `validator_value` (i.e. accepted values) ["A", "B"] and the
|
| 436 |
+
other one ["A", "B", "C"] then the first one is removed and the final
|
| 437 |
+
`enum` list only contains the error with ["A", "B", "C"].
|
| 438 |
+
"""
|
| 439 |
+
if len(errors) > 1:
|
| 440 |
+
# Values (and therefore `validator_value`) of an enum are always arrays,
|
| 441 |
+
# see https://json-schema.org/understanding-json-schema/reference/generic.html#enumerated-values
|
| 442 |
+
# which is why we can use join below
|
| 443 |
+
value_strings = [",".join(err.validator_value) for err in errors] # type: ignore
|
| 444 |
+
longest_enums: ValidationErrorList = []
|
| 445 |
+
for value_str, err in zip(value_strings, errors):
|
| 446 |
+
if not _contained_at_start_of_one_of_other_values(value_str, value_strings):
|
| 447 |
+
longest_enums.append(err)
|
| 448 |
+
errors = longest_enums
|
| 449 |
+
return errors
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
def _deduplicate_additional_properties_errors(
|
| 453 |
+
errors: ValidationErrorList,
|
| 454 |
+
) -> ValidationErrorList:
|
| 455 |
+
"""
|
| 456 |
+
If there are multiple additional property errors it usually means that the offending element was validated against multiple schemas and its parent is a common anyOf validator.
|
| 457 |
+
|
| 458 |
+
The error messages produced from these cases are usually
|
| 459 |
+
very similar and we just take the shortest one. For example,
|
| 460 |
+
the following 3 errors are raised for the `unknown` channel option in
|
| 461 |
+
`alt.X("variety", unknown=2)`:
|
| 462 |
+
- "Additional properties are not allowed ('unknown' was unexpected)"
|
| 463 |
+
- "Additional properties are not allowed ('field', 'unknown' were unexpected)"
|
| 464 |
+
- "Additional properties are not allowed ('field', 'type', 'unknown' were unexpected)".
|
| 465 |
+
"""
|
| 466 |
+
if len(errors) > 1:
|
| 467 |
+
# Test if all parent errors are the same anyOf error and only do
|
| 468 |
+
# the prioritization in these cases. Can't think of a chart spec where this
|
| 469 |
+
# would not be the case but still allow for it below to not break anything.
|
| 470 |
+
parent = errors[0].parent
|
| 471 |
+
if (
|
| 472 |
+
parent is not None
|
| 473 |
+
and parent.validator == "anyOf"
|
| 474 |
+
# Use [1:] as don't have to check for first error as it was used
|
| 475 |
+
# above to define `parent`
|
| 476 |
+
and all(err.parent is parent for err in errors[1:])
|
| 477 |
+
):
|
| 478 |
+
errors = [min(errors, key=lambda x: len(x.message))]
|
| 479 |
+
return errors
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def _deduplicate_by_message(errors: ValidationErrorList) -> ValidationErrorList:
|
| 483 |
+
"""Deduplicate errors by message. This keeps the original order in case it was chosen intentionally."""
|
| 484 |
+
return list({e.message: e for e in errors}.values())
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
def _subclasses(cls: type[Any]) -> Iterator[type[Any]]:
|
| 488 |
+
"""Breadth-first sequence of all classes which inherit from cls."""
|
| 489 |
+
seen = set()
|
| 490 |
+
current_set = {cls}
|
| 491 |
+
while current_set:
|
| 492 |
+
seen |= current_set
|
| 493 |
+
current_set = set.union(*(set(cls.__subclasses__()) for cls in current_set))
|
| 494 |
+
for cls in current_set - seen:
|
| 495 |
+
yield cls
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
def _from_array_like(obj: Iterable[Any], /) -> list[Any]:
|
| 499 |
+
try:
|
| 500 |
+
ser = nw.from_native(obj, strict=True, series_only=True)
|
| 501 |
+
return ser.to_list()
|
| 502 |
+
except TypeError:
|
| 503 |
+
return list(obj)
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
def _from_date_datetime(obj: dt.date | dt.datetime, /) -> dict[str, Any]:
|
| 507 |
+
"""
|
| 508 |
+
Parse native `datetime.(date|datetime)` into a `DateTime`_ schema.
|
| 509 |
+
|
| 510 |
+
.. _DateTime:
|
| 511 |
+
https://vega.github.io/vega-lite/docs/datetime.html
|
| 512 |
+
"""
|
| 513 |
+
result: dict[str, Any] = {"year": obj.year, "month": obj.month, "date": obj.day}
|
| 514 |
+
if isinstance(obj, dt.datetime):
|
| 515 |
+
if obj.time() != dt.time.min:
|
| 516 |
+
us = obj.microsecond
|
| 517 |
+
ms = us if us == 0 else us // 1_000
|
| 518 |
+
result.update(
|
| 519 |
+
hours=obj.hour, minutes=obj.minute, seconds=obj.second, milliseconds=ms
|
| 520 |
+
)
|
| 521 |
+
if tzinfo := obj.tzinfo:
|
| 522 |
+
if tzinfo is dt.timezone.utc:
|
| 523 |
+
result["utc"] = True
|
| 524 |
+
else:
|
| 525 |
+
msg = (
|
| 526 |
+
f"Unsupported timezone {tzinfo!r}.\n"
|
| 527 |
+
"Only `'UTC'` or naive (local) datetimes are permitted.\n"
|
| 528 |
+
"See https://altair-viz.github.io/user_guide/generated/core/altair.DateTime.html"
|
| 529 |
+
)
|
| 530 |
+
raise TypeError(msg)
|
| 531 |
+
return result
|
| 532 |
+
|
| 533 |
+
|
| 534 |
+
def _todict(obj: Any, context: dict[str, Any] | None, np_opt: Any, pd_opt: Any) -> Any: # noqa: C901
|
| 535 |
+
"""Convert an object to a dict representation."""
|
| 536 |
+
if np_opt is not None:
|
| 537 |
+
np = np_opt
|
| 538 |
+
if isinstance(obj, np.ndarray):
|
| 539 |
+
return [_todict(v, context, np_opt, pd_opt) for v in obj]
|
| 540 |
+
elif isinstance(obj, np.number):
|
| 541 |
+
return float(obj)
|
| 542 |
+
elif isinstance(obj, np.datetime64):
|
| 543 |
+
result = str(obj)
|
| 544 |
+
if "T" not in result:
|
| 545 |
+
# See https://github.com/vega/altair/issues/1027 for why this is necessary.
|
| 546 |
+
result += "T00:00:00"
|
| 547 |
+
return result
|
| 548 |
+
if isinstance(obj, SchemaBase):
|
| 549 |
+
return obj.to_dict(validate=False, context=context)
|
| 550 |
+
elif isinstance(obj, (list, tuple)):
|
| 551 |
+
return [_todict(v, context, np_opt, pd_opt) for v in obj]
|
| 552 |
+
elif isinstance(obj, dict):
|
| 553 |
+
return {
|
| 554 |
+
k: _todict(v, context, np_opt, pd_opt)
|
| 555 |
+
for k, v in obj.items()
|
| 556 |
+
if v is not Undefined
|
| 557 |
+
}
|
| 558 |
+
elif isinstance(obj, SchemaLike):
|
| 559 |
+
return obj.to_dict()
|
| 560 |
+
elif pd_opt is not None and isinstance(obj, pd_opt.Timestamp):
|
| 561 |
+
return pd_opt.Timestamp(obj).isoformat()
|
| 562 |
+
elif _is_iterable(obj, exclude=(str, bytes)):
|
| 563 |
+
return _todict(_from_array_like(obj), context, np_opt, pd_opt)
|
| 564 |
+
elif isinstance(obj, dt.date):
|
| 565 |
+
return _from_date_datetime(obj)
|
| 566 |
+
else:
|
| 567 |
+
return obj
|
| 568 |
+
|
| 569 |
+
|
| 570 |
+
def _resolve_references(
|
| 571 |
+
schema: dict[str, Any], rootschema: dict[str, Any] | None = None
|
| 572 |
+
) -> dict[str, Any]:
|
| 573 |
+
"""Resolve schema references until there is no $ref anymore in the top-level of the dictionary."""
|
| 574 |
+
if _use_referencing_library():
|
| 575 |
+
registry = _get_referencing_registry(rootschema or schema)
|
| 576 |
+
# Using a different variable name to show that this is not the
|
| 577 |
+
# jsonschema.RefResolver but instead a Resolver from the referencing
|
| 578 |
+
# library
|
| 579 |
+
referencing_resolver = registry.resolver()
|
| 580 |
+
while "$ref" in schema:
|
| 581 |
+
schema = referencing_resolver.lookup(
|
| 582 |
+
_VEGA_LITE_ROOT_URI + schema["$ref"]
|
| 583 |
+
).contents
|
| 584 |
+
else:
|
| 585 |
+
resolver = jsonschema.RefResolver.from_schema(rootschema or schema)
|
| 586 |
+
while "$ref" in schema:
|
| 587 |
+
with resolver.resolving(schema["$ref"]) as resolved:
|
| 588 |
+
schema = resolved
|
| 589 |
+
return schema
|
| 590 |
+
|
| 591 |
+
|
| 592 |
+
class SchemaValidationError(jsonschema.ValidationError):
|
| 593 |
+
def __init__(self, obj: SchemaBase, err: jsonschema.ValidationError) -> None:
|
| 594 |
+
"""
|
| 595 |
+
A wrapper for ``jsonschema.ValidationError`` with friendlier traceback.
|
| 596 |
+
|
| 597 |
+
Parameters
|
| 598 |
+
----------
|
| 599 |
+
obj
|
| 600 |
+
The instance that failed ``self.validate(...)``.
|
| 601 |
+
err
|
| 602 |
+
The original ``ValidationError``.
|
| 603 |
+
|
| 604 |
+
Notes
|
| 605 |
+
-----
|
| 606 |
+
We do not raise `from err` as else the resulting traceback is very long
|
| 607 |
+
as it contains part of the Vega-Lite schema.
|
| 608 |
+
|
| 609 |
+
It would also first show the less helpful `ValidationError` instead of
|
| 610 |
+
the more user friendly `SchemaValidationError`.
|
| 611 |
+
"""
|
| 612 |
+
super().__init__(**err._contents())
|
| 613 |
+
self.obj = obj
|
| 614 |
+
self._errors: GroupedValidationErrors = getattr(
|
| 615 |
+
err, "_all_errors", {getattr(err, "json_path", _json_path(err)): [err]}
|
| 616 |
+
)
|
| 617 |
+
# This is the message from err
|
| 618 |
+
self._original_message = self.message
|
| 619 |
+
self.message = self._get_message()
|
| 620 |
+
|
| 621 |
+
def __str__(self) -> str:
|
| 622 |
+
return self.message
|
| 623 |
+
|
| 624 |
+
def _get_message(self) -> str:
|
| 625 |
+
def indent_second_line_onwards(message: str, indent: int = 4) -> str:
|
| 626 |
+
modified_lines: list[str] = []
|
| 627 |
+
for idx, line in enumerate(message.split("\n")):
|
| 628 |
+
if idx > 0 and len(line) > 0:
|
| 629 |
+
line = " " * indent + line
|
| 630 |
+
modified_lines.append(line)
|
| 631 |
+
return "\n".join(modified_lines)
|
| 632 |
+
|
| 633 |
+
error_messages: list[str] = []
|
| 634 |
+
# Only show a maximum of 3 errors as else the final message returned by this
|
| 635 |
+
# method could get very long.
|
| 636 |
+
for errors in list(self._errors.values())[:3]:
|
| 637 |
+
error_messages.append(self._get_message_for_errors_group(errors))
|
| 638 |
+
|
| 639 |
+
message = ""
|
| 640 |
+
if len(error_messages) > 1:
|
| 641 |
+
error_messages = [
|
| 642 |
+
indent_second_line_onwards(f"Error {error_id}: {m}")
|
| 643 |
+
for error_id, m in enumerate(error_messages, start=1)
|
| 644 |
+
]
|
| 645 |
+
message += "Multiple errors were found.\n\n"
|
| 646 |
+
message += "\n\n".join(error_messages)
|
| 647 |
+
return message
|
| 648 |
+
|
| 649 |
+
def _get_message_for_errors_group(
|
| 650 |
+
self,
|
| 651 |
+
errors: ValidationErrorList,
|
| 652 |
+
) -> str:
|
| 653 |
+
if errors[0].validator == "additionalProperties":
|
| 654 |
+
# During development, we only found cases where an additionalProperties
|
| 655 |
+
# error was raised if that was the only error for the offending instance
|
| 656 |
+
# as identifiable by the json path. Therefore, we just check here the first
|
| 657 |
+
# error. However, other constellations might exist in which case
|
| 658 |
+
# this should be adapted so that other error messages are shown as well.
|
| 659 |
+
message = self._get_additional_properties_error_message(errors[0])
|
| 660 |
+
else:
|
| 661 |
+
message = self._get_default_error_message(errors=errors)
|
| 662 |
+
|
| 663 |
+
return message.strip()
|
| 664 |
+
|
| 665 |
+
def _get_additional_properties_error_message(
|
| 666 |
+
self,
|
| 667 |
+
error: jsonschema.exceptions.ValidationError,
|
| 668 |
+
) -> str:
|
| 669 |
+
"""Output all existing parameters when an unknown parameter is specified."""
|
| 670 |
+
altair_cls = self._get_altair_class_for_error(error)
|
| 671 |
+
param_dict_keys = inspect.signature(altair_cls).parameters.keys()
|
| 672 |
+
param_names_table = self._format_params_as_table(param_dict_keys)
|
| 673 |
+
|
| 674 |
+
# Error messages for these errors look like this:
|
| 675 |
+
# "Additional properties are not allowed ('unknown' was unexpected)"
|
| 676 |
+
# Line below extracts "unknown" from this string
|
| 677 |
+
parameter_name = error.message.split("('")[-1].split("'")[0]
|
| 678 |
+
message = f"""\
|
| 679 |
+
`{altair_cls.__name__}` has no parameter named '{parameter_name}'
|
| 680 |
+
|
| 681 |
+
Existing parameter names are:
|
| 682 |
+
{param_names_table}
|
| 683 |
+
See the help for `{altair_cls.__name__}` to read the full description of these parameters"""
|
| 684 |
+
return message
|
| 685 |
+
|
| 686 |
+
def _get_altair_class_for_error(
|
| 687 |
+
self, error: jsonschema.exceptions.ValidationError
|
| 688 |
+
) -> type[SchemaBase]:
|
| 689 |
+
"""
|
| 690 |
+
Try to get the lowest class possible in the chart hierarchy so it can be displayed in the error message.
|
| 691 |
+
|
| 692 |
+
This should lead to more informative error messages pointing the user closer to the source of the issue.
|
| 693 |
+
"""
|
| 694 |
+
for prop_name in reversed(error.absolute_path):
|
| 695 |
+
# Check if str as e.g. first item can be a 0
|
| 696 |
+
if isinstance(prop_name, str):
|
| 697 |
+
potential_class_name = prop_name[0].upper() + prop_name[1:]
|
| 698 |
+
cls = getattr(vegalite, potential_class_name, None)
|
| 699 |
+
if cls is not None:
|
| 700 |
+
break
|
| 701 |
+
else:
|
| 702 |
+
# Did not find a suitable class based on traversing the path so we fall
|
| 703 |
+
# back on the class of the top-level object which created
|
| 704 |
+
# the SchemaValidationError
|
| 705 |
+
cls = self.obj.__class__
|
| 706 |
+
return cls
|
| 707 |
+
|
| 708 |
+
@staticmethod
|
| 709 |
+
def _format_params_as_table(param_dict_keys: Iterable[str]) -> str:
|
| 710 |
+
"""Format param names into a table so that they are easier to read."""
|
| 711 |
+
param_names: tuple[str, ...]
|
| 712 |
+
name_lengths: tuple[int, ...]
|
| 713 |
+
param_names, name_lengths = zip(
|
| 714 |
+
*[
|
| 715 |
+
(name, len(name))
|
| 716 |
+
for name in param_dict_keys
|
| 717 |
+
if name not in {"kwds", "self"}
|
| 718 |
+
]
|
| 719 |
+
)
|
| 720 |
+
# Worst case scenario with the same longest param name in the same
|
| 721 |
+
# row for all columns
|
| 722 |
+
max_name_length = max(name_lengths)
|
| 723 |
+
max_column_width = 80
|
| 724 |
+
# Output a square table if not too big (since it is easier to read)
|
| 725 |
+
num_param_names = len(param_names)
|
| 726 |
+
square_columns = int(ceil(num_param_names**0.5))
|
| 727 |
+
columns = min(max_column_width // max_name_length, square_columns)
|
| 728 |
+
|
| 729 |
+
# Compute roughly equal column heights to evenly divide the param names
|
| 730 |
+
def split_into_equal_parts(n: int, p: int) -> list[int]:
|
| 731 |
+
return [n // p + 1] * (n % p) + [n // p] * (p - n % p)
|
| 732 |
+
|
| 733 |
+
column_heights = split_into_equal_parts(num_param_names, columns)
|
| 734 |
+
|
| 735 |
+
# Section the param names into columns and compute their widths
|
| 736 |
+
param_names_columns: list[tuple[str, ...]] = []
|
| 737 |
+
column_max_widths: list[int] = []
|
| 738 |
+
last_end_idx: int = 0
|
| 739 |
+
for ch in column_heights:
|
| 740 |
+
param_names_columns.append(param_names[last_end_idx : last_end_idx + ch])
|
| 741 |
+
column_max_widths.append(
|
| 742 |
+
max(len(param_name) for param_name in param_names_columns[-1])
|
| 743 |
+
)
|
| 744 |
+
last_end_idx = ch + last_end_idx
|
| 745 |
+
|
| 746 |
+
# Transpose the param name columns into rows to facilitate looping
|
| 747 |
+
param_names_rows: list[tuple[str, ...]] = []
|
| 748 |
+
for li in zip_longest(*param_names_columns, fillvalue=""):
|
| 749 |
+
param_names_rows.append(li)
|
| 750 |
+
# Build the table as a string by iterating over and formatting the rows
|
| 751 |
+
param_names_table: str = ""
|
| 752 |
+
for param_names_row in param_names_rows:
|
| 753 |
+
for num, param_name in enumerate(param_names_row):
|
| 754 |
+
# Set column width based on the longest param in the column
|
| 755 |
+
max_name_length_column = column_max_widths[num]
|
| 756 |
+
column_pad = 3
|
| 757 |
+
param_names_table += "{:<{}}".format(
|
| 758 |
+
param_name, max_name_length_column + column_pad
|
| 759 |
+
)
|
| 760 |
+
# Insert newlines and spacing after the last element in each row
|
| 761 |
+
if num == (len(param_names_row) - 1):
|
| 762 |
+
param_names_table += "\n"
|
| 763 |
+
return param_names_table
|
| 764 |
+
|
| 765 |
+
def _get_default_error_message(
|
| 766 |
+
self,
|
| 767 |
+
errors: ValidationErrorList,
|
| 768 |
+
) -> str:
|
| 769 |
+
bullet_points: list[str] = []
|
| 770 |
+
errors_by_validator = _group_errors_by_validator(errors)
|
| 771 |
+
if "enum" in errors_by_validator:
|
| 772 |
+
for error in errors_by_validator["enum"]:
|
| 773 |
+
bullet_points.append(f"one of {error.validator_value}")
|
| 774 |
+
|
| 775 |
+
if "type" in errors_by_validator:
|
| 776 |
+
types = [f"'{err.validator_value}'" for err in errors_by_validator["type"]]
|
| 777 |
+
point = "of type "
|
| 778 |
+
if len(types) == 1:
|
| 779 |
+
point += types[0]
|
| 780 |
+
elif len(types) == 2:
|
| 781 |
+
point += f"{types[0]} or {types[1]}"
|
| 782 |
+
else:
|
| 783 |
+
point += ", ".join(types[:-1]) + f", or {types[-1]}"
|
| 784 |
+
bullet_points.append(point)
|
| 785 |
+
|
| 786 |
+
# It should not matter which error is specifically used as they are all
|
| 787 |
+
# about the same offending instance (i.e. invalid value), so we can just
|
| 788 |
+
# take the first one
|
| 789 |
+
error = errors[0]
|
| 790 |
+
# Add a summary line when parameters are passed an invalid value
|
| 791 |
+
# For example: "'asdf' is an invalid value for `stack`
|
| 792 |
+
message = f"'{error.instance}' is an invalid value"
|
| 793 |
+
if error.absolute_path:
|
| 794 |
+
message += f" for `{error.absolute_path[-1]}`"
|
| 795 |
+
|
| 796 |
+
# Add bullet points
|
| 797 |
+
if len(bullet_points) == 0:
|
| 798 |
+
message += ".\n\n"
|
| 799 |
+
elif len(bullet_points) == 1:
|
| 800 |
+
message += f". Valid values are {bullet_points[0]}.\n\n"
|
| 801 |
+
else:
|
| 802 |
+
# We don't use .capitalize below to make the first letter uppercase
|
| 803 |
+
# as that makes the rest of the message lowercase
|
| 804 |
+
bullet_points = [point[0].upper() + point[1:] for point in bullet_points]
|
| 805 |
+
message += ". Valid values are:\n\n"
|
| 806 |
+
message += "\n".join([f"- {point}" for point in bullet_points])
|
| 807 |
+
message += "\n\n"
|
| 808 |
+
|
| 809 |
+
# Add unformatted messages of any remaining errors which were not
|
| 810 |
+
# considered so far. This is not expected to be used but more exists
|
| 811 |
+
# as a fallback for cases which were not known during development.
|
| 812 |
+
it = (
|
| 813 |
+
"\n".join(e.message for e in errors)
|
| 814 |
+
for validator, errors in errors_by_validator.items()
|
| 815 |
+
if validator not in {"enum", "type"}
|
| 816 |
+
)
|
| 817 |
+
message += "".join(it)
|
| 818 |
+
return message
|
| 819 |
+
|
| 820 |
+
|
| 821 |
+
_JSON_VT_co = TypeVar(
|
| 822 |
+
"_JSON_VT_co",
|
| 823 |
+
Literal["string"],
|
| 824 |
+
Literal["object"],
|
| 825 |
+
Literal["array"],
|
| 826 |
+
covariant=True,
|
| 827 |
+
)
|
| 828 |
+
"""
|
| 829 |
+
One of a subset of JSON Schema `primitive types`_:
|
| 830 |
+
|
| 831 |
+
["string", "object", "array"]
|
| 832 |
+
|
| 833 |
+
.. _primitive types:
|
| 834 |
+
https://json-schema.org/draft-07/json-schema-validation#rfc.section.6.1.1
|
| 835 |
+
"""
|
| 836 |
+
|
| 837 |
+
_TypeMap = TypeAliasType(
|
| 838 |
+
"_TypeMap", Mapping[Literal["type"], _JSON_VT_co], type_params=(_JSON_VT_co,)
|
| 839 |
+
)
|
| 840 |
+
"""
|
| 841 |
+
A single item JSON Schema using the `type`_ keyword.
|
| 842 |
+
|
| 843 |
+
This may represent **one of**:
|
| 844 |
+
|
| 845 |
+
{"type": "string"}
|
| 846 |
+
{"type": "object"}
|
| 847 |
+
{"type": "array"}
|
| 848 |
+
|
| 849 |
+
.. _type:
|
| 850 |
+
https://json-schema.org/understanding-json-schema/reference/type
|
| 851 |
+
"""
|
| 852 |
+
|
| 853 |
+
# NOTE: Type checkers want opposing things:
|
| 854 |
+
# - `mypy` : Covariant type variable "_JSON_VT_co" used in protocol where invariant one is expected [misc]
|
| 855 |
+
# - `pyright`: Type variable "_JSON_VT_co" used in generic protocol "SchemaLike" should be covariant [reportInvalidTypeVarUse]
|
| 856 |
+
# Siding with `pyright` as this is consistent with https://github.com/python/typeshed/blob/9e506eb5e8fc2823db8c60ad561b1145ff114947/stdlib/typing.pyi#L690
|
| 857 |
+
|
| 858 |
+
|
| 859 |
+
@runtime_checkable
|
| 860 |
+
class SchemaLike(Generic[_JSON_VT_co], Protocol): # type: ignore[misc]
|
| 861 |
+
"""
|
| 862 |
+
Represents ``altair`` classes which *may* not derive ``SchemaBase``.
|
| 863 |
+
|
| 864 |
+
Attributes
|
| 865 |
+
----------
|
| 866 |
+
_schema
|
| 867 |
+
A single item JSON Schema using the `type`_ keyword.
|
| 868 |
+
|
| 869 |
+
Notes
|
| 870 |
+
-----
|
| 871 |
+
Should be kept tightly defined to the **minimum** requirements for:
|
| 872 |
+
- Converting into a form that can be validated by `jsonschema`_.
|
| 873 |
+
- Avoiding calling ``.to_dict()`` on a class external to ``altair``.
|
| 874 |
+
- ``_schema`` is more accurately described as a ``ClassVar``
|
| 875 |
+
- See `discussion`_ for blocking issue.
|
| 876 |
+
|
| 877 |
+
.. _jsonschema:
|
| 878 |
+
https://github.com/python-jsonschema/jsonschema
|
| 879 |
+
.. _type:
|
| 880 |
+
https://json-schema.org/understanding-json-schema/reference/type
|
| 881 |
+
.. _discussion:
|
| 882 |
+
https://github.com/python/typing/discussions/1424
|
| 883 |
+
"""
|
| 884 |
+
|
| 885 |
+
_schema: _TypeMap[_JSON_VT_co]
|
| 886 |
+
|
| 887 |
+
def to_dict(self, *args, **kwds) -> Any: ...
|
| 888 |
+
|
| 889 |
+
|
| 890 |
+
@runtime_checkable
|
| 891 |
+
class ConditionLike(SchemaLike[Literal["object"]], Protocol):
|
| 892 |
+
"""
|
| 893 |
+
Represents the wrapped state of a conditional encoding or property.
|
| 894 |
+
|
| 895 |
+
Attributes
|
| 896 |
+
----------
|
| 897 |
+
condition
|
| 898 |
+
One or more (predicate, statement) pairs which each form a condition.
|
| 899 |
+
|
| 900 |
+
Notes
|
| 901 |
+
-----
|
| 902 |
+
- Can be extended with additional conditions.
|
| 903 |
+
- *Does not* define a default value, but can be finalized with one.
|
| 904 |
+
"""
|
| 905 |
+
|
| 906 |
+
condition: Any
|
| 907 |
+
_schema: _TypeMap[Literal["object"]] = {"type": "object"}
|
| 908 |
+
|
| 909 |
+
|
| 910 |
+
class UndefinedType:
|
| 911 |
+
"""A singleton object for marking undefined parameters."""
|
| 912 |
+
|
| 913 |
+
__instance = None
|
| 914 |
+
|
| 915 |
+
def __new__(cls, *args, **kwargs) -> Self:
|
| 916 |
+
if not isinstance(cls.__instance, cls):
|
| 917 |
+
cls.__instance = object.__new__(cls, *args, **kwargs)
|
| 918 |
+
return cls.__instance
|
| 919 |
+
|
| 920 |
+
def __repr__(self) -> str:
|
| 921 |
+
return "Undefined"
|
| 922 |
+
|
| 923 |
+
|
| 924 |
+
Undefined = UndefinedType()
|
| 925 |
+
T = TypeVar("T")
|
| 926 |
+
Optional: TypeAlias = Union[T, UndefinedType]
|
| 927 |
+
"""One of ``T`` specified type(s), or the ``Undefined`` singleton.
|
| 928 |
+
|
| 929 |
+
Examples
|
| 930 |
+
--------
|
| 931 |
+
The parameters ``short``, ``long`` accept the same range of types::
|
| 932 |
+
|
| 933 |
+
# ruff: noqa: UP006, UP007
|
| 934 |
+
from altair.typing import Optional
|
| 935 |
+
|
| 936 |
+
def func_1(
|
| 937 |
+
short: Optional[str | bool | float | dict[str, Any] | SchemaBase] = Undefined,
|
| 938 |
+
long: Union[
|
| 939 |
+
str, bool, float, Dict[str, Any], SchemaBase, UndefinedType
|
| 940 |
+
] = Undefined,
|
| 941 |
+
): ...
|
| 942 |
+
|
| 943 |
+
This is distinct from `typing.Optional <https://typing.readthedocs.io/en/latest/spec/historical.html#union-and-optional>`__.
|
| 944 |
+
|
| 945 |
+
``altair.typing.Optional`` treats ``None`` like any other type::
|
| 946 |
+
|
| 947 |
+
# ruff: noqa: UP006, UP007
|
| 948 |
+
from altair.typing import Optional
|
| 949 |
+
|
| 950 |
+
def func_2(
|
| 951 |
+
short: Optional[str | float | dict[str, Any] | None | SchemaBase] = Undefined,
|
| 952 |
+
long: Union[
|
| 953 |
+
str, float, Dict[str, Any], None, SchemaBase, UndefinedType
|
| 954 |
+
] = Undefined,
|
| 955 |
+
): ...
|
| 956 |
+
"""
|
| 957 |
+
|
| 958 |
+
|
| 959 |
+
def is_undefined(obj: Any) -> TypeIs[UndefinedType]:
|
| 960 |
+
"""
|
| 961 |
+
Type-safe singleton check for `UndefinedType`.
|
| 962 |
+
|
| 963 |
+
Notes
|
| 964 |
+
-----
|
| 965 |
+
- Using `obj is Undefined` does not narrow from `UndefinedType` in a union.
|
| 966 |
+
- Due to the assumption that other `UndefinedType`'s could exist.
|
| 967 |
+
- Current [typing spec advises](https://typing.readthedocs.io/en/latest/spec/concepts.html#support-for-singleton-types-in-unions) using an `Enum`.
|
| 968 |
+
- Otherwise, requires an explicit guard to inform the type checker.
|
| 969 |
+
"""
|
| 970 |
+
return obj is Undefined
|
| 971 |
+
|
| 972 |
+
|
| 973 |
+
@overload
|
| 974 |
+
def _shallow_copy(obj: _CopyImpl) -> _CopyImpl: ...
|
| 975 |
+
@overload
|
| 976 |
+
def _shallow_copy(obj: Any) -> Any: ...
|
| 977 |
+
def _shallow_copy(obj: _CopyImpl | Any) -> _CopyImpl | Any:
|
| 978 |
+
if isinstance(obj, SchemaBase):
|
| 979 |
+
return obj.copy(deep=False)
|
| 980 |
+
elif isinstance(obj, (list, dict)):
|
| 981 |
+
return obj.copy()
|
| 982 |
+
else:
|
| 983 |
+
return obj
|
| 984 |
+
|
| 985 |
+
|
| 986 |
+
@overload
|
| 987 |
+
def _deep_copy(obj: _CopyImpl, by_ref: set[str]) -> _CopyImpl: ...
|
| 988 |
+
@overload
|
| 989 |
+
def _deep_copy(obj: Any, by_ref: set[str]) -> Any: ...
|
| 990 |
+
def _deep_copy(obj: _CopyImpl | Any, by_ref: set[str]) -> _CopyImpl | Any:
|
| 991 |
+
copy = partial(_deep_copy, by_ref=by_ref)
|
| 992 |
+
if isinstance(obj, SchemaBase):
|
| 993 |
+
if copier := getattr(obj, "__deepcopy__", None):
|
| 994 |
+
with debug_mode(False):
|
| 995 |
+
return copier(obj)
|
| 996 |
+
args = (copy(arg) for arg in obj._args)
|
| 997 |
+
kwds = {k: (copy(v) if k not in by_ref else v) for k, v in obj._kwds.items()}
|
| 998 |
+
with debug_mode(False):
|
| 999 |
+
return obj.__class__(*args, **kwds)
|
| 1000 |
+
elif isinstance(obj, list):
|
| 1001 |
+
return [copy(v) for v in obj]
|
| 1002 |
+
elif isinstance(obj, dict):
|
| 1003 |
+
return {k: (copy(v) if k not in by_ref else v) for k, v in obj.items()}
|
| 1004 |
+
else:
|
| 1005 |
+
return obj
|
| 1006 |
+
|
| 1007 |
+
|
| 1008 |
+
class SchemaBase:
|
| 1009 |
+
"""
|
| 1010 |
+
Base class for schema wrappers.
|
| 1011 |
+
|
| 1012 |
+
Each derived class should set the _schema class attribute (and optionally
|
| 1013 |
+
the _rootschema class attribute) which is used for validation.
|
| 1014 |
+
"""
|
| 1015 |
+
|
| 1016 |
+
_schema: ClassVar[dict[str, Any] | Any] = None
|
| 1017 |
+
_rootschema: ClassVar[dict[str, Any] | None] = None
|
| 1018 |
+
_class_is_valid_at_instantiation: ClassVar[bool] = True
|
| 1019 |
+
|
| 1020 |
+
def __init__(self, *args: Any, **kwds: Any) -> None:
|
| 1021 |
+
# Two valid options for initialization, which should be handled by
|
| 1022 |
+
# derived classes:
|
| 1023 |
+
# - a single arg with no kwds, for, e.g. {'type': 'string'}
|
| 1024 |
+
# - zero args with zero or more kwds for {'type': 'object'}
|
| 1025 |
+
if self._schema is None:
|
| 1026 |
+
msg = (
|
| 1027 |
+
f"Cannot instantiate object of type {self.__class__}: "
|
| 1028 |
+
"_schema class attribute is not defined."
|
| 1029 |
+
""
|
| 1030 |
+
)
|
| 1031 |
+
raise ValueError(msg)
|
| 1032 |
+
|
| 1033 |
+
if kwds:
|
| 1034 |
+
assert len(args) == 0
|
| 1035 |
+
else:
|
| 1036 |
+
assert len(args) in {0, 1}
|
| 1037 |
+
|
| 1038 |
+
# use object.__setattr__ because we override setattr below.
|
| 1039 |
+
object.__setattr__(self, "_args", args)
|
| 1040 |
+
object.__setattr__(self, "_kwds", kwds)
|
| 1041 |
+
|
| 1042 |
+
if DEBUG_MODE and self._class_is_valid_at_instantiation:
|
| 1043 |
+
self.to_dict(validate=True)
|
| 1044 |
+
|
| 1045 |
+
def copy(
|
| 1046 |
+
self, deep: bool | Iterable[Any] = True, ignore: list[str] | None = None
|
| 1047 |
+
) -> Self:
|
| 1048 |
+
"""
|
| 1049 |
+
Return a copy of the object.
|
| 1050 |
+
|
| 1051 |
+
Parameters
|
| 1052 |
+
----------
|
| 1053 |
+
deep : boolean or list, optional
|
| 1054 |
+
If True (default) then return a deep copy of all dict, list, and
|
| 1055 |
+
SchemaBase objects within the object structure.
|
| 1056 |
+
If False, then only copy the top object.
|
| 1057 |
+
If a list or iterable, then only copy the listed attributes.
|
| 1058 |
+
ignore : list, optional
|
| 1059 |
+
A list of keys for which the contents should not be copied, but
|
| 1060 |
+
only stored by reference.
|
| 1061 |
+
"""
|
| 1062 |
+
if deep is True:
|
| 1063 |
+
return cast("Self", _deep_copy(self, set(ignore) if ignore else set()))
|
| 1064 |
+
with debug_mode(False):
|
| 1065 |
+
copy = self.__class__(*self._args, **self._kwds)
|
| 1066 |
+
if _is_iterable(deep):
|
| 1067 |
+
for attr in deep:
|
| 1068 |
+
copy[attr] = _shallow_copy(copy._get(attr))
|
| 1069 |
+
return copy
|
| 1070 |
+
|
| 1071 |
+
def _get(self, attr, default=Undefined):
|
| 1072 |
+
"""Get an attribute, returning default if not present."""
|
| 1073 |
+
attr = self._kwds.get(attr, Undefined)
|
| 1074 |
+
if attr is Undefined:
|
| 1075 |
+
attr = default
|
| 1076 |
+
return attr
|
| 1077 |
+
|
| 1078 |
+
def __getattr__(self, attr):
|
| 1079 |
+
# reminder: getattr is called after the normal lookups
|
| 1080 |
+
if attr == "_kwds":
|
| 1081 |
+
raise AttributeError()
|
| 1082 |
+
if attr in self._kwds:
|
| 1083 |
+
return self._kwds[attr]
|
| 1084 |
+
else:
|
| 1085 |
+
try:
|
| 1086 |
+
_getattr = super().__getattr__ # pyright: ignore[reportAttributeAccessIssue]
|
| 1087 |
+
except AttributeError:
|
| 1088 |
+
_getattr = super().__getattribute__
|
| 1089 |
+
return _getattr(attr)
|
| 1090 |
+
|
| 1091 |
+
def __setattr__(self, item, val) -> None:
|
| 1092 |
+
self._kwds[item] = val
|
| 1093 |
+
|
| 1094 |
+
def __getitem__(self, item):
|
| 1095 |
+
return self._kwds[item]
|
| 1096 |
+
|
| 1097 |
+
def __setitem__(self, item, val) -> None:
|
| 1098 |
+
self._kwds[item] = val
|
| 1099 |
+
|
| 1100 |
+
def __repr__(self) -> str:
|
| 1101 |
+
name = type(self).__name__
|
| 1102 |
+
if kwds := self._kwds:
|
| 1103 |
+
it = (f"{k}: {v!r}" for k, v in sorted(kwds.items()) if v is not Undefined)
|
| 1104 |
+
args = ",\n".join(it).replace("\n", "\n ")
|
| 1105 |
+
LB, RB = "{", "}"
|
| 1106 |
+
return f"{name}({LB}\n {args}\n{RB})"
|
| 1107 |
+
else:
|
| 1108 |
+
return f"{name}({self._args[0]!r})"
|
| 1109 |
+
|
| 1110 |
+
def __eq__(self, other: Any) -> bool:
|
| 1111 |
+
return (
|
| 1112 |
+
type(self) is type(other)
|
| 1113 |
+
and self._args == other._args
|
| 1114 |
+
and self._kwds == other._kwds
|
| 1115 |
+
)
|
| 1116 |
+
|
| 1117 |
+
def to_dict(
|
| 1118 |
+
self,
|
| 1119 |
+
validate: bool = True,
|
| 1120 |
+
*,
|
| 1121 |
+
ignore: list[str] | None = None,
|
| 1122 |
+
context: dict[str, Any] | None = None,
|
| 1123 |
+
) -> dict[str, Any]:
|
| 1124 |
+
"""
|
| 1125 |
+
Return a dictionary representation of the object.
|
| 1126 |
+
|
| 1127 |
+
Parameters
|
| 1128 |
+
----------
|
| 1129 |
+
validate : bool, optional
|
| 1130 |
+
If True (default), then validate the result against the schema.
|
| 1131 |
+
ignore : list[str], optional
|
| 1132 |
+
A list of keys to ignore.
|
| 1133 |
+
context : dict[str, Any], optional
|
| 1134 |
+
A context dictionary.
|
| 1135 |
+
|
| 1136 |
+
Raises
|
| 1137 |
+
------
|
| 1138 |
+
SchemaValidationError :
|
| 1139 |
+
If ``validate`` and the result does not conform to the schema.
|
| 1140 |
+
|
| 1141 |
+
Notes
|
| 1142 |
+
-----
|
| 1143 |
+
- ``ignore``, ``context`` are usually not needed to be specified as a user.
|
| 1144 |
+
- *Technical*: ``ignore`` will **not** be passed to child :meth:`.to_dict()`.
|
| 1145 |
+
"""
|
| 1146 |
+
context = context or {}
|
| 1147 |
+
ignore = ignore or []
|
| 1148 |
+
opts = _get_optional_modules(np_opt="numpy", pd_opt="pandas")
|
| 1149 |
+
|
| 1150 |
+
if self._args and not self._kwds:
|
| 1151 |
+
kwds = self._args[0]
|
| 1152 |
+
elif not self._args:
|
| 1153 |
+
kwds = self._kwds.copy()
|
| 1154 |
+
exclude = {*ignore, "shorthand"}
|
| 1155 |
+
if parsed := context.pop("parsed_shorthand", None):
|
| 1156 |
+
kwds = _replace_parsed_shorthand(parsed, kwds)
|
| 1157 |
+
kwds = {k: v for k, v in kwds.items() if k not in exclude}
|
| 1158 |
+
if (mark := kwds.get("mark")) and isinstance(mark, str):
|
| 1159 |
+
kwds["mark"] = {"type": mark}
|
| 1160 |
+
else:
|
| 1161 |
+
msg = f"{type(self)} instance has both a value and properties : cannot serialize to dict"
|
| 1162 |
+
raise ValueError(msg)
|
| 1163 |
+
result = _todict(kwds, context=context, **opts)
|
| 1164 |
+
if validate:
|
| 1165 |
+
# NOTE: Don't raise `from err`, see `SchemaValidationError` doc
|
| 1166 |
+
try:
|
| 1167 |
+
self.validate(result)
|
| 1168 |
+
except jsonschema.ValidationError as err:
|
| 1169 |
+
raise SchemaValidationError(self, err) from None
|
| 1170 |
+
return result
|
| 1171 |
+
|
| 1172 |
+
def to_json(
|
| 1173 |
+
self,
|
| 1174 |
+
validate: bool = True,
|
| 1175 |
+
indent: int | str | None = 2,
|
| 1176 |
+
sort_keys: bool = True,
|
| 1177 |
+
*,
|
| 1178 |
+
ignore: list[str] | None = None,
|
| 1179 |
+
context: dict[str, Any] | None = None,
|
| 1180 |
+
**kwargs,
|
| 1181 |
+
) -> str:
|
| 1182 |
+
"""
|
| 1183 |
+
Emit the JSON representation for this object as a string.
|
| 1184 |
+
|
| 1185 |
+
Parameters
|
| 1186 |
+
----------
|
| 1187 |
+
validate : bool, optional
|
| 1188 |
+
If True (default), then validate the result against the schema.
|
| 1189 |
+
indent : int, optional
|
| 1190 |
+
The number of spaces of indentation to use. The default is 2.
|
| 1191 |
+
sort_keys : bool, optional
|
| 1192 |
+
If True (default), sort keys in the output.
|
| 1193 |
+
ignore : list[str], optional
|
| 1194 |
+
A list of keys to ignore.
|
| 1195 |
+
context : dict[str, Any], optional
|
| 1196 |
+
A context dictionary.
|
| 1197 |
+
**kwargs
|
| 1198 |
+
Additional keyword arguments are passed to ``json.dumps()``
|
| 1199 |
+
|
| 1200 |
+
Raises
|
| 1201 |
+
------
|
| 1202 |
+
SchemaValidationError :
|
| 1203 |
+
If ``validate`` and the result does not conform to the schema.
|
| 1204 |
+
|
| 1205 |
+
Notes
|
| 1206 |
+
-----
|
| 1207 |
+
- ``ignore``, ``context`` are usually not needed to be specified as a user.
|
| 1208 |
+
- *Technical*: ``ignore`` will **not** be passed to child :meth:`.to_dict()`.
|
| 1209 |
+
"""
|
| 1210 |
+
if ignore is None:
|
| 1211 |
+
ignore = []
|
| 1212 |
+
if context is None:
|
| 1213 |
+
context = {}
|
| 1214 |
+
dct = self.to_dict(validate=validate, ignore=ignore, context=context)
|
| 1215 |
+
return json.dumps(dct, indent=indent, sort_keys=sort_keys, **kwargs)
|
| 1216 |
+
|
| 1217 |
+
@classmethod
|
| 1218 |
+
def _default_wrapper_classes(cls) -> Iterator[type[SchemaBase]]:
|
| 1219 |
+
"""Return the set of classes used within cls.from_dict()."""
|
| 1220 |
+
return _subclasses(SchemaBase)
|
| 1221 |
+
|
| 1222 |
+
@classmethod
|
| 1223 |
+
def from_dict(
|
| 1224 |
+
cls: type[TSchemaBase], dct: dict[str, Any], validate: bool = True
|
| 1225 |
+
) -> TSchemaBase:
|
| 1226 |
+
"""
|
| 1227 |
+
Construct class from a dictionary representation.
|
| 1228 |
+
|
| 1229 |
+
Parameters
|
| 1230 |
+
----------
|
| 1231 |
+
dct : dictionary
|
| 1232 |
+
The dict from which to construct the class
|
| 1233 |
+
validate : boolean
|
| 1234 |
+
If True (default), then validate the input against the schema.
|
| 1235 |
+
|
| 1236 |
+
Raises
|
| 1237 |
+
------
|
| 1238 |
+
jsonschema.ValidationError :
|
| 1239 |
+
If ``validate`` and ``dct`` does not conform to the schema
|
| 1240 |
+
"""
|
| 1241 |
+
if validate:
|
| 1242 |
+
cls.validate(dct)
|
| 1243 |
+
converter = _FromDict(cls._default_wrapper_classes())
|
| 1244 |
+
return converter.from_dict(dct, cls)
|
| 1245 |
+
|
| 1246 |
+
@classmethod
|
| 1247 |
+
def from_json(
|
| 1248 |
+
cls,
|
| 1249 |
+
json_string: str,
|
| 1250 |
+
validate: bool = True,
|
| 1251 |
+
**kwargs: Any,
|
| 1252 |
+
# Type hints for this method would get rather complicated
|
| 1253 |
+
# if we want to provide a more specific return type
|
| 1254 |
+
) -> ChartType:
|
| 1255 |
+
"""
|
| 1256 |
+
Instantiate the object from a valid JSON string.
|
| 1257 |
+
|
| 1258 |
+
Parameters
|
| 1259 |
+
----------
|
| 1260 |
+
json_string : string
|
| 1261 |
+
The string containing a valid JSON chart specification.
|
| 1262 |
+
validate : boolean
|
| 1263 |
+
If True (default), then validate the input against the schema.
|
| 1264 |
+
**kwargs :
|
| 1265 |
+
Additional keyword arguments are passed to json.loads
|
| 1266 |
+
|
| 1267 |
+
Returns
|
| 1268 |
+
-------
|
| 1269 |
+
chart : Chart object
|
| 1270 |
+
The altair Chart object built from the specification.
|
| 1271 |
+
"""
|
| 1272 |
+
dct: dict[str, Any] = json.loads(json_string, **kwargs)
|
| 1273 |
+
return cls.from_dict(dct, validate=validate) # type: ignore[return-value]
|
| 1274 |
+
|
| 1275 |
+
@classmethod
|
| 1276 |
+
def validate(
|
| 1277 |
+
cls, instance: dict[str, Any], schema: dict[str, Any] | None = None
|
| 1278 |
+
) -> None:
|
| 1279 |
+
"""Validate the instance against the class schema in the context of the rootschema."""
|
| 1280 |
+
if schema is None:
|
| 1281 |
+
schema = cls._schema
|
| 1282 |
+
# For the benefit of mypy
|
| 1283 |
+
assert schema is not None
|
| 1284 |
+
validate_jsonschema(instance, schema, rootschema=cls._rootschema or cls._schema)
|
| 1285 |
+
|
| 1286 |
+
@classmethod
|
| 1287 |
+
def resolve_references(cls, schema: dict[str, Any] | None = None) -> dict[str, Any]:
|
| 1288 |
+
"""Resolve references in the context of this object's schema or root schema."""
|
| 1289 |
+
schema_to_pass = schema or cls._schema
|
| 1290 |
+
# For the benefit of mypy
|
| 1291 |
+
assert schema_to_pass is not None
|
| 1292 |
+
return _resolve_references(
|
| 1293 |
+
schema=schema_to_pass,
|
| 1294 |
+
rootschema=(cls._rootschema or cls._schema or schema),
|
| 1295 |
+
)
|
| 1296 |
+
|
| 1297 |
+
@classmethod
|
| 1298 |
+
def validate_property(
|
| 1299 |
+
cls, name: str, value: Any, schema: dict[str, Any] | None = None
|
| 1300 |
+
) -> None:
|
| 1301 |
+
"""Validate a property against property schema in the context of the rootschema."""
|
| 1302 |
+
opts = _get_optional_modules(np_opt="numpy", pd_opt="pandas")
|
| 1303 |
+
value = _todict(value, context={}, **opts)
|
| 1304 |
+
props = cls.resolve_references(schema or cls._schema).get("properties", {})
|
| 1305 |
+
validate_jsonschema(
|
| 1306 |
+
value, props.get(name, {}), rootschema=cls._rootschema or cls._schema
|
| 1307 |
+
)
|
| 1308 |
+
|
| 1309 |
+
def __dir__(self) -> list[str]:
|
| 1310 |
+
return sorted(chain(super().__dir__(), self._kwds))
|
| 1311 |
+
|
| 1312 |
+
|
| 1313 |
+
def _get_optional_modules(**modules: str) -> dict[str, _OptionalModule]:
|
| 1314 |
+
"""
|
| 1315 |
+
Returns packages only if they have already been imported - otherwise they return `None`.
|
| 1316 |
+
|
| 1317 |
+
This is useful for `isinstance` checks.
|
| 1318 |
+
|
| 1319 |
+
For example, if `pandas` has not been imported, then an object is
|
| 1320 |
+
definitely not a `pandas.Timestamp`.
|
| 1321 |
+
|
| 1322 |
+
Parameters
|
| 1323 |
+
----------
|
| 1324 |
+
**modules
|
| 1325 |
+
Keyword-only binding from `{alias: module_name}`.
|
| 1326 |
+
|
| 1327 |
+
Examples
|
| 1328 |
+
--------
|
| 1329 |
+
>>> import pandas as pd # doctest: +SKIP
|
| 1330 |
+
>>> import polars as pl # doctest: +SKIP
|
| 1331 |
+
>>> from altair.utils.schemapi import _get_optional_modules # doctest: +SKIP
|
| 1332 |
+
>>>
|
| 1333 |
+
>>> _get_optional_modules(pd="pandas", pl="polars", ibis="ibis") # doctest: +SKIP
|
| 1334 |
+
{
|
| 1335 |
+
"pd": <module 'pandas' from '...'>,
|
| 1336 |
+
"pl": <module 'polars' from '...'>,
|
| 1337 |
+
"ibis": None,
|
| 1338 |
+
}
|
| 1339 |
+
|
| 1340 |
+
If the user later imports ``ibis``, it would appear in subsequent calls.
|
| 1341 |
+
|
| 1342 |
+
>>> import ibis # doctest: +SKIP
|
| 1343 |
+
>>>
|
| 1344 |
+
>>> _get_optional_modules(ibis="ibis") # doctest: +SKIP
|
| 1345 |
+
{
|
| 1346 |
+
"ibis": <module 'ibis' from '...'>,
|
| 1347 |
+
}
|
| 1348 |
+
"""
|
| 1349 |
+
return {k: sys.modules.get(v) for k, v in modules.items()}
|
| 1350 |
+
|
| 1351 |
+
|
| 1352 |
+
def _replace_parsed_shorthand(
|
| 1353 |
+
parsed_shorthand: dict[str, Any], kwds: dict[str, Any]
|
| 1354 |
+
) -> dict[str, Any]:
|
| 1355 |
+
"""
|
| 1356 |
+
`parsed_shorthand` is added by `FieldChannelMixin`.
|
| 1357 |
+
|
| 1358 |
+
It's used below to replace shorthand with its long form equivalent
|
| 1359 |
+
`parsed_shorthand` is removed from `context` if it exists so that it is
|
| 1360 |
+
not passed to child `to_dict` function calls.
|
| 1361 |
+
"""
|
| 1362 |
+
# Prevent that pandas categorical data is automatically sorted
|
| 1363 |
+
# when a non-ordinal data type is specifed manually
|
| 1364 |
+
# or if the encoding channel does not support sorting
|
| 1365 |
+
if "sort" in parsed_shorthand and (
|
| 1366 |
+
"sort" not in kwds or kwds["type"] not in {"ordinal", Undefined}
|
| 1367 |
+
):
|
| 1368 |
+
parsed_shorthand.pop("sort")
|
| 1369 |
+
|
| 1370 |
+
kwds.update(
|
| 1371 |
+
(k, v)
|
| 1372 |
+
for k, v in parsed_shorthand.items()
|
| 1373 |
+
if kwds.get(k, Undefined) is Undefined
|
| 1374 |
+
)
|
| 1375 |
+
return kwds
|
| 1376 |
+
|
| 1377 |
+
|
| 1378 |
+
TSchemaBase = TypeVar("TSchemaBase", bound=SchemaBase)
|
| 1379 |
+
|
| 1380 |
+
_CopyImpl = TypeVar("_CopyImpl", SchemaBase, dict[Any, Any], list[Any])
|
| 1381 |
+
"""
|
| 1382 |
+
Types which have an implementation in ``SchemaBase.copy()``.
|
| 1383 |
+
|
| 1384 |
+
All other types are returned **by reference**.
|
| 1385 |
+
"""
|
| 1386 |
+
|
| 1387 |
+
|
| 1388 |
+
def _is_dict(obj: Any | dict[Any, Any]) -> TypeIs[dict[Any, Any]]:
|
| 1389 |
+
return isinstance(obj, dict)
|
| 1390 |
+
|
| 1391 |
+
|
| 1392 |
+
def _is_list(obj: Any | list[Any]) -> TypeIs[list[Any]]:
|
| 1393 |
+
return isinstance(obj, list)
|
| 1394 |
+
|
| 1395 |
+
|
| 1396 |
+
def _is_iterable(
|
| 1397 |
+
obj: Any, *, exclude: type | tuple[type, ...] = (str, bytes)
|
| 1398 |
+
) -> TypeIs[Iterable[Any]]:
|
| 1399 |
+
return not isinstance(obj, exclude) and isinstance(obj, Iterable)
|
| 1400 |
+
|
| 1401 |
+
|
| 1402 |
+
def _passthrough(*args: Any, **kwds: Any) -> Any | dict[str, Any]:
|
| 1403 |
+
return args[0] if args else kwds
|
| 1404 |
+
|
| 1405 |
+
|
| 1406 |
+
class _FromDict:
|
| 1407 |
+
"""
|
| 1408 |
+
Class used to construct SchemaBase class hierarchies from a dict.
|
| 1409 |
+
|
| 1410 |
+
The primary purpose of using this class is to be able to build a hash table
|
| 1411 |
+
that maps schemas to their wrapper classes. The candidate classes are
|
| 1412 |
+
specified in the ``wrapper_classes`` positional-only argument to the constructor.
|
| 1413 |
+
"""
|
| 1414 |
+
|
| 1415 |
+
_hash_exclude_keys = ("definitions", "title", "description", "$schema", "id")
|
| 1416 |
+
|
| 1417 |
+
def __init__(self, wrapper_classes: Iterable[type[SchemaBase]], /) -> None:
|
| 1418 |
+
# Create a mapping of a schema hash to a list of matching classes
|
| 1419 |
+
# This lets us quickly determine the correct class to construct
|
| 1420 |
+
self.class_dict: dict[int, list[type[SchemaBase]]] = defaultdict(list)
|
| 1421 |
+
for tp in wrapper_classes:
|
| 1422 |
+
if tp._schema is not None:
|
| 1423 |
+
self.class_dict[self.hash_schema(tp._schema)].append(tp)
|
| 1424 |
+
|
| 1425 |
+
@classmethod
|
| 1426 |
+
def hash_schema(cls, schema: dict[str, Any], use_json: bool = True) -> int:
|
| 1427 |
+
"""
|
| 1428 |
+
Compute a python hash for a nested dictionary which properly handles dicts, lists, sets, and tuples.
|
| 1429 |
+
|
| 1430 |
+
At the top level, the function excludes from the hashed schema all keys
|
| 1431 |
+
listed in `exclude_keys`.
|
| 1432 |
+
|
| 1433 |
+
This implements two methods: one based on conversion to JSON, and one based
|
| 1434 |
+
on recursive conversions of unhashable to hashable types; the former seems
|
| 1435 |
+
to be slightly faster in several benchmarks.
|
| 1436 |
+
"""
|
| 1437 |
+
if cls._hash_exclude_keys and isinstance(schema, dict):
|
| 1438 |
+
schema = {
|
| 1439 |
+
key: val
|
| 1440 |
+
for key, val in schema.items()
|
| 1441 |
+
if key not in cls._hash_exclude_keys
|
| 1442 |
+
}
|
| 1443 |
+
if use_json:
|
| 1444 |
+
s = json.dumps(schema, sort_keys=True)
|
| 1445 |
+
return hash(s)
|
| 1446 |
+
else:
|
| 1447 |
+
|
| 1448 |
+
def _freeze(val):
|
| 1449 |
+
if isinstance(val, dict):
|
| 1450 |
+
return frozenset((k, _freeze(v)) for k, v in val.items())
|
| 1451 |
+
elif isinstance(val, set):
|
| 1452 |
+
return frozenset(map(_freeze, val))
|
| 1453 |
+
elif isinstance(val, (list, tuple)):
|
| 1454 |
+
return tuple(map(_freeze, val))
|
| 1455 |
+
else:
|
| 1456 |
+
return val
|
| 1457 |
+
|
| 1458 |
+
return hash(_freeze(schema))
|
| 1459 |
+
|
| 1460 |
+
@overload
|
| 1461 |
+
def from_dict(
|
| 1462 |
+
self,
|
| 1463 |
+
dct: TSchemaBase,
|
| 1464 |
+
tp: None = ...,
|
| 1465 |
+
schema: None = ...,
|
| 1466 |
+
rootschema: None = ...,
|
| 1467 |
+
default_class: Any = ...,
|
| 1468 |
+
) -> TSchemaBase: ...
|
| 1469 |
+
@overload
|
| 1470 |
+
def from_dict(
|
| 1471 |
+
self,
|
| 1472 |
+
dct: dict[str, Any] | list[dict[str, Any]],
|
| 1473 |
+
tp: Any = ...,
|
| 1474 |
+
schema: Any = ...,
|
| 1475 |
+
rootschema: Any = ...,
|
| 1476 |
+
default_class: type[TSchemaBase] = ..., # pyright: ignore[reportInvalidTypeVarUse]
|
| 1477 |
+
) -> TSchemaBase: ...
|
| 1478 |
+
@overload
|
| 1479 |
+
def from_dict(
|
| 1480 |
+
self,
|
| 1481 |
+
dct: dict[str, Any],
|
| 1482 |
+
tp: None = ...,
|
| 1483 |
+
schema: dict[str, Any] = ...,
|
| 1484 |
+
rootschema: None = ...,
|
| 1485 |
+
default_class: Any = ...,
|
| 1486 |
+
) -> SchemaBase: ...
|
| 1487 |
+
@overload
|
| 1488 |
+
def from_dict(
|
| 1489 |
+
self,
|
| 1490 |
+
dct: dict[str, Any],
|
| 1491 |
+
tp: type[TSchemaBase],
|
| 1492 |
+
schema: None = ...,
|
| 1493 |
+
rootschema: None = ...,
|
| 1494 |
+
default_class: Any = ...,
|
| 1495 |
+
) -> TSchemaBase: ...
|
| 1496 |
+
@overload
|
| 1497 |
+
def from_dict(
|
| 1498 |
+
self,
|
| 1499 |
+
dct: dict[str, Any] | list[dict[str, Any]],
|
| 1500 |
+
tp: type[TSchemaBase],
|
| 1501 |
+
schema: dict[str, Any],
|
| 1502 |
+
rootschema: dict[str, Any] | None = ...,
|
| 1503 |
+
default_class: Any = ...,
|
| 1504 |
+
) -> Never: ...
|
| 1505 |
+
def from_dict(
|
| 1506 |
+
self,
|
| 1507 |
+
dct: dict[str, Any] | list[dict[str, Any]] | TSchemaBase,
|
| 1508 |
+
tp: type[TSchemaBase] | None = None,
|
| 1509 |
+
schema: dict[str, Any] | None = None,
|
| 1510 |
+
rootschema: dict[str, Any] | None = None,
|
| 1511 |
+
default_class: Any = _passthrough,
|
| 1512 |
+
) -> TSchemaBase | SchemaBase:
|
| 1513 |
+
"""Construct an object from a dict representation."""
|
| 1514 |
+
target_tp: Any
|
| 1515 |
+
current_schema: dict[str, Any]
|
| 1516 |
+
if isinstance(dct, SchemaBase):
|
| 1517 |
+
return dct
|
| 1518 |
+
elif tp is not None:
|
| 1519 |
+
current_schema = tp._schema
|
| 1520 |
+
root_schema: dict[str, Any] = rootschema or tp._rootschema or current_schema
|
| 1521 |
+
target_tp = tp
|
| 1522 |
+
elif schema is not None:
|
| 1523 |
+
# If there are multiple matches, we use the first one in the dict.
|
| 1524 |
+
# Our class dict is constructed breadth-first from top to bottom,
|
| 1525 |
+
# so the first class that matches is the most general match.
|
| 1526 |
+
current_schema = schema
|
| 1527 |
+
root_schema = rootschema or current_schema
|
| 1528 |
+
matches = self.class_dict[self.hash_schema(current_schema)]
|
| 1529 |
+
target_tp = matches[0] if matches else default_class
|
| 1530 |
+
else:
|
| 1531 |
+
msg = "Must provide either `tp` or `schema`, but not both."
|
| 1532 |
+
raise ValueError(msg)
|
| 1533 |
+
|
| 1534 |
+
from_dict = partial(self.from_dict, rootschema=root_schema)
|
| 1535 |
+
# Can also return a list?
|
| 1536 |
+
resolved = _resolve_references(current_schema, root_schema)
|
| 1537 |
+
if "anyOf" in resolved or "oneOf" in resolved:
|
| 1538 |
+
schemas = resolved.get("anyOf", []) + resolved.get("oneOf", [])
|
| 1539 |
+
for possible in schemas:
|
| 1540 |
+
try:
|
| 1541 |
+
validate_jsonschema(dct, possible, rootschema=root_schema)
|
| 1542 |
+
except jsonschema.ValidationError:
|
| 1543 |
+
continue
|
| 1544 |
+
else:
|
| 1545 |
+
return from_dict(dct, schema=possible, default_class=target_tp)
|
| 1546 |
+
|
| 1547 |
+
if _is_dict(dct):
|
| 1548 |
+
# TODO: handle schemas for additionalProperties/patternProperties
|
| 1549 |
+
props: dict[str, Any] = resolved.get("properties", {})
|
| 1550 |
+
kwds = {
|
| 1551 |
+
k: (from_dict(v, schema=props[k]) if k in props else v)
|
| 1552 |
+
for k, v in dct.items()
|
| 1553 |
+
}
|
| 1554 |
+
return target_tp(**kwds)
|
| 1555 |
+
elif _is_list(dct):
|
| 1556 |
+
item_schema: dict[str, Any] = resolved.get("items", {})
|
| 1557 |
+
return target_tp([from_dict(k, schema=item_schema) for k in dct])
|
| 1558 |
+
else:
|
| 1559 |
+
# NOTE: Unsure what is valid here
|
| 1560 |
+
return target_tp(dct)
|
| 1561 |
+
|
| 1562 |
+
|
| 1563 |
+
class _PropertySetter:
|
| 1564 |
+
def __init__(self, prop: str, schema: dict[str, Any]) -> None:
|
| 1565 |
+
self.prop = prop
|
| 1566 |
+
self.schema = schema
|
| 1567 |
+
|
| 1568 |
+
def __get__(self, obj, cls):
|
| 1569 |
+
self.obj = obj
|
| 1570 |
+
self.cls = cls
|
| 1571 |
+
# The docs from the encoding class parameter (e.g. `bin` in X, Color,
|
| 1572 |
+
# etc); this provides a general description of the parameter.
|
| 1573 |
+
self.__doc__ = self.schema["description"].replace("__", "**")
|
| 1574 |
+
property_name = f"{self.prop}"[0].upper() + f"{self.prop}"[1:]
|
| 1575 |
+
if hasattr(vegalite, property_name):
|
| 1576 |
+
altair_prop = getattr(vegalite, property_name)
|
| 1577 |
+
# Add the docstring from the helper class (e.g. `BinParams`) so
|
| 1578 |
+
# that all the parameter names of the helper class are included in
|
| 1579 |
+
# the final docstring
|
| 1580 |
+
parameter_index = altair_prop.__doc__.find("Parameters\n")
|
| 1581 |
+
if parameter_index > -1:
|
| 1582 |
+
self.__doc__ = (
|
| 1583 |
+
altair_prop.__doc__[:parameter_index].replace(" ", "")
|
| 1584 |
+
+ self.__doc__
|
| 1585 |
+
+ textwrap.dedent(
|
| 1586 |
+
f"\n\n {altair_prop.__doc__[parameter_index:]}"
|
| 1587 |
+
)
|
| 1588 |
+
)
|
| 1589 |
+
# For short docstrings such as Aggregate, Stack, et
|
| 1590 |
+
else:
|
| 1591 |
+
self.__doc__ = (
|
| 1592 |
+
altair_prop.__doc__.replace(" ", "") + "\n" + self.__doc__
|
| 1593 |
+
)
|
| 1594 |
+
# Add signatures and tab completion for the method and parameter names
|
| 1595 |
+
self.__signature__ = inspect.signature(altair_prop)
|
| 1596 |
+
self.__wrapped__ = inspect.getfullargspec(altair_prop)
|
| 1597 |
+
self.__name__ = altair_prop.__name__
|
| 1598 |
+
else:
|
| 1599 |
+
# It seems like bandPosition is the only parameter that doesn't
|
| 1600 |
+
# have a helper class.
|
| 1601 |
+
pass
|
| 1602 |
+
return self
|
| 1603 |
+
|
| 1604 |
+
def __call__(self, *args: Any, **kwargs: Any):
|
| 1605 |
+
obj = self.obj.copy()
|
| 1606 |
+
# TODO: use schema to validate
|
| 1607 |
+
obj[self.prop] = args[0] if args else kwargs
|
| 1608 |
+
return obj
|
| 1609 |
+
|
| 1610 |
+
|
| 1611 |
+
def with_property_setters(cls: type[TSchemaBase]) -> type[TSchemaBase]:
|
| 1612 |
+
"""Decorator to add property setters to a Schema class."""
|
| 1613 |
+
schema = cls.resolve_references()
|
| 1614 |
+
for prop, propschema in schema.get("properties", {}).items():
|
| 1615 |
+
setattr(cls, prop, _PropertySetter(prop, propschema))
|
| 1616 |
+
return cls
|
mgm/lib/python3.10/site-packages/altair/utils/selection.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
from typing import Any, NewType
|
| 5 |
+
|
| 6 |
+
# Type representing the "{selection}_store" dataset that corresponds to a
|
| 7 |
+
# Vega-Lite selection
|
| 8 |
+
Store = NewType("Store", list[dict[str, Any]])
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@dataclass(frozen=True, eq=True)
|
| 12 |
+
class IndexSelection:
|
| 13 |
+
"""
|
| 14 |
+
Represents the state of an alt.selection_point() when neither the fields nor encodings arguments are specified.
|
| 15 |
+
|
| 16 |
+
The value field is a list of zero-based indices into the
|
| 17 |
+
selected dataset.
|
| 18 |
+
|
| 19 |
+
Note: These indices only apply to the input DataFrame
|
| 20 |
+
for charts that do not include aggregations (e.g. a scatter chart).
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
name: str
|
| 24 |
+
value: list[int]
|
| 25 |
+
store: Store
|
| 26 |
+
|
| 27 |
+
@staticmethod
|
| 28 |
+
def from_vega(name: str, signal: dict[str, dict] | None, store: Store):
|
| 29 |
+
"""
|
| 30 |
+
Construct an IndexSelection from the raw Vega signal and dataset values.
|
| 31 |
+
|
| 32 |
+
Parameters
|
| 33 |
+
----------
|
| 34 |
+
name: str
|
| 35 |
+
The selection's name
|
| 36 |
+
signal: dict or None
|
| 37 |
+
The value of the Vega signal corresponding to the selection
|
| 38 |
+
store: list
|
| 39 |
+
The value of the Vega dataset corresponding to the selection.
|
| 40 |
+
This dataset is named "{name}_store" in the Vega view.
|
| 41 |
+
|
| 42 |
+
Returns
|
| 43 |
+
-------
|
| 44 |
+
IndexSelection
|
| 45 |
+
"""
|
| 46 |
+
if signal is None:
|
| 47 |
+
indices = []
|
| 48 |
+
else:
|
| 49 |
+
points = signal.get("vlPoint", {}).get("or", [])
|
| 50 |
+
indices = [p["_vgsid_"] - 1 for p in points]
|
| 51 |
+
return IndexSelection(name=name, value=indices, store=store)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@dataclass(frozen=True, eq=True)
|
| 55 |
+
class PointSelection:
|
| 56 |
+
"""
|
| 57 |
+
Represents the state of an alt.selection_point() when the fields or encodings arguments are specified.
|
| 58 |
+
|
| 59 |
+
The value field is a list of dicts of the form:
|
| 60 |
+
[{"dim1": 1, "dim2": "A"}, {"dim1": 2, "dim2": "BB"}]
|
| 61 |
+
|
| 62 |
+
where "dim1" and "dim2" are dataset columns and the dict values
|
| 63 |
+
correspond to the specific selected values.
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
name: str
|
| 67 |
+
value: list[dict[str, Any]]
|
| 68 |
+
store: Store
|
| 69 |
+
|
| 70 |
+
@staticmethod
|
| 71 |
+
def from_vega(name: str, signal: dict[str, dict] | None, store: Store):
|
| 72 |
+
"""
|
| 73 |
+
Construct a PointSelection from the raw Vega signal and dataset values.
|
| 74 |
+
|
| 75 |
+
Parameters
|
| 76 |
+
----------
|
| 77 |
+
name: str
|
| 78 |
+
The selection's name
|
| 79 |
+
signal: dict or None
|
| 80 |
+
The value of the Vega signal corresponding to the selection
|
| 81 |
+
store: list
|
| 82 |
+
The value of the Vega dataset corresponding to the selection.
|
| 83 |
+
This dataset is named "{name}_store" in the Vega view.
|
| 84 |
+
|
| 85 |
+
Returns
|
| 86 |
+
-------
|
| 87 |
+
PointSelection
|
| 88 |
+
"""
|
| 89 |
+
points = [] if signal is None else signal.get("vlPoint", {}).get("or", [])
|
| 90 |
+
return PointSelection(name=name, value=points, store=store)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
@dataclass(frozen=True, eq=True)
|
| 94 |
+
class IntervalSelection:
|
| 95 |
+
"""
|
| 96 |
+
Represents the state of an alt.selection_interval().
|
| 97 |
+
|
| 98 |
+
The value field is a dict of the form:
|
| 99 |
+
{"dim1": [0, 10], "dim2": ["A", "BB", "CCC"]}
|
| 100 |
+
|
| 101 |
+
where "dim1" and "dim2" are dataset columns and the dict values
|
| 102 |
+
correspond to the selected range.
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
name: str
|
| 106 |
+
value: dict[str, list]
|
| 107 |
+
store: Store
|
| 108 |
+
|
| 109 |
+
@staticmethod
|
| 110 |
+
def from_vega(name: str, signal: dict[str, list] | None, store: Store):
|
| 111 |
+
"""
|
| 112 |
+
Construct an IntervalSelection from the raw Vega signal and dataset values.
|
| 113 |
+
|
| 114 |
+
Parameters
|
| 115 |
+
----------
|
| 116 |
+
name: str
|
| 117 |
+
The selection's name
|
| 118 |
+
signal: dict or None
|
| 119 |
+
The value of the Vega signal corresponding to the selection
|
| 120 |
+
store: list
|
| 121 |
+
The value of the Vega dataset corresponding to the selection.
|
| 122 |
+
This dataset is named "{name}_store" in the Vega view.
|
| 123 |
+
|
| 124 |
+
Returns
|
| 125 |
+
-------
|
| 126 |
+
PointSelection
|
| 127 |
+
"""
|
| 128 |
+
if signal is None:
|
| 129 |
+
signal = {}
|
| 130 |
+
return IntervalSelection(name=name, value=signal, store=store)
|
mgm/lib/python3.10/site-packages/altair/vegalite/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ruff: noqa: F403
|
| 2 |
+
from .v5 import *
|
mgm/lib/python3.10/site-packages/altair/vegalite/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (183 Bytes). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/vegalite/__pycache__/api.cpython-310.pyc
ADDED
|
Binary file (182 Bytes). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/vegalite/__pycache__/data.cpython-310.pyc
ADDED
|
Binary file (2.15 kB). View file
|
|
|
mgm/lib/python3.10/site-packages/altair/vegalite/__pycache__/display.cpython-310.pyc
ADDED
|
Binary file (381 Bytes). View file
|
|
|