ZTWHHH commited on
Commit
faeeecc
·
verified ·
1 Parent(s): 6308975

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. parrot/share/terminfo/r/rbcomm +0 -0
  3. parrot/share/terminfo/r/regent100 +0 -0
  4. parrot/share/terminfo/t/t10 +0 -0
  5. parrot/share/terminfo/t/t3800 +0 -0
  6. parrot/share/terminfo/t/tab132 +0 -0
  7. parrot/share/terminfo/t/tek +0 -0
  8. parrot/share/terminfo/t/tek4013 +0 -0
  9. parrot/share/terminfo/t/tek4014 +0 -0
  10. parrot/share/terminfo/t/tek4015 +0 -0
  11. parrot/share/terminfo/t/tek4025-17-ws +0 -0
  12. parrot/share/terminfo/t/tek4025a +0 -0
  13. parrot/share/terminfo/t/tek4205 +0 -0
  14. parrot/share/terminfo/t/teken-2018 +0 -0
  15. parrot/share/terminfo/t/teken-sc +0 -0
  16. parrot/share/terminfo/t/terminology-0.6.1 +0 -0
  17. parrot/share/terminfo/t/ti707 +0 -0
  18. parrot/share/terminfo/t/tkterm +0 -0
  19. parrot/share/terminfo/t/tn300 +0 -0
  20. parrot/share/terminfo/t/ts100-sp +0 -0
  21. parrot/share/terminfo/t/tty37 +0 -0
  22. parrot/share/terminfo/t/tty5410 +0 -0
  23. parrot/share/terminfo/t/tty5410-w +0 -0
  24. parrot/share/terminfo/t/tty5420-w-rv-n +0 -0
  25. parrot/share/terminfo/t/tty5620 +0 -0
  26. parrot/share/terminfo/t/tvi9065 +0 -0
  27. parrot/share/terminfo/t/tvi910+ +0 -0
  28. parrot/share/terminfo/t/tvi912b+dim +0 -0
  29. parrot/share/terminfo/t/tvi912b+mc +0 -0
  30. parrot/share/terminfo/t/tvi912b+vb +0 -0
  31. parrot/share/terminfo/t/tvi912b-2p-mc +0 -0
  32. parrot/share/terminfo/t/tvi912c-mc-2p +0 -0
  33. parrot/share/terminfo/t/tvi920b+fn +0 -0
  34. parrot/share/terminfo/t/tvi920b-2p +0 -0
  35. parrot/share/terminfo/t/tvi920b-vb-mc +0 -0
  36. parrot/share/terminfo/t/tvi920b-vb-unk +0 -0
  37. parrot/share/terminfo/t/tvi920c-2p-p +0 -0
  38. parrot/share/terminfo/t/tvi950 +0 -0
  39. parrot/share/terminfo/t/tws2103 +0 -0
  40. videollama2/lib/python3.10/site-packages/altair/utils/_importers.py +109 -0
  41. videollama2/lib/python3.10/site-packages/altair/utils/_show.py +72 -0
  42. videollama2/lib/python3.10/site-packages/altair/utils/_vegafusion_data.py +281 -0
  43. videollama2/lib/python3.10/site-packages/altair/utils/display.py +225 -0
  44. videollama2/lib/python3.10/site-packages/altair/utils/html.py +314 -0
  45. videollama2/lib/python3.10/site-packages/altair/utils/plugin_registry.py +277 -0
  46. videollama2/lib/python3.10/site-packages/altair/utils/save.py +224 -0
  47. videollama2/lib/python3.10/site-packages/altair/utils/schemapi.py +1471 -0
  48. videollama2/lib/python3.10/site-packages/altair/utils/selection.py +130 -0
  49. videollama2/lib/python3.10/site-packages/altair/utils/theme.py +49 -0
  50. videollama2/lib/python3.10/site-packages/altair/vegalite/v5/__pycache__/__init__.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -969,3 +969,4 @@ parrot/lib/python3.10/html/__pycache__/entities.cpython-310.pyc filter=lfs diff=
969
  vllm/lib/python3.10/site-packages/pandas/tests/io/__pycache__/test_sql.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
970
  vllm/lib/python3.10/site-packages/pandas/tests/indexing/__pycache__/test_loc.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
971
  vllm/lib/python3.10/site-packages/pandas/tests/tools/__pycache__/test_to_datetime.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
 
 
969
  vllm/lib/python3.10/site-packages/pandas/tests/io/__pycache__/test_sql.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
970
  vllm/lib/python3.10/site-packages/pandas/tests/indexing/__pycache__/test_loc.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
971
  vllm/lib/python3.10/site-packages/pandas/tests/tools/__pycache__/test_to_datetime.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
972
+ videollama2/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
parrot/share/terminfo/r/rbcomm ADDED
Binary file (592 Bytes). View file
 
parrot/share/terminfo/r/regent100 ADDED
Binary file (490 Bytes). View file
 
parrot/share/terminfo/t/t10 ADDED
Binary file (446 Bytes). View file
 
parrot/share/terminfo/t/t3800 ADDED
Binary file (410 Bytes). View file
 
parrot/share/terminfo/t/tab132 ADDED
Binary file (1.2 kB). View file
 
parrot/share/terminfo/t/tek ADDED
Binary file (214 Bytes). View file
 
parrot/share/terminfo/t/tek4013 ADDED
Binary file (411 Bytes). View file
 
parrot/share/terminfo/t/tek4014 ADDED
Binary file (212 Bytes). View file
 
parrot/share/terminfo/t/tek4015 ADDED
Binary file (413 Bytes). View file
 
parrot/share/terminfo/t/tek4025-17-ws ADDED
Binary file (725 Bytes). View file
 
parrot/share/terminfo/t/tek4025a ADDED
Binary file (649 Bytes). View file
 
parrot/share/terminfo/t/tek4205 ADDED
Binary file (1.89 kB). View file
 
parrot/share/terminfo/t/teken-2018 ADDED
Binary file (1.53 kB). View file
 
parrot/share/terminfo/t/teken-sc ADDED
Binary file (1.32 kB). View file
 
parrot/share/terminfo/t/terminology-0.6.1 ADDED
Binary file (2.39 kB). View file
 
parrot/share/terminfo/t/ti707 ADDED
Binary file (386 Bytes). View file
 
parrot/share/terminfo/t/tkterm ADDED
Binary file (424 Bytes). View file
 
parrot/share/terminfo/t/tn300 ADDED
Binary file (384 Bytes). View file
 
parrot/share/terminfo/t/ts100-sp ADDED
Binary file (1.25 kB). View file
 
parrot/share/terminfo/t/tty37 ADDED
Binary file (369 Bytes). View file
 
parrot/share/terminfo/t/tty5410 ADDED
Binary file (1.14 kB). View file
 
parrot/share/terminfo/t/tty5410-w ADDED
Binary file (1.15 kB). View file
 
parrot/share/terminfo/t/tty5420-w-rv-n ADDED
Binary file (1.41 kB). View file
 
parrot/share/terminfo/t/tty5620 ADDED
Binary file (630 Bytes). View file
 
parrot/share/terminfo/t/tvi9065 ADDED
Binary file (2.04 kB). View file
 
parrot/share/terminfo/t/tvi910+ ADDED
Binary file (817 Bytes). View file
 
parrot/share/terminfo/t/tvi912b+dim ADDED
Binary file (418 Bytes). View file
 
parrot/share/terminfo/t/tvi912b+mc ADDED
Binary file (507 Bytes). View file
 
parrot/share/terminfo/t/tvi912b+vb ADDED
Binary file (274 Bytes). View file
 
parrot/share/terminfo/t/tvi912b-2p-mc ADDED
Binary file (1.42 kB). View file
 
parrot/share/terminfo/t/tvi912c-mc-2p ADDED
Binary file (1.42 kB). View file
 
parrot/share/terminfo/t/tvi920b+fn ADDED
Binary file (620 Bytes). View file
 
parrot/share/terminfo/t/tvi920b-2p ADDED
Binary file (1.39 kB). View file
 
parrot/share/terminfo/t/tvi920b-vb-mc ADDED
Binary file (1.53 kB). View file
 
parrot/share/terminfo/t/tvi920b-vb-unk ADDED
Binary file (1.41 kB). View file
 
parrot/share/terminfo/t/tvi920c-2p-p ADDED
Binary file (1.4 kB). View file
 
parrot/share/terminfo/t/tvi950 ADDED
Binary file (978 Bytes). View file
 
parrot/share/terminfo/t/tws2103 ADDED
Binary file (1.45 kB). View file
 
videollama2/lib/python3.10/site-packages/altair/utils/_importers.py ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from importlib.metadata import version as importlib_version
4
+ from typing import TYPE_CHECKING
5
+
6
+ from packaging.version import Version
7
+
8
+ if TYPE_CHECKING:
9
+ from types import ModuleType
10
+
11
+
12
+ def import_vegafusion() -> ModuleType:
13
+ min_version = "1.5.0"
14
+ try:
15
+ version = importlib_version("vegafusion")
16
+ embed_version = importlib_version("vegafusion-python-embed")
17
+ if version != embed_version or Version(version) < Version(min_version):
18
+ msg = (
19
+ "The versions of the vegafusion and vegafusion-python-embed packages must match\n"
20
+ f"and must be version {min_version} or greater.\n"
21
+ f"Found:\n"
22
+ f" - vegafusion=={version}\n"
23
+ f" - vegafusion-python-embed=={embed_version}\n"
24
+ )
25
+ raise RuntimeError(msg)
26
+ import vegafusion as vf # type: ignore
27
+
28
+ return vf
29
+ except ImportError as err:
30
+ msg = (
31
+ 'The "vegafusion" data transformer and chart.transformed_data feature requires\n'
32
+ f"version {min_version} or greater of the 'vegafusion-python-embed' and 'vegafusion' packages.\n"
33
+ "These can be installed with pip using:\n"
34
+ f' pip install "vegafusion[embed]>={min_version}"\n'
35
+ "Or with conda using:\n"
36
+ f' conda install -c conda-forge "vegafusion-python-embed>={min_version}" '
37
+ f'"vegafusion>={min_version}"\n\n'
38
+ f"ImportError: {err.args[0]}"
39
+ )
40
+ raise ImportError(msg) from err
41
+
42
+
43
+ def import_vl_convert() -> ModuleType:
44
+ min_version = "1.6.0"
45
+ try:
46
+ version = importlib_version("vl-convert-python")
47
+ if Version(version) < Version(min_version):
48
+ msg = (
49
+ f"The vl-convert-python package must be version {min_version} or greater. "
50
+ f"Found version {version}"
51
+ )
52
+ raise RuntimeError(msg)
53
+ import vl_convert as vlc
54
+
55
+ return vlc
56
+ except ImportError as err:
57
+ msg = (
58
+ f"The vl-convert Vega-Lite compiler and file export feature requires\n"
59
+ f"version {min_version} or greater of the 'vl-convert-python' package. \n"
60
+ f"This can be installed with pip using:\n"
61
+ f' pip install "vl-convert-python>={min_version}"\n'
62
+ "or conda:\n"
63
+ f' conda install -c conda-forge "vl-convert-python>={min_version}"\n\n'
64
+ f"ImportError: {err.args[0]}"
65
+ )
66
+ raise ImportError(msg) from err
67
+
68
+
69
+ def vl_version_for_vl_convert() -> str:
70
+ from altair.vegalite import SCHEMA_VERSION
71
+
72
+ # Compute VlConvert's vl_version string (of the form 'v5_2')
73
+ # from SCHEMA_VERSION (of the form 'v5.2.0')
74
+ return "_".join(SCHEMA_VERSION.split(".")[:2])
75
+
76
+
77
+ def import_pyarrow_interchange() -> ModuleType:
78
+ min_version = "11.0.0"
79
+ try:
80
+ version = importlib_version("pyarrow")
81
+
82
+ if Version(version) < Version(min_version):
83
+ msg = (
84
+ f"The pyarrow package must be version {min_version} or greater. "
85
+ f"Found version {version}"
86
+ )
87
+ raise RuntimeError(msg)
88
+ import pyarrow.interchange as pi
89
+
90
+ return pi
91
+ except ImportError as err:
92
+ msg = (
93
+ f"Usage of the DataFrame Interchange Protocol requires\n"
94
+ f"version {min_version} or greater of the pyarrow package. \n"
95
+ f"This can be installed with pip using:\n"
96
+ f' pip install "pyarrow>={min_version}"\n'
97
+ "or conda:\n"
98
+ f' conda install -c conda-forge "pyarrow>={min_version}"\n\n'
99
+ f"ImportError: {err.args[0]}"
100
+ )
101
+ raise ImportError(msg) from err
102
+
103
+
104
+ def pyarrow_available() -> bool:
105
+ try:
106
+ import_pyarrow_interchange()
107
+ return True
108
+ except (ImportError, RuntimeError):
109
+ return False
videollama2/lib/python3.10/site-packages/altair/utils/_show.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import webbrowser
4
+ from http.server import BaseHTTPRequestHandler, HTTPServer
5
+ from typing import Iterable
6
+
7
+
8
+ def open_html_in_browser(
9
+ html: str | bytes,
10
+ using: str | Iterable[str] | None = None,
11
+ port: int | None = None,
12
+ ) -> None:
13
+ """
14
+ Display an html document in a web browser without creating a temp file.
15
+
16
+ Instantiates a simple http server and uses the webbrowser module to
17
+ open the server's URL
18
+
19
+ Parameters
20
+ ----------
21
+ html: str
22
+ HTML string to display
23
+ using: str or iterable of str
24
+ Name of the web browser to open (e.g. "chrome", "firefox", etc.).
25
+ If an iterable, choose the first browser available on the system.
26
+ If none, choose the system default browser.
27
+ port: int
28
+ Port to use. Defaults to a random port
29
+ """
30
+ # Encode html to bytes
31
+ html_bytes = html.encode("utf8") if isinstance(html, str) else html
32
+
33
+ browser = None
34
+
35
+ if using is None:
36
+ browser = webbrowser.get(None)
37
+ else:
38
+ # normalize using to an iterable
39
+ if isinstance(using, str):
40
+ using = [using]
41
+
42
+ for browser_key in using:
43
+ try:
44
+ browser = webbrowser.get(browser_key)
45
+ if browser is not None:
46
+ break
47
+ except webbrowser.Error:
48
+ pass
49
+
50
+ if browser is None:
51
+ raise ValueError("Failed to locate a browser with name in " + str(using))
52
+
53
+ class OneShotRequestHandler(BaseHTTPRequestHandler):
54
+ def do_GET(self) -> None:
55
+ self.send_response(200)
56
+ self.send_header("Content-type", "text/html")
57
+ self.end_headers()
58
+
59
+ bufferSize = 1024 * 1024
60
+ for i in range(0, len(html_bytes), bufferSize):
61
+ self.wfile.write(html_bytes[i : i + bufferSize])
62
+
63
+ def log_message(self, format, *args):
64
+ # Silence stderr logging
65
+ pass
66
+
67
+ # Use specified port if provided, otherwise choose a random port (port value of 0)
68
+ server = HTTPServer(
69
+ ("127.0.0.1", port if port is not None else 0), OneShotRequestHandler
70
+ )
71
+ browser.open(f"http://127.0.0.1:{server.server_port}")
72
+ server.handle_request()
videollama2/lib/python3.10/site-packages/altair/utils/_vegafusion_data.py ADDED
@@ -0,0 +1,281 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import uuid
4
+ from typing import (
5
+ TYPE_CHECKING,
6
+ Any,
7
+ Callable,
8
+ Final,
9
+ MutableMapping,
10
+ TypedDict,
11
+ Union,
12
+ overload,
13
+ )
14
+ from weakref import WeakValueDictionary
15
+
16
+ from altair.utils._importers import import_vegafusion
17
+ from altair.utils.core import DataFrameLike
18
+ from altair.utils.data import (
19
+ DataType,
20
+ MaxRowsError,
21
+ SupportsGeoInterface,
22
+ ToValuesReturnType,
23
+ )
24
+ from altair.vegalite.data import default_data_transformer
25
+
26
+ if TYPE_CHECKING:
27
+ from narwhals.typing import IntoDataFrame
28
+
29
+ from vegafusion.runtime import ChartState # type: ignore
30
+
31
+ # Temporary storage for dataframes that have been extracted
32
+ # from charts by the vegafusion data transformer. Use a WeakValueDictionary
33
+ # rather than a dict so that the Python interpreter is free to garbage
34
+ # collect the stored DataFrames.
35
+ extracted_inline_tables: MutableMapping[str, DataFrameLike] = WeakValueDictionary()
36
+
37
+ # Special URL prefix that VegaFusion uses to denote that a
38
+ # dataset in a Vega spec corresponds to an entry in the `inline_datasets`
39
+ # kwarg of vf.runtime.pre_transform_spec().
40
+ VEGAFUSION_PREFIX: Final = "vegafusion+dataset://"
41
+
42
+
43
+ class _ToVegaFusionReturnUrlDict(TypedDict):
44
+ url: str
45
+
46
+
47
+ _VegaFusionReturnType = Union[_ToVegaFusionReturnUrlDict, ToValuesReturnType]
48
+
49
+
50
+ @overload
51
+ def vegafusion_data_transformer(
52
+ data: None = ..., max_rows: int = ...
53
+ ) -> Callable[..., Any]: ...
54
+
55
+
56
+ @overload
57
+ def vegafusion_data_transformer(
58
+ data: DataFrameLike, max_rows: int = ...
59
+ ) -> ToValuesReturnType: ...
60
+
61
+
62
+ @overload
63
+ def vegafusion_data_transformer(
64
+ data: dict | IntoDataFrame | SupportsGeoInterface, max_rows: int = ...
65
+ ) -> _VegaFusionReturnType: ...
66
+
67
+
68
+ def vegafusion_data_transformer(
69
+ data: DataType | None = None, max_rows: int = 100000
70
+ ) -> Callable[..., Any] | _VegaFusionReturnType:
71
+ """VegaFusion Data Transformer."""
72
+ if data is None:
73
+ return vegafusion_data_transformer
74
+ elif isinstance(data, DataFrameLike) and not isinstance(data, SupportsGeoInterface):
75
+ table_name = f"table_{uuid.uuid4()}".replace("-", "_")
76
+ extracted_inline_tables[table_name] = data
77
+ return {"url": VEGAFUSION_PREFIX + table_name}
78
+ else:
79
+ # Use default transformer for geo interface objects
80
+ # # (e.g. a geopandas GeoDataFrame)
81
+ # Or if we don't recognize data type
82
+ return default_data_transformer(data)
83
+
84
+
85
+ def get_inline_table_names(vega_spec: dict[str, Any]) -> set[str]:
86
+ """
87
+ Get a set of the inline datasets names in the provided Vega spec.
88
+
89
+ Inline datasets are encoded as URLs that start with the table://
90
+ prefix.
91
+
92
+ Parameters
93
+ ----------
94
+ vega_spec: dict
95
+ A Vega specification dict
96
+
97
+ Returns
98
+ -------
99
+ set of str
100
+ Set of the names of the inline datasets that are referenced
101
+ in the specification.
102
+
103
+ Examples
104
+ --------
105
+ >>> spec = {
106
+ ... "data": [
107
+ ... {"name": "foo", "url": "https://path/to/file.csv"},
108
+ ... {"name": "bar", "url": "vegafusion+dataset://inline_dataset_123"},
109
+ ... ]
110
+ ... }
111
+ >>> get_inline_table_names(spec)
112
+ {'inline_dataset_123'}
113
+ """
114
+ table_names = set()
115
+
116
+ # Process datasets
117
+ for data in vega_spec.get("data", []):
118
+ url = data.get("url", "")
119
+ if url.startswith(VEGAFUSION_PREFIX):
120
+ name = url[len(VEGAFUSION_PREFIX) :]
121
+ table_names.add(name)
122
+
123
+ # Recursively process child marks, which may have their own datasets
124
+ for mark in vega_spec.get("marks", []):
125
+ table_names.update(get_inline_table_names(mark))
126
+
127
+ return table_names
128
+
129
+
130
+ def get_inline_tables(vega_spec: dict[str, Any]) -> dict[str, DataFrameLike]:
131
+ """
132
+ Get the inline tables referenced by a Vega specification.
133
+
134
+ Note: This function should only be called on a Vega spec that corresponds
135
+ to a chart that was processed by the vegafusion_data_transformer.
136
+ Furthermore, this function may only be called once per spec because
137
+ the returned dataframes are deleted from internal storage.
138
+
139
+ Parameters
140
+ ----------
141
+ vega_spec: dict
142
+ A Vega specification dict
143
+
144
+ Returns
145
+ -------
146
+ dict from str to dataframe
147
+ dict from inline dataset name to dataframe object
148
+ """
149
+ inline_names = get_inline_table_names(vega_spec)
150
+ # exclude named dataset that was provided by the user,
151
+ # or dataframes that have been deleted.
152
+ table_names = inline_names.intersection(extracted_inline_tables)
153
+ return {k: extracted_inline_tables.pop(k) for k in table_names}
154
+
155
+
156
+ def compile_to_vegafusion_chart_state(
157
+ vegalite_spec: dict[str, Any], local_tz: str
158
+ ) -> ChartState:
159
+ """
160
+ Compile a Vega-Lite spec to a VegaFusion ChartState.
161
+
162
+ Note: This function should only be called on a Vega-Lite spec
163
+ that was generated with the "vegafusion" data transformer enabled.
164
+ In particular, this spec may contain references to extract datasets
165
+ using table:// prefixed URLs.
166
+
167
+ Parameters
168
+ ----------
169
+ vegalite_spec: dict
170
+ A Vega-Lite spec that was generated from an Altair chart with
171
+ the "vegafusion" data transformer enabled
172
+ local_tz: str
173
+ Local timezone name (e.g. 'America/New_York')
174
+
175
+ Returns
176
+ -------
177
+ ChartState
178
+ A VegaFusion ChartState object
179
+ """
180
+ # Local import to avoid circular ImportError
181
+ from altair import data_transformers, vegalite_compilers
182
+
183
+ vf = import_vegafusion()
184
+
185
+ # Compile Vega-Lite spec to Vega
186
+ compiler = vegalite_compilers.get()
187
+ if compiler is None:
188
+ msg = "No active vega-lite compiler plugin found"
189
+ raise ValueError(msg)
190
+
191
+ vega_spec = compiler(vegalite_spec)
192
+
193
+ # Retrieve dict of inline tables referenced by the spec
194
+ inline_tables = get_inline_tables(vega_spec)
195
+
196
+ # Pre-evaluate transforms in vega spec with vegafusion
197
+ row_limit = data_transformers.options.get("max_rows", None)
198
+
199
+ chart_state = vf.runtime.new_chart_state(
200
+ vega_spec,
201
+ local_tz=local_tz,
202
+ inline_datasets=inline_tables,
203
+ row_limit=row_limit,
204
+ )
205
+
206
+ # Check from row limit warning and convert to MaxRowsError
207
+ handle_row_limit_exceeded(row_limit, chart_state.get_warnings())
208
+
209
+ return chart_state
210
+
211
+
212
+ def compile_with_vegafusion(vegalite_spec: dict[str, Any]) -> dict[str, Any]:
213
+ """
214
+ Compile a Vega-Lite spec to Vega and pre-transform with VegaFusion.
215
+
216
+ Note: This function should only be called on a Vega-Lite spec
217
+ that was generated with the "vegafusion" data transformer enabled.
218
+ In particular, this spec may contain references to extract datasets
219
+ using table:// prefixed URLs.
220
+
221
+ Parameters
222
+ ----------
223
+ vegalite_spec: dict
224
+ A Vega-Lite spec that was generated from an Altair chart with
225
+ the "vegafusion" data transformer enabled
226
+
227
+ Returns
228
+ -------
229
+ dict
230
+ A Vega spec that has been pre-transformed by VegaFusion
231
+ """
232
+ # Local import to avoid circular ImportError
233
+ from altair import data_transformers, vegalite_compilers
234
+
235
+ vf = import_vegafusion()
236
+
237
+ # Compile Vega-Lite spec to Vega
238
+ compiler = vegalite_compilers.get()
239
+ if compiler is None:
240
+ msg = "No active vega-lite compiler plugin found"
241
+ raise ValueError(msg)
242
+
243
+ vega_spec = compiler(vegalite_spec)
244
+
245
+ # Retrieve dict of inline tables referenced by the spec
246
+ inline_tables = get_inline_tables(vega_spec)
247
+
248
+ # Pre-evaluate transforms in vega spec with vegafusion
249
+ row_limit = data_transformers.options.get("max_rows", None)
250
+ transformed_vega_spec, warnings = vf.runtime.pre_transform_spec(
251
+ vega_spec,
252
+ vf.get_local_tz(),
253
+ inline_datasets=inline_tables,
254
+ row_limit=row_limit,
255
+ )
256
+
257
+ # Check from row limit warning and convert to MaxRowsError
258
+ handle_row_limit_exceeded(row_limit, warnings)
259
+
260
+ return transformed_vega_spec
261
+
262
+
263
+ def handle_row_limit_exceeded(row_limit: int, warnings: list):
264
+ for warning in warnings:
265
+ if warning.get("type") == "RowLimitExceeded":
266
+ msg = (
267
+ "The number of dataset rows after filtering and aggregation exceeds\n"
268
+ f"the current limit of {row_limit}. Try adding an aggregation to reduce\n"
269
+ "the size of the dataset that must be loaded into the browser. Or, disable\n"
270
+ "the limit by calling alt.data_transformers.disable_max_rows(). Note that\n"
271
+ "disabling this limit may cause the browser to freeze or crash."
272
+ )
273
+ raise MaxRowsError(msg)
274
+
275
+
276
+ def using_vegafusion() -> bool:
277
+ """Check whether the vegafusion data transformer is enabled."""
278
+ # Local import to avoid circular ImportError
279
+ from altair import data_transformers
280
+
281
+ return data_transformers.active == "vegafusion"
videollama2/lib/python3.10/site-packages/altair/utils/display.py ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import pkgutil
5
+ import textwrap
6
+ import uuid
7
+ from typing import Any, Callable, Dict, Tuple, Union
8
+ from typing_extensions import TypeAlias
9
+
10
+ from ._vegafusion_data import compile_with_vegafusion, using_vegafusion
11
+ from .mimebundle import spec_to_mimebundle
12
+ from .plugin_registry import PluginEnabler, PluginRegistry
13
+ from .schemapi import validate_jsonschema
14
+
15
+ # ==============================================================================
16
+ # Renderer registry
17
+ # ==============================================================================
18
+ # MimeBundleType needs to be the same as what are acceptable return values
19
+ # for _repr_mimebundle_,
20
+ # see https://ipython.readthedocs.io/en/stable/config/integrating.html#MyObject._repr_mimebundle_
21
+ MimeBundleDataType: TypeAlias = Dict[str, Any]
22
+ MimeBundleMetaDataType: TypeAlias = Dict[str, Any]
23
+ MimeBundleType: TypeAlias = Union[
24
+ MimeBundleDataType, Tuple[MimeBundleDataType, MimeBundleMetaDataType]
25
+ ]
26
+ RendererType: TypeAlias = Callable[..., MimeBundleType]
27
+ # Subtype of MimeBundleType as more specific in the values of the dictionaries
28
+
29
+ DefaultRendererReturnType: TypeAlias = Tuple[
30
+ Dict[str, Union[str, Dict[str, Any]]], Dict[str, Dict[str, Any]]
31
+ ]
32
+
33
+
34
+ class RendererRegistry(PluginRegistry[RendererType, MimeBundleType]):
35
+ entrypoint_err_messages = {
36
+ "notebook": textwrap.dedent(
37
+ """
38
+ To use the 'notebook' renderer, you must install the vega package
39
+ and the associated Jupyter extension.
40
+ See https://altair-viz.github.io/getting_started/installation.html
41
+ for more information.
42
+ """
43
+ ),
44
+ }
45
+
46
+ def set_embed_options(
47
+ self,
48
+ defaultStyle: bool | str | None = None,
49
+ renderer: str | None = None,
50
+ width: int | None = None,
51
+ height: int | None = None,
52
+ padding: int | None = None,
53
+ scaleFactor: float | None = None,
54
+ actions: bool | dict[str, bool] | None = None,
55
+ format_locale: str | dict | None = None,
56
+ time_format_locale: str | dict | None = None,
57
+ **kwargs,
58
+ ) -> PluginEnabler:
59
+ """
60
+ Set options for embeddings of Vega & Vega-Lite charts.
61
+
62
+ Options are fully documented at https://github.com/vega/vega-embed.
63
+ Similar to the `enable()` method, this can be used as either
64
+ a persistent global switch, or as a temporary local setting using
65
+ a context manager (i.e. a `with` statement).
66
+
67
+ Parameters
68
+ ----------
69
+ defaultStyle : bool or string
70
+ Specify a default stylesheet for embed actions.
71
+ renderer : string
72
+ The renderer to use for the view. One of "canvas" (default) or "svg"
73
+ width : integer
74
+ The view width in pixels
75
+ height : integer
76
+ The view height in pixels
77
+ padding : integer
78
+ The view padding in pixels
79
+ scaleFactor : number
80
+ The number by which to multiply the width and height (default 1)
81
+ of an exported PNG or SVG image.
82
+ actions : bool or dict
83
+ Determines if action links ("Export as PNG/SVG", "View Source",
84
+ "View Vega" (only for Vega-Lite), "Open in Vega Editor") are
85
+ included with the embedded view. If the value is true, all action
86
+ links will be shown and none if the value is false. This property
87
+ can take a key-value mapping object that maps keys (export, source,
88
+ compiled, editor) to boolean values for determining if
89
+ each action link should be shown.
90
+ format_locale : str or dict
91
+ d3-format locale name or dictionary. Defaults to "en-US" for United States English.
92
+ See https://github.com/d3/d3-format/tree/main/locale for available names and example
93
+ definitions.
94
+ time_format_locale : str or dict
95
+ d3-time-format locale name or dictionary. Defaults to "en-US" for United States English.
96
+ See https://github.com/d3/d3-time-format/tree/main/locale for available names and example
97
+ definitions.
98
+ **kwargs :
99
+ Additional options are passed directly to embed options.
100
+ """
101
+ options: dict[str, bool | str | float | dict[str, bool] | None] = {
102
+ "defaultStyle": defaultStyle,
103
+ "renderer": renderer,
104
+ "width": width,
105
+ "height": height,
106
+ "padding": padding,
107
+ "scaleFactor": scaleFactor,
108
+ "actions": actions,
109
+ "formatLocale": format_locale,
110
+ "timeFormatLocale": time_format_locale,
111
+ }
112
+ kwargs.update({key: val for key, val in options.items() if val is not None})
113
+ return self.enable(None, embed_options=kwargs)
114
+
115
+
116
+ # ==============================================================================
117
+ # VegaLite v1/v2 renderer logic
118
+ # ==============================================================================
119
+
120
+
121
+ class Displayable:
122
+ """
123
+ A base display class for VegaLite v1/v2.
124
+
125
+ This class takes a VegaLite v1/v2 spec and does the following:
126
+
127
+ 1. Optionally validates the spec against a schema.
128
+ 2. Uses the RendererPlugin to grab a renderer and call it when the
129
+ IPython/Jupyter display method (_repr_mimebundle_) is called.
130
+
131
+ The spec passed to this class must be fully schema compliant and already
132
+ have the data portion of the spec fully processed and ready to serialize.
133
+ In practice, this means, the data portion of the spec should have been passed
134
+ through appropriate data model transformers.
135
+ """
136
+
137
+ renderers: RendererRegistry | None = None
138
+ schema_path = ("altair", "")
139
+
140
+ def __init__(self, spec: dict[str, Any], validate: bool = False) -> None:
141
+ self.spec = spec
142
+ self.validate = validate
143
+ self._validate()
144
+
145
+ def _validate(self) -> None:
146
+ """Validate the spec against the schema."""
147
+ data = pkgutil.get_data(*self.schema_path)
148
+ assert data is not None
149
+ schema_dict: dict[str, Any] = json.loads(data.decode("utf-8"))
150
+ validate_jsonschema(
151
+ self.spec,
152
+ schema_dict,
153
+ )
154
+
155
+ def _repr_mimebundle_(
156
+ self, include: Any = None, exclude: Any = None
157
+ ) -> MimeBundleType:
158
+ """Return a MIME bundle for display in Jupyter frontends."""
159
+ if self.renderers is not None:
160
+ renderer_func = self.renderers.get()
161
+ assert renderer_func is not None
162
+ return renderer_func(self.spec)
163
+ else:
164
+ return {}
165
+
166
+
167
+ def default_renderer_base(
168
+ spec: dict[str, Any], mime_type: str, str_repr: str, **options
169
+ ) -> DefaultRendererReturnType:
170
+ """
171
+ A default renderer for Vega or VegaLite that works for modern frontends.
172
+
173
+ This renderer works with modern frontends (JupyterLab, nteract) that know
174
+ how to render the custom VegaLite MIME type listed above.
175
+ """
176
+ # Local import to avoid circular ImportError
177
+ from altair.vegalite.v5.display import VEGA_MIME_TYPE, VEGALITE_MIME_TYPE
178
+
179
+ assert isinstance(spec, dict)
180
+ bundle: dict[str, str | dict] = {}
181
+ metadata: dict[str, dict[str, Any]] = {}
182
+
183
+ if using_vegafusion():
184
+ spec = compile_with_vegafusion(spec)
185
+
186
+ # Swap mimetype from Vega-Lite to Vega.
187
+ # If mimetype was JSON, leave it alone
188
+ if mime_type == VEGALITE_MIME_TYPE:
189
+ mime_type = VEGA_MIME_TYPE
190
+
191
+ bundle[mime_type] = spec
192
+ bundle["text/plain"] = str_repr
193
+ if options:
194
+ metadata[mime_type] = options
195
+ return bundle, metadata
196
+
197
+
198
+ def json_renderer_base(
199
+ spec: dict[str, Any], str_repr: str, **options
200
+ ) -> DefaultRendererReturnType:
201
+ """
202
+ A renderer that returns a MIME type of application/json.
203
+
204
+ In JupyterLab/nteract this is rendered as a nice JSON tree.
205
+ """
206
+ return default_renderer_base(
207
+ spec, mime_type="application/json", str_repr=str_repr, **options
208
+ )
209
+
210
+
211
+ class HTMLRenderer:
212
+ """Object to render charts as HTML, with a unique output div each time."""
213
+
214
+ def __init__(self, output_div: str = "altair-viz-{}", **kwargs) -> None:
215
+ self._output_div = output_div
216
+ self.kwargs = kwargs
217
+
218
+ @property
219
+ def output_div(self) -> str:
220
+ return self._output_div.format(uuid.uuid4().hex)
221
+
222
+ def __call__(self, spec: dict[str, Any], **metadata) -> dict[str, str]:
223
+ kwargs = self.kwargs.copy()
224
+ kwargs.update(**metadata, output_div=self.output_div)
225
+ return spec_to_mimebundle(spec, format="html", **kwargs)
videollama2/lib/python3.10/site-packages/altair/utils/html.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from typing import Any, Literal
5
+
6
+ import jinja2
7
+
8
+ from altair.utils._importers import import_vl_convert, vl_version_for_vl_convert
9
+
10
+ TemplateName = Literal["standard", "universal", "inline"]
11
+ RenderMode = Literal["vega", "vega-lite"]
12
+
13
+ HTML_TEMPLATE = jinja2.Template(
14
+ """
15
+ {%- if fullhtml -%}
16
+ <!DOCTYPE html>
17
+ <html>
18
+ <head>
19
+ {%- endif %}
20
+ <style>
21
+ #{{ output_div }}.vega-embed {
22
+ width: 100%;
23
+ display: flex;
24
+ }
25
+
26
+ #{{ output_div }}.vega-embed details,
27
+ #{{ output_div }}.vega-embed details summary {
28
+ position: relative;
29
+ }
30
+ </style>
31
+ {%- if not requirejs %}
32
+ <script type="text/javascript" src="{{ base_url }}/vega@{{ vega_version }}"></script>
33
+ {%- if mode == 'vega-lite' %}
34
+ <script type="text/javascript" src="{{ base_url }}/vega-lite@{{ vegalite_version }}"></script>
35
+ {%- endif %}
36
+ <script type="text/javascript" src="{{ base_url }}/vega-embed@{{ vegaembed_version }}"></script>
37
+ {%- endif %}
38
+ {%- if fullhtml %}
39
+ {%- if requirejs %}
40
+ <script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.6/require.min.js"></script>
41
+ <script>
42
+ requirejs.config({
43
+ "paths": {
44
+ "vega": "{{ base_url }}/vega@{{ vega_version }}?noext",
45
+ "vega-lib": "{{ base_url }}/vega-lib?noext",
46
+ "vega-lite": "{{ base_url }}/vega-lite@{{ vegalite_version }}?noext",
47
+ "vega-embed": "{{ base_url }}/vega-embed@{{ vegaembed_version }}?noext",
48
+ }
49
+ });
50
+ </script>
51
+ {%- endif %}
52
+ </head>
53
+ <body>
54
+ {%- endif %}
55
+ <div id="{{ output_div }}"></div>
56
+ <script>
57
+ {%- if requirejs and not fullhtml %}
58
+ requirejs.config({
59
+ "paths": {
60
+ "vega": "{{ base_url }}/vega@{{ vega_version }}?noext",
61
+ "vega-lib": "{{ base_url }}/vega-lib?noext",
62
+ "vega-lite": "{{ base_url }}/vega-lite@{{ vegalite_version }}?noext",
63
+ "vega-embed": "{{ base_url }}/vega-embed@{{ vegaembed_version }}?noext",
64
+ }
65
+ });
66
+ {% endif %}
67
+ {% if requirejs -%}
68
+ require(['vega-embed'],
69
+ {%- else -%}
70
+ (
71
+ {%- endif -%}
72
+ function(vegaEmbed) {
73
+ var spec = {{ spec }};
74
+ var embedOpt = {{ embed_options }};
75
+
76
+ function showError(el, error){
77
+ el.innerHTML = ('<div style="color:red;">'
78
+ + '<p>JavaScript Error: ' + error.message + '</p>'
79
+ + "<p>This usually means there's a typo in your chart specification. "
80
+ + "See the javascript console for the full traceback.</p>"
81
+ + '</div>');
82
+ throw error;
83
+ }
84
+ const el = document.getElementById('{{ output_div }}');
85
+ vegaEmbed("#{{ output_div }}", spec, embedOpt)
86
+ .catch(error => showError(el, error));
87
+ }){% if not requirejs %}(vegaEmbed){% endif %};
88
+
89
+ </script>
90
+ {%- if fullhtml %}
91
+ </body>
92
+ </html>
93
+ {%- endif %}
94
+ """
95
+ )
96
+
97
+
98
+ HTML_TEMPLATE_UNIVERSAL = jinja2.Template(
99
+ """
100
+ <style>
101
+ #{{ output_div }}.vega-embed {
102
+ width: 100%;
103
+ display: flex;
104
+ }
105
+
106
+ #{{ output_div }}.vega-embed details,
107
+ #{{ output_div }}.vega-embed details summary {
108
+ position: relative;
109
+ }
110
+ </style>
111
+ <div id="{{ output_div }}"></div>
112
+ <script type="text/javascript">
113
+ var VEGA_DEBUG = (typeof VEGA_DEBUG == "undefined") ? {} : VEGA_DEBUG;
114
+ (function(spec, embedOpt){
115
+ let outputDiv = document.currentScript.previousElementSibling;
116
+ if (outputDiv.id !== "{{ output_div }}") {
117
+ outputDiv = document.getElementById("{{ output_div }}");
118
+ }
119
+ const paths = {
120
+ "vega": "{{ base_url }}/vega@{{ vega_version }}?noext",
121
+ "vega-lib": "{{ base_url }}/vega-lib?noext",
122
+ "vega-lite": "{{ base_url }}/vega-lite@{{ vegalite_version }}?noext",
123
+ "vega-embed": "{{ base_url }}/vega-embed@{{ vegaembed_version }}?noext",
124
+ };
125
+
126
+ function maybeLoadScript(lib, version) {
127
+ var key = `${lib.replace("-", "")}_version`;
128
+ return (VEGA_DEBUG[key] == version) ?
129
+ Promise.resolve(paths[lib]) :
130
+ new Promise(function(resolve, reject) {
131
+ var s = document.createElement('script');
132
+ document.getElementsByTagName("head")[0].appendChild(s);
133
+ s.async = true;
134
+ s.onload = () => {
135
+ VEGA_DEBUG[key] = version;
136
+ return resolve(paths[lib]);
137
+ };
138
+ s.onerror = () => reject(`Error loading script: ${paths[lib]}`);
139
+ s.src = paths[lib];
140
+ });
141
+ }
142
+
143
+ function showError(err) {
144
+ outputDiv.innerHTML = `<div class="error" style="color:red;">${err}</div>`;
145
+ throw err;
146
+ }
147
+
148
+ function displayChart(vegaEmbed) {
149
+ vegaEmbed(outputDiv, spec, embedOpt)
150
+ .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));
151
+ }
152
+
153
+ if(typeof define === "function" && define.amd) {
154
+ requirejs.config({paths});
155
+ require(["vega-embed"], displayChart, err => showError(`Error loading script: ${err.message}`));
156
+ } else {
157
+ maybeLoadScript("vega", "{{vega_version}}")
158
+ .then(() => maybeLoadScript("vega-lite", "{{vegalite_version}}"))
159
+ .then(() => maybeLoadScript("vega-embed", "{{vegaembed_version}}"))
160
+ .catch(showError)
161
+ .then(() => displayChart(vegaEmbed));
162
+ }
163
+ })({{ spec }}, {{ embed_options }});
164
+ </script>
165
+ """
166
+ )
167
+
168
+
169
+ # This is like the HTML_TEMPLATE template, but includes vega javascript inline
170
+ # so that the resulting file is not dependent on external resources. This was
171
+ # ported over from altair_saver.
172
+ #
173
+ # implies requirejs=False and full_html=True
174
+ INLINE_HTML_TEMPLATE = jinja2.Template(
175
+ """\
176
+ <!DOCTYPE html>
177
+ <html>
178
+ <head>
179
+ <style>
180
+ #{{ output_div }}.vega-embed {
181
+ width: 100%;
182
+ display: flex;
183
+ }
184
+
185
+ #{{ output_div }}.vega-embed details,
186
+ #{{ output_div }}.vega-embed details summary {
187
+ position: relative;
188
+ }
189
+ </style>
190
+ <script type="text/javascript">
191
+ // vega-embed.js bundle with Vega-Lite version v{{ vegalite_version }}
192
+ {{ vegaembed_script }}
193
+ </script>
194
+ </head>
195
+ <body>
196
+ <div class="vega-visualization" id="{{ output_div }}"></div>
197
+ <script type="text/javascript">
198
+ const spec = {{ spec }};
199
+ const embedOpt = {{ embed_options }};
200
+ vegaEmbed('#{{ output_div }}', spec, embedOpt).catch(console.error);
201
+ </script>
202
+ </body>
203
+ </html>
204
+ """
205
+ )
206
+
207
+
208
+ TEMPLATES: dict[TemplateName, jinja2.Template] = {
209
+ "standard": HTML_TEMPLATE,
210
+ "universal": HTML_TEMPLATE_UNIVERSAL,
211
+ "inline": INLINE_HTML_TEMPLATE,
212
+ }
213
+
214
+
215
+ def spec_to_html(
216
+ spec: dict[str, Any],
217
+ mode: RenderMode,
218
+ vega_version: str | None,
219
+ vegaembed_version: str | None,
220
+ vegalite_version: str | None = None,
221
+ base_url: str = "https://cdn.jsdelivr.net/npm",
222
+ output_div: str = "vis",
223
+ embed_options: dict[str, Any] | None = None,
224
+ json_kwds: dict[str, Any] | None = None,
225
+ fullhtml: bool = True,
226
+ requirejs: bool = False,
227
+ template: jinja2.Template | TemplateName = "standard",
228
+ ) -> str:
229
+ """
230
+ Embed a Vega/Vega-Lite spec into an HTML page.
231
+
232
+ Parameters
233
+ ----------
234
+ spec : dict
235
+ a dictionary representing a vega-lite plot spec.
236
+ mode : string {'vega' | 'vega-lite'}
237
+ The rendering mode. This value is overridden by embed_options['mode'],
238
+ if it is present.
239
+ vega_version : string
240
+ For html output, the version of vega.js to use.
241
+ vegalite_version : string
242
+ For html output, the version of vegalite.js to use.
243
+ vegaembed_version : string
244
+ For html output, the version of vegaembed.js to use.
245
+ base_url : string (optional)
246
+ The base url from which to load the javascript libraries.
247
+ output_div : string (optional)
248
+ The id of the div element where the plot will be shown.
249
+ embed_options : dict (optional)
250
+ Dictionary of options to pass to the vega-embed script. Default
251
+ entry is {'mode': mode}.
252
+ json_kwds : dict (optional)
253
+ Dictionary of keywords to pass to json.dumps().
254
+ fullhtml : boolean (optional)
255
+ If True (default) then return a full html page. If False, then return
256
+ an HTML snippet that can be embedded into an HTML page.
257
+ requirejs : boolean (optional)
258
+ If False (default) then load libraries from base_url using <script>
259
+ tags. If True, then load libraries using requirejs
260
+ template : jinja2.Template or string (optional)
261
+ Specify the template to use (default = 'standard'). If template is a
262
+ string, it must be one of {'universal', 'standard', 'inline'}. Otherwise, it
263
+ can be a jinja2.Template object containing a custom template.
264
+
265
+ Returns
266
+ -------
267
+ output : string
268
+ an HTML string for rendering the chart.
269
+ """
270
+ embed_options = embed_options or {}
271
+ json_kwds = json_kwds or {}
272
+
273
+ mode = embed_options.setdefault("mode", mode)
274
+
275
+ if mode not in {"vega", "vega-lite"}:
276
+ msg = "mode must be either 'vega' or 'vega-lite'"
277
+ raise ValueError(msg)
278
+
279
+ if vega_version is None:
280
+ msg = "must specify vega_version"
281
+ raise ValueError(msg)
282
+
283
+ if vegaembed_version is None:
284
+ msg = "must specify vegaembed_version"
285
+ raise ValueError(msg)
286
+
287
+ if mode == "vega-lite" and vegalite_version is None:
288
+ msg = "must specify vega-lite version for mode='vega-lite'"
289
+ raise ValueError(msg)
290
+
291
+ render_kwargs = {}
292
+ if template == "inline":
293
+ vlc = import_vl_convert()
294
+ vl_version = vl_version_for_vl_convert()
295
+ render_kwargs["vegaembed_script"] = vlc.javascript_bundle(vl_version=vl_version)
296
+
297
+ jinja_template = TEMPLATES.get(template, template) # type: ignore[arg-type]
298
+ if not hasattr(jinja_template, "render"):
299
+ msg = f"Invalid template: {jinja_template}"
300
+ raise ValueError(msg)
301
+
302
+ return jinja_template.render(
303
+ spec=json.dumps(spec, **json_kwds),
304
+ embed_options=json.dumps(embed_options),
305
+ mode=mode,
306
+ vega_version=vega_version,
307
+ vegalite_version=vegalite_version,
308
+ vegaembed_version=vegaembed_version,
309
+ base_url=base_url,
310
+ output_div=output_div,
311
+ fullhtml=fullhtml,
312
+ requirejs=requirejs,
313
+ **render_kwargs,
314
+ )
videollama2/lib/python3.10/site-packages/altair/utils/plugin_registry.py ADDED
@@ -0,0 +1,277 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from functools import partial
4
+ from importlib.metadata import entry_points
5
+ from typing import TYPE_CHECKING, Any, Callable, Generic, cast
6
+ from typing_extensions import TypeAliasType, TypeIs, TypeVar
7
+
8
+ from altair.utils.deprecation import deprecated_warn
9
+
10
+ if TYPE_CHECKING:
11
+ from types import TracebackType
12
+
13
+ T = TypeVar("T")
14
+ R = TypeVar("R")
15
+ Plugin = TypeAliasType("Plugin", Callable[..., R], type_params=(R,))
16
+ PluginT = TypeVar("PluginT", bound=Plugin[Any])
17
+ IsPlugin = Callable[[object], TypeIs[Plugin[Any]]]
18
+
19
+
20
+ def _is_type(tp: type[T], /) -> Callable[[object], TypeIs[type[T]]]:
21
+ """
22
+ Converts a type to guard function.
23
+
24
+ Added for compatibility with original `PluginRegistry` default.
25
+ """
26
+
27
+ def func(obj: object, /) -> TypeIs[type[T]]:
28
+ return isinstance(obj, tp)
29
+
30
+ return func
31
+
32
+
33
+ class NoSuchEntryPoint(Exception):
34
+ def __init__(self, group, name):
35
+ self.group = group
36
+ self.name = name
37
+
38
+ def __str__(self):
39
+ return f"No {self.name!r} entry point found in group {self.group!r}"
40
+
41
+
42
+ class PluginEnabler:
43
+ """
44
+ Context manager for enabling plugins.
45
+
46
+ This object lets you use enable() as a context manager to
47
+ temporarily enable a given plugin::
48
+
49
+ with plugins.enable("name"):
50
+ do_something() # 'name' plugin temporarily enabled
51
+ # plugins back to original state
52
+ """
53
+
54
+ def __init__(self, registry: PluginRegistry, name: str, **options):
55
+ self.registry: PluginRegistry = registry
56
+ self.name: str = name
57
+ self.options: dict[str, Any] = options
58
+ self.original_state: dict[str, Any] = registry._get_state()
59
+ self.registry._enable(name, **options)
60
+
61
+ def __enter__(self) -> PluginEnabler:
62
+ return self
63
+
64
+ def __exit__(self, typ: type, value: Exception, traceback: TracebackType) -> None:
65
+ self.registry._set_state(self.original_state)
66
+
67
+ def __repr__(self) -> str:
68
+ return f"{self.registry.__class__.__name__}.enable({self.name!r})"
69
+
70
+
71
+ class PluginRegistry(Generic[PluginT, R]):
72
+ """
73
+ A registry for plugins.
74
+
75
+ This is a plugin registry that allows plugins to be loaded/registered
76
+ in two ways:
77
+
78
+ 1. Through an explicit call to ``.register(name, value)``.
79
+ 2. By looking for other Python packages that are installed and provide
80
+ a setuptools entry point group.
81
+
82
+ When you create an instance of this class, provide the name of the
83
+ entry point group to use::
84
+
85
+ reg = PluginRegister("my_entrypoint_group")
86
+
87
+ """
88
+
89
+ # this is a mapping of name to error message to allow custom error messages
90
+ # in case an entrypoint is not found
91
+ entrypoint_err_messages: dict[str, str] = {}
92
+
93
+ # global settings is a key-value mapping of settings that are stored globally
94
+ # in the registry rather than passed to the plugins
95
+ _global_settings: dict[str, Any] = {}
96
+
97
+ def __init__(
98
+ self, entry_point_group: str = "", plugin_type: IsPlugin = callable
99
+ ) -> None:
100
+ """
101
+ Create a PluginRegistry for a named entry point group.
102
+
103
+ Parameters
104
+ ----------
105
+ entry_point_group: str
106
+ The name of the entry point group.
107
+ plugin_type
108
+ A type narrowing function that will optionally be used for runtime
109
+ type checking loaded plugins.
110
+
111
+ References
112
+ ----------
113
+ https://typing.readthedocs.io/en/latest/spec/narrowing.html
114
+ """
115
+ self.entry_point_group: str = entry_point_group
116
+ self.plugin_type: IsPlugin
117
+ if plugin_type is not callable and isinstance(plugin_type, type):
118
+ msg = (
119
+ f"Pass a callable `TypeIs` function to `plugin_type` instead.\n"
120
+ f"{type(self).__name__!r}(plugin_type)\n\n"
121
+ f"See also:\n"
122
+ f"https://typing.readthedocs.io/en/latest/spec/narrowing.html\n"
123
+ f"https://docs.astral.sh/ruff/rules/assert/"
124
+ )
125
+ deprecated_warn(msg, version="5.4.0")
126
+ self.plugin_type = cast(IsPlugin, _is_type(plugin_type))
127
+ else:
128
+ self.plugin_type = plugin_type
129
+ self._active: Plugin[R] | None = None
130
+ self._active_name: str = ""
131
+ self._plugins: dict[str, PluginT] = {}
132
+ self._options: dict[str, Any] = {}
133
+ self._global_settings: dict[str, Any] = self.__class__._global_settings.copy()
134
+
135
+ def register(self, name: str, value: PluginT | None) -> PluginT | None:
136
+ """
137
+ Register a plugin by name and value.
138
+
139
+ This method is used for explicit registration of a plugin and shouldn't be
140
+ used to manage entry point managed plugins, which are auto-loaded.
141
+
142
+ Parameters
143
+ ----------
144
+ name: str
145
+ The name of the plugin.
146
+ value: PluginType or None
147
+ The actual plugin object to register or None to unregister that plugin.
148
+
149
+ Returns
150
+ -------
151
+ plugin: PluginType or None
152
+ The plugin that was registered or unregistered.
153
+ """
154
+ if value is None:
155
+ return self._plugins.pop(name, None)
156
+ elif self.plugin_type(value):
157
+ self._plugins[name] = value
158
+ return value
159
+ else:
160
+ msg = f"{type(value).__name__!r} is not compatible with {type(self).__name__!r}"
161
+ raise TypeError(msg)
162
+
163
+ def names(self) -> list[str]:
164
+ """List the names of the registered and entry points plugins."""
165
+ exts = list(self._plugins.keys())
166
+ e_points = importlib_metadata_get(self.entry_point_group)
167
+ more_exts = [ep.name for ep in e_points]
168
+ exts.extend(more_exts)
169
+ return sorted(set(exts))
170
+
171
+ def _get_state(self) -> dict[str, Any]:
172
+ """Return a dictionary representing the current state of the registry."""
173
+ return {
174
+ "_active": self._active,
175
+ "_active_name": self._active_name,
176
+ "_plugins": self._plugins.copy(),
177
+ "_options": self._options.copy(),
178
+ "_global_settings": self._global_settings.copy(),
179
+ }
180
+
181
+ def _set_state(self, state: dict[str, Any]) -> None:
182
+ """Reset the state of the registry."""
183
+ assert set(state.keys()) == {
184
+ "_active",
185
+ "_active_name",
186
+ "_plugins",
187
+ "_options",
188
+ "_global_settings",
189
+ }
190
+ for key, val in state.items():
191
+ setattr(self, key, val)
192
+
193
+ def _enable(self, name: str, **options) -> None:
194
+ if name not in self._plugins:
195
+ try:
196
+ (ep,) = (
197
+ ep
198
+ for ep in importlib_metadata_get(self.entry_point_group)
199
+ if ep.name == name
200
+ )
201
+ except ValueError as err:
202
+ if name in self.entrypoint_err_messages:
203
+ raise ValueError(self.entrypoint_err_messages[name]) from err
204
+ else:
205
+ raise NoSuchEntryPoint(self.entry_point_group, name) from err
206
+ value = cast(PluginT, ep.load())
207
+ self.register(name, value)
208
+ self._active_name = name
209
+ self._active = self._plugins[name]
210
+ for key in set(options.keys()) & set(self._global_settings.keys()):
211
+ self._global_settings[key] = options.pop(key)
212
+ self._options = options
213
+
214
+ def enable(self, name: str | None = None, **options) -> PluginEnabler:
215
+ """
216
+ Enable a plugin by name.
217
+
218
+ This can be either called directly, or used as a context manager.
219
+
220
+ Parameters
221
+ ----------
222
+ name : string (optional)
223
+ The name of the plugin to enable. If not specified, then use the
224
+ current active name.
225
+ **options :
226
+ Any additional parameters will be passed to the plugin as keyword
227
+ arguments
228
+
229
+ Returns
230
+ -------
231
+ PluginEnabler:
232
+ An object that allows enable() to be used as a context manager
233
+ """
234
+ if name is None:
235
+ name = self.active
236
+ return PluginEnabler(self, name, **options)
237
+
238
+ @property
239
+ def active(self) -> str:
240
+ """Return the name of the currently active plugin."""
241
+ return self._active_name
242
+
243
+ @property
244
+ def options(self) -> dict[str, Any]:
245
+ """Return the current options dictionary."""
246
+ return self._options
247
+
248
+ def get(self) -> partial[R] | Plugin[R] | None:
249
+ """Return the currently active plugin."""
250
+ if (func := self._active) and self.plugin_type(func):
251
+ return partial(func, **self._options) if self._options else func
252
+ elif self._active is not None:
253
+ msg = (
254
+ f"{type(self).__name__!r} requires all plugins to be callable objects, "
255
+ f"but {type(self._active).__name__!r} is not callable."
256
+ )
257
+ raise TypeError(msg)
258
+ elif TYPE_CHECKING:
259
+ # NOTE: The `None` return is implicit, but `mypy` isn't satisfied
260
+ # - `ruff` will factor out explicit `None` return
261
+ # - `pyright` has no issue
262
+ raise NotImplementedError
263
+
264
+ def __repr__(self) -> str:
265
+ return f"{type(self).__name__}(active={self.active!r}, registered={self.names()!r})"
266
+
267
+
268
+ def importlib_metadata_get(group):
269
+ ep = entry_points()
270
+ # 'select' was introduced in Python 3.10 and 'get' got deprecated
271
+ # We don't check for Python version here as by checking with hasattr we
272
+ # also get compatibility with the importlib_metadata package which had a different
273
+ # deprecation cycle for 'get'
274
+ if hasattr(ep, "select"):
275
+ return ep.select(group=group) # pyright: ignore
276
+ else:
277
+ return ep.get(group, [])
videollama2/lib/python3.10/site-packages/altair/utils/save.py ADDED
@@ -0,0 +1,224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import pathlib
5
+ import warnings
6
+ from typing import IO, TYPE_CHECKING, Any, Literal
7
+
8
+ from altair.utils._vegafusion_data import using_vegafusion
9
+ from altair.utils.deprecation import deprecated_warn
10
+ from altair.vegalite.v5.data import data_transformers
11
+
12
+ from .mimebundle import spec_to_mimebundle
13
+
14
+ if TYPE_CHECKING:
15
+ from pathlib import Path
16
+
17
+
18
+ def write_file_or_filename(
19
+ fp: str | Path | IO,
20
+ content: str | bytes,
21
+ mode: str = "w",
22
+ encoding: str | None = None,
23
+ ) -> None:
24
+ """Write content to fp, whether fp is a string, a pathlib Path or a file-like object."""
25
+ if isinstance(fp, (str, pathlib.Path)):
26
+ with pathlib.Path(fp).open(mode=mode, encoding=encoding) as f:
27
+ f.write(content)
28
+ else:
29
+ fp.write(content)
30
+
31
+
32
+ def set_inspect_format_argument(
33
+ format: str | None, fp: str | Path | IO, inline: bool
34
+ ) -> str:
35
+ """Inspect the format argument in the save function."""
36
+ if format is None:
37
+ if isinstance(fp, (str, pathlib.Path)):
38
+ format = pathlib.Path(fp).suffix.lstrip(".")
39
+ else:
40
+ msg = (
41
+ "must specify file format: "
42
+ "['png', 'svg', 'pdf', 'html', 'json', 'vega']"
43
+ )
44
+ raise ValueError(msg)
45
+
46
+ if format != "html" and inline:
47
+ warnings.warn("inline argument ignored for non HTML formats.", stacklevel=1)
48
+
49
+ return format
50
+
51
+
52
+ def set_inspect_mode_argument(
53
+ mode: Literal["vega-lite"] | None,
54
+ embed_options: dict[str, Any],
55
+ spec: dict[str, Any],
56
+ vegalite_version: str | None,
57
+ ) -> Literal["vega-lite"]:
58
+ """Inspect the mode argument in the save function."""
59
+ if mode is None:
60
+ if "mode" in embed_options:
61
+ mode = embed_options["mode"]
62
+ elif "$schema" in spec:
63
+ mode = spec["$schema"].split("/")[-2]
64
+ else:
65
+ mode = "vega-lite"
66
+
67
+ if mode != "vega-lite":
68
+ msg = "mode must be 'vega-lite', " f"not '{mode}'"
69
+ raise ValueError(msg)
70
+
71
+ if mode == "vega-lite" and vegalite_version is None:
72
+ msg = "must specify vega-lite version"
73
+ raise ValueError(msg)
74
+
75
+ return mode
76
+
77
+
78
+ def save(
79
+ chart,
80
+ fp: str | Path | IO,
81
+ vega_version: str | None,
82
+ vegaembed_version: str | None,
83
+ format: Literal["json", "html", "png", "svg", "pdf"] | None = None,
84
+ mode: Literal["vega-lite"] | None = None,
85
+ vegalite_version: str | None = None,
86
+ embed_options: dict | None = None,
87
+ json_kwds: dict | None = None,
88
+ scale_factor: float = 1,
89
+ engine: Literal["vl-convert"] | None = None,
90
+ inline: bool = False,
91
+ **kwargs,
92
+ ) -> None:
93
+ """
94
+ Save a chart to file in a variety of formats.
95
+
96
+ Supported formats are [json, html, png, svg, pdf]
97
+
98
+ Parameters
99
+ ----------
100
+ chart : alt.Chart
101
+ the chart instance to save
102
+ fp : string filename, pathlib.Path or file-like object
103
+ file to which to write the chart.
104
+ format : string (optional)
105
+ the format to write: one of ['json', 'html', 'png', 'svg', 'pdf'].
106
+ If not specified, the format will be determined from the filename.
107
+ mode : string (optional)
108
+ Must be 'vega-lite'. If not specified, then infer the mode from
109
+ the '$schema' property of the spec, or the ``opt`` dictionary.
110
+ If it's not specified in either of those places, then use 'vega-lite'.
111
+ vega_version : string (optional)
112
+ For html output, the version of vega.js to use
113
+ vegalite_version : string (optional)
114
+ For html output, the version of vegalite.js to use
115
+ vegaembed_version : string (optional)
116
+ For html output, the version of vegaembed.js to use
117
+ embed_options : dict (optional)
118
+ The vegaEmbed options dictionary. Default is {}
119
+ (See https://github.com/vega/vega-embed for details)
120
+ json_kwds : dict (optional)
121
+ Additional keyword arguments are passed to the output method
122
+ associated with the specified format.
123
+ scale_factor : float (optional)
124
+ scale_factor to use to change size/resolution of png or svg output
125
+ engine: string {'vl-convert'}
126
+ the conversion engine to use for 'png', 'svg', and 'pdf' formats
127
+ inline: bool (optional)
128
+ If False (default), the required JavaScript libraries are loaded
129
+ from a CDN location in the resulting html file.
130
+ If True, the required JavaScript libraries are inlined into the resulting
131
+ html file so that it will work without an internet connection.
132
+ The vl-convert-python package is required if True.
133
+ **kwargs :
134
+ additional kwargs passed to spec_to_mimebundle.
135
+ """
136
+ if _ := kwargs.pop("webdriver", None):
137
+ deprecated_warn(
138
+ "The webdriver argument is not relevant for the new vl-convert engine which replaced altair_saver. "
139
+ "The argument will be removed in a future release.",
140
+ version="5.0.0",
141
+ )
142
+
143
+ json_kwds = json_kwds or {}
144
+ encoding = kwargs.get("encoding", "utf-8")
145
+ format = set_inspect_format_argument(format, fp, inline) # type: ignore[assignment]
146
+
147
+ def perform_save() -> None:
148
+ spec = chart.to_dict(context={"pre_transform": False})
149
+
150
+ inner_mode = set_inspect_mode_argument(
151
+ mode, embed_options or {}, spec, vegalite_version
152
+ )
153
+
154
+ if format == "json":
155
+ json_spec = json.dumps(spec, **json_kwds)
156
+ write_file_or_filename(fp, json_spec, mode="w", encoding=encoding)
157
+ elif format == "html":
158
+ if inline:
159
+ kwargs["template"] = "inline"
160
+ mb_html = spec_to_mimebundle(
161
+ spec=spec,
162
+ format=format,
163
+ mode=inner_mode,
164
+ vega_version=vega_version,
165
+ vegalite_version=vegalite_version,
166
+ vegaembed_version=vegaembed_version,
167
+ embed_options=embed_options,
168
+ json_kwds=json_kwds,
169
+ **kwargs,
170
+ )
171
+ write_file_or_filename(
172
+ fp, mb_html["text/html"], mode="w", encoding=encoding
173
+ )
174
+ elif format == "png":
175
+ mb_png = spec_to_mimebundle(
176
+ spec=spec,
177
+ format=format,
178
+ mode=inner_mode,
179
+ vega_version=vega_version,
180
+ vegalite_version=vegalite_version,
181
+ vegaembed_version=vegaembed_version,
182
+ embed_options=embed_options,
183
+ scale_factor=scale_factor,
184
+ engine=engine,
185
+ **kwargs,
186
+ )
187
+ write_file_or_filename(fp, mb_png[0]["image/png"], mode="wb")
188
+ elif format in {"svg", "pdf", "vega"}:
189
+ mb_any = spec_to_mimebundle(
190
+ spec=spec,
191
+ format=format,
192
+ mode=inner_mode,
193
+ vega_version=vega_version,
194
+ vegalite_version=vegalite_version,
195
+ vegaembed_version=vegaembed_version,
196
+ embed_options=embed_options,
197
+ scale_factor=scale_factor,
198
+ engine=engine,
199
+ **kwargs,
200
+ )
201
+ if format == "pdf":
202
+ write_file_or_filename(fp, mb_any["application/pdf"], mode="wb")
203
+ else:
204
+ write_file_or_filename(
205
+ fp, mb_any["image/svg+xml"], mode="w", encoding=encoding
206
+ )
207
+ else:
208
+ msg = f"Unsupported format: '{format}'"
209
+ raise ValueError(msg)
210
+
211
+ if using_vegafusion():
212
+ # When the vegafusion data transformer is enabled, transforms will be
213
+ # evaluated during save and the resulting data will be included in the
214
+ # vega specification that is saved.
215
+ with data_transformers.disable_max_rows():
216
+ perform_save()
217
+ else:
218
+ # Temporarily turn off any data transformers so that all data is inlined
219
+ # when calling chart.to_dict. This is relevant for vl-convert which cannot access
220
+ # local json files which could be created by a json data transformer. Furthermore,
221
+ # we don't exit the with statement until this function completed due to the issue
222
+ # described at https://github.com/vega/vl-convert/issues/31
223
+ with data_transformers.enable("default"), data_transformers.disable_max_rows():
224
+ perform_save()
videollama2/lib/python3.10/site-packages/altair/utils/schemapi.py ADDED
@@ -0,0 +1,1471 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # The contents of this file are automatically written by
2
+ # tools/generate_schema_wrapper.py. Do not modify directly.
3
+ from __future__ import annotations
4
+
5
+ import contextlib
6
+ import copy
7
+ import inspect
8
+ import json
9
+ import sys
10
+ import textwrap
11
+ from collections import defaultdict
12
+ from functools import partial
13
+ from importlib.metadata import version as importlib_version
14
+ from itertools import chain, zip_longest
15
+ from math import ceil
16
+ from typing import (
17
+ TYPE_CHECKING,
18
+ Any,
19
+ Dict,
20
+ Final,
21
+ Iterable,
22
+ Iterator,
23
+ List,
24
+ Literal,
25
+ Sequence,
26
+ TypeVar,
27
+ Union,
28
+ cast,
29
+ overload,
30
+ )
31
+ from typing_extensions import TypeAlias
32
+
33
+ import jsonschema
34
+ import jsonschema.exceptions
35
+ import jsonschema.validators
36
+ import narwhals.stable.v1 as nw
37
+ from packaging.version import Version
38
+
39
+ # This leads to circular imports with the vegalite module. Currently, this works
40
+ # but be aware that when you access it in this script, the vegalite module might
41
+ # not yet be fully instantiated in case your code is being executed during import time
42
+ from altair import vegalite
43
+
44
+ if TYPE_CHECKING:
45
+ from typing import ClassVar
46
+
47
+ from referencing import Registry
48
+
49
+ from altair.typing import ChartType
50
+
51
+ if sys.version_info >= (3, 13):
52
+ from typing import TypeIs
53
+ else:
54
+ from typing_extensions import TypeIs
55
+
56
+ if sys.version_info >= (3, 11):
57
+ from typing import Never, Self
58
+ else:
59
+ from typing_extensions import Never, Self
60
+
61
+ ValidationErrorList: TypeAlias = List[jsonschema.exceptions.ValidationError]
62
+ GroupedValidationErrors: TypeAlias = Dict[str, ValidationErrorList]
63
+
64
+ # This URI is arbitrary and could be anything else. It just cannot be an empty
65
+ # string as we need to reference the schema registered in
66
+ # the referencing.Registry.
67
+ _VEGA_LITE_ROOT_URI: Final = "urn:vega-lite-schema"
68
+
69
+ # Ideally, jsonschema specification would be parsed from the current Vega-Lite
70
+ # schema instead of being hardcoded here as a default value.
71
+ # However, due to circular imports between this module and the altair.vegalite
72
+ # modules, this information is not yet available at this point as altair.vegalite
73
+ # is only partially loaded. The draft version which is used is unlikely to
74
+ # change often so it's ok to keep this. There is also a test which validates
75
+ # that this value is always the same as in the Vega-Lite schema.
76
+ _DEFAULT_JSON_SCHEMA_DRAFT_URL: Final = "http://json-schema.org/draft-07/schema#"
77
+
78
+
79
+ # If DEBUG_MODE is True, then schema objects are converted to dict and
80
+ # validated at creation time. This slows things down, particularly for
81
+ # larger specs, but leads to much more useful tracebacks for the user.
82
+ # Individual schema classes can override this by setting the
83
+ # class-level _class_is_valid_at_instantiation attribute to False
84
+ DEBUG_MODE: bool = True
85
+
86
+ jsonschema_version_str = importlib_version("jsonschema")
87
+
88
+
89
+ def enable_debug_mode() -> None:
90
+ global DEBUG_MODE
91
+ DEBUG_MODE = True
92
+
93
+
94
+ def disable_debug_mode() -> None:
95
+ global DEBUG_MODE
96
+ DEBUG_MODE = False
97
+
98
+
99
+ @contextlib.contextmanager
100
+ def debug_mode(arg: bool) -> Iterator[None]:
101
+ global DEBUG_MODE
102
+ original = DEBUG_MODE
103
+ DEBUG_MODE = arg
104
+ try:
105
+ yield
106
+ finally:
107
+ DEBUG_MODE = original
108
+
109
+
110
+ @overload
111
+ def validate_jsonschema(
112
+ spec: Any,
113
+ schema: dict[str, Any],
114
+ rootschema: dict[str, Any] | None = ...,
115
+ *,
116
+ raise_error: Literal[True] = ...,
117
+ ) -> Never: ...
118
+
119
+
120
+ @overload
121
+ def validate_jsonschema(
122
+ spec: Any,
123
+ schema: dict[str, Any],
124
+ rootschema: dict[str, Any] | None = ...,
125
+ *,
126
+ raise_error: Literal[False],
127
+ ) -> jsonschema.exceptions.ValidationError | None: ...
128
+
129
+
130
+ def validate_jsonschema(
131
+ spec,
132
+ schema: dict[str, Any],
133
+ rootschema: dict[str, Any] | None = None,
134
+ *,
135
+ raise_error: bool = True,
136
+ ) -> jsonschema.exceptions.ValidationError | None:
137
+ """
138
+ Validates the passed in spec against the schema in the context of the rootschema.
139
+
140
+ If any errors are found, they are deduplicated and prioritized
141
+ and only the most relevant errors are kept. Errors are then either raised
142
+ or returned, depending on the value of `raise_error`.
143
+ """
144
+ errors = _get_errors_from_spec(spec, schema, rootschema=rootschema)
145
+ if errors:
146
+ leaf_errors = _get_leaves_of_error_tree(errors)
147
+ grouped_errors = _group_errors_by_json_path(leaf_errors)
148
+ grouped_errors = _subset_to_most_specific_json_paths(grouped_errors)
149
+ grouped_errors = _deduplicate_errors(grouped_errors)
150
+
151
+ # Nothing special about this first error but we need to choose one
152
+ # which can be raised
153
+ main_error: Any = next(iter(grouped_errors.values()))[0]
154
+ # All errors are then attached as a new attribute to ValidationError so that
155
+ # they can be used in SchemaValidationError to craft a more helpful
156
+ # error message. Setting a new attribute like this is not ideal as
157
+ # it then no longer matches the type ValidationError. It would be better
158
+ # to refactor this function to never raise but only return errors.
159
+ main_error._all_errors = grouped_errors
160
+ if raise_error:
161
+ raise main_error
162
+ else:
163
+ return main_error
164
+ else:
165
+ return None
166
+
167
+
168
+ def _get_errors_from_spec(
169
+ spec: dict[str, Any],
170
+ schema: dict[str, Any],
171
+ rootschema: dict[str, Any] | None = None,
172
+ ) -> ValidationErrorList:
173
+ """
174
+ Uses the relevant jsonschema validator to validate the passed in spec against the schema using the rootschema to resolve references.
175
+
176
+ The schema and rootschema themselves are not validated but instead considered as valid.
177
+ """
178
+ # We don't use jsonschema.validate as this would validate the schema itself.
179
+ # Instead, we pass the schema directly to the validator class. This is done for
180
+ # two reasons: The schema comes from Vega-Lite and is not based on the user
181
+ # input, therefore there is no need to validate it in the first place. Furthermore,
182
+ # the "uri-reference" format checker fails for some of the references as URIs in
183
+ # "$ref" are not encoded,
184
+ # e.g. '#/definitions/ValueDefWithCondition<MarkPropFieldOrDatumDef,
185
+ # (Gradient|string|null)>' would be a valid $ref in a Vega-Lite schema but
186
+ # it is not a valid URI reference due to the characters such as '<'.
187
+
188
+ json_schema_draft_url = _get_json_schema_draft_url(rootschema or schema)
189
+ validator_cls = jsonschema.validators.validator_for(
190
+ {"$schema": json_schema_draft_url}
191
+ )
192
+ validator_kwargs: dict[str, Any] = {}
193
+ if hasattr(validator_cls, "FORMAT_CHECKER"):
194
+ validator_kwargs["format_checker"] = validator_cls.FORMAT_CHECKER
195
+
196
+ if _use_referencing_library():
197
+ schema = _prepare_references_in_schema(schema)
198
+ validator_kwargs["registry"] = _get_referencing_registry(
199
+ rootschema or schema, json_schema_draft_url
200
+ )
201
+
202
+ else:
203
+ # No resolver is necessary if the schema is already the full schema
204
+ validator_kwargs["resolver"] = (
205
+ jsonschema.RefResolver.from_schema(rootschema)
206
+ if rootschema is not None
207
+ else None
208
+ )
209
+
210
+ validator = validator_cls(schema, **validator_kwargs)
211
+ errors = list(validator.iter_errors(spec))
212
+ return errors
213
+
214
+
215
+ def _get_json_schema_draft_url(schema: dict[str, Any]) -> str:
216
+ return schema.get("$schema", _DEFAULT_JSON_SCHEMA_DRAFT_URL)
217
+
218
+
219
+ def _use_referencing_library() -> bool:
220
+ """In version 4.18.0, the jsonschema package deprecated RefResolver in favor of the referencing library."""
221
+ return Version(jsonschema_version_str) >= Version("4.18")
222
+
223
+
224
+ def _prepare_references_in_schema(schema: dict[str, Any]) -> dict[str, Any]:
225
+ # Create a copy so that $ref is not modified in the original schema in case
226
+ # that it would still reference a dictionary which might be attached to
227
+ # an Altair class _schema attribute
228
+ schema = copy.deepcopy(schema)
229
+
230
+ def _prepare_refs(d: dict[str, Any]) -> dict[str, Any]:
231
+ """
232
+ Add _VEGA_LITE_ROOT_URI in front of all $ref values.
233
+
234
+ This function recursively iterates through the whole dictionary.
235
+
236
+ $ref values can only be nested in dictionaries or lists
237
+ as the passed in `d` dictionary comes from the Vega-Lite json schema
238
+ and in json we only have arrays (-> lists in Python) and objects
239
+ (-> dictionaries in Python) which we need to iterate through.
240
+ """
241
+ for key, value in d.items():
242
+ if key == "$ref":
243
+ d[key] = _VEGA_LITE_ROOT_URI + d[key]
244
+ elif isinstance(value, dict):
245
+ d[key] = _prepare_refs(value)
246
+ elif isinstance(value, list):
247
+ prepared_values = []
248
+ for v in value:
249
+ if isinstance(v, dict):
250
+ v = _prepare_refs(v)
251
+ prepared_values.append(v)
252
+ d[key] = prepared_values
253
+ return d
254
+
255
+ schema = _prepare_refs(schema)
256
+ return schema
257
+
258
+
259
+ # We do not annotate the return value here as the referencing library is not always
260
+ # available and this function is only executed in those cases.
261
+ def _get_referencing_registry(
262
+ rootschema: dict[str, Any], json_schema_draft_url: str | None = None
263
+ ) -> Registry:
264
+ # Referencing is a dependency of newer jsonschema versions, starting with the
265
+ # version that is specified in _use_referencing_library and we therefore
266
+ # can expect that it is installed if the function returns True.
267
+ # We ignore 'import' mypy errors which happen when the referencing library
268
+ # is not installed. That's ok as in these cases this function is not called.
269
+ # We also have to ignore 'unused-ignore' errors as mypy raises those in case
270
+ # referencing is installed.
271
+ import referencing # type: ignore[import,unused-ignore]
272
+ import referencing.jsonschema # type: ignore[import,unused-ignore]
273
+
274
+ if json_schema_draft_url is None:
275
+ json_schema_draft_url = _get_json_schema_draft_url(rootschema)
276
+
277
+ specification = referencing.jsonschema.specification_with(json_schema_draft_url)
278
+ resource = specification.create_resource(rootschema)
279
+ return referencing.Registry().with_resource(
280
+ uri=_VEGA_LITE_ROOT_URI, resource=resource
281
+ )
282
+
283
+
284
+ def _json_path(err: jsonschema.exceptions.ValidationError) -> str:
285
+ """
286
+ Drop in replacement for the .json_path property of the jsonschema ValidationError class.
287
+
288
+ This is not available as property for ValidationError with jsonschema<4.0.1.
289
+
290
+ More info, see https://github.com/vega/altair/issues/3038.
291
+ """
292
+ path = "$"
293
+ for elem in err.absolute_path:
294
+ if isinstance(elem, int):
295
+ path += "[" + str(elem) + "]"
296
+ else:
297
+ path += "." + elem
298
+ return path
299
+
300
+
301
+ def _group_errors_by_json_path(
302
+ errors: ValidationErrorList,
303
+ ) -> GroupedValidationErrors:
304
+ """
305
+ Groups errors by the `json_path` attribute of the jsonschema ValidationError class.
306
+
307
+ This attribute contains the path to the offending element within
308
+ a chart specification and can therefore be considered as an identifier of an
309
+ 'issue' in the chart that needs to be fixed.
310
+ """
311
+ errors_by_json_path = defaultdict(list)
312
+ for err in errors:
313
+ err_key = getattr(err, "json_path", _json_path(err))
314
+ errors_by_json_path[err_key].append(err)
315
+ return dict(errors_by_json_path)
316
+
317
+
318
+ def _get_leaves_of_error_tree(
319
+ errors: ValidationErrorList,
320
+ ) -> ValidationErrorList:
321
+ """
322
+ For each error in `errors`, it traverses down the "error tree" that is generated by the jsonschema library to find and return all "leaf" errors.
323
+
324
+ These are errors which have no further errors that caused it and so they are the most specific errors
325
+ with the most specific error messages.
326
+ """
327
+ leaves: ValidationErrorList = []
328
+ for err in errors:
329
+ if err.context:
330
+ # This means that the error `err` was caused by errors in subschemas.
331
+ # The list of errors from the subschemas are available in the property
332
+ # `context`.
333
+ leaves.extend(_get_leaves_of_error_tree(err.context))
334
+ else:
335
+ leaves.append(err)
336
+ return leaves
337
+
338
+
339
+ def _subset_to_most_specific_json_paths(
340
+ errors_by_json_path: GroupedValidationErrors,
341
+ ) -> GroupedValidationErrors:
342
+ """
343
+ Removes key (json path), value (errors) pairs where the json path is fully contained in another json path.
344
+
345
+ For example if `errors_by_json_path` has two keys, `$.encoding.X` and `$.encoding.X.tooltip`,
346
+ then the first one will be removed and only the second one is returned.
347
+
348
+ This is done under the assumption that more specific json paths give more helpful error messages to the user.
349
+ """
350
+ errors_by_json_path_specific: GroupedValidationErrors = {}
351
+ for json_path, errors in errors_by_json_path.items():
352
+ if not _contained_at_start_of_one_of_other_values(
353
+ json_path, list(errors_by_json_path.keys())
354
+ ):
355
+ errors_by_json_path_specific[json_path] = errors
356
+ return errors_by_json_path_specific
357
+
358
+
359
+ def _contained_at_start_of_one_of_other_values(x: str, values: Sequence[str]) -> bool:
360
+ # Does not count as "contained at start of other value" if the values are
361
+ # the same. These cases should be handled separately
362
+ return any(value.startswith(x) for value in values if x != value)
363
+
364
+
365
+ def _deduplicate_errors(
366
+ grouped_errors: GroupedValidationErrors,
367
+ ) -> GroupedValidationErrors:
368
+ """
369
+ Some errors have very similar error messages or are just in general not helpful for a user.
370
+
371
+ This function removes as many of these cases as possible and
372
+ can be extended over time to handle new cases that come up.
373
+ """
374
+ grouped_errors_deduplicated: GroupedValidationErrors = {}
375
+ for json_path, element_errors in grouped_errors.items():
376
+ errors_by_validator = _group_errors_by_validator(element_errors)
377
+
378
+ deduplication_functions = {
379
+ "enum": _deduplicate_enum_errors,
380
+ "additionalProperties": _deduplicate_additional_properties_errors,
381
+ }
382
+ deduplicated_errors: ValidationErrorList = []
383
+ for validator, errors in errors_by_validator.items():
384
+ deduplication_func = deduplication_functions.get(validator)
385
+ if deduplication_func is not None:
386
+ errors = deduplication_func(errors)
387
+ deduplicated_errors.extend(_deduplicate_by_message(errors))
388
+
389
+ # Removes any ValidationError "'value' is a required property" as these
390
+ # errors are unlikely to be the relevant ones for the user. They come from
391
+ # validation against a schema definition where the output of `alt.value`
392
+ # would be valid. However, if a user uses `alt.value`, the `value` keyword
393
+ # is included automatically from that function and so it's unlikely
394
+ # that this was what the user intended if the keyword is not present
395
+ # in the first place.
396
+ deduplicated_errors = [
397
+ err for err in deduplicated_errors if not _is_required_value_error(err)
398
+ ]
399
+
400
+ grouped_errors_deduplicated[json_path] = deduplicated_errors
401
+ return grouped_errors_deduplicated
402
+
403
+
404
+ def _is_required_value_error(err: jsonschema.exceptions.ValidationError) -> bool:
405
+ return err.validator == "required" and err.validator_value == ["value"]
406
+
407
+
408
+ def _group_errors_by_validator(errors: ValidationErrorList) -> GroupedValidationErrors:
409
+ """
410
+ Groups the errors by the json schema "validator" that casued the error.
411
+
412
+ For example if the error is that a value is not one of an enumeration in the json schema
413
+ then the "validator" is `"enum"`, if the error is due to an unknown property that
414
+ was set although no additional properties are allowed then "validator" is
415
+ `"additionalProperties`, etc.
416
+ """
417
+ errors_by_validator: defaultdict[str, ValidationErrorList] = defaultdict(list)
418
+ for err in errors:
419
+ # Ignore mypy error as err.validator as it wrongly sees err.validator
420
+ # as of type Optional[Validator] instead of str which it is according
421
+ # to the documentation and all tested cases
422
+ errors_by_validator[err.validator].append(err) # type: ignore[index]
423
+ return dict(errors_by_validator)
424
+
425
+
426
+ def _deduplicate_enum_errors(errors: ValidationErrorList) -> ValidationErrorList:
427
+ """
428
+ Deduplicate enum errors by removing the errors where the allowed values are a subset of another error.
429
+
430
+ For example, if `enum` contains two errors and one has `validator_value` (i.e. accepted values) ["A", "B"] and the
431
+ other one ["A", "B", "C"] then the first one is removed and the final
432
+ `enum` list only contains the error with ["A", "B", "C"].
433
+ """
434
+ if len(errors) > 1:
435
+ # Values (and therefore `validator_value`) of an enum are always arrays,
436
+ # see https://json-schema.org/understanding-json-schema/reference/generic.html#enumerated-values
437
+ # which is why we can use join below
438
+ value_strings = [",".join(err.validator_value) for err in errors] # type: ignore
439
+ longest_enums: ValidationErrorList = []
440
+ for value_str, err in zip(value_strings, errors):
441
+ if not _contained_at_start_of_one_of_other_values(value_str, value_strings):
442
+ longest_enums.append(err)
443
+ errors = longest_enums
444
+ return errors
445
+
446
+
447
+ def _deduplicate_additional_properties_errors(
448
+ errors: ValidationErrorList,
449
+ ) -> ValidationErrorList:
450
+ """
451
+ If there are multiple additional property errors it usually means that the offending element was validated against multiple schemas and its parent is a common anyOf validator.
452
+
453
+ The error messages produced from these cases are usually
454
+ very similar and we just take the shortest one. For example,
455
+ the following 3 errors are raised for the `unknown` channel option in
456
+ `alt.X("variety", unknown=2)`:
457
+ - "Additional properties are not allowed ('unknown' was unexpected)"
458
+ - "Additional properties are not allowed ('field', 'unknown' were unexpected)"
459
+ - "Additional properties are not allowed ('field', 'type', 'unknown' were unexpected)".
460
+ """
461
+ if len(errors) > 1:
462
+ # Test if all parent errors are the same anyOf error and only do
463
+ # the prioritization in these cases. Can't think of a chart spec where this
464
+ # would not be the case but still allow for it below to not break anything.
465
+ parent = errors[0].parent
466
+ if (
467
+ parent is not None
468
+ and parent.validator == "anyOf"
469
+ # Use [1:] as don't have to check for first error as it was used
470
+ # above to define `parent`
471
+ and all(err.parent is parent for err in errors[1:])
472
+ ):
473
+ errors = [min(errors, key=lambda x: len(x.message))]
474
+ return errors
475
+
476
+
477
+ def _deduplicate_by_message(errors: ValidationErrorList) -> ValidationErrorList:
478
+ """Deduplicate errors by message. This keeps the original order in case it was chosen intentionally."""
479
+ return list({e.message: e for e in errors}.values())
480
+
481
+
482
+ def _subclasses(cls: type[Any]) -> Iterator[type[Any]]:
483
+ """Breadth-first sequence of all classes which inherit from cls."""
484
+ seen = set()
485
+ current_set = {cls}
486
+ while current_set:
487
+ seen |= current_set
488
+ current_set = set.union(*(set(cls.__subclasses__()) for cls in current_set))
489
+ for cls in current_set - seen:
490
+ yield cls
491
+
492
+
493
+ def _from_array_like(obj: Iterable[Any], /) -> list[Any]:
494
+ try:
495
+ ser = nw.from_native(obj, strict=True, series_only=True)
496
+ return ser.to_list()
497
+ except TypeError:
498
+ return list(obj)
499
+
500
+
501
+ def _todict(obj: Any, context: dict[str, Any] | None, np_opt: Any, pd_opt: Any) -> Any: # noqa: C901
502
+ """Convert an object to a dict representation."""
503
+ if np_opt is not None:
504
+ np = np_opt
505
+ if isinstance(obj, np.ndarray):
506
+ return [_todict(v, context, np_opt, pd_opt) for v in obj]
507
+ elif isinstance(obj, np.number):
508
+ return float(obj)
509
+ elif isinstance(obj, np.datetime64):
510
+ result = str(obj)
511
+ if "T" not in result:
512
+ # See https://github.com/vega/altair/issues/1027 for why this is necessary.
513
+ result += "T00:00:00"
514
+ return result
515
+ if isinstance(obj, SchemaBase):
516
+ return obj.to_dict(validate=False, context=context)
517
+ elif isinstance(obj, (list, tuple)):
518
+ return [_todict(v, context, np_opt, pd_opt) for v in obj]
519
+ elif isinstance(obj, dict):
520
+ return {
521
+ k: _todict(v, context, np_opt, pd_opt)
522
+ for k, v in obj.items()
523
+ if v is not Undefined
524
+ }
525
+ elif (
526
+ hasattr(obj, "to_dict")
527
+ and (module_name := obj.__module__)
528
+ and module_name.startswith("altair")
529
+ ):
530
+ return obj.to_dict()
531
+ elif pd_opt is not None and isinstance(obj, pd_opt.Timestamp):
532
+ return pd_opt.Timestamp(obj).isoformat()
533
+ elif _is_iterable(obj, exclude=(str, bytes)):
534
+ return _todict(_from_array_like(obj), context, np_opt, pd_opt)
535
+ else:
536
+ return obj
537
+
538
+
539
+ def _resolve_references(
540
+ schema: dict[str, Any], rootschema: dict[str, Any] | None = None
541
+ ) -> dict[str, Any]:
542
+ """Resolve schema references until there is no $ref anymore in the top-level of the dictionary."""
543
+ if _use_referencing_library():
544
+ registry = _get_referencing_registry(rootschema or schema)
545
+ # Using a different variable name to show that this is not the
546
+ # jsonschema.RefResolver but instead a Resolver from the referencing
547
+ # library
548
+ referencing_resolver = registry.resolver()
549
+ while "$ref" in schema:
550
+ schema = referencing_resolver.lookup(
551
+ _VEGA_LITE_ROOT_URI + schema["$ref"]
552
+ ).contents
553
+ else:
554
+ resolver = jsonschema.RefResolver.from_schema(rootschema or schema)
555
+ while "$ref" in schema:
556
+ with resolver.resolving(schema["$ref"]) as resolved:
557
+ schema = resolved
558
+ return schema
559
+
560
+
561
+ class SchemaValidationError(jsonschema.ValidationError):
562
+ """A wrapper for jsonschema.ValidationError with friendlier traceback."""
563
+
564
+ def __init__(self, obj: SchemaBase, err: jsonschema.ValidationError) -> None:
565
+ super().__init__(**err._contents())
566
+ self.obj = obj
567
+ self._errors: GroupedValidationErrors = getattr(
568
+ err, "_all_errors", {getattr(err, "json_path", _json_path(err)): [err]}
569
+ )
570
+ # This is the message from err
571
+ self._original_message = self.message
572
+ self.message = self._get_message()
573
+
574
+ def __str__(self) -> str:
575
+ return self.message
576
+
577
+ def _get_message(self) -> str:
578
+ def indent_second_line_onwards(message: str, indent: int = 4) -> str:
579
+ modified_lines: list[str] = []
580
+ for idx, line in enumerate(message.split("\n")):
581
+ if idx > 0 and len(line) > 0:
582
+ line = " " * indent + line
583
+ modified_lines.append(line)
584
+ return "\n".join(modified_lines)
585
+
586
+ error_messages: list[str] = []
587
+ # Only show a maximum of 3 errors as else the final message returned by this
588
+ # method could get very long.
589
+ for errors in list(self._errors.values())[:3]:
590
+ error_messages.append(self._get_message_for_errors_group(errors))
591
+
592
+ message = ""
593
+ if len(error_messages) > 1:
594
+ error_messages = [
595
+ indent_second_line_onwards(f"Error {error_id}: {m}")
596
+ for error_id, m in enumerate(error_messages, start=1)
597
+ ]
598
+ message += "Multiple errors were found.\n\n"
599
+ message += "\n\n".join(error_messages)
600
+ return message
601
+
602
+ def _get_message_for_errors_group(
603
+ self,
604
+ errors: ValidationErrorList,
605
+ ) -> str:
606
+ if errors[0].validator == "additionalProperties":
607
+ # During development, we only found cases where an additionalProperties
608
+ # error was raised if that was the only error for the offending instance
609
+ # as identifiable by the json path. Therefore, we just check here the first
610
+ # error. However, other constellations might exist in which case
611
+ # this should be adapted so that other error messages are shown as well.
612
+ message = self._get_additional_properties_error_message(errors[0])
613
+ else:
614
+ message = self._get_default_error_message(errors=errors)
615
+
616
+ return message.strip()
617
+
618
+ def _get_additional_properties_error_message(
619
+ self,
620
+ error: jsonschema.exceptions.ValidationError,
621
+ ) -> str:
622
+ """Output all existing parameters when an unknown parameter is specified."""
623
+ altair_cls = self._get_altair_class_for_error(error)
624
+ param_dict_keys = inspect.signature(altair_cls).parameters.keys()
625
+ param_names_table = self._format_params_as_table(param_dict_keys)
626
+
627
+ # Error messages for these errors look like this:
628
+ # "Additional properties are not allowed ('unknown' was unexpected)"
629
+ # Line below extracts "unknown" from this string
630
+ parameter_name = error.message.split("('")[-1].split("'")[0]
631
+ message = f"""\
632
+ `{altair_cls.__name__}` has no parameter named '{parameter_name}'
633
+
634
+ Existing parameter names are:
635
+ {param_names_table}
636
+ See the help for `{altair_cls.__name__}` to read the full description of these parameters"""
637
+ return message
638
+
639
+ def _get_altair_class_for_error(
640
+ self, error: jsonschema.exceptions.ValidationError
641
+ ) -> type[SchemaBase]:
642
+ """
643
+ Try to get the lowest class possible in the chart hierarchy so it can be displayed in the error message.
644
+
645
+ This should lead to more informative error messages pointing the user closer to the source of the issue.
646
+ """
647
+ for prop_name in reversed(error.absolute_path):
648
+ # Check if str as e.g. first item can be a 0
649
+ if isinstance(prop_name, str):
650
+ potential_class_name = prop_name[0].upper() + prop_name[1:]
651
+ cls = getattr(vegalite, potential_class_name, None)
652
+ if cls is not None:
653
+ break
654
+ else:
655
+ # Did not find a suitable class based on traversing the path so we fall
656
+ # back on the class of the top-level object which created
657
+ # the SchemaValidationError
658
+ cls = self.obj.__class__
659
+ return cls
660
+
661
+ @staticmethod
662
+ def _format_params_as_table(param_dict_keys: Iterable[str]) -> str:
663
+ """Format param names into a table so that they are easier to read."""
664
+ param_names: tuple[str, ...]
665
+ name_lengths: tuple[int, ...]
666
+ param_names, name_lengths = zip(
667
+ *[
668
+ (name, len(name))
669
+ for name in param_dict_keys
670
+ if name not in {"kwds", "self"}
671
+ ]
672
+ )
673
+ # Worst case scenario with the same longest param name in the same
674
+ # row for all columns
675
+ max_name_length = max(name_lengths)
676
+ max_column_width = 80
677
+ # Output a square table if not too big (since it is easier to read)
678
+ num_param_names = len(param_names)
679
+ square_columns = int(ceil(num_param_names**0.5))
680
+ columns = min(max_column_width // max_name_length, square_columns)
681
+
682
+ # Compute roughly equal column heights to evenly divide the param names
683
+ def split_into_equal_parts(n: int, p: int) -> list[int]:
684
+ return [n // p + 1] * (n % p) + [n // p] * (p - n % p)
685
+
686
+ column_heights = split_into_equal_parts(num_param_names, columns)
687
+
688
+ # Section the param names into columns and compute their widths
689
+ param_names_columns: list[tuple[str, ...]] = []
690
+ column_max_widths: list[int] = []
691
+ last_end_idx: int = 0
692
+ for ch in column_heights:
693
+ param_names_columns.append(param_names[last_end_idx : last_end_idx + ch])
694
+ column_max_widths.append(
695
+ max(len(param_name) for param_name in param_names_columns[-1])
696
+ )
697
+ last_end_idx = ch + last_end_idx
698
+
699
+ # Transpose the param name columns into rows to facilitate looping
700
+ param_names_rows: list[tuple[str, ...]] = []
701
+ for li in zip_longest(*param_names_columns, fillvalue=""):
702
+ param_names_rows.append(li)
703
+ # Build the table as a string by iterating over and formatting the rows
704
+ param_names_table: str = ""
705
+ for param_names_row in param_names_rows:
706
+ for num, param_name in enumerate(param_names_row):
707
+ # Set column width based on the longest param in the column
708
+ max_name_length_column = column_max_widths[num]
709
+ column_pad = 3
710
+ param_names_table += "{:<{}}".format(
711
+ param_name, max_name_length_column + column_pad
712
+ )
713
+ # Insert newlines and spacing after the last element in each row
714
+ if num == (len(param_names_row) - 1):
715
+ param_names_table += "\n"
716
+ return param_names_table
717
+
718
+ def _get_default_error_message(
719
+ self,
720
+ errors: ValidationErrorList,
721
+ ) -> str:
722
+ bullet_points: list[str] = []
723
+ errors_by_validator = _group_errors_by_validator(errors)
724
+ if "enum" in errors_by_validator:
725
+ for error in errors_by_validator["enum"]:
726
+ bullet_points.append(f"one of {error.validator_value}")
727
+
728
+ if "type" in errors_by_validator:
729
+ types = [f"'{err.validator_value}'" for err in errors_by_validator["type"]]
730
+ point = "of type "
731
+ if len(types) == 1:
732
+ point += types[0]
733
+ elif len(types) == 2:
734
+ point += f"{types[0]} or {types[1]}"
735
+ else:
736
+ point += ", ".join(types[:-1]) + f", or {types[-1]}"
737
+ bullet_points.append(point)
738
+
739
+ # It should not matter which error is specifically used as they are all
740
+ # about the same offending instance (i.e. invalid value), so we can just
741
+ # take the first one
742
+ error = errors[0]
743
+ # Add a summary line when parameters are passed an invalid value
744
+ # For example: "'asdf' is an invalid value for `stack`
745
+ message = f"'{error.instance}' is an invalid value"
746
+ if error.absolute_path:
747
+ message += f" for `{error.absolute_path[-1]}`"
748
+
749
+ # Add bullet points
750
+ if len(bullet_points) == 0:
751
+ message += ".\n\n"
752
+ elif len(bullet_points) == 1:
753
+ message += f". Valid values are {bullet_points[0]}.\n\n"
754
+ else:
755
+ # We don't use .capitalize below to make the first letter uppercase
756
+ # as that makes the rest of the message lowercase
757
+ bullet_points = [point[0].upper() + point[1:] for point in bullet_points]
758
+ message += ". Valid values are:\n\n"
759
+ message += "\n".join([f"- {point}" for point in bullet_points])
760
+ message += "\n\n"
761
+
762
+ # Add unformatted messages of any remaining errors which were not
763
+ # considered so far. This is not expected to be used but more exists
764
+ # as a fallback for cases which were not known during development.
765
+ it = (
766
+ "\n".join(e.message for e in errors)
767
+ for validator, errors in errors_by_validator.items()
768
+ if validator not in {"enum", "type"}
769
+ )
770
+ message += "".join(it)
771
+ return message
772
+
773
+
774
+ class UndefinedType:
775
+ """A singleton object for marking undefined parameters."""
776
+
777
+ __instance = None
778
+
779
+ def __new__(cls, *args, **kwargs) -> Self:
780
+ if not isinstance(cls.__instance, cls):
781
+ cls.__instance = object.__new__(cls, *args, **kwargs)
782
+ return cls.__instance
783
+
784
+ def __repr__(self) -> str:
785
+ return "Undefined"
786
+
787
+
788
+ Undefined = UndefinedType()
789
+ T = TypeVar("T")
790
+ Optional: TypeAlias = Union[T, UndefinedType]
791
+ """One of ``T`` specified type(s), or the ``Undefined`` singleton.
792
+
793
+ Examples
794
+ --------
795
+ The parameters ``short``, ``long`` accept the same range of types::
796
+
797
+ # ruff: noqa: UP006, UP007
798
+ from altair.typing import Optional
799
+
800
+ def func_1(
801
+ short: Optional[str | bool | float | dict[str, Any] | SchemaBase] = Undefined,
802
+ long: Union[
803
+ str, bool, float, Dict[str, Any], SchemaBase, UndefinedType
804
+ ] = Undefined,
805
+ ): ...
806
+
807
+ This is distinct from `typing.Optional <https://typing.readthedocs.io/en/latest/spec/historical.html#union-and-optional>`__.
808
+
809
+ ``altair.typing.Optional`` treats ``None`` like any other type::
810
+
811
+ # ruff: noqa: UP006, UP007
812
+ from altair.typing import Optional
813
+
814
+ def func_2(
815
+ short: Optional[str | float | dict[str, Any] | None | SchemaBase] = Undefined,
816
+ long: Union[
817
+ str, float, Dict[str, Any], None, SchemaBase, UndefinedType
818
+ ] = Undefined,
819
+ ): ...
820
+ """
821
+
822
+
823
+ def is_undefined(obj: Any) -> TypeIs[UndefinedType]:
824
+ """
825
+ Type-safe singleton check for `UndefinedType`.
826
+
827
+ Notes
828
+ -----
829
+ - Using `obj is Undefined` does not narrow from `UndefinedType` in a union.
830
+ - Due to the assumption that other `UndefinedType`'s could exist.
831
+ - Current [typing spec advises](https://typing.readthedocs.io/en/latest/spec/concepts.html#support-for-singleton-types-in-unions) using an `Enum`.
832
+ - Otherwise, requires an explicit guard to inform the type checker.
833
+ """
834
+ return obj is Undefined
835
+
836
+
837
+ @overload
838
+ def _shallow_copy(obj: _CopyImpl) -> _CopyImpl: ...
839
+ @overload
840
+ def _shallow_copy(obj: Any) -> Any: ...
841
+ def _shallow_copy(obj: _CopyImpl | Any) -> _CopyImpl | Any:
842
+ if isinstance(obj, SchemaBase):
843
+ return obj.copy(deep=False)
844
+ elif isinstance(obj, (list, dict)):
845
+ return obj.copy()
846
+ else:
847
+ return obj
848
+
849
+
850
+ @overload
851
+ def _deep_copy(obj: _CopyImpl, by_ref: set[str]) -> _CopyImpl: ...
852
+ @overload
853
+ def _deep_copy(obj: Any, by_ref: set[str]) -> Any: ...
854
+ def _deep_copy(obj: _CopyImpl | Any, by_ref: set[str]) -> _CopyImpl | Any:
855
+ copy = partial(_deep_copy, by_ref=by_ref)
856
+ if isinstance(obj, SchemaBase):
857
+ if copier := getattr(obj, "__deepcopy__", None):
858
+ with debug_mode(False):
859
+ return copier(obj)
860
+ args = (copy(arg) for arg in obj._args)
861
+ kwds = {k: (copy(v) if k not in by_ref else v) for k, v in obj._kwds.items()}
862
+ with debug_mode(False):
863
+ return obj.__class__(*args, **kwds)
864
+ elif isinstance(obj, list):
865
+ return [copy(v) for v in obj]
866
+ elif isinstance(obj, dict):
867
+ return {k: (copy(v) if k not in by_ref else v) for k, v in obj.items()}
868
+ else:
869
+ return obj
870
+
871
+
872
+ class SchemaBase:
873
+ """
874
+ Base class for schema wrappers.
875
+
876
+ Each derived class should set the _schema class attribute (and optionally
877
+ the _rootschema class attribute) which is used for validation.
878
+ """
879
+
880
+ _schema: ClassVar[dict[str, Any] | Any] = None
881
+ _rootschema: ClassVar[dict[str, Any] | None] = None
882
+ _class_is_valid_at_instantiation: ClassVar[bool] = True
883
+
884
+ def __init__(self, *args: Any, **kwds: Any) -> None:
885
+ # Two valid options for initialization, which should be handled by
886
+ # derived classes:
887
+ # - a single arg with no kwds, for, e.g. {'type': 'string'}
888
+ # - zero args with zero or more kwds for {'type': 'object'}
889
+ if self._schema is None:
890
+ msg = (
891
+ f"Cannot instantiate object of type {self.__class__}: "
892
+ "_schema class attribute is not defined."
893
+ ""
894
+ )
895
+ raise ValueError(msg)
896
+
897
+ if kwds:
898
+ assert len(args) == 0
899
+ else:
900
+ assert len(args) in {0, 1}
901
+
902
+ # use object.__setattr__ because we override setattr below.
903
+ object.__setattr__(self, "_args", args)
904
+ object.__setattr__(self, "_kwds", kwds)
905
+
906
+ if DEBUG_MODE and self._class_is_valid_at_instantiation:
907
+ self.to_dict(validate=True)
908
+
909
+ def copy(
910
+ self, deep: bool | Iterable[Any] = True, ignore: list[str] | None = None
911
+ ) -> Self:
912
+ """
913
+ Return a copy of the object.
914
+
915
+ Parameters
916
+ ----------
917
+ deep : boolean or list, optional
918
+ If True (default) then return a deep copy of all dict, list, and
919
+ SchemaBase objects within the object structure.
920
+ If False, then only copy the top object.
921
+ If a list or iterable, then only copy the listed attributes.
922
+ ignore : list, optional
923
+ A list of keys for which the contents should not be copied, but
924
+ only stored by reference.
925
+ """
926
+ if deep is True:
927
+ return cast("Self", _deep_copy(self, set(ignore) if ignore else set()))
928
+ with debug_mode(False):
929
+ copy = self.__class__(*self._args, **self._kwds)
930
+ if _is_iterable(deep):
931
+ for attr in deep:
932
+ copy[attr] = _shallow_copy(copy._get(attr))
933
+ return copy
934
+
935
+ def _get(self, attr, default=Undefined):
936
+ """Get an attribute, returning default if not present."""
937
+ attr = self._kwds.get(attr, Undefined)
938
+ if attr is Undefined:
939
+ attr = default
940
+ return attr
941
+
942
+ def __getattr__(self, attr):
943
+ # reminder: getattr is called after the normal lookups
944
+ if attr == "_kwds":
945
+ raise AttributeError()
946
+ if attr in self._kwds:
947
+ return self._kwds[attr]
948
+ else:
949
+ try:
950
+ _getattr = super().__getattr__ # pyright: ignore[reportAttributeAccessIssue]
951
+ except AttributeError:
952
+ _getattr = super().__getattribute__
953
+ return _getattr(attr)
954
+
955
+ def __setattr__(self, item, val) -> None:
956
+ self._kwds[item] = val
957
+
958
+ def __getitem__(self, item):
959
+ return self._kwds[item]
960
+
961
+ def __setitem__(self, item, val) -> None:
962
+ self._kwds[item] = val
963
+
964
+ def __repr__(self) -> str:
965
+ name = type(self).__name__
966
+ if kwds := self._kwds:
967
+ it = (f"{k}: {v!r}" for k, v in sorted(kwds.items()) if v is not Undefined)
968
+ args = ",\n".join(it).replace("\n", "\n ")
969
+ LB, RB = "{", "}"
970
+ return f"{name}({LB}\n {args}\n{RB})"
971
+ else:
972
+ return f"{name}({self._args[0]!r})"
973
+
974
+ def __eq__(self, other: Any) -> bool:
975
+ return (
976
+ type(self) is type(other)
977
+ and self._args == other._args
978
+ and self._kwds == other._kwds
979
+ )
980
+
981
+ def to_dict(
982
+ self,
983
+ validate: bool = True,
984
+ *,
985
+ ignore: list[str] | None = None,
986
+ context: dict[str, Any] | None = None,
987
+ ) -> dict[str, Any]:
988
+ """
989
+ Return a dictionary representation of the object.
990
+
991
+ Parameters
992
+ ----------
993
+ validate : bool, optional
994
+ If True (default), then validate the output dictionary
995
+ against the schema.
996
+ ignore : list[str], optional
997
+ A list of keys to ignore. It is usually not needed
998
+ to specify this argument as a user.
999
+ context : dict[str, Any], optional
1000
+ A context dictionary. It is usually not needed
1001
+ to specify this argument as a user.
1002
+
1003
+ Notes
1004
+ -----
1005
+ Technical: The ignore parameter will *not* be passed to child to_dict
1006
+ function calls.
1007
+
1008
+ Returns
1009
+ -------
1010
+ dict
1011
+ The dictionary representation of this object
1012
+
1013
+ Raises
1014
+ ------
1015
+ SchemaValidationError :
1016
+ if validate=True and the dict does not conform to the schema
1017
+ """
1018
+ if context is None:
1019
+ context = {}
1020
+ if ignore is None:
1021
+ ignore = []
1022
+ # The following return the package only if it has already been
1023
+ # imported - otherwise they return None. This is useful for
1024
+ # isinstance checks - for example, if pandas has not been imported,
1025
+ # then an object is definitely not a `pandas.Timestamp`.
1026
+ pd_opt = sys.modules.get("pandas")
1027
+ np_opt = sys.modules.get("numpy")
1028
+
1029
+ if self._args and not self._kwds:
1030
+ result = _todict(
1031
+ self._args[0], context=context, np_opt=np_opt, pd_opt=pd_opt
1032
+ )
1033
+ elif not self._args:
1034
+ kwds = self._kwds.copy()
1035
+ # parsed_shorthand is added by FieldChannelMixin.
1036
+ # It's used below to replace shorthand with its long form equivalent
1037
+ # parsed_shorthand is removed from context if it exists so that it is
1038
+ # not passed to child to_dict function calls
1039
+ parsed_shorthand = context.pop("parsed_shorthand", {})
1040
+ # Prevent that pandas categorical data is automatically sorted
1041
+ # when a non-ordinal data type is specifed manually
1042
+ # or if the encoding channel does not support sorting
1043
+ if "sort" in parsed_shorthand and (
1044
+ "sort" not in kwds or kwds["type"] not in {"ordinal", Undefined}
1045
+ ):
1046
+ parsed_shorthand.pop("sort")
1047
+
1048
+ kwds.update(
1049
+ {
1050
+ k: v
1051
+ for k, v in parsed_shorthand.items()
1052
+ if kwds.get(k, Undefined) is Undefined
1053
+ }
1054
+ )
1055
+ kwds = {
1056
+ k: v for k, v in kwds.items() if k not in {*list(ignore), "shorthand"}
1057
+ }
1058
+ if "mark" in kwds and isinstance(kwds["mark"], str):
1059
+ kwds["mark"] = {"type": kwds["mark"]}
1060
+ result = _todict(kwds, context=context, np_opt=np_opt, pd_opt=pd_opt)
1061
+ else:
1062
+ msg = (
1063
+ f"{self.__class__} instance has both a value and properties : "
1064
+ "cannot serialize to dict"
1065
+ )
1066
+ raise ValueError(msg)
1067
+ if validate:
1068
+ try:
1069
+ self.validate(result)
1070
+ except jsonschema.ValidationError as err:
1071
+ # We do not raise `from err` as else the resulting
1072
+ # traceback is very long as it contains part
1073
+ # of the Vega-Lite schema. It would also first
1074
+ # show the less helpful ValidationError instead of
1075
+ # the more user friendly SchemaValidationError
1076
+ raise SchemaValidationError(self, err) from None
1077
+ return result
1078
+
1079
+ def to_json(
1080
+ self,
1081
+ validate: bool = True,
1082
+ indent: int | str | None = 2,
1083
+ sort_keys: bool = True,
1084
+ *,
1085
+ ignore: list[str] | None = None,
1086
+ context: dict[str, Any] | None = None,
1087
+ **kwargs,
1088
+ ) -> str:
1089
+ """
1090
+ Emit the JSON representation for this object as a string.
1091
+
1092
+ Parameters
1093
+ ----------
1094
+ validate : bool, optional
1095
+ If True (default), then validate the output dictionary
1096
+ against the schema.
1097
+ indent : int, optional
1098
+ The number of spaces of indentation to use. The default is 2.
1099
+ sort_keys : bool, optional
1100
+ If True (default), sort keys in the output.
1101
+ ignore : list[str], optional
1102
+ A list of keys to ignore. It is usually not needed
1103
+ to specify this argument as a user.
1104
+ context : dict[str, Any], optional
1105
+ A context dictionary. It is usually not needed
1106
+ to specify this argument as a user.
1107
+ **kwargs
1108
+ Additional keyword arguments are passed to ``json.dumps()``
1109
+
1110
+ Notes
1111
+ -----
1112
+ Technical: The ignore parameter will *not* be passed to child to_dict
1113
+ function calls.
1114
+
1115
+ Returns
1116
+ -------
1117
+ str
1118
+ The JSON specification of the chart object.
1119
+ """
1120
+ if ignore is None:
1121
+ ignore = []
1122
+ if context is None:
1123
+ context = {}
1124
+ dct = self.to_dict(validate=validate, ignore=ignore, context=context)
1125
+ return json.dumps(dct, indent=indent, sort_keys=sort_keys, **kwargs)
1126
+
1127
+ @classmethod
1128
+ def _default_wrapper_classes(cls) -> Iterator[type[SchemaBase]]:
1129
+ """Return the set of classes used within cls.from_dict()."""
1130
+ return _subclasses(SchemaBase)
1131
+
1132
+ @classmethod
1133
+ def from_dict(
1134
+ cls: type[TSchemaBase], dct: dict[str, Any], validate: bool = True
1135
+ ) -> TSchemaBase:
1136
+ """
1137
+ Construct class from a dictionary representation.
1138
+
1139
+ Parameters
1140
+ ----------
1141
+ dct : dictionary
1142
+ The dict from which to construct the class
1143
+ validate : boolean
1144
+ If True (default), then validate the input against the schema.
1145
+
1146
+ Returns
1147
+ -------
1148
+ obj : Schema object
1149
+ The wrapped schema
1150
+
1151
+ Raises
1152
+ ------
1153
+ jsonschema.ValidationError :
1154
+ if validate=True and dct does not conform to the schema
1155
+ """
1156
+ if validate:
1157
+ cls.validate(dct)
1158
+ converter = _FromDict(cls._default_wrapper_classes())
1159
+ return converter.from_dict(dct, cls)
1160
+
1161
+ @classmethod
1162
+ def from_json(
1163
+ cls,
1164
+ json_string: str,
1165
+ validate: bool = True,
1166
+ **kwargs: Any,
1167
+ # Type hints for this method would get rather complicated
1168
+ # if we want to provide a more specific return type
1169
+ ) -> ChartType:
1170
+ """
1171
+ Instantiate the object from a valid JSON string.
1172
+
1173
+ Parameters
1174
+ ----------
1175
+ json_string : string
1176
+ The string containing a valid JSON chart specification.
1177
+ validate : boolean
1178
+ If True (default), then validate the input against the schema.
1179
+ **kwargs :
1180
+ Additional keyword arguments are passed to json.loads
1181
+
1182
+ Returns
1183
+ -------
1184
+ chart : Chart object
1185
+ The altair Chart object built from the specification.
1186
+ """
1187
+ dct: dict[str, Any] = json.loads(json_string, **kwargs)
1188
+ return cls.from_dict(dct, validate=validate) # type: ignore[return-value]
1189
+
1190
+ @classmethod
1191
+ def validate(
1192
+ cls, instance: dict[str, Any], schema: dict[str, Any] | None = None
1193
+ ) -> None:
1194
+ """Validate the instance against the class schema in the context of the rootschema."""
1195
+ if schema is None:
1196
+ schema = cls._schema
1197
+ # For the benefit of mypy
1198
+ assert schema is not None
1199
+ validate_jsonschema(instance, schema, rootschema=cls._rootschema or cls._schema)
1200
+
1201
+ @classmethod
1202
+ def resolve_references(cls, schema: dict[str, Any] | None = None) -> dict[str, Any]:
1203
+ """Resolve references in the context of this object's schema or root schema."""
1204
+ schema_to_pass = schema or cls._schema
1205
+ # For the benefit of mypy
1206
+ assert schema_to_pass is not None
1207
+ return _resolve_references(
1208
+ schema=schema_to_pass,
1209
+ rootschema=(cls._rootschema or cls._schema or schema),
1210
+ )
1211
+
1212
+ @classmethod
1213
+ def validate_property(
1214
+ cls, name: str, value: Any, schema: dict[str, Any] | None = None
1215
+ ) -> None:
1216
+ """Validate a property against property schema in the context of the rootschema."""
1217
+ # The following return the package only if it has already been
1218
+ # imported - otherwise they return None. This is useful for
1219
+ # isinstance checks - for example, if pandas has not been imported,
1220
+ # then an object is definitely not a `pandas.Timestamp`.
1221
+ pd_opt = sys.modules.get("pandas")
1222
+ np_opt = sys.modules.get("numpy")
1223
+ value = _todict(value, context={}, np_opt=np_opt, pd_opt=pd_opt)
1224
+ props = cls.resolve_references(schema or cls._schema).get("properties", {})
1225
+ validate_jsonschema(
1226
+ value, props.get(name, {}), rootschema=cls._rootschema or cls._schema
1227
+ )
1228
+
1229
+ def __dir__(self) -> list[str]:
1230
+ return sorted(chain(super().__dir__(), self._kwds))
1231
+
1232
+
1233
+ TSchemaBase = TypeVar("TSchemaBase", bound=SchemaBase)
1234
+
1235
+ _CopyImpl = TypeVar("_CopyImpl", SchemaBase, Dict[Any, Any], List[Any])
1236
+ """
1237
+ Types which have an implementation in ``SchemaBase.copy()``.
1238
+
1239
+ All other types are returned **by reference**.
1240
+ """
1241
+
1242
+
1243
+ def _is_dict(obj: Any | dict[Any, Any]) -> TypeIs[dict[Any, Any]]:
1244
+ return isinstance(obj, dict)
1245
+
1246
+
1247
+ def _is_list(obj: Any | list[Any]) -> TypeIs[list[Any]]:
1248
+ return isinstance(obj, list)
1249
+
1250
+
1251
+ def _is_iterable(
1252
+ obj: Any, *, exclude: type | tuple[type, ...] = (str, bytes)
1253
+ ) -> TypeIs[Iterable[Any]]:
1254
+ return not isinstance(obj, exclude) and isinstance(obj, Iterable)
1255
+
1256
+
1257
+ def _passthrough(*args: Any, **kwds: Any) -> Any | dict[str, Any]:
1258
+ return args[0] if args else kwds
1259
+
1260
+
1261
+ class _FromDict:
1262
+ """
1263
+ Class used to construct SchemaBase class hierarchies from a dict.
1264
+
1265
+ The primary purpose of using this class is to be able to build a hash table
1266
+ that maps schemas to their wrapper classes. The candidate classes are
1267
+ specified in the ``wrapper_classes`` positional-only argument to the constructor.
1268
+ """
1269
+
1270
+ _hash_exclude_keys = ("definitions", "title", "description", "$schema", "id")
1271
+
1272
+ def __init__(self, wrapper_classes: Iterable[type[SchemaBase]], /) -> None:
1273
+ # Create a mapping of a schema hash to a list of matching classes
1274
+ # This lets us quickly determine the correct class to construct
1275
+ self.class_dict: dict[int, list[type[SchemaBase]]] = defaultdict(list)
1276
+ for tp in wrapper_classes:
1277
+ if tp._schema is not None:
1278
+ self.class_dict[self.hash_schema(tp._schema)].append(tp)
1279
+
1280
+ @classmethod
1281
+ def hash_schema(cls, schema: dict[str, Any], use_json: bool = True) -> int:
1282
+ """
1283
+ Compute a python hash for a nested dictionary which properly handles dicts, lists, sets, and tuples.
1284
+
1285
+ At the top level, the function excludes from the hashed schema all keys
1286
+ listed in `exclude_keys`.
1287
+
1288
+ This implements two methods: one based on conversion to JSON, and one based
1289
+ on recursive conversions of unhashable to hashable types; the former seems
1290
+ to be slightly faster in several benchmarks.
1291
+ """
1292
+ if cls._hash_exclude_keys and isinstance(schema, dict):
1293
+ schema = {
1294
+ key: val
1295
+ for key, val in schema.items()
1296
+ if key not in cls._hash_exclude_keys
1297
+ }
1298
+ if use_json:
1299
+ s = json.dumps(schema, sort_keys=True)
1300
+ return hash(s)
1301
+ else:
1302
+
1303
+ def _freeze(val):
1304
+ if isinstance(val, dict):
1305
+ return frozenset((k, _freeze(v)) for k, v in val.items())
1306
+ elif isinstance(val, set):
1307
+ return frozenset(map(_freeze, val))
1308
+ elif isinstance(val, (list, tuple)):
1309
+ return tuple(map(_freeze, val))
1310
+ else:
1311
+ return val
1312
+
1313
+ return hash(_freeze(schema))
1314
+
1315
+ @overload
1316
+ def from_dict(
1317
+ self,
1318
+ dct: TSchemaBase,
1319
+ tp: None = ...,
1320
+ schema: None = ...,
1321
+ rootschema: None = ...,
1322
+ default_class: Any = ...,
1323
+ ) -> TSchemaBase: ...
1324
+ @overload
1325
+ def from_dict(
1326
+ self,
1327
+ dct: dict[str, Any] | list[dict[str, Any]],
1328
+ tp: Any = ...,
1329
+ schema: Any = ...,
1330
+ rootschema: Any = ...,
1331
+ default_class: type[TSchemaBase] = ..., # pyright: ignore[reportInvalidTypeVarUse]
1332
+ ) -> TSchemaBase: ...
1333
+ @overload
1334
+ def from_dict(
1335
+ self,
1336
+ dct: dict[str, Any],
1337
+ tp: None = ...,
1338
+ schema: dict[str, Any] = ...,
1339
+ rootschema: None = ...,
1340
+ default_class: Any = ...,
1341
+ ) -> SchemaBase: ...
1342
+ @overload
1343
+ def from_dict(
1344
+ self,
1345
+ dct: dict[str, Any],
1346
+ tp: type[TSchemaBase],
1347
+ schema: None = ...,
1348
+ rootschema: None = ...,
1349
+ default_class: Any = ...,
1350
+ ) -> TSchemaBase: ...
1351
+ @overload
1352
+ def from_dict(
1353
+ self,
1354
+ dct: dict[str, Any] | list[dict[str, Any]],
1355
+ tp: type[TSchemaBase],
1356
+ schema: dict[str, Any],
1357
+ rootschema: dict[str, Any] | None = ...,
1358
+ default_class: Any = ...,
1359
+ ) -> Never: ...
1360
+ def from_dict(
1361
+ self,
1362
+ dct: dict[str, Any] | list[dict[str, Any]] | TSchemaBase,
1363
+ tp: type[TSchemaBase] | None = None,
1364
+ schema: dict[str, Any] | None = None,
1365
+ rootschema: dict[str, Any] | None = None,
1366
+ default_class: Any = _passthrough,
1367
+ ) -> TSchemaBase | SchemaBase:
1368
+ """Construct an object from a dict representation."""
1369
+ target_tp: Any
1370
+ current_schema: dict[str, Any]
1371
+ if isinstance(dct, SchemaBase):
1372
+ return dct
1373
+ elif tp is not None:
1374
+ current_schema = tp._schema
1375
+ root_schema: dict[str, Any] = rootschema or tp._rootschema or current_schema
1376
+ target_tp = tp
1377
+ elif schema is not None:
1378
+ # If there are multiple matches, we use the first one in the dict.
1379
+ # Our class dict is constructed breadth-first from top to bottom,
1380
+ # so the first class that matches is the most general match.
1381
+ current_schema = schema
1382
+ root_schema = rootschema or current_schema
1383
+ matches = self.class_dict[self.hash_schema(current_schema)]
1384
+ target_tp = matches[0] if matches else default_class
1385
+ else:
1386
+ msg = "Must provide either `tp` or `schema`, but not both."
1387
+ raise ValueError(msg)
1388
+
1389
+ from_dict = partial(self.from_dict, rootschema=root_schema)
1390
+ # Can also return a list?
1391
+ resolved = _resolve_references(current_schema, root_schema)
1392
+ if "anyOf" in resolved or "oneOf" in resolved:
1393
+ schemas = resolved.get("anyOf", []) + resolved.get("oneOf", [])
1394
+ for possible in schemas:
1395
+ try:
1396
+ validate_jsonschema(dct, possible, rootschema=root_schema)
1397
+ except jsonschema.ValidationError:
1398
+ continue
1399
+ else:
1400
+ return from_dict(dct, schema=possible, default_class=target_tp)
1401
+
1402
+ if _is_dict(dct):
1403
+ # TODO: handle schemas for additionalProperties/patternProperties
1404
+ props: dict[str, Any] = resolved.get("properties", {})
1405
+ kwds = {
1406
+ k: (from_dict(v, schema=props[k]) if k in props else v)
1407
+ for k, v in dct.items()
1408
+ }
1409
+ return target_tp(**kwds)
1410
+ elif _is_list(dct):
1411
+ item_schema: dict[str, Any] = resolved.get("items", {})
1412
+ return target_tp([from_dict(k, schema=item_schema) for k in dct])
1413
+ else:
1414
+ # NOTE: Unsure what is valid here
1415
+ return target_tp(dct)
1416
+
1417
+
1418
+ class _PropertySetter:
1419
+ def __init__(self, prop: str, schema: dict[str, Any]) -> None:
1420
+ self.prop = prop
1421
+ self.schema = schema
1422
+
1423
+ def __get__(self, obj, cls):
1424
+ self.obj = obj
1425
+ self.cls = cls
1426
+ # The docs from the encoding class parameter (e.g. `bin` in X, Color,
1427
+ # etc); this provides a general description of the parameter.
1428
+ self.__doc__ = self.schema["description"].replace("__", "**")
1429
+ property_name = f"{self.prop}"[0].upper() + f"{self.prop}"[1:]
1430
+ if hasattr(vegalite, property_name):
1431
+ altair_prop = getattr(vegalite, property_name)
1432
+ # Add the docstring from the helper class (e.g. `BinParams`) so
1433
+ # that all the parameter names of the helper class are included in
1434
+ # the final docstring
1435
+ parameter_index = altair_prop.__doc__.find("Parameters\n")
1436
+ if parameter_index > -1:
1437
+ self.__doc__ = (
1438
+ altair_prop.__doc__[:parameter_index].replace(" ", "")
1439
+ + self.__doc__
1440
+ + textwrap.dedent(
1441
+ f"\n\n {altair_prop.__doc__[parameter_index:]}"
1442
+ )
1443
+ )
1444
+ # For short docstrings such as Aggregate, Stack, et
1445
+ else:
1446
+ self.__doc__ = (
1447
+ altair_prop.__doc__.replace(" ", "") + "\n" + self.__doc__
1448
+ )
1449
+ # Add signatures and tab completion for the method and parameter names
1450
+ self.__signature__ = inspect.signature(altair_prop)
1451
+ self.__wrapped__ = inspect.getfullargspec(altair_prop)
1452
+ self.__name__ = altair_prop.__name__
1453
+ else:
1454
+ # It seems like bandPosition is the only parameter that doesn't
1455
+ # have a helper class.
1456
+ pass
1457
+ return self
1458
+
1459
+ def __call__(self, *args: Any, **kwargs: Any):
1460
+ obj = self.obj.copy()
1461
+ # TODO: use schema to validate
1462
+ obj[self.prop] = args[0] if args else kwargs
1463
+ return obj
1464
+
1465
+
1466
+ def with_property_setters(cls: type[TSchemaBase]) -> type[TSchemaBase]:
1467
+ """Decorator to add property setters to a Schema class."""
1468
+ schema = cls.resolve_references()
1469
+ for prop, propschema in schema.get("properties", {}).items():
1470
+ setattr(cls, prop, _PropertySetter(prop, propschema))
1471
+ return cls
videollama2/lib/python3.10/site-packages/altair/utils/selection.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Any, Dict, List, NewType
5
+
6
+ # Type representing the "{selection}_store" dataset that corresponds to a
7
+ # Vega-Lite selection
8
+ Store = NewType("Store", List[Dict[str, Any]])
9
+
10
+
11
+ @dataclass(frozen=True, eq=True)
12
+ class IndexSelection:
13
+ """
14
+ Represents the state of an alt.selection_point() when neither the fields nor encodings arguments are specified.
15
+
16
+ The value field is a list of zero-based indices into the
17
+ selected dataset.
18
+
19
+ Note: These indices only apply to the input DataFrame
20
+ for charts that do not include aggregations (e.g. a scatter chart).
21
+ """
22
+
23
+ name: str
24
+ value: list[int]
25
+ store: Store
26
+
27
+ @staticmethod
28
+ def from_vega(name: str, signal: dict[str, dict] | None, store: Store):
29
+ """
30
+ Construct an IndexSelection from the raw Vega signal and dataset values.
31
+
32
+ Parameters
33
+ ----------
34
+ name: str
35
+ The selection's name
36
+ signal: dict or None
37
+ The value of the Vega signal corresponding to the selection
38
+ store: list
39
+ The value of the Vega dataset corresponding to the selection.
40
+ This dataset is named "{name}_store" in the Vega view.
41
+
42
+ Returns
43
+ -------
44
+ IndexSelection
45
+ """
46
+ if signal is None:
47
+ indices = []
48
+ else:
49
+ points = signal.get("vlPoint", {}).get("or", [])
50
+ indices = [p["_vgsid_"] - 1 for p in points]
51
+ return IndexSelection(name=name, value=indices, store=store)
52
+
53
+
54
+ @dataclass(frozen=True, eq=True)
55
+ class PointSelection:
56
+ """
57
+ Represents the state of an alt.selection_point() when the fields or encodings arguments are specified.
58
+
59
+ The value field is a list of dicts of the form:
60
+ [{"dim1": 1, "dim2": "A"}, {"dim1": 2, "dim2": "BB"}]
61
+
62
+ where "dim1" and "dim2" are dataset columns and the dict values
63
+ correspond to the specific selected values.
64
+ """
65
+
66
+ name: str
67
+ value: list[dict[str, Any]]
68
+ store: Store
69
+
70
+ @staticmethod
71
+ def from_vega(name: str, signal: dict[str, dict] | None, store: Store):
72
+ """
73
+ Construct a PointSelection from the raw Vega signal and dataset values.
74
+
75
+ Parameters
76
+ ----------
77
+ name: str
78
+ The selection's name
79
+ signal: dict or None
80
+ The value of the Vega signal corresponding to the selection
81
+ store: list
82
+ The value of the Vega dataset corresponding to the selection.
83
+ This dataset is named "{name}_store" in the Vega view.
84
+
85
+ Returns
86
+ -------
87
+ PointSelection
88
+ """
89
+ points = [] if signal is None else signal.get("vlPoint", {}).get("or", [])
90
+ return PointSelection(name=name, value=points, store=store)
91
+
92
+
93
+ @dataclass(frozen=True, eq=True)
94
+ class IntervalSelection:
95
+ """
96
+ Represents the state of an alt.selection_interval().
97
+
98
+ The value field is a dict of the form:
99
+ {"dim1": [0, 10], "dim2": ["A", "BB", "CCC"]}
100
+
101
+ where "dim1" and "dim2" are dataset columns and the dict values
102
+ correspond to the selected range.
103
+ """
104
+
105
+ name: str
106
+ value: dict[str, list]
107
+ store: Store
108
+
109
+ @staticmethod
110
+ def from_vega(name: str, signal: dict[str, list] | None, store: Store):
111
+ """
112
+ Construct an IntervalSelection from the raw Vega signal and dataset values.
113
+
114
+ Parameters
115
+ ----------
116
+ name: str
117
+ The selection's name
118
+ signal: dict or None
119
+ The value of the Vega signal corresponding to the selection
120
+ store: list
121
+ The value of the Vega dataset corresponding to the selection.
122
+ This dataset is named "{name}_store" in the Vega view.
123
+
124
+ Returns
125
+ -------
126
+ PointSelection
127
+ """
128
+ if signal is None:
129
+ signal = {}
130
+ return IntervalSelection(name=name, value=signal, store=store)
videollama2/lib/python3.10/site-packages/altair/utils/theme.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Utilities for registering and working with themes."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import sys
6
+ from typing import TYPE_CHECKING, Callable
7
+
8
+ from .plugin_registry import PluginRegistry
9
+
10
+ if sys.version_info >= (3, 11):
11
+ from typing import LiteralString
12
+ else:
13
+ from typing_extensions import LiteralString
14
+
15
+ if TYPE_CHECKING:
16
+ from altair.utils.plugin_registry import PluginEnabler
17
+ from altair.vegalite.v5.theme import AltairThemes, VegaThemes
18
+
19
+ ThemeType = Callable[..., dict]
20
+
21
+
22
+ class ThemeRegistry(PluginRegistry[ThemeType, dict]):
23
+ def enable(
24
+ self, name: LiteralString | AltairThemes | VegaThemes | None = None, **options
25
+ ) -> PluginEnabler:
26
+ """
27
+ Enable a theme by name.
28
+
29
+ This can be either called directly, or used as a context manager.
30
+
31
+ Parameters
32
+ ----------
33
+ name : string (optional)
34
+ The name of the theme to enable. If not specified, then use the
35
+ current active name.
36
+ **options :
37
+ Any additional parameters will be passed to the theme as keyword
38
+ arguments
39
+
40
+ Returns
41
+ -------
42
+ PluginEnabler:
43
+ An object that allows enable() to be used as a context manager
44
+
45
+ Notes
46
+ -----
47
+ Default `vega` themes can be previewed at https://vega.github.io/vega-themes/
48
+ """
49
+ return super().enable(name, **options)
videollama2/lib/python3.10/site-packages/altair/vegalite/v5/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (686 Bytes). View file