Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- mantis_evalkit/lib/python3.10/site-packages/aiosignal/__init__.py +36 -0
- mantis_evalkit/lib/python3.10/site-packages/aiosignal/__init__.pyi +12 -0
- mantis_evalkit/lib/python3.10/site-packages/aiosignal/py.typed +0 -0
- mantis_evalkit/lib/python3.10/site-packages/async_timeout/__init__.py +276 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/folder_based_builder/__init__.py +0 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/folder_based_builder/__pycache__/__init__.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/folder_based_builder/__pycache__/folder_based_builder.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/folder_based_builder/folder_based_builder.py +406 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/pandas/__pycache__/__init__.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/sql/__pycache__/__init__.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/sql/sql.py +118 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/__init__.py +0 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/__pycache__/__init__.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/__pycache__/_tenbin.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/__pycache__/webdataset.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/_tenbin.py +285 -0
- mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/webdataset.py +299 -0
- mantis_evalkit/lib/python3.10/site-packages/numpy/__config__.pyi +102 -0
- mantis_evalkit/lib/python3.10/site-packages/numpy/__init__.cython-30.pxd +1243 -0
- mantis_evalkit/lib/python3.10/site-packages/numpy/__init__.py +547 -0
- mantis_evalkit/lib/python3.10/site-packages/numpy/_configtool.py +39 -0
- mantis_evalkit/lib/python3.10/site-packages/numpy/_expired_attrs_2_0.py +80 -0
- mantis_evalkit/lib/python3.10/site-packages/numpy/_globals.py +95 -0
- mantis_evalkit/lib/python3.10/site-packages/numpy/exceptions.py +247 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/__init__.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/_impl.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/_utils.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/base.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/basecontainer.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/dictconfig.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/errors.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/grammar_parser.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/grammar_visitor.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/listconfig.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/nodes.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/omegaconf.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/version.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/base.py +962 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/basecontainer.py +916 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/dictconfig.py +776 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/__init__.py +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/__pycache__/__init__.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/OmegaConfGrammarLexer.py +337 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/OmegaConfGrammarParser.py +1595 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/OmegaConfGrammarParserListener.py +156 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/OmegaConfGrammarParserVisitor.py +93 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/__init__.py +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/__pycache__/OmegaConfGrammarLexer.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/__pycache__/OmegaConfGrammarParser.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -552,3 +552,4 @@ moondream/lib/python3.10/site-packages/pygments/lexers/__pycache__/lisp.cpython-
|
|
| 552 |
parrot/lib/python3.10/site-packages/nvidia/cuda_cupti/lib/libcupti.so.12 filter=lfs diff=lfs merge=lfs -text
|
| 553 |
moondream/lib/python3.10/site-packages/altair/vegalite/v5/__pycache__/api.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 554 |
mantis_evalkit/bin/python3.10 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 552 |
parrot/lib/python3.10/site-packages/nvidia/cuda_cupti/lib/libcupti.so.12 filter=lfs diff=lfs merge=lfs -text
|
| 553 |
moondream/lib/python3.10/site-packages/altair/vegalite/v5/__pycache__/api.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 554 |
mantis_evalkit/bin/python3.10 filter=lfs diff=lfs merge=lfs -text
|
| 555 |
+
openflamingo/lib/python3.10/site-packages/torch/lib/libnvrtc-672ee683.so.11.2 filter=lfs diff=lfs merge=lfs -text
|
mantis_evalkit/lib/python3.10/site-packages/aiosignal/__init__.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from frozenlist import FrozenList
|
| 2 |
+
|
| 3 |
+
__version__ = "1.3.2"
|
| 4 |
+
|
| 5 |
+
__all__ = ("Signal",)
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class Signal(FrozenList):
|
| 9 |
+
"""Coroutine-based signal implementation.
|
| 10 |
+
|
| 11 |
+
To connect a callback to a signal, use any list method.
|
| 12 |
+
|
| 13 |
+
Signals are fired using the send() coroutine, which takes named
|
| 14 |
+
arguments.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
__slots__ = ("_owner",)
|
| 18 |
+
|
| 19 |
+
def __init__(self, owner):
|
| 20 |
+
super().__init__()
|
| 21 |
+
self._owner = owner
|
| 22 |
+
|
| 23 |
+
def __repr__(self):
|
| 24 |
+
return "<Signal owner={}, frozen={}, {!r}>".format(
|
| 25 |
+
self._owner, self.frozen, list(self)
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
async def send(self, *args, **kwargs):
|
| 29 |
+
"""
|
| 30 |
+
Sends data to all registered receivers.
|
| 31 |
+
"""
|
| 32 |
+
if not self.frozen:
|
| 33 |
+
raise RuntimeError("Cannot send non-frozen signal.")
|
| 34 |
+
|
| 35 |
+
for receiver in self:
|
| 36 |
+
await receiver(*args, **kwargs) # type: ignore
|
mantis_evalkit/lib/python3.10/site-packages/aiosignal/__init__.pyi
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Generic, TypeVar
|
| 2 |
+
|
| 3 |
+
from frozenlist import FrozenList
|
| 4 |
+
|
| 5 |
+
__all__ = ("Signal",)
|
| 6 |
+
|
| 7 |
+
_T = TypeVar("_T")
|
| 8 |
+
|
| 9 |
+
class Signal(FrozenList[_T], Generic[_T]):
|
| 10 |
+
def __init__(self, owner: Any) -> None: ...
|
| 11 |
+
def __repr__(self) -> str: ...
|
| 12 |
+
async def send(self, *args: Any, **kwargs: Any) -> None: ...
|
mantis_evalkit/lib/python3.10/site-packages/aiosignal/py.typed
ADDED
|
File without changes
|
mantis_evalkit/lib/python3.10/site-packages/async_timeout/__init__.py
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import enum
|
| 3 |
+
import sys
|
| 4 |
+
from types import TracebackType
|
| 5 |
+
from typing import Optional, Type, final
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
__version__ = "5.0.1"
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
__all__ = ("timeout", "timeout_at", "Timeout")
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def timeout(delay: Optional[float]) -> "Timeout":
|
| 15 |
+
"""timeout context manager.
|
| 16 |
+
|
| 17 |
+
Useful in cases when you want to apply timeout logic around block
|
| 18 |
+
of code or in cases when asyncio.wait_for is not suitable. For example:
|
| 19 |
+
|
| 20 |
+
>>> async with timeout(0.001):
|
| 21 |
+
... async with aiohttp.get('https://github.com') as r:
|
| 22 |
+
... await r.text()
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
delay - value in seconds or None to disable timeout logic
|
| 26 |
+
"""
|
| 27 |
+
loop = asyncio.get_running_loop()
|
| 28 |
+
if delay is not None:
|
| 29 |
+
deadline = loop.time() + delay # type: Optional[float]
|
| 30 |
+
else:
|
| 31 |
+
deadline = None
|
| 32 |
+
return Timeout(deadline, loop)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def timeout_at(deadline: Optional[float]) -> "Timeout":
|
| 36 |
+
"""Schedule the timeout at absolute time.
|
| 37 |
+
|
| 38 |
+
deadline argument points on the time in the same clock system
|
| 39 |
+
as loop.time().
|
| 40 |
+
|
| 41 |
+
Please note: it is not POSIX time but a time with
|
| 42 |
+
undefined starting base, e.g. the time of the system power on.
|
| 43 |
+
|
| 44 |
+
>>> async with timeout_at(loop.time() + 10):
|
| 45 |
+
... async with aiohttp.get('https://github.com') as r:
|
| 46 |
+
... await r.text()
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
"""
|
| 50 |
+
loop = asyncio.get_running_loop()
|
| 51 |
+
return Timeout(deadline, loop)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class _State(enum.Enum):
|
| 55 |
+
INIT = "INIT"
|
| 56 |
+
ENTER = "ENTER"
|
| 57 |
+
TIMEOUT = "TIMEOUT"
|
| 58 |
+
EXIT = "EXIT"
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
if sys.version_info >= (3, 11):
|
| 62 |
+
|
| 63 |
+
class _Expired:
|
| 64 |
+
__slots__ = ("_val",)
|
| 65 |
+
|
| 66 |
+
def __init__(self, val: bool) -> None:
|
| 67 |
+
self._val = val
|
| 68 |
+
|
| 69 |
+
def __call__(self) -> bool:
|
| 70 |
+
return self._val
|
| 71 |
+
|
| 72 |
+
def __bool__(self) -> bool:
|
| 73 |
+
return self._val
|
| 74 |
+
|
| 75 |
+
def __repr__(self) -> str:
|
| 76 |
+
return repr(self._val)
|
| 77 |
+
|
| 78 |
+
def __str__(self) -> str:
|
| 79 |
+
return str(self._val)
|
| 80 |
+
|
| 81 |
+
@final
|
| 82 |
+
class Timeout(asyncio.Timeout): # type: ignore[misc]
|
| 83 |
+
# Supports full asyncio.Timeout API.
|
| 84 |
+
# Also provides several asyncio_timeout specific methods
|
| 85 |
+
# for backward compatibility.
|
| 86 |
+
def __init__(
|
| 87 |
+
self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
|
| 88 |
+
) -> None:
|
| 89 |
+
super().__init__(deadline)
|
| 90 |
+
|
| 91 |
+
@property
|
| 92 |
+
def expired(self) -> _Expired:
|
| 93 |
+
# a hacky property hat can provide both roles:
|
| 94 |
+
# timeout.expired() from asyncio
|
| 95 |
+
# timeout.expired from asyncio_timeout
|
| 96 |
+
return _Expired(super().expired())
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def deadline(self) -> Optional[float]:
|
| 100 |
+
return self.when()
|
| 101 |
+
|
| 102 |
+
def reject(self) -> None:
|
| 103 |
+
"""Reject scheduled timeout if any."""
|
| 104 |
+
# cancel is maybe better name but
|
| 105 |
+
# task.cancel() raises CancelledError in asyncio world.
|
| 106 |
+
self.reschedule(None)
|
| 107 |
+
|
| 108 |
+
def shift(self, delay: float) -> None:
|
| 109 |
+
"""Advance timeout on delay seconds.
|
| 110 |
+
|
| 111 |
+
The delay can be negative.
|
| 112 |
+
|
| 113 |
+
Raise RuntimeError if shift is called when deadline is not scheduled
|
| 114 |
+
"""
|
| 115 |
+
deadline = self.when()
|
| 116 |
+
if deadline is None:
|
| 117 |
+
raise RuntimeError("cannot shift timeout if deadline is not scheduled")
|
| 118 |
+
self.reschedule(deadline + delay)
|
| 119 |
+
|
| 120 |
+
def update(self, deadline: float) -> None:
|
| 121 |
+
"""Set deadline to absolute value.
|
| 122 |
+
|
| 123 |
+
deadline argument points on the time in the same clock system
|
| 124 |
+
as loop.time().
|
| 125 |
+
|
| 126 |
+
If new deadline is in the past the timeout is raised immediately.
|
| 127 |
+
|
| 128 |
+
Please note: it is not POSIX time but a time with
|
| 129 |
+
undefined starting base, e.g. the time of the system power on.
|
| 130 |
+
"""
|
| 131 |
+
self.reschedule(deadline)
|
| 132 |
+
|
| 133 |
+
else:
|
| 134 |
+
|
| 135 |
+
@final
|
| 136 |
+
class Timeout:
|
| 137 |
+
# Internal class, please don't instantiate it directly
|
| 138 |
+
# Use timeout() and timeout_at() public factories instead.
|
| 139 |
+
#
|
| 140 |
+
# Implementation note: `async with timeout()` is preferred
|
| 141 |
+
# over `with timeout()`.
|
| 142 |
+
# While technically the Timeout class implementation
|
| 143 |
+
# doesn't need to be async at all,
|
| 144 |
+
# the `async with` statement explicitly points that
|
| 145 |
+
# the context manager should be used from async function context.
|
| 146 |
+
#
|
| 147 |
+
# This design allows to avoid many silly misusages.
|
| 148 |
+
#
|
| 149 |
+
# TimeoutError is raised immediately when scheduled
|
| 150 |
+
# if the deadline is passed.
|
| 151 |
+
# The purpose is to time out as soon as possible
|
| 152 |
+
# without waiting for the next await expression.
|
| 153 |
+
|
| 154 |
+
__slots__ = ("_deadline", "_loop", "_state", "_timeout_handler", "_task")
|
| 155 |
+
|
| 156 |
+
def __init__(
|
| 157 |
+
self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
|
| 158 |
+
) -> None:
|
| 159 |
+
self._loop = loop
|
| 160 |
+
self._state = _State.INIT
|
| 161 |
+
|
| 162 |
+
self._task: Optional["asyncio.Task[object]"] = None
|
| 163 |
+
self._timeout_handler = None # type: Optional[asyncio.Handle]
|
| 164 |
+
if deadline is None:
|
| 165 |
+
self._deadline = None # type: Optional[float]
|
| 166 |
+
else:
|
| 167 |
+
self.update(deadline)
|
| 168 |
+
|
| 169 |
+
async def __aenter__(self) -> "Timeout":
|
| 170 |
+
self._do_enter()
|
| 171 |
+
return self
|
| 172 |
+
|
| 173 |
+
async def __aexit__(
|
| 174 |
+
self,
|
| 175 |
+
exc_type: Optional[Type[BaseException]],
|
| 176 |
+
exc_val: Optional[BaseException],
|
| 177 |
+
exc_tb: Optional[TracebackType],
|
| 178 |
+
) -> Optional[bool]:
|
| 179 |
+
self._do_exit(exc_type)
|
| 180 |
+
return None
|
| 181 |
+
|
| 182 |
+
@property
|
| 183 |
+
def expired(self) -> bool:
|
| 184 |
+
"""Is timeout expired during execution?"""
|
| 185 |
+
return self._state == _State.TIMEOUT
|
| 186 |
+
|
| 187 |
+
@property
|
| 188 |
+
def deadline(self) -> Optional[float]:
|
| 189 |
+
return self._deadline
|
| 190 |
+
|
| 191 |
+
def reject(self) -> None:
|
| 192 |
+
"""Reject scheduled timeout if any."""
|
| 193 |
+
# cancel is maybe better name but
|
| 194 |
+
# task.cancel() raises CancelledError in asyncio world.
|
| 195 |
+
if self._state not in (_State.INIT, _State.ENTER):
|
| 196 |
+
raise RuntimeError(f"invalid state {self._state.value}")
|
| 197 |
+
self._reject()
|
| 198 |
+
|
| 199 |
+
def _reject(self) -> None:
|
| 200 |
+
self._task = None
|
| 201 |
+
if self._timeout_handler is not None:
|
| 202 |
+
self._timeout_handler.cancel()
|
| 203 |
+
self._timeout_handler = None
|
| 204 |
+
|
| 205 |
+
def shift(self, delay: float) -> None:
|
| 206 |
+
"""Advance timeout on delay seconds.
|
| 207 |
+
|
| 208 |
+
The delay can be negative.
|
| 209 |
+
|
| 210 |
+
Raise RuntimeError if shift is called when deadline is not scheduled
|
| 211 |
+
"""
|
| 212 |
+
deadline = self._deadline
|
| 213 |
+
if deadline is None:
|
| 214 |
+
raise RuntimeError("cannot shift timeout if deadline is not scheduled")
|
| 215 |
+
self.update(deadline + delay)
|
| 216 |
+
|
| 217 |
+
def update(self, deadline: float) -> None:
|
| 218 |
+
"""Set deadline to absolute value.
|
| 219 |
+
|
| 220 |
+
deadline argument points on the time in the same clock system
|
| 221 |
+
as loop.time().
|
| 222 |
+
|
| 223 |
+
If new deadline is in the past the timeout is raised immediately.
|
| 224 |
+
|
| 225 |
+
Please note: it is not POSIX time but a time with
|
| 226 |
+
undefined starting base, e.g. the time of the system power on.
|
| 227 |
+
"""
|
| 228 |
+
if self._state == _State.EXIT:
|
| 229 |
+
raise RuntimeError("cannot reschedule after exit from context manager")
|
| 230 |
+
if self._state == _State.TIMEOUT:
|
| 231 |
+
raise RuntimeError("cannot reschedule expired timeout")
|
| 232 |
+
if self._timeout_handler is not None:
|
| 233 |
+
self._timeout_handler.cancel()
|
| 234 |
+
self._deadline = deadline
|
| 235 |
+
if self._state != _State.INIT:
|
| 236 |
+
self._reschedule()
|
| 237 |
+
|
| 238 |
+
def _reschedule(self) -> None:
|
| 239 |
+
assert self._state == _State.ENTER
|
| 240 |
+
deadline = self._deadline
|
| 241 |
+
if deadline is None:
|
| 242 |
+
return
|
| 243 |
+
|
| 244 |
+
now = self._loop.time()
|
| 245 |
+
if self._timeout_handler is not None:
|
| 246 |
+
self._timeout_handler.cancel()
|
| 247 |
+
|
| 248 |
+
self._task = asyncio.current_task()
|
| 249 |
+
if deadline <= now:
|
| 250 |
+
self._timeout_handler = self._loop.call_soon(self._on_timeout)
|
| 251 |
+
else:
|
| 252 |
+
self._timeout_handler = self._loop.call_at(deadline, self._on_timeout)
|
| 253 |
+
|
| 254 |
+
def _do_enter(self) -> None:
|
| 255 |
+
if self._state != _State.INIT:
|
| 256 |
+
raise RuntimeError(f"invalid state {self._state.value}")
|
| 257 |
+
self._state = _State.ENTER
|
| 258 |
+
self._reschedule()
|
| 259 |
+
|
| 260 |
+
def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None:
|
| 261 |
+
if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT:
|
| 262 |
+
assert self._task is not None
|
| 263 |
+
self._timeout_handler = None
|
| 264 |
+
self._task = None
|
| 265 |
+
raise asyncio.TimeoutError
|
| 266 |
+
# timeout has not expired
|
| 267 |
+
self._state = _State.EXIT
|
| 268 |
+
self._reject()
|
| 269 |
+
return None
|
| 270 |
+
|
| 271 |
+
def _on_timeout(self) -> None:
|
| 272 |
+
assert self._task is not None
|
| 273 |
+
self._task.cancel()
|
| 274 |
+
self._state = _State.TIMEOUT
|
| 275 |
+
# drop the reference early
|
| 276 |
+
self._timeout_handler = None
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/folder_based_builder/__init__.py
ADDED
|
File without changes
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/folder_based_builder/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (206 Bytes). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/folder_based_builder/__pycache__/folder_based_builder.cpython-310.pyc
ADDED
|
Binary file (10.8 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/folder_based_builder/folder_based_builder.py
ADDED
|
@@ -0,0 +1,406 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import itertools
|
| 3 |
+
import os
|
| 4 |
+
from dataclasses import dataclass
|
| 5 |
+
from typing import List, Optional, Tuple, Type
|
| 6 |
+
|
| 7 |
+
import pandas as pd
|
| 8 |
+
import pyarrow as pa
|
| 9 |
+
import pyarrow.json as paj
|
| 10 |
+
|
| 11 |
+
import datasets
|
| 12 |
+
from datasets.features.features import FeatureType
|
| 13 |
+
from datasets.tasks.base import TaskTemplate
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
logger = datasets.utils.logging.get_logger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def count_path_segments(path):
|
| 20 |
+
return path.replace("\\", "/").count("/")
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@dataclass
|
| 24 |
+
class FolderBasedBuilderConfig(datasets.BuilderConfig):
|
| 25 |
+
"""BuilderConfig for AutoFolder."""
|
| 26 |
+
|
| 27 |
+
features: Optional[datasets.Features] = None
|
| 28 |
+
drop_labels: bool = None
|
| 29 |
+
drop_metadata: bool = None
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class FolderBasedBuilder(datasets.GeneratorBasedBuilder):
|
| 33 |
+
"""
|
| 34 |
+
Base class for generic data loaders for vision and image data.
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
Abstract class attributes to be overridden by a child class:
|
| 38 |
+
BASE_FEATURE: feature object to decode data (i.e. datasets.Image, datasets.Audio, ...)
|
| 39 |
+
BASE_COLUMN_NAME: string key name of a base feature (i.e. "image", "audio", ...)
|
| 40 |
+
BUILDER_CONFIG_CLASS: builder config inherited from `folder_based_builder.FolderBasedBuilderConfig`
|
| 41 |
+
EXTENSIONS: list of allowed extensions (only files with these extensions and METADATA_FILENAME files
|
| 42 |
+
will be included in a dataset)
|
| 43 |
+
CLASSIFICATION_TASK: classification task to use if labels are obtained from the folder structure
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
BASE_FEATURE: Type[FeatureType]
|
| 47 |
+
BASE_COLUMN_NAME: str
|
| 48 |
+
BUILDER_CONFIG_CLASS: FolderBasedBuilderConfig
|
| 49 |
+
EXTENSIONS: List[str]
|
| 50 |
+
CLASSIFICATION_TASK: TaskTemplate
|
| 51 |
+
|
| 52 |
+
METADATA_FILENAMES: List[str] = ["metadata.csv", "metadata.jsonl"]
|
| 53 |
+
|
| 54 |
+
def _info(self):
|
| 55 |
+
return datasets.DatasetInfo(features=self.config.features)
|
| 56 |
+
|
| 57 |
+
def _split_generators(self, dl_manager):
|
| 58 |
+
if not self.config.data_files:
|
| 59 |
+
raise ValueError(f"At least one data file must be specified, but got data_files={self.config.data_files}")
|
| 60 |
+
|
| 61 |
+
# Do an early pass if:
|
| 62 |
+
# * `drop_labels` is None (default) or False, to infer the class labels
|
| 63 |
+
# * `drop_metadata` is None (default) or False, to find the metadata files
|
| 64 |
+
do_analyze = not self.config.drop_labels or not self.config.drop_metadata
|
| 65 |
+
labels, path_depths = set(), set()
|
| 66 |
+
metadata_files = collections.defaultdict(set)
|
| 67 |
+
|
| 68 |
+
def analyze(files_or_archives, downloaded_files_or_dirs, split):
|
| 69 |
+
if len(downloaded_files_or_dirs) == 0:
|
| 70 |
+
return
|
| 71 |
+
# The files are separated from the archives at this point, so check the first sample
|
| 72 |
+
# to see if it's a file or a directory and iterate accordingly
|
| 73 |
+
if os.path.isfile(downloaded_files_or_dirs[0]):
|
| 74 |
+
original_files, downloaded_files = files_or_archives, downloaded_files_or_dirs
|
| 75 |
+
for original_file, downloaded_file in zip(original_files, downloaded_files):
|
| 76 |
+
original_file, downloaded_file = str(original_file), str(downloaded_file)
|
| 77 |
+
_, original_file_ext = os.path.splitext(original_file)
|
| 78 |
+
if original_file_ext.lower() in self.EXTENSIONS:
|
| 79 |
+
if not self.config.drop_labels:
|
| 80 |
+
labels.add(os.path.basename(os.path.dirname(original_file)))
|
| 81 |
+
path_depths.add(count_path_segments(original_file))
|
| 82 |
+
elif os.path.basename(original_file) in self.METADATA_FILENAMES:
|
| 83 |
+
metadata_files[split].add((original_file, downloaded_file))
|
| 84 |
+
else:
|
| 85 |
+
original_file_name = os.path.basename(original_file)
|
| 86 |
+
logger.debug(
|
| 87 |
+
f"The file '{original_file_name}' was ignored: it is not an image, and is not {self.METADATA_FILENAMES} either."
|
| 88 |
+
)
|
| 89 |
+
else:
|
| 90 |
+
archives, downloaded_dirs = files_or_archives, downloaded_files_or_dirs
|
| 91 |
+
for archive, downloaded_dir in zip(archives, downloaded_dirs):
|
| 92 |
+
archive, downloaded_dir = str(archive), str(downloaded_dir)
|
| 93 |
+
for downloaded_dir_file in dl_manager.iter_files(downloaded_dir):
|
| 94 |
+
_, downloaded_dir_file_ext = os.path.splitext(downloaded_dir_file)
|
| 95 |
+
if downloaded_dir_file_ext in self.EXTENSIONS:
|
| 96 |
+
if not self.config.drop_labels:
|
| 97 |
+
labels.add(os.path.basename(os.path.dirname(downloaded_dir_file)))
|
| 98 |
+
path_depths.add(count_path_segments(downloaded_dir_file))
|
| 99 |
+
elif os.path.basename(downloaded_dir_file) in self.METADATA_FILENAMES:
|
| 100 |
+
metadata_files[split].add((None, downloaded_dir_file))
|
| 101 |
+
else:
|
| 102 |
+
archive_file_name = os.path.basename(archive)
|
| 103 |
+
original_file_name = os.path.basename(downloaded_dir_file)
|
| 104 |
+
logger.debug(
|
| 105 |
+
f"The file '{original_file_name}' from the archive '{archive_file_name}' was ignored: it is not an {self.BASE_COLUMN_NAME}, and is not {self.METADATA_FILENAMES} either."
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
data_files = self.config.data_files
|
| 109 |
+
splits = []
|
| 110 |
+
for split_name, files in data_files.items():
|
| 111 |
+
if isinstance(files, str):
|
| 112 |
+
files = [files]
|
| 113 |
+
files, archives = self._split_files_and_archives(files)
|
| 114 |
+
downloaded_files = dl_manager.download(files)
|
| 115 |
+
downloaded_dirs = dl_manager.download_and_extract(archives)
|
| 116 |
+
if do_analyze: # drop_metadata is None or False, drop_labels is None or False
|
| 117 |
+
logger.info(f"Searching for labels and/or metadata files in {split_name} data files...")
|
| 118 |
+
analyze(files, downloaded_files, split_name)
|
| 119 |
+
analyze(archives, downloaded_dirs, split_name)
|
| 120 |
+
|
| 121 |
+
if metadata_files:
|
| 122 |
+
# add metadata if `metadata_files` are found and `drop_metadata` is None (default) or False
|
| 123 |
+
add_metadata = not self.config.drop_metadata
|
| 124 |
+
# if `metadata_files` are found, add labels only if
|
| 125 |
+
# `drop_labels` is set up to False explicitly (not-default behavior)
|
| 126 |
+
add_labels = self.config.drop_labels is False
|
| 127 |
+
else:
|
| 128 |
+
# if `metadata_files` are not found, don't add metadata
|
| 129 |
+
add_metadata = False
|
| 130 |
+
# if `metadata_files` are not found and `drop_labels` is None (default) -
|
| 131 |
+
# add labels if files are on the same level in directory hierarchy and there is more than one label
|
| 132 |
+
add_labels = (
|
| 133 |
+
(len(labels) > 1 and len(path_depths) == 1)
|
| 134 |
+
if self.config.drop_labels is None
|
| 135 |
+
else not self.config.drop_labels
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
if add_labels:
|
| 139 |
+
logger.info("Adding the labels inferred from data directories to the dataset's features...")
|
| 140 |
+
if add_metadata:
|
| 141 |
+
logger.info("Adding metadata to the dataset...")
|
| 142 |
+
else:
|
| 143 |
+
add_labels, add_metadata, metadata_files = False, False, {}
|
| 144 |
+
|
| 145 |
+
splits.append(
|
| 146 |
+
datasets.SplitGenerator(
|
| 147 |
+
name=split_name,
|
| 148 |
+
gen_kwargs={
|
| 149 |
+
"files": list(zip(files, downloaded_files))
|
| 150 |
+
+ [(None, dl_manager.iter_files(downloaded_dir)) for downloaded_dir in downloaded_dirs],
|
| 151 |
+
"metadata_files": metadata_files,
|
| 152 |
+
"split_name": split_name,
|
| 153 |
+
"add_labels": add_labels,
|
| 154 |
+
"add_metadata": add_metadata,
|
| 155 |
+
},
|
| 156 |
+
)
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
if add_metadata:
|
| 160 |
+
# Verify that:
|
| 161 |
+
# * all metadata files have the same set of features
|
| 162 |
+
# * the `file_name` key is one of the metadata keys and is of type string
|
| 163 |
+
features_per_metadata_file: List[Tuple[str, datasets.Features]] = []
|
| 164 |
+
|
| 165 |
+
# Check that all metadata files share the same format
|
| 166 |
+
metadata_ext = {
|
| 167 |
+
os.path.splitext(original_metadata_file)[-1]
|
| 168 |
+
for original_metadata_file, _ in itertools.chain.from_iterable(metadata_files.values())
|
| 169 |
+
}
|
| 170 |
+
if len(metadata_ext) > 1:
|
| 171 |
+
raise ValueError(f"Found metadata files with different extensions: {list(metadata_ext)}")
|
| 172 |
+
metadata_ext = metadata_ext.pop()
|
| 173 |
+
|
| 174 |
+
for _, downloaded_metadata_file in itertools.chain.from_iterable(metadata_files.values()):
|
| 175 |
+
pa_metadata_table = self._read_metadata(downloaded_metadata_file, metadata_ext=metadata_ext)
|
| 176 |
+
features_per_metadata_file.append(
|
| 177 |
+
(downloaded_metadata_file, datasets.Features.from_arrow_schema(pa_metadata_table.schema))
|
| 178 |
+
)
|
| 179 |
+
for downloaded_metadata_file, metadata_features in features_per_metadata_file:
|
| 180 |
+
if metadata_features != features_per_metadata_file[0][1]:
|
| 181 |
+
raise ValueError(
|
| 182 |
+
f"Metadata files {downloaded_metadata_file} and {features_per_metadata_file[0][0]} have different features: {features_per_metadata_file[0]} != {metadata_features}"
|
| 183 |
+
)
|
| 184 |
+
metadata_features = features_per_metadata_file[0][1]
|
| 185 |
+
if "file_name" not in metadata_features:
|
| 186 |
+
raise ValueError("`file_name` must be present as dictionary key in metadata files")
|
| 187 |
+
if metadata_features["file_name"] != datasets.Value("string"):
|
| 188 |
+
raise ValueError("`file_name` key must be a string")
|
| 189 |
+
del metadata_features["file_name"]
|
| 190 |
+
else:
|
| 191 |
+
metadata_features = None
|
| 192 |
+
|
| 193 |
+
# Normally, we would do this in _info, but we need to know the labels and/or metadata
|
| 194 |
+
# before building the features
|
| 195 |
+
if self.config.features is None:
|
| 196 |
+
if add_labels:
|
| 197 |
+
self.info.features = datasets.Features(
|
| 198 |
+
{
|
| 199 |
+
self.BASE_COLUMN_NAME: self.BASE_FEATURE(),
|
| 200 |
+
"label": datasets.ClassLabel(names=sorted(labels)),
|
| 201 |
+
}
|
| 202 |
+
)
|
| 203 |
+
self.info.task_templates = [self.CLASSIFICATION_TASK.align_with_features(self.info.features)]
|
| 204 |
+
else:
|
| 205 |
+
self.info.features = datasets.Features({self.BASE_COLUMN_NAME: self.BASE_FEATURE()})
|
| 206 |
+
|
| 207 |
+
if add_metadata:
|
| 208 |
+
# Warn if there are duplicated keys in metadata compared to the existing features
|
| 209 |
+
# (`BASE_COLUMN_NAME`, optionally "label")
|
| 210 |
+
duplicated_keys = set(self.info.features) & set(metadata_features)
|
| 211 |
+
if duplicated_keys:
|
| 212 |
+
logger.warning(
|
| 213 |
+
f"Ignoring metadata columns {list(duplicated_keys)} as they are already present in "
|
| 214 |
+
f"the features dictionary."
|
| 215 |
+
)
|
| 216 |
+
# skip metadata duplicated keys
|
| 217 |
+
self.info.features.update(
|
| 218 |
+
{
|
| 219 |
+
feature: metadata_features[feature]
|
| 220 |
+
for feature in metadata_features
|
| 221 |
+
if feature not in duplicated_keys
|
| 222 |
+
}
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
return splits
|
| 226 |
+
|
| 227 |
+
def _split_files_and_archives(self, data_files):
|
| 228 |
+
files, archives = [], []
|
| 229 |
+
for data_file in data_files:
|
| 230 |
+
_, data_file_ext = os.path.splitext(data_file)
|
| 231 |
+
if data_file_ext.lower() in self.EXTENSIONS:
|
| 232 |
+
files.append(data_file)
|
| 233 |
+
elif os.path.basename(data_file) in self.METADATA_FILENAMES:
|
| 234 |
+
files.append(data_file)
|
| 235 |
+
else:
|
| 236 |
+
archives.append(data_file)
|
| 237 |
+
return files, archives
|
| 238 |
+
|
| 239 |
+
def _read_metadata(self, metadata_file, metadata_ext: str = ""):
|
| 240 |
+
if metadata_ext == ".csv":
|
| 241 |
+
# Use `pd.read_csv` (although slower) instead of `pyarrow.csv.read_csv` for reading CSV files for consistency with the CSV packaged module
|
| 242 |
+
return pa.Table.from_pandas(pd.read_csv(metadata_file))
|
| 243 |
+
else:
|
| 244 |
+
with open(metadata_file, "rb") as f:
|
| 245 |
+
return paj.read_json(f)
|
| 246 |
+
|
| 247 |
+
def _generate_examples(self, files, metadata_files, split_name, add_metadata, add_labels):
|
| 248 |
+
split_metadata_files = metadata_files.get(split_name, [])
|
| 249 |
+
sample_empty_metadata = (
|
| 250 |
+
{k: None for k in self.info.features if k != self.BASE_COLUMN_NAME} if self.info.features else {}
|
| 251 |
+
)
|
| 252 |
+
last_checked_dir = None
|
| 253 |
+
metadata_dir = None
|
| 254 |
+
metadata_dict = None
|
| 255 |
+
downloaded_metadata_file = None
|
| 256 |
+
|
| 257 |
+
metadata_ext = ""
|
| 258 |
+
if split_metadata_files:
|
| 259 |
+
metadata_ext = {
|
| 260 |
+
os.path.splitext(original_metadata_file)[-1] for original_metadata_file, _ in split_metadata_files
|
| 261 |
+
}
|
| 262 |
+
metadata_ext = metadata_ext.pop()
|
| 263 |
+
|
| 264 |
+
file_idx = 0
|
| 265 |
+
for original_file, downloaded_file_or_dir in files:
|
| 266 |
+
if original_file is not None:
|
| 267 |
+
_, original_file_ext = os.path.splitext(original_file)
|
| 268 |
+
if original_file_ext.lower() in self.EXTENSIONS:
|
| 269 |
+
if add_metadata:
|
| 270 |
+
# If the file is a file of a needed type, and we've just entered a new directory,
|
| 271 |
+
# find the nereast metadata file (by counting path segments) for the directory
|
| 272 |
+
current_dir = os.path.dirname(original_file)
|
| 273 |
+
if last_checked_dir is None or last_checked_dir != current_dir:
|
| 274 |
+
last_checked_dir = current_dir
|
| 275 |
+
metadata_file_candidates = [
|
| 276 |
+
(
|
| 277 |
+
os.path.relpath(original_file, os.path.dirname(metadata_file_candidate)),
|
| 278 |
+
metadata_file_candidate,
|
| 279 |
+
downloaded_metadata_file,
|
| 280 |
+
)
|
| 281 |
+
for metadata_file_candidate, downloaded_metadata_file in split_metadata_files
|
| 282 |
+
if metadata_file_candidate
|
| 283 |
+
is not None # ignore metadata_files that are inside archives
|
| 284 |
+
and not os.path.relpath(
|
| 285 |
+
original_file, os.path.dirname(metadata_file_candidate)
|
| 286 |
+
).startswith("..")
|
| 287 |
+
]
|
| 288 |
+
if metadata_file_candidates:
|
| 289 |
+
_, metadata_file, downloaded_metadata_file = min(
|
| 290 |
+
metadata_file_candidates, key=lambda x: count_path_segments(x[0])
|
| 291 |
+
)
|
| 292 |
+
pa_metadata_table = self._read_metadata(
|
| 293 |
+
downloaded_metadata_file, metadata_ext=metadata_ext
|
| 294 |
+
)
|
| 295 |
+
pa_file_name_array = pa_metadata_table["file_name"]
|
| 296 |
+
pa_metadata_table = pa_metadata_table.drop(["file_name"])
|
| 297 |
+
metadata_dir = os.path.dirname(metadata_file)
|
| 298 |
+
metadata_dict = {
|
| 299 |
+
os.path.normpath(file_name).replace("\\", "/"): sample_metadata
|
| 300 |
+
for file_name, sample_metadata in zip(
|
| 301 |
+
pa_file_name_array.to_pylist(), pa_metadata_table.to_pylist()
|
| 302 |
+
)
|
| 303 |
+
}
|
| 304 |
+
else:
|
| 305 |
+
raise ValueError(
|
| 306 |
+
f"One or several metadata{metadata_ext} were found, but not in the same directory or in a parent directory of {downloaded_file_or_dir}."
|
| 307 |
+
)
|
| 308 |
+
if metadata_dir is not None and downloaded_metadata_file is not None:
|
| 309 |
+
file_relpath = os.path.relpath(original_file, metadata_dir)
|
| 310 |
+
file_relpath = file_relpath.replace("\\", "/")
|
| 311 |
+
if file_relpath not in metadata_dict:
|
| 312 |
+
raise ValueError(
|
| 313 |
+
f"{self.BASE_COLUMN_NAME} at {file_relpath} doesn't have metadata in {downloaded_metadata_file}."
|
| 314 |
+
)
|
| 315 |
+
sample_metadata = metadata_dict[file_relpath]
|
| 316 |
+
else:
|
| 317 |
+
raise ValueError(
|
| 318 |
+
f"One or several metadata{metadata_ext} were found, but not in the same directory or in a parent directory of {downloaded_file_or_dir}."
|
| 319 |
+
)
|
| 320 |
+
else:
|
| 321 |
+
sample_metadata = {}
|
| 322 |
+
if add_labels:
|
| 323 |
+
sample_label = {"label": os.path.basename(os.path.dirname(original_file))}
|
| 324 |
+
else:
|
| 325 |
+
sample_label = {}
|
| 326 |
+
yield (
|
| 327 |
+
file_idx,
|
| 328 |
+
{
|
| 329 |
+
**sample_empty_metadata,
|
| 330 |
+
self.BASE_COLUMN_NAME: downloaded_file_or_dir,
|
| 331 |
+
**sample_metadata,
|
| 332 |
+
**sample_label,
|
| 333 |
+
},
|
| 334 |
+
)
|
| 335 |
+
file_idx += 1
|
| 336 |
+
else:
|
| 337 |
+
for downloaded_dir_file in downloaded_file_or_dir:
|
| 338 |
+
_, downloaded_dir_file_ext = os.path.splitext(downloaded_dir_file)
|
| 339 |
+
if downloaded_dir_file_ext.lower() in self.EXTENSIONS:
|
| 340 |
+
if add_metadata:
|
| 341 |
+
current_dir = os.path.dirname(downloaded_dir_file)
|
| 342 |
+
if last_checked_dir is None or last_checked_dir != current_dir:
|
| 343 |
+
last_checked_dir = current_dir
|
| 344 |
+
metadata_file_candidates = [
|
| 345 |
+
(
|
| 346 |
+
os.path.relpath(
|
| 347 |
+
downloaded_dir_file, os.path.dirname(downloaded_metadata_file)
|
| 348 |
+
),
|
| 349 |
+
metadata_file_candidate,
|
| 350 |
+
downloaded_metadata_file,
|
| 351 |
+
)
|
| 352 |
+
for metadata_file_candidate, downloaded_metadata_file in split_metadata_files
|
| 353 |
+
if metadata_file_candidate
|
| 354 |
+
is None # ignore metadata_files that are not inside archives
|
| 355 |
+
and not os.path.relpath(
|
| 356 |
+
downloaded_dir_file, os.path.dirname(downloaded_metadata_file)
|
| 357 |
+
).startswith("..")
|
| 358 |
+
]
|
| 359 |
+
if metadata_file_candidates:
|
| 360 |
+
_, metadata_file, downloaded_metadata_file = min(
|
| 361 |
+
metadata_file_candidates, key=lambda x: count_path_segments(x[0])
|
| 362 |
+
)
|
| 363 |
+
pa_metadata_table = self._read_metadata(
|
| 364 |
+
downloaded_metadata_file, metadata_ext=metadata_ext
|
| 365 |
+
)
|
| 366 |
+
pa_file_name_array = pa_metadata_table["file_name"]
|
| 367 |
+
pa_metadata_table = pa_metadata_table.drop(["file_name"])
|
| 368 |
+
metadata_dir = os.path.dirname(downloaded_metadata_file)
|
| 369 |
+
metadata_dict = {
|
| 370 |
+
os.path.normpath(file_name).replace("\\", "/"): sample_metadata
|
| 371 |
+
for file_name, sample_metadata in zip(
|
| 372 |
+
pa_file_name_array.to_pylist(), pa_metadata_table.to_pylist()
|
| 373 |
+
)
|
| 374 |
+
}
|
| 375 |
+
else:
|
| 376 |
+
raise ValueError(
|
| 377 |
+
f"One or several metadata{metadata_ext} were found, but not in the same directory or in a parent directory of {downloaded_dir_file}."
|
| 378 |
+
)
|
| 379 |
+
if metadata_dir is not None and downloaded_metadata_file is not None:
|
| 380 |
+
downloaded_dir_file_relpath = os.path.relpath(downloaded_dir_file, metadata_dir)
|
| 381 |
+
downloaded_dir_file_relpath = downloaded_dir_file_relpath.replace("\\", "/")
|
| 382 |
+
if downloaded_dir_file_relpath not in metadata_dict:
|
| 383 |
+
raise ValueError(
|
| 384 |
+
f"{self.BASE_COLUMN_NAME} at {downloaded_dir_file_relpath} doesn't have metadata in {downloaded_metadata_file}."
|
| 385 |
+
)
|
| 386 |
+
sample_metadata = metadata_dict[downloaded_dir_file_relpath]
|
| 387 |
+
else:
|
| 388 |
+
raise ValueError(
|
| 389 |
+
f"One or several metadata{metadata_ext} were found, but not in the same directory or in a parent directory of {downloaded_dir_file}."
|
| 390 |
+
)
|
| 391 |
+
else:
|
| 392 |
+
sample_metadata = {}
|
| 393 |
+
if add_labels:
|
| 394 |
+
sample_label = {"label": os.path.basename(os.path.dirname(downloaded_dir_file))}
|
| 395 |
+
else:
|
| 396 |
+
sample_label = {}
|
| 397 |
+
yield (
|
| 398 |
+
file_idx,
|
| 399 |
+
{
|
| 400 |
+
**sample_empty_metadata,
|
| 401 |
+
self.BASE_COLUMN_NAME: downloaded_dir_file,
|
| 402 |
+
**sample_metadata,
|
| 403 |
+
**sample_label,
|
| 404 |
+
},
|
| 405 |
+
)
|
| 406 |
+
file_idx += 1
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/pandas/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (192 Bytes). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/sql/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (189 Bytes). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/sql/sql.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from dataclasses import dataclass
|
| 3 |
+
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union
|
| 4 |
+
|
| 5 |
+
import pandas as pd
|
| 6 |
+
import pyarrow as pa
|
| 7 |
+
|
| 8 |
+
import datasets
|
| 9 |
+
import datasets.config
|
| 10 |
+
from datasets.features.features import require_storage_cast
|
| 11 |
+
from datasets.table import table_cast
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
import sqlite3
|
| 16 |
+
|
| 17 |
+
import sqlalchemy
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
logger = datasets.utils.logging.get_logger(__name__)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@dataclass
|
| 24 |
+
class SqlConfig(datasets.BuilderConfig):
|
| 25 |
+
"""BuilderConfig for SQL."""
|
| 26 |
+
|
| 27 |
+
sql: Union[str, "sqlalchemy.sql.Selectable"] = None
|
| 28 |
+
con: Union[str, "sqlalchemy.engine.Connection", "sqlalchemy.engine.Engine", "sqlite3.Connection"] = None
|
| 29 |
+
index_col: Optional[Union[str, List[str]]] = None
|
| 30 |
+
coerce_float: bool = True
|
| 31 |
+
params: Optional[Union[List, Tuple, Dict]] = None
|
| 32 |
+
parse_dates: Optional[Union[List, Dict]] = None
|
| 33 |
+
columns: Optional[List[str]] = None
|
| 34 |
+
chunksize: Optional[int] = 10_000
|
| 35 |
+
features: Optional[datasets.Features] = None
|
| 36 |
+
|
| 37 |
+
def __post_init__(self):
|
| 38 |
+
if self.sql is None:
|
| 39 |
+
raise ValueError("sql must be specified")
|
| 40 |
+
if self.con is None:
|
| 41 |
+
raise ValueError("con must be specified")
|
| 42 |
+
|
| 43 |
+
def create_config_id(
|
| 44 |
+
self,
|
| 45 |
+
config_kwargs: dict,
|
| 46 |
+
custom_features: Optional[datasets.Features] = None,
|
| 47 |
+
) -> str:
|
| 48 |
+
config_kwargs = config_kwargs.copy()
|
| 49 |
+
# We need to stringify the Selectable object to make its hash deterministic
|
| 50 |
+
|
| 51 |
+
# The process of stringifying is explained here: http://docs.sqlalchemy.org/en/latest/faq/sqlexpressions.html
|
| 52 |
+
sql = config_kwargs["sql"]
|
| 53 |
+
if not isinstance(sql, str):
|
| 54 |
+
if datasets.config.SQLALCHEMY_AVAILABLE and "sqlalchemy" in sys.modules:
|
| 55 |
+
import sqlalchemy
|
| 56 |
+
|
| 57 |
+
if isinstance(sql, sqlalchemy.sql.Selectable):
|
| 58 |
+
engine = sqlalchemy.create_engine(config_kwargs["con"].split("://")[0] + "://")
|
| 59 |
+
sql_str = str(sql.compile(dialect=engine.dialect))
|
| 60 |
+
config_kwargs["sql"] = sql_str
|
| 61 |
+
else:
|
| 62 |
+
raise TypeError(
|
| 63 |
+
f"Supported types for 'sql' are string and sqlalchemy.sql.Selectable but got {type(sql)}: {sql}"
|
| 64 |
+
)
|
| 65 |
+
else:
|
| 66 |
+
raise TypeError(
|
| 67 |
+
f"Supported types for 'sql' are string and sqlalchemy.sql.Selectable but got {type(sql)}: {sql}"
|
| 68 |
+
)
|
| 69 |
+
con = config_kwargs["con"]
|
| 70 |
+
if not isinstance(con, str):
|
| 71 |
+
config_kwargs["con"] = id(con)
|
| 72 |
+
logger.info(
|
| 73 |
+
f"SQL connection 'con' of type {type(con)} couldn't be hashed properly. To enable hashing, specify 'con' as URI string instead."
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
return super().create_config_id(config_kwargs, custom_features=custom_features)
|
| 77 |
+
|
| 78 |
+
@property
|
| 79 |
+
def pd_read_sql_kwargs(self):
|
| 80 |
+
pd_read_sql_kwargs = {
|
| 81 |
+
"index_col": self.index_col,
|
| 82 |
+
"columns": self.columns,
|
| 83 |
+
"params": self.params,
|
| 84 |
+
"coerce_float": self.coerce_float,
|
| 85 |
+
"parse_dates": self.parse_dates,
|
| 86 |
+
}
|
| 87 |
+
return pd_read_sql_kwargs
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class Sql(datasets.ArrowBasedBuilder):
|
| 91 |
+
BUILDER_CONFIG_CLASS = SqlConfig
|
| 92 |
+
|
| 93 |
+
def _info(self):
|
| 94 |
+
return datasets.DatasetInfo(features=self.config.features)
|
| 95 |
+
|
| 96 |
+
def _split_generators(self, dl_manager):
|
| 97 |
+
return [datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={})]
|
| 98 |
+
|
| 99 |
+
def _cast_table(self, pa_table: pa.Table) -> pa.Table:
|
| 100 |
+
if self.config.features is not None:
|
| 101 |
+
schema = self.config.features.arrow_schema
|
| 102 |
+
if all(not require_storage_cast(feature) for feature in self.config.features.values()):
|
| 103 |
+
# cheaper cast
|
| 104 |
+
pa_table = pa.Table.from_arrays([pa_table[field.name] for field in schema], schema=schema)
|
| 105 |
+
else:
|
| 106 |
+
# more expensive cast; allows str <-> int/float or str to Audio for example
|
| 107 |
+
pa_table = table_cast(pa_table, schema)
|
| 108 |
+
return pa_table
|
| 109 |
+
|
| 110 |
+
def _generate_tables(self):
|
| 111 |
+
chunksize = self.config.chunksize
|
| 112 |
+
sql_reader = pd.read_sql(
|
| 113 |
+
self.config.sql, self.config.con, chunksize=chunksize, **self.config.pd_read_sql_kwargs
|
| 114 |
+
)
|
| 115 |
+
sql_reader = [sql_reader] if chunksize is None else sql_reader
|
| 116 |
+
for chunk_idx, df in enumerate(sql_reader):
|
| 117 |
+
pa_table = pa.Table.from_pandas(df)
|
| 118 |
+
yield chunk_idx, self._cast_table(pa_table)
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/__init__.py
ADDED
|
File without changes
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (196 Bytes). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/__pycache__/_tenbin.cpython-310.pyc
ADDED
|
Binary file (8.83 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/__pycache__/webdataset.cpython-310.pyc
ADDED
|
Binary file (6.1 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/_tenbin.py
ADDED
|
@@ -0,0 +1,285 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Copyright (c) 2017-2021 NVIDIA CORPORATION. All rights reserved.
|
| 3 |
+
# This file coems from the WebDataset library.
|
| 4 |
+
# See the LICENSE file for licensing terms (BSD-style).
|
| 5 |
+
#
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
Binary tensor encodings for PyTorch and NumPy.
|
| 9 |
+
|
| 10 |
+
This defines efficient binary encodings for tensors. The format is 8 byte
|
| 11 |
+
aligned and can be used directly for computations when transmitted, say,
|
| 12 |
+
via RDMA. The format is supported by WebDataset with the `.ten` filename
|
| 13 |
+
extension. It is also used by Tensorcom, Tensorcom RDMA, and can be used
|
| 14 |
+
for fast tensor storage with LMDB and in disk files (which can be memory
|
| 15 |
+
mapped)
|
| 16 |
+
|
| 17 |
+
Data is encoded as a series of chunks:
|
| 18 |
+
|
| 19 |
+
- magic number (int64)
|
| 20 |
+
- length in bytes (int64)
|
| 21 |
+
- bytes (multiple of 64 bytes long)
|
| 22 |
+
|
| 23 |
+
Arrays are a header chunk followed by a data chunk.
|
| 24 |
+
Header chunks have the following structure:
|
| 25 |
+
|
| 26 |
+
- dtype (int64)
|
| 27 |
+
- 8 byte array name
|
| 28 |
+
- ndim (int64)
|
| 29 |
+
- dim[0]
|
| 30 |
+
- dim[1]
|
| 31 |
+
- ...
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
import struct
|
| 35 |
+
import sys
|
| 36 |
+
|
| 37 |
+
import numpy as np
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def bytelen(a):
|
| 41 |
+
"""Determine the length of a in bytes."""
|
| 42 |
+
if hasattr(a, "nbytes"):
|
| 43 |
+
return a.nbytes
|
| 44 |
+
elif isinstance(a, (bytearray, bytes)):
|
| 45 |
+
return len(a)
|
| 46 |
+
else:
|
| 47 |
+
raise ValueError(a, "cannot determine nbytes")
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def bytedata(a):
|
| 51 |
+
"""Return a the raw data corresponding to a."""
|
| 52 |
+
if isinstance(a, (bytearray, bytes, memoryview)):
|
| 53 |
+
return a
|
| 54 |
+
elif hasattr(a, "data"):
|
| 55 |
+
return a.data
|
| 56 |
+
else:
|
| 57 |
+
raise ValueError(a, "cannot return bytedata")
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# tables for converting between long/short NumPy dtypes
|
| 61 |
+
|
| 62 |
+
long_to_short = """
|
| 63 |
+
float16 f2
|
| 64 |
+
float32 f4
|
| 65 |
+
float64 f8
|
| 66 |
+
int8 i1
|
| 67 |
+
int16 i2
|
| 68 |
+
int32 i4
|
| 69 |
+
int64 i8
|
| 70 |
+
uint8 u1
|
| 71 |
+
uint16 u2
|
| 72 |
+
unit32 u4
|
| 73 |
+
uint64 u8
|
| 74 |
+
""".strip()
|
| 75 |
+
long_to_short = [x.split() for x in long_to_short.split("\n")]
|
| 76 |
+
long_to_short = {x[0]: x[1] for x in long_to_short}
|
| 77 |
+
short_to_long = {v: k for k, v in long_to_short.items()}
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def check_acceptable_input_type(data, allow64):
|
| 81 |
+
"""Check that the data has an acceptable type for tensor encoding.
|
| 82 |
+
|
| 83 |
+
:param data: array
|
| 84 |
+
:param allow64: allow 64 bit types
|
| 85 |
+
"""
|
| 86 |
+
for a in data:
|
| 87 |
+
if a.dtype.name not in long_to_short:
|
| 88 |
+
raise ValueError("unsupported dataypte")
|
| 89 |
+
if not allow64 and a.dtype.name not in ["float64", "int64", "uint64"]:
|
| 90 |
+
raise ValueError("64 bit datatypes not allowed unless explicitly enabled")
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def str64(s):
|
| 94 |
+
"""Convert a string to an int64."""
|
| 95 |
+
s = s + "\0" * (8 - len(s))
|
| 96 |
+
s = s.encode("ascii")
|
| 97 |
+
return struct.unpack("@q", s)[0]
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def unstr64(i):
|
| 101 |
+
"""Convert an int64 to a string."""
|
| 102 |
+
b = struct.pack("@q", i)
|
| 103 |
+
return b.decode("ascii").strip("\0")
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def check_infos(data, infos, required_infos=None):
|
| 107 |
+
"""Verify the info strings."""
|
| 108 |
+
if required_infos is False or required_infos is None:
|
| 109 |
+
return data
|
| 110 |
+
if required_infos is True:
|
| 111 |
+
return data, infos
|
| 112 |
+
if not isinstance(required_infos, (tuple, list)):
|
| 113 |
+
raise ValueError("required_infos must be tuple or list")
|
| 114 |
+
for required, actual in zip(required_infos, infos):
|
| 115 |
+
raise ValueError(f"actual info {actual} doesn't match required info {required}")
|
| 116 |
+
return data
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def encode_header(a, info=""):
|
| 120 |
+
"""Encode an array header as a byte array."""
|
| 121 |
+
if a.ndim >= 10:
|
| 122 |
+
raise ValueError("too many dimensions")
|
| 123 |
+
if a.nbytes != np.prod(a.shape) * a.itemsize:
|
| 124 |
+
raise ValueError("mismatch between size and shape")
|
| 125 |
+
if a.dtype.name not in long_to_short:
|
| 126 |
+
raise ValueError("unsupported array type")
|
| 127 |
+
header = [str64(long_to_short[a.dtype.name]), str64(info), len(a.shape)] + list(a.shape)
|
| 128 |
+
return bytedata(np.array(header, dtype="i8"))
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def decode_header(h):
|
| 132 |
+
"""Decode a byte array into an array header."""
|
| 133 |
+
h = np.frombuffer(h, dtype="i8")
|
| 134 |
+
if unstr64(h[0]) not in short_to_long:
|
| 135 |
+
raise ValueError("unsupported array type")
|
| 136 |
+
dtype = np.dtype(short_to_long[unstr64(h[0])])
|
| 137 |
+
info = unstr64(h[1])
|
| 138 |
+
rank = int(h[2])
|
| 139 |
+
shape = tuple(h[3 : 3 + rank])
|
| 140 |
+
return shape, dtype, info
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def encode_list(l, infos=None): # noqa: E741
|
| 144 |
+
"""Given a list of arrays, encode them into a list of byte arrays."""
|
| 145 |
+
if infos is None:
|
| 146 |
+
infos = [""]
|
| 147 |
+
else:
|
| 148 |
+
if len(l) != len(infos):
|
| 149 |
+
raise ValueError(f"length of list {l} must muatch length of infos {infos}")
|
| 150 |
+
result = []
|
| 151 |
+
for i, a in enumerate(l):
|
| 152 |
+
header = encode_header(a, infos[i % len(infos)])
|
| 153 |
+
result += [header, bytedata(a)]
|
| 154 |
+
return result
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def decode_list(l, infos=False): # noqa: E741
|
| 158 |
+
"""Given a list of byte arrays, decode them into arrays."""
|
| 159 |
+
result = []
|
| 160 |
+
infos0 = []
|
| 161 |
+
for header, data in zip(l[::2], l[1::2]):
|
| 162 |
+
shape, dtype, info = decode_header(header)
|
| 163 |
+
a = np.frombuffer(data, dtype=dtype, count=np.prod(shape)).reshape(*shape)
|
| 164 |
+
result += [a]
|
| 165 |
+
infos0 += [info]
|
| 166 |
+
return check_infos(result, infos0, infos)
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
magic_str = "~TenBin~"
|
| 170 |
+
magic = str64(magic_str)
|
| 171 |
+
magic_bytes = unstr64(magic).encode("ascii")
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def roundup(n, k=64):
|
| 175 |
+
"""Round up to the next multiple of 64."""
|
| 176 |
+
return k * ((n + k - 1) // k)
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
def encode_chunks(l): # noqa: E741
|
| 180 |
+
"""Encode a list of chunks into a single byte array, with lengths and magics.."""
|
| 181 |
+
size = sum(16 + roundup(b.nbytes) for b in l)
|
| 182 |
+
result = bytearray(size)
|
| 183 |
+
offset = 0
|
| 184 |
+
for b in l:
|
| 185 |
+
result[offset : offset + 8] = magic_bytes
|
| 186 |
+
offset += 8
|
| 187 |
+
result[offset : offset + 8] = struct.pack("@q", b.nbytes)
|
| 188 |
+
offset += 8
|
| 189 |
+
result[offset : offset + bytelen(b)] = b
|
| 190 |
+
offset += roundup(bytelen(b))
|
| 191 |
+
return result
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def decode_chunks(buf):
|
| 195 |
+
"""Decode a byte array into a list of chunks."""
|
| 196 |
+
result = []
|
| 197 |
+
offset = 0
|
| 198 |
+
total = bytelen(buf)
|
| 199 |
+
while offset < total:
|
| 200 |
+
if magic_bytes != buf[offset : offset + 8]:
|
| 201 |
+
raise ValueError("magic bytes mismatch")
|
| 202 |
+
offset += 8
|
| 203 |
+
nbytes = struct.unpack("@q", buf[offset : offset + 8])[0]
|
| 204 |
+
offset += 8
|
| 205 |
+
b = buf[offset : offset + nbytes]
|
| 206 |
+
offset += roundup(nbytes)
|
| 207 |
+
result.append(b)
|
| 208 |
+
return result
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def encode_buffer(l, infos=None): # noqa: E741
|
| 212 |
+
"""Encode a list of arrays into a single byte array."""
|
| 213 |
+
if not isinstance(l, list):
|
| 214 |
+
raise ValueError("requires list")
|
| 215 |
+
return encode_chunks(encode_list(l, infos=infos))
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def decode_buffer(buf, infos=False):
|
| 219 |
+
"""Decode a byte array into a list of arrays."""
|
| 220 |
+
return decode_list(decode_chunks(buf), infos=infos)
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def write_chunk(stream, buf):
|
| 224 |
+
"""Write a byte chunk to the stream with magics, length, and padding."""
|
| 225 |
+
nbytes = bytelen(buf)
|
| 226 |
+
stream.write(magic_bytes)
|
| 227 |
+
stream.write(struct.pack("@q", nbytes))
|
| 228 |
+
stream.write(bytedata(buf))
|
| 229 |
+
padding = roundup(nbytes) - nbytes
|
| 230 |
+
if padding > 0:
|
| 231 |
+
stream.write(b"\0" * padding)
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
def read_chunk(stream):
|
| 235 |
+
"""Read a byte chunk from a stream with magics, length, and padding."""
|
| 236 |
+
magic = stream.read(8)
|
| 237 |
+
if magic == b"":
|
| 238 |
+
return None
|
| 239 |
+
if magic != magic_bytes:
|
| 240 |
+
raise ValueError("magic number does not match")
|
| 241 |
+
nbytes = stream.read(8)
|
| 242 |
+
nbytes = struct.unpack("@q", nbytes)[0]
|
| 243 |
+
if nbytes < 0:
|
| 244 |
+
raise ValueError("negative nbytes")
|
| 245 |
+
data = stream.read(nbytes)
|
| 246 |
+
padding = roundup(nbytes) - nbytes
|
| 247 |
+
if padding > 0:
|
| 248 |
+
stream.read(padding)
|
| 249 |
+
return data
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
def write(stream, l, infos=None): # noqa: E741
|
| 253 |
+
"""Write a list of arrays to a stream, with magics, length, and padding."""
|
| 254 |
+
for chunk in encode_list(l, infos=infos):
|
| 255 |
+
write_chunk(stream, chunk)
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
def read(stream, n=sys.maxsize, infos=False):
|
| 259 |
+
"""Read a list of arrays from a stream, with magics, length, and padding."""
|
| 260 |
+
chunks = []
|
| 261 |
+
for _ in range(n):
|
| 262 |
+
header = read_chunk(stream)
|
| 263 |
+
if header is None:
|
| 264 |
+
break
|
| 265 |
+
data = read_chunk(stream)
|
| 266 |
+
if data is None:
|
| 267 |
+
raise ValueError("premature EOF")
|
| 268 |
+
chunks += [header, data]
|
| 269 |
+
return decode_list(chunks, infos=infos)
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def save(fname, *args, infos=None, nocheck=False):
|
| 273 |
+
"""Save a list of arrays to a file, with magics, length, and padding."""
|
| 274 |
+
if not nocheck and not fname.endswith(".ten"):
|
| 275 |
+
raise ValueError("file name should end in .ten")
|
| 276 |
+
with open(fname, "wb") as stream:
|
| 277 |
+
write(stream, args, infos=infos)
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
def load(fname, infos=False, nocheck=False):
|
| 281 |
+
"""Read a list of arrays from a file, with magics, length, and padding."""
|
| 282 |
+
if not nocheck and not fname.endswith(".ten"):
|
| 283 |
+
raise ValueError("file name should end in .ten")
|
| 284 |
+
with open(fname, "rb") as stream:
|
| 285 |
+
return read(stream, infos=infos)
|
mantis_evalkit/lib/python3.10/site-packages/datasets/packaged_modules/webdataset/webdataset.py
ADDED
|
@@ -0,0 +1,299 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import json
|
| 3 |
+
from itertools import islice
|
| 4 |
+
from typing import Any, Callable, Dict, List
|
| 5 |
+
|
| 6 |
+
import numpy as np
|
| 7 |
+
import pyarrow as pa
|
| 8 |
+
|
| 9 |
+
import datasets
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
logger = datasets.utils.logging.get_logger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class WebDataset(datasets.GeneratorBasedBuilder):
|
| 16 |
+
DEFAULT_WRITER_BATCH_SIZE = 100
|
| 17 |
+
IMAGE_EXTENSIONS: List[str] # definition at the bottom of the script
|
| 18 |
+
AUDIO_EXTENSIONS: List[str] # definition at the bottom of the script
|
| 19 |
+
DECODERS: Dict[str, Callable[[Any], Any]] # definition at the bottom of the script
|
| 20 |
+
NUM_EXAMPLES_FOR_FEATURES_INFERENCE = 5
|
| 21 |
+
|
| 22 |
+
@classmethod
|
| 23 |
+
def _get_pipeline_from_tar(cls, tar_path, tar_iterator):
|
| 24 |
+
current_example = {}
|
| 25 |
+
for filename, f in tar_iterator:
|
| 26 |
+
if "." in filename:
|
| 27 |
+
example_key, field_name = filename.split(".", 1)
|
| 28 |
+
if current_example and current_example["__key__"] != example_key:
|
| 29 |
+
yield current_example
|
| 30 |
+
current_example = {}
|
| 31 |
+
current_example["__key__"] = example_key
|
| 32 |
+
current_example["__url__"] = tar_path
|
| 33 |
+
current_example[field_name.lower()] = f.read()
|
| 34 |
+
if field_name in cls.DECODERS:
|
| 35 |
+
current_example[field_name] = cls.DECODERS[field_name](current_example[field_name])
|
| 36 |
+
if current_example:
|
| 37 |
+
yield current_example
|
| 38 |
+
|
| 39 |
+
def _info(self) -> datasets.DatasetInfo:
|
| 40 |
+
return datasets.DatasetInfo()
|
| 41 |
+
|
| 42 |
+
def _split_generators(self, dl_manager):
|
| 43 |
+
"""We handle string, list and dicts in datafiles"""
|
| 44 |
+
# Download the data files
|
| 45 |
+
if not self.config.data_files:
|
| 46 |
+
raise ValueError(f"At least one data file must be specified, but got data_files={self.config.data_files}")
|
| 47 |
+
data_files = dl_manager.download(self.config.data_files)
|
| 48 |
+
if isinstance(data_files, (str, list, tuple)):
|
| 49 |
+
tar_paths = data_files
|
| 50 |
+
if isinstance(tar_paths, str):
|
| 51 |
+
tar_paths = [tar_paths]
|
| 52 |
+
tar_iterators = [dl_manager.iter_archive(tar_path) for tar_path in tar_paths]
|
| 53 |
+
splits = [
|
| 54 |
+
datasets.SplitGenerator(
|
| 55 |
+
name=datasets.Split.TRAIN, gen_kwargs={"tar_paths": tar_paths, "tar_iterators": tar_iterators}
|
| 56 |
+
)
|
| 57 |
+
]
|
| 58 |
+
else:
|
| 59 |
+
splits = []
|
| 60 |
+
for split_name, tar_paths in data_files.items():
|
| 61 |
+
if isinstance(tar_paths, str):
|
| 62 |
+
tar_paths = [tar_paths]
|
| 63 |
+
tar_iterators = [dl_manager.iter_archive(tar_path) for tar_path in tar_paths]
|
| 64 |
+
splits.append(
|
| 65 |
+
datasets.SplitGenerator(
|
| 66 |
+
name=split_name, gen_kwargs={"tar_paths": tar_paths, "tar_iterators": tar_iterators}
|
| 67 |
+
)
|
| 68 |
+
)
|
| 69 |
+
if not self.info.features:
|
| 70 |
+
# Get one example to get the feature types
|
| 71 |
+
pipeline = self._get_pipeline_from_tar(tar_paths[0], tar_iterators[0])
|
| 72 |
+
first_examples = list(islice(pipeline, self.NUM_EXAMPLES_FOR_FEATURES_INFERENCE))
|
| 73 |
+
if any(example.keys() != first_examples[0].keys() for example in first_examples):
|
| 74 |
+
raise ValueError(
|
| 75 |
+
"The TAR archives of the dataset should be in WebDataset format, "
|
| 76 |
+
"but the files in the archive don't share the same prefix or the same types."
|
| 77 |
+
)
|
| 78 |
+
pa_tables = [pa.Table.from_pylist([example]) for example in first_examples]
|
| 79 |
+
if datasets.config.PYARROW_VERSION.major < 14:
|
| 80 |
+
inferred_arrow_schema = pa.concat_tables(pa_tables, promote=True).schema
|
| 81 |
+
else:
|
| 82 |
+
inferred_arrow_schema = pa.concat_tables(pa_tables, promote_options="default").schema
|
| 83 |
+
features = datasets.Features.from_arrow_schema(inferred_arrow_schema)
|
| 84 |
+
|
| 85 |
+
# Set Image types
|
| 86 |
+
for field_name in first_examples[0]:
|
| 87 |
+
extension = field_name.rsplit(".", 1)[-1]
|
| 88 |
+
if extension in self.IMAGE_EXTENSIONS:
|
| 89 |
+
features[field_name] = datasets.Image()
|
| 90 |
+
# Set Audio types
|
| 91 |
+
for field_name in first_examples[0]:
|
| 92 |
+
extension = field_name.rsplit(".", 1)[-1]
|
| 93 |
+
if extension in self.AUDIO_EXTENSIONS:
|
| 94 |
+
features[field_name] = datasets.Audio()
|
| 95 |
+
self.info.features = features
|
| 96 |
+
|
| 97 |
+
return splits
|
| 98 |
+
|
| 99 |
+
def _generate_examples(self, tar_paths, tar_iterators):
|
| 100 |
+
image_field_names = [
|
| 101 |
+
field_name for field_name, feature in self.info.features.items() if isinstance(feature, datasets.Image)
|
| 102 |
+
]
|
| 103 |
+
audio_field_names = [
|
| 104 |
+
field_name for field_name, feature in self.info.features.items() if isinstance(feature, datasets.Audio)
|
| 105 |
+
]
|
| 106 |
+
for tar_idx, (tar_path, tar_iterator) in enumerate(zip(tar_paths, tar_iterators)):
|
| 107 |
+
for example_idx, example in enumerate(self._get_pipeline_from_tar(tar_path, tar_iterator)):
|
| 108 |
+
for field_name in image_field_names + audio_field_names:
|
| 109 |
+
example[field_name] = {"path": example["__key__"] + "." + field_name, "bytes": example[field_name]}
|
| 110 |
+
yield f"{tar_idx}_{example_idx}", example
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
# Obtained with:
|
| 114 |
+
# ```
|
| 115 |
+
# import PIL.Image
|
| 116 |
+
# IMAGE_EXTENSIONS = []
|
| 117 |
+
# PIL.Image.init()
|
| 118 |
+
# for ext, format in PIL.Image.EXTENSION.items():
|
| 119 |
+
# if format in PIL.Image.OPEN:
|
| 120 |
+
# IMAGE_EXTENSIONS.append(ext[1:])
|
| 121 |
+
# ```
|
| 122 |
+
# We intentionally do not run this code on launch because:
|
| 123 |
+
# (1) Pillow is an optional dependency, so importing Pillow in global namespace is not allowed
|
| 124 |
+
# (2) To ensure the list of supported extensions is deterministic
|
| 125 |
+
IMAGE_EXTENSIONS = [
|
| 126 |
+
"blp",
|
| 127 |
+
"bmp",
|
| 128 |
+
"dib",
|
| 129 |
+
"bufr",
|
| 130 |
+
"cur",
|
| 131 |
+
"pcx",
|
| 132 |
+
"dcx",
|
| 133 |
+
"dds",
|
| 134 |
+
"ps",
|
| 135 |
+
"eps",
|
| 136 |
+
"fit",
|
| 137 |
+
"fits",
|
| 138 |
+
"fli",
|
| 139 |
+
"flc",
|
| 140 |
+
"ftc",
|
| 141 |
+
"ftu",
|
| 142 |
+
"gbr",
|
| 143 |
+
"gif",
|
| 144 |
+
"grib",
|
| 145 |
+
"h5",
|
| 146 |
+
"hdf",
|
| 147 |
+
"png",
|
| 148 |
+
"apng",
|
| 149 |
+
"jp2",
|
| 150 |
+
"j2k",
|
| 151 |
+
"jpc",
|
| 152 |
+
"jpf",
|
| 153 |
+
"jpx",
|
| 154 |
+
"j2c",
|
| 155 |
+
"icns",
|
| 156 |
+
"ico",
|
| 157 |
+
"im",
|
| 158 |
+
"iim",
|
| 159 |
+
"tif",
|
| 160 |
+
"tiff",
|
| 161 |
+
"jfif",
|
| 162 |
+
"jpe",
|
| 163 |
+
"jpg",
|
| 164 |
+
"jpeg",
|
| 165 |
+
"mpg",
|
| 166 |
+
"mpeg",
|
| 167 |
+
"msp",
|
| 168 |
+
"pcd",
|
| 169 |
+
"pxr",
|
| 170 |
+
"pbm",
|
| 171 |
+
"pgm",
|
| 172 |
+
"ppm",
|
| 173 |
+
"pnm",
|
| 174 |
+
"psd",
|
| 175 |
+
"bw",
|
| 176 |
+
"rgb",
|
| 177 |
+
"rgba",
|
| 178 |
+
"sgi",
|
| 179 |
+
"ras",
|
| 180 |
+
"tga",
|
| 181 |
+
"icb",
|
| 182 |
+
"vda",
|
| 183 |
+
"vst",
|
| 184 |
+
"webp",
|
| 185 |
+
"wmf",
|
| 186 |
+
"emf",
|
| 187 |
+
"xbm",
|
| 188 |
+
"xpm",
|
| 189 |
+
]
|
| 190 |
+
WebDataset.IMAGE_EXTENSIONS = IMAGE_EXTENSIONS
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
# Obtained with:
|
| 194 |
+
# ```
|
| 195 |
+
# import soundfile as sf
|
| 196 |
+
#
|
| 197 |
+
# AUDIO_EXTENSIONS = [f".{format.lower()}" for format in sf.available_formats().keys()]
|
| 198 |
+
#
|
| 199 |
+
# # .mp3 is currently decoded via `torchaudio`, .opus decoding is supported if version of `libsndfile` >= 1.0.30:
|
| 200 |
+
# AUDIO_EXTENSIONS.extend([".mp3", ".opus"])
|
| 201 |
+
# ```
|
| 202 |
+
# We intentionally do not run this code on launch because:
|
| 203 |
+
# (1) Soundfile is an optional dependency, so importing it in global namespace is not allowed
|
| 204 |
+
# (2) To ensure the list of supported extensions is deterministic
|
| 205 |
+
AUDIO_EXTENSIONS = [
|
| 206 |
+
"aiff",
|
| 207 |
+
"au",
|
| 208 |
+
"avr",
|
| 209 |
+
"caf",
|
| 210 |
+
"flac",
|
| 211 |
+
"htk",
|
| 212 |
+
"svx",
|
| 213 |
+
"mat4",
|
| 214 |
+
"mat5",
|
| 215 |
+
"mpc2k",
|
| 216 |
+
"ogg",
|
| 217 |
+
"paf",
|
| 218 |
+
"pvf",
|
| 219 |
+
"raw",
|
| 220 |
+
"rf64",
|
| 221 |
+
"sd2",
|
| 222 |
+
"sds",
|
| 223 |
+
"ircam",
|
| 224 |
+
"voc",
|
| 225 |
+
"w64",
|
| 226 |
+
"wav",
|
| 227 |
+
"nist",
|
| 228 |
+
"wavex",
|
| 229 |
+
"wve",
|
| 230 |
+
"xi",
|
| 231 |
+
"mp3",
|
| 232 |
+
"opus",
|
| 233 |
+
]
|
| 234 |
+
WebDataset.AUDIO_EXTENSIONS = AUDIO_EXTENSIONS
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def text_loads(data: bytes):
|
| 238 |
+
return data.decode("utf-8")
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
def tenbin_loads(data: bytes):
|
| 242 |
+
from . import _tenbin
|
| 243 |
+
|
| 244 |
+
return _tenbin.decode_buffer(data)
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def msgpack_loads(data: bytes):
|
| 248 |
+
import msgpack
|
| 249 |
+
|
| 250 |
+
return msgpack.unpackb(data)
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
def npy_loads(data: bytes):
|
| 254 |
+
import numpy.lib.format
|
| 255 |
+
|
| 256 |
+
stream = io.BytesIO(data)
|
| 257 |
+
return numpy.lib.format.read_array(stream, allow_pickle=False)
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def npz_loads(data: bytes):
|
| 261 |
+
return np.load(io.BytesIO(data), allow_pickle=False)
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def cbor_loads(data: bytes):
|
| 265 |
+
import cbor
|
| 266 |
+
|
| 267 |
+
return cbor.loads(data)
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
# Obtained by checking `decoders` in `webdataset.autodecode`
|
| 271 |
+
# and removing unsafe extension decoders.
|
| 272 |
+
# Removed Pickle decoders:
|
| 273 |
+
# - "pyd": lambda data: pickle.loads(data)
|
| 274 |
+
# - "pickle": lambda data: pickle.loads(data)
|
| 275 |
+
# Removed Torch decoders:
|
| 276 |
+
# - "pth": lambda data: torch_loads(data)
|
| 277 |
+
# Modified NumPy decoders to fix CVE-2019-6446 (add allow_pickle=False):
|
| 278 |
+
# - "npy": npy_loads,
|
| 279 |
+
# - "npz": lambda data: np.load(io.BytesIO(data)),
|
| 280 |
+
DECODERS = {
|
| 281 |
+
"txt": text_loads,
|
| 282 |
+
"text": text_loads,
|
| 283 |
+
"transcript": text_loads,
|
| 284 |
+
"cls": int,
|
| 285 |
+
"cls2": int,
|
| 286 |
+
"index": int,
|
| 287 |
+
"inx": int,
|
| 288 |
+
"id": int,
|
| 289 |
+
"json": json.loads,
|
| 290 |
+
"jsn": json.loads,
|
| 291 |
+
"ten": tenbin_loads,
|
| 292 |
+
"tb": tenbin_loads,
|
| 293 |
+
"mp": msgpack_loads,
|
| 294 |
+
"msg": msgpack_loads,
|
| 295 |
+
"npy": npy_loads,
|
| 296 |
+
"npz": npz_loads,
|
| 297 |
+
"cbor": cbor_loads,
|
| 298 |
+
}
|
| 299 |
+
WebDataset.DECODERS = DECODERS
|
mantis_evalkit/lib/python3.10/site-packages/numpy/__config__.pyi
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from enum import Enum
|
| 2 |
+
from types import ModuleType
|
| 3 |
+
from typing import Final, Literal as L, TypedDict, overload, type_check_only
|
| 4 |
+
from typing_extensions import NotRequired
|
| 5 |
+
|
| 6 |
+
_CompilerConfigDictValue = TypedDict(
|
| 7 |
+
"_CompilerConfigDictValue",
|
| 8 |
+
{
|
| 9 |
+
"name": str,
|
| 10 |
+
"linker": str,
|
| 11 |
+
"version": str,
|
| 12 |
+
"commands": str,
|
| 13 |
+
"args": str,
|
| 14 |
+
"linker args": str,
|
| 15 |
+
},
|
| 16 |
+
)
|
| 17 |
+
_CompilerConfigDict = TypedDict(
|
| 18 |
+
"_CompilerConfigDict",
|
| 19 |
+
{
|
| 20 |
+
"c": _CompilerConfigDictValue,
|
| 21 |
+
"cython": _CompilerConfigDictValue,
|
| 22 |
+
"c++": _CompilerConfigDictValue,
|
| 23 |
+
},
|
| 24 |
+
)
|
| 25 |
+
_MachineInformationDict = TypedDict(
|
| 26 |
+
"_MachineInformationDict",
|
| 27 |
+
{
|
| 28 |
+
"host":_MachineInformationDictValue,
|
| 29 |
+
"build": _MachineInformationDictValue,
|
| 30 |
+
"cross-compiled": NotRequired[L[True]],
|
| 31 |
+
},
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
@type_check_only
|
| 35 |
+
class _MachineInformationDictValue(TypedDict):
|
| 36 |
+
cpu: str
|
| 37 |
+
family: str
|
| 38 |
+
endian: L["little", "big"]
|
| 39 |
+
system: str
|
| 40 |
+
|
| 41 |
+
_BuildDependenciesDictValue = TypedDict(
|
| 42 |
+
"_BuildDependenciesDictValue",
|
| 43 |
+
{
|
| 44 |
+
"name": str,
|
| 45 |
+
"found": NotRequired[L[True]],
|
| 46 |
+
"version": str,
|
| 47 |
+
"include directory": str,
|
| 48 |
+
"lib directory": str,
|
| 49 |
+
"openblas configuration": str,
|
| 50 |
+
"pc file directory": str,
|
| 51 |
+
},
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
class _BuildDependenciesDict(TypedDict):
|
| 55 |
+
blas: _BuildDependenciesDictValue
|
| 56 |
+
lapack: _BuildDependenciesDictValue
|
| 57 |
+
|
| 58 |
+
class _PythonInformationDict(TypedDict):
|
| 59 |
+
path: str
|
| 60 |
+
version: str
|
| 61 |
+
|
| 62 |
+
_SIMDExtensionsDict = TypedDict(
|
| 63 |
+
"_SIMDExtensionsDict",
|
| 64 |
+
{
|
| 65 |
+
"baseline": list[str],
|
| 66 |
+
"found": list[str],
|
| 67 |
+
"not found": list[str],
|
| 68 |
+
},
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
_ConfigDict = TypedDict(
|
| 72 |
+
"_ConfigDict",
|
| 73 |
+
{
|
| 74 |
+
"Compilers": _CompilerConfigDict,
|
| 75 |
+
"Machine Information": _MachineInformationDict,
|
| 76 |
+
"Build Dependencies": _BuildDependenciesDict,
|
| 77 |
+
"Python Information": _PythonInformationDict,
|
| 78 |
+
"SIMD Extensions": _SIMDExtensionsDict,
|
| 79 |
+
},
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
###
|
| 83 |
+
|
| 84 |
+
__all__ = ["show_config"]
|
| 85 |
+
|
| 86 |
+
CONFIG: Final[_ConfigDict] = ...
|
| 87 |
+
|
| 88 |
+
class DisplayModes(Enum):
|
| 89 |
+
stdout = "stdout"
|
| 90 |
+
dicts = "dicts"
|
| 91 |
+
|
| 92 |
+
def _check_pyyaml() -> ModuleType: ...
|
| 93 |
+
|
| 94 |
+
@overload
|
| 95 |
+
def show(mode: L["stdout"] = "stdout") -> None: ...
|
| 96 |
+
@overload
|
| 97 |
+
def show(mode: L["dicts"]) -> _ConfigDict: ...
|
| 98 |
+
|
| 99 |
+
@overload
|
| 100 |
+
def show_config(mode: L["stdout"] = "stdout") -> None: ...
|
| 101 |
+
@overload
|
| 102 |
+
def show_config(mode: L["dicts"]) -> _ConfigDict: ...
|
mantis_evalkit/lib/python3.10/site-packages/numpy/__init__.cython-30.pxd
ADDED
|
@@ -0,0 +1,1243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# NumPy static imports for Cython >= 3.0
|
| 2 |
+
#
|
| 3 |
+
# If any of the PyArray_* functions are called, import_array must be
|
| 4 |
+
# called first. This is done automatically by Cython 3.0+ if a call
|
| 5 |
+
# is not detected inside of the module.
|
| 6 |
+
#
|
| 7 |
+
# Author: Dag Sverre Seljebotn
|
| 8 |
+
#
|
| 9 |
+
|
| 10 |
+
from cpython.ref cimport Py_INCREF
|
| 11 |
+
from cpython.object cimport PyObject, PyTypeObject, PyObject_TypeCheck
|
| 12 |
+
cimport libc.stdio as stdio
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
cdef extern from *:
|
| 16 |
+
# Leave a marker that the NumPy declarations came from NumPy itself and not from Cython.
|
| 17 |
+
# See https://github.com/cython/cython/issues/3573
|
| 18 |
+
"""
|
| 19 |
+
/* Using NumPy API declarations from "numpy/__init__.cython-30.pxd" */
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
cdef extern from "numpy/arrayobject.h":
|
| 24 |
+
# It would be nice to use size_t and ssize_t, but ssize_t has special
|
| 25 |
+
# implicit conversion rules, so just use "long".
|
| 26 |
+
# Note: The actual type only matters for Cython promotion, so long
|
| 27 |
+
# is closer than int, but could lead to incorrect promotion.
|
| 28 |
+
# (Not to worrying, and always the status-quo.)
|
| 29 |
+
ctypedef signed long npy_intp
|
| 30 |
+
ctypedef unsigned long npy_uintp
|
| 31 |
+
|
| 32 |
+
ctypedef unsigned char npy_bool
|
| 33 |
+
|
| 34 |
+
ctypedef signed char npy_byte
|
| 35 |
+
ctypedef signed short npy_short
|
| 36 |
+
ctypedef signed int npy_int
|
| 37 |
+
ctypedef signed long npy_long
|
| 38 |
+
ctypedef signed long long npy_longlong
|
| 39 |
+
|
| 40 |
+
ctypedef unsigned char npy_ubyte
|
| 41 |
+
ctypedef unsigned short npy_ushort
|
| 42 |
+
ctypedef unsigned int npy_uint
|
| 43 |
+
ctypedef unsigned long npy_ulong
|
| 44 |
+
ctypedef unsigned long long npy_ulonglong
|
| 45 |
+
|
| 46 |
+
ctypedef float npy_float
|
| 47 |
+
ctypedef double npy_double
|
| 48 |
+
ctypedef long double npy_longdouble
|
| 49 |
+
|
| 50 |
+
ctypedef signed char npy_int8
|
| 51 |
+
ctypedef signed short npy_int16
|
| 52 |
+
ctypedef signed int npy_int32
|
| 53 |
+
ctypedef signed long long npy_int64
|
| 54 |
+
ctypedef signed long long npy_int96
|
| 55 |
+
ctypedef signed long long npy_int128
|
| 56 |
+
|
| 57 |
+
ctypedef unsigned char npy_uint8
|
| 58 |
+
ctypedef unsigned short npy_uint16
|
| 59 |
+
ctypedef unsigned int npy_uint32
|
| 60 |
+
ctypedef unsigned long long npy_uint64
|
| 61 |
+
ctypedef unsigned long long npy_uint96
|
| 62 |
+
ctypedef unsigned long long npy_uint128
|
| 63 |
+
|
| 64 |
+
ctypedef float npy_float32
|
| 65 |
+
ctypedef double npy_float64
|
| 66 |
+
ctypedef long double npy_float80
|
| 67 |
+
ctypedef long double npy_float96
|
| 68 |
+
ctypedef long double npy_float128
|
| 69 |
+
|
| 70 |
+
ctypedef struct npy_cfloat:
|
| 71 |
+
pass
|
| 72 |
+
|
| 73 |
+
ctypedef struct npy_cdouble:
|
| 74 |
+
pass
|
| 75 |
+
|
| 76 |
+
ctypedef struct npy_clongdouble:
|
| 77 |
+
pass
|
| 78 |
+
|
| 79 |
+
ctypedef struct npy_complex64:
|
| 80 |
+
pass
|
| 81 |
+
|
| 82 |
+
ctypedef struct npy_complex128:
|
| 83 |
+
pass
|
| 84 |
+
|
| 85 |
+
ctypedef struct npy_complex160:
|
| 86 |
+
pass
|
| 87 |
+
|
| 88 |
+
ctypedef struct npy_complex192:
|
| 89 |
+
pass
|
| 90 |
+
|
| 91 |
+
ctypedef struct npy_complex256:
|
| 92 |
+
pass
|
| 93 |
+
|
| 94 |
+
ctypedef struct PyArray_Dims:
|
| 95 |
+
npy_intp *ptr
|
| 96 |
+
int len
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
cdef enum NPY_TYPES:
|
| 100 |
+
NPY_BOOL
|
| 101 |
+
NPY_BYTE
|
| 102 |
+
NPY_UBYTE
|
| 103 |
+
NPY_SHORT
|
| 104 |
+
NPY_USHORT
|
| 105 |
+
NPY_INT
|
| 106 |
+
NPY_UINT
|
| 107 |
+
NPY_LONG
|
| 108 |
+
NPY_ULONG
|
| 109 |
+
NPY_LONGLONG
|
| 110 |
+
NPY_ULONGLONG
|
| 111 |
+
NPY_FLOAT
|
| 112 |
+
NPY_DOUBLE
|
| 113 |
+
NPY_LONGDOUBLE
|
| 114 |
+
NPY_CFLOAT
|
| 115 |
+
NPY_CDOUBLE
|
| 116 |
+
NPY_CLONGDOUBLE
|
| 117 |
+
NPY_OBJECT
|
| 118 |
+
NPY_STRING
|
| 119 |
+
NPY_UNICODE
|
| 120 |
+
NPY_VOID
|
| 121 |
+
NPY_DATETIME
|
| 122 |
+
NPY_TIMEDELTA
|
| 123 |
+
NPY_NTYPES_LEGACY
|
| 124 |
+
NPY_NOTYPE
|
| 125 |
+
|
| 126 |
+
NPY_INT8
|
| 127 |
+
NPY_INT16
|
| 128 |
+
NPY_INT32
|
| 129 |
+
NPY_INT64
|
| 130 |
+
NPY_INT128
|
| 131 |
+
NPY_INT256
|
| 132 |
+
NPY_UINT8
|
| 133 |
+
NPY_UINT16
|
| 134 |
+
NPY_UINT32
|
| 135 |
+
NPY_UINT64
|
| 136 |
+
NPY_UINT128
|
| 137 |
+
NPY_UINT256
|
| 138 |
+
NPY_FLOAT16
|
| 139 |
+
NPY_FLOAT32
|
| 140 |
+
NPY_FLOAT64
|
| 141 |
+
NPY_FLOAT80
|
| 142 |
+
NPY_FLOAT96
|
| 143 |
+
NPY_FLOAT128
|
| 144 |
+
NPY_FLOAT256
|
| 145 |
+
NPY_COMPLEX32
|
| 146 |
+
NPY_COMPLEX64
|
| 147 |
+
NPY_COMPLEX128
|
| 148 |
+
NPY_COMPLEX160
|
| 149 |
+
NPY_COMPLEX192
|
| 150 |
+
NPY_COMPLEX256
|
| 151 |
+
NPY_COMPLEX512
|
| 152 |
+
|
| 153 |
+
NPY_INTP
|
| 154 |
+
NPY_UINTP
|
| 155 |
+
NPY_DEFAULT_INT # Not a compile time constant (normally)!
|
| 156 |
+
|
| 157 |
+
ctypedef enum NPY_ORDER:
|
| 158 |
+
NPY_ANYORDER
|
| 159 |
+
NPY_CORDER
|
| 160 |
+
NPY_FORTRANORDER
|
| 161 |
+
NPY_KEEPORDER
|
| 162 |
+
|
| 163 |
+
ctypedef enum NPY_CASTING:
|
| 164 |
+
NPY_NO_CASTING
|
| 165 |
+
NPY_EQUIV_CASTING
|
| 166 |
+
NPY_SAFE_CASTING
|
| 167 |
+
NPY_SAME_KIND_CASTING
|
| 168 |
+
NPY_UNSAFE_CASTING
|
| 169 |
+
|
| 170 |
+
ctypedef enum NPY_CLIPMODE:
|
| 171 |
+
NPY_CLIP
|
| 172 |
+
NPY_WRAP
|
| 173 |
+
NPY_RAISE
|
| 174 |
+
|
| 175 |
+
ctypedef enum NPY_SCALARKIND:
|
| 176 |
+
NPY_NOSCALAR,
|
| 177 |
+
NPY_BOOL_SCALAR,
|
| 178 |
+
NPY_INTPOS_SCALAR,
|
| 179 |
+
NPY_INTNEG_SCALAR,
|
| 180 |
+
NPY_FLOAT_SCALAR,
|
| 181 |
+
NPY_COMPLEX_SCALAR,
|
| 182 |
+
NPY_OBJECT_SCALAR
|
| 183 |
+
|
| 184 |
+
ctypedef enum NPY_SORTKIND:
|
| 185 |
+
NPY_QUICKSORT
|
| 186 |
+
NPY_HEAPSORT
|
| 187 |
+
NPY_MERGESORT
|
| 188 |
+
|
| 189 |
+
ctypedef enum NPY_SEARCHSIDE:
|
| 190 |
+
NPY_SEARCHLEFT
|
| 191 |
+
NPY_SEARCHRIGHT
|
| 192 |
+
|
| 193 |
+
enum:
|
| 194 |
+
# DEPRECATED since NumPy 1.7 ! Do not use in new code!
|
| 195 |
+
NPY_C_CONTIGUOUS
|
| 196 |
+
NPY_F_CONTIGUOUS
|
| 197 |
+
NPY_CONTIGUOUS
|
| 198 |
+
NPY_FORTRAN
|
| 199 |
+
NPY_OWNDATA
|
| 200 |
+
NPY_FORCECAST
|
| 201 |
+
NPY_ENSURECOPY
|
| 202 |
+
NPY_ENSUREARRAY
|
| 203 |
+
NPY_ELEMENTSTRIDES
|
| 204 |
+
NPY_ALIGNED
|
| 205 |
+
NPY_NOTSWAPPED
|
| 206 |
+
NPY_WRITEABLE
|
| 207 |
+
NPY_ARR_HAS_DESCR
|
| 208 |
+
|
| 209 |
+
NPY_BEHAVED
|
| 210 |
+
NPY_BEHAVED_NS
|
| 211 |
+
NPY_CARRAY
|
| 212 |
+
NPY_CARRAY_RO
|
| 213 |
+
NPY_FARRAY
|
| 214 |
+
NPY_FARRAY_RO
|
| 215 |
+
NPY_DEFAULT
|
| 216 |
+
|
| 217 |
+
NPY_IN_ARRAY
|
| 218 |
+
NPY_OUT_ARRAY
|
| 219 |
+
NPY_INOUT_ARRAY
|
| 220 |
+
NPY_IN_FARRAY
|
| 221 |
+
NPY_OUT_FARRAY
|
| 222 |
+
NPY_INOUT_FARRAY
|
| 223 |
+
|
| 224 |
+
NPY_UPDATE_ALL
|
| 225 |
+
|
| 226 |
+
enum:
|
| 227 |
+
# Added in NumPy 1.7 to replace the deprecated enums above.
|
| 228 |
+
NPY_ARRAY_C_CONTIGUOUS
|
| 229 |
+
NPY_ARRAY_F_CONTIGUOUS
|
| 230 |
+
NPY_ARRAY_OWNDATA
|
| 231 |
+
NPY_ARRAY_FORCECAST
|
| 232 |
+
NPY_ARRAY_ENSURECOPY
|
| 233 |
+
NPY_ARRAY_ENSUREARRAY
|
| 234 |
+
NPY_ARRAY_ELEMENTSTRIDES
|
| 235 |
+
NPY_ARRAY_ALIGNED
|
| 236 |
+
NPY_ARRAY_NOTSWAPPED
|
| 237 |
+
NPY_ARRAY_WRITEABLE
|
| 238 |
+
NPY_ARRAY_WRITEBACKIFCOPY
|
| 239 |
+
|
| 240 |
+
NPY_ARRAY_BEHAVED
|
| 241 |
+
NPY_ARRAY_BEHAVED_NS
|
| 242 |
+
NPY_ARRAY_CARRAY
|
| 243 |
+
NPY_ARRAY_CARRAY_RO
|
| 244 |
+
NPY_ARRAY_FARRAY
|
| 245 |
+
NPY_ARRAY_FARRAY_RO
|
| 246 |
+
NPY_ARRAY_DEFAULT
|
| 247 |
+
|
| 248 |
+
NPY_ARRAY_IN_ARRAY
|
| 249 |
+
NPY_ARRAY_OUT_ARRAY
|
| 250 |
+
NPY_ARRAY_INOUT_ARRAY
|
| 251 |
+
NPY_ARRAY_IN_FARRAY
|
| 252 |
+
NPY_ARRAY_OUT_FARRAY
|
| 253 |
+
NPY_ARRAY_INOUT_FARRAY
|
| 254 |
+
|
| 255 |
+
NPY_ARRAY_UPDATE_ALL
|
| 256 |
+
|
| 257 |
+
cdef enum:
|
| 258 |
+
NPY_MAXDIMS # 64 on NumPy 2.x and 32 on NumPy 1.x
|
| 259 |
+
NPY_RAVEL_AXIS # Used for functions like PyArray_Mean
|
| 260 |
+
|
| 261 |
+
ctypedef void (*PyArray_VectorUnaryFunc)(void *, void *, npy_intp, void *, void *)
|
| 262 |
+
|
| 263 |
+
ctypedef struct PyArray_ArrayDescr:
|
| 264 |
+
# shape is a tuple, but Cython doesn't support "tuple shape"
|
| 265 |
+
# inside a non-PyObject declaration, so we have to declare it
|
| 266 |
+
# as just a PyObject*.
|
| 267 |
+
PyObject* shape
|
| 268 |
+
|
| 269 |
+
ctypedef struct PyArray_Descr:
|
| 270 |
+
pass
|
| 271 |
+
|
| 272 |
+
ctypedef class numpy.dtype [object PyArray_Descr, check_size ignore]:
|
| 273 |
+
# Use PyDataType_* macros when possible, however there are no macros
|
| 274 |
+
# for accessing some of the fields, so some are defined.
|
| 275 |
+
cdef PyTypeObject* typeobj
|
| 276 |
+
cdef char kind
|
| 277 |
+
cdef char type
|
| 278 |
+
# Numpy sometimes mutates this without warning (e.g. it'll
|
| 279 |
+
# sometimes change "|" to "<" in shared dtype objects on
|
| 280 |
+
# little-endian machines). If this matters to you, use
|
| 281 |
+
# PyArray_IsNativeByteOrder(dtype.byteorder) instead of
|
| 282 |
+
# directly accessing this field.
|
| 283 |
+
cdef char byteorder
|
| 284 |
+
cdef int type_num
|
| 285 |
+
|
| 286 |
+
@property
|
| 287 |
+
cdef inline npy_intp itemsize(self) noexcept nogil:
|
| 288 |
+
return PyDataType_ELSIZE(self)
|
| 289 |
+
|
| 290 |
+
@property
|
| 291 |
+
cdef inline npy_intp alignment(self) noexcept nogil:
|
| 292 |
+
return PyDataType_ALIGNMENT(self)
|
| 293 |
+
|
| 294 |
+
# Use fields/names with care as they may be NULL. You must check
|
| 295 |
+
# for this using PyDataType_HASFIELDS.
|
| 296 |
+
@property
|
| 297 |
+
cdef inline object fields(self):
|
| 298 |
+
return <object>PyDataType_FIELDS(self)
|
| 299 |
+
|
| 300 |
+
@property
|
| 301 |
+
cdef inline tuple names(self):
|
| 302 |
+
return <tuple>PyDataType_NAMES(self)
|
| 303 |
+
|
| 304 |
+
# Use PyDataType_HASSUBARRAY to test whether this field is
|
| 305 |
+
# valid (the pointer can be NULL). Most users should access
|
| 306 |
+
# this field via the inline helper method PyDataType_SHAPE.
|
| 307 |
+
@property
|
| 308 |
+
cdef inline PyArray_ArrayDescr* subarray(self) noexcept nogil:
|
| 309 |
+
return PyDataType_SUBARRAY(self)
|
| 310 |
+
|
| 311 |
+
@property
|
| 312 |
+
cdef inline npy_uint64 flags(self) noexcept nogil:
|
| 313 |
+
"""The data types flags."""
|
| 314 |
+
return PyDataType_FLAGS(self)
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
ctypedef class numpy.flatiter [object PyArrayIterObject, check_size ignore]:
|
| 318 |
+
# Use through macros
|
| 319 |
+
pass
|
| 320 |
+
|
| 321 |
+
ctypedef class numpy.broadcast [object PyArrayMultiIterObject, check_size ignore]:
|
| 322 |
+
|
| 323 |
+
@property
|
| 324 |
+
cdef inline int numiter(self) noexcept nogil:
|
| 325 |
+
"""The number of arrays that need to be broadcast to the same shape."""
|
| 326 |
+
return PyArray_MultiIter_NUMITER(self)
|
| 327 |
+
|
| 328 |
+
@property
|
| 329 |
+
cdef inline npy_intp size(self) noexcept nogil:
|
| 330 |
+
"""The total broadcasted size."""
|
| 331 |
+
return PyArray_MultiIter_SIZE(self)
|
| 332 |
+
|
| 333 |
+
@property
|
| 334 |
+
cdef inline npy_intp index(self) noexcept nogil:
|
| 335 |
+
"""The current (1-d) index into the broadcasted result."""
|
| 336 |
+
return PyArray_MultiIter_INDEX(self)
|
| 337 |
+
|
| 338 |
+
@property
|
| 339 |
+
cdef inline int nd(self) noexcept nogil:
|
| 340 |
+
"""The number of dimensions in the broadcasted result."""
|
| 341 |
+
return PyArray_MultiIter_NDIM(self)
|
| 342 |
+
|
| 343 |
+
@property
|
| 344 |
+
cdef inline npy_intp* dimensions(self) noexcept nogil:
|
| 345 |
+
"""The shape of the broadcasted result."""
|
| 346 |
+
return PyArray_MultiIter_DIMS(self)
|
| 347 |
+
|
| 348 |
+
@property
|
| 349 |
+
cdef inline void** iters(self) noexcept nogil:
|
| 350 |
+
"""An array of iterator objects that holds the iterators for the arrays to be broadcast together.
|
| 351 |
+
On return, the iterators are adjusted for broadcasting."""
|
| 352 |
+
return PyArray_MultiIter_ITERS(self)
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
ctypedef struct PyArrayObject:
|
| 356 |
+
# For use in situations where ndarray can't replace PyArrayObject*,
|
| 357 |
+
# like PyArrayObject**.
|
| 358 |
+
pass
|
| 359 |
+
|
| 360 |
+
ctypedef class numpy.ndarray [object PyArrayObject, check_size ignore]:
|
| 361 |
+
cdef __cythonbufferdefaults__ = {"mode": "strided"}
|
| 362 |
+
|
| 363 |
+
# NOTE: no field declarations since direct access is deprecated since NumPy 1.7
|
| 364 |
+
# Instead, we use properties that map to the corresponding C-API functions.
|
| 365 |
+
|
| 366 |
+
@property
|
| 367 |
+
cdef inline PyObject* base(self) noexcept nogil:
|
| 368 |
+
"""Returns a borrowed reference to the object owning the data/memory.
|
| 369 |
+
"""
|
| 370 |
+
return PyArray_BASE(self)
|
| 371 |
+
|
| 372 |
+
@property
|
| 373 |
+
cdef inline dtype descr(self):
|
| 374 |
+
"""Returns an owned reference to the dtype of the array.
|
| 375 |
+
"""
|
| 376 |
+
return <dtype>PyArray_DESCR(self)
|
| 377 |
+
|
| 378 |
+
@property
|
| 379 |
+
cdef inline int ndim(self) noexcept nogil:
|
| 380 |
+
"""Returns the number of dimensions in the array.
|
| 381 |
+
"""
|
| 382 |
+
return PyArray_NDIM(self)
|
| 383 |
+
|
| 384 |
+
@property
|
| 385 |
+
cdef inline npy_intp *shape(self) noexcept nogil:
|
| 386 |
+
"""Returns a pointer to the dimensions/shape of the array.
|
| 387 |
+
The number of elements matches the number of dimensions of the array (ndim).
|
| 388 |
+
Can return NULL for 0-dimensional arrays.
|
| 389 |
+
"""
|
| 390 |
+
return PyArray_DIMS(self)
|
| 391 |
+
|
| 392 |
+
@property
|
| 393 |
+
cdef inline npy_intp *strides(self) noexcept nogil:
|
| 394 |
+
"""Returns a pointer to the strides of the array.
|
| 395 |
+
The number of elements matches the number of dimensions of the array (ndim).
|
| 396 |
+
"""
|
| 397 |
+
return PyArray_STRIDES(self)
|
| 398 |
+
|
| 399 |
+
@property
|
| 400 |
+
cdef inline npy_intp size(self) noexcept nogil:
|
| 401 |
+
"""Returns the total size (in number of elements) of the array.
|
| 402 |
+
"""
|
| 403 |
+
return PyArray_SIZE(self)
|
| 404 |
+
|
| 405 |
+
@property
|
| 406 |
+
cdef inline char* data(self) noexcept nogil:
|
| 407 |
+
"""The pointer to the data buffer as a char*.
|
| 408 |
+
This is provided for legacy reasons to avoid direct struct field access.
|
| 409 |
+
For new code that needs this access, you probably want to cast the result
|
| 410 |
+
of `PyArray_DATA()` instead, which returns a 'void*'.
|
| 411 |
+
"""
|
| 412 |
+
return PyArray_BYTES(self)
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
int _import_array() except -1
|
| 416 |
+
# A second definition so _import_array isn't marked as used when we use it here.
|
| 417 |
+
# Do not use - subject to change any time.
|
| 418 |
+
int __pyx_import_array "_import_array"() except -1
|
| 419 |
+
|
| 420 |
+
#
|
| 421 |
+
# Macros from ndarrayobject.h
|
| 422 |
+
#
|
| 423 |
+
bint PyArray_CHKFLAGS(ndarray m, int flags) nogil
|
| 424 |
+
bint PyArray_IS_C_CONTIGUOUS(ndarray arr) nogil
|
| 425 |
+
bint PyArray_IS_F_CONTIGUOUS(ndarray arr) nogil
|
| 426 |
+
bint PyArray_ISCONTIGUOUS(ndarray m) nogil
|
| 427 |
+
bint PyArray_ISWRITEABLE(ndarray m) nogil
|
| 428 |
+
bint PyArray_ISALIGNED(ndarray m) nogil
|
| 429 |
+
|
| 430 |
+
int PyArray_NDIM(ndarray) nogil
|
| 431 |
+
bint PyArray_ISONESEGMENT(ndarray) nogil
|
| 432 |
+
bint PyArray_ISFORTRAN(ndarray) nogil
|
| 433 |
+
int PyArray_FORTRANIF(ndarray) nogil
|
| 434 |
+
|
| 435 |
+
void* PyArray_DATA(ndarray) nogil
|
| 436 |
+
char* PyArray_BYTES(ndarray) nogil
|
| 437 |
+
|
| 438 |
+
npy_intp* PyArray_DIMS(ndarray) nogil
|
| 439 |
+
npy_intp* PyArray_STRIDES(ndarray) nogil
|
| 440 |
+
npy_intp PyArray_DIM(ndarray, size_t) nogil
|
| 441 |
+
npy_intp PyArray_STRIDE(ndarray, size_t) nogil
|
| 442 |
+
|
| 443 |
+
PyObject *PyArray_BASE(ndarray) nogil # returns borrowed reference!
|
| 444 |
+
PyArray_Descr *PyArray_DESCR(ndarray) nogil # returns borrowed reference to dtype!
|
| 445 |
+
PyArray_Descr *PyArray_DTYPE(ndarray) nogil # returns borrowed reference to dtype! NP 1.7+ alias for descr.
|
| 446 |
+
int PyArray_FLAGS(ndarray) nogil
|
| 447 |
+
void PyArray_CLEARFLAGS(ndarray, int flags) nogil # Added in NumPy 1.7
|
| 448 |
+
void PyArray_ENABLEFLAGS(ndarray, int flags) nogil # Added in NumPy 1.7
|
| 449 |
+
npy_intp PyArray_ITEMSIZE(ndarray) nogil
|
| 450 |
+
int PyArray_TYPE(ndarray arr) nogil
|
| 451 |
+
|
| 452 |
+
object PyArray_GETITEM(ndarray arr, void *itemptr)
|
| 453 |
+
int PyArray_SETITEM(ndarray arr, void *itemptr, object obj) except -1
|
| 454 |
+
|
| 455 |
+
bint PyTypeNum_ISBOOL(int) nogil
|
| 456 |
+
bint PyTypeNum_ISUNSIGNED(int) nogil
|
| 457 |
+
bint PyTypeNum_ISSIGNED(int) nogil
|
| 458 |
+
bint PyTypeNum_ISINTEGER(int) nogil
|
| 459 |
+
bint PyTypeNum_ISFLOAT(int) nogil
|
| 460 |
+
bint PyTypeNum_ISNUMBER(int) nogil
|
| 461 |
+
bint PyTypeNum_ISSTRING(int) nogil
|
| 462 |
+
bint PyTypeNum_ISCOMPLEX(int) nogil
|
| 463 |
+
bint PyTypeNum_ISFLEXIBLE(int) nogil
|
| 464 |
+
bint PyTypeNum_ISUSERDEF(int) nogil
|
| 465 |
+
bint PyTypeNum_ISEXTENDED(int) nogil
|
| 466 |
+
bint PyTypeNum_ISOBJECT(int) nogil
|
| 467 |
+
|
| 468 |
+
npy_intp PyDataType_ELSIZE(dtype) nogil
|
| 469 |
+
npy_intp PyDataType_ALIGNMENT(dtype) nogil
|
| 470 |
+
PyObject* PyDataType_METADATA(dtype) nogil
|
| 471 |
+
PyArray_ArrayDescr* PyDataType_SUBARRAY(dtype) nogil
|
| 472 |
+
PyObject* PyDataType_NAMES(dtype) nogil
|
| 473 |
+
PyObject* PyDataType_FIELDS(dtype) nogil
|
| 474 |
+
|
| 475 |
+
bint PyDataType_ISBOOL(dtype) nogil
|
| 476 |
+
bint PyDataType_ISUNSIGNED(dtype) nogil
|
| 477 |
+
bint PyDataType_ISSIGNED(dtype) nogil
|
| 478 |
+
bint PyDataType_ISINTEGER(dtype) nogil
|
| 479 |
+
bint PyDataType_ISFLOAT(dtype) nogil
|
| 480 |
+
bint PyDataType_ISNUMBER(dtype) nogil
|
| 481 |
+
bint PyDataType_ISSTRING(dtype) nogil
|
| 482 |
+
bint PyDataType_ISCOMPLEX(dtype) nogil
|
| 483 |
+
bint PyDataType_ISFLEXIBLE(dtype) nogil
|
| 484 |
+
bint PyDataType_ISUSERDEF(dtype) nogil
|
| 485 |
+
bint PyDataType_ISEXTENDED(dtype) nogil
|
| 486 |
+
bint PyDataType_ISOBJECT(dtype) nogil
|
| 487 |
+
bint PyDataType_HASFIELDS(dtype) nogil
|
| 488 |
+
bint PyDataType_HASSUBARRAY(dtype) nogil
|
| 489 |
+
npy_uint64 PyDataType_FLAGS(dtype) nogil
|
| 490 |
+
|
| 491 |
+
bint PyArray_ISBOOL(ndarray) nogil
|
| 492 |
+
bint PyArray_ISUNSIGNED(ndarray) nogil
|
| 493 |
+
bint PyArray_ISSIGNED(ndarray) nogil
|
| 494 |
+
bint PyArray_ISINTEGER(ndarray) nogil
|
| 495 |
+
bint PyArray_ISFLOAT(ndarray) nogil
|
| 496 |
+
bint PyArray_ISNUMBER(ndarray) nogil
|
| 497 |
+
bint PyArray_ISSTRING(ndarray) nogil
|
| 498 |
+
bint PyArray_ISCOMPLEX(ndarray) nogil
|
| 499 |
+
bint PyArray_ISFLEXIBLE(ndarray) nogil
|
| 500 |
+
bint PyArray_ISUSERDEF(ndarray) nogil
|
| 501 |
+
bint PyArray_ISEXTENDED(ndarray) nogil
|
| 502 |
+
bint PyArray_ISOBJECT(ndarray) nogil
|
| 503 |
+
bint PyArray_HASFIELDS(ndarray) nogil
|
| 504 |
+
|
| 505 |
+
bint PyArray_ISVARIABLE(ndarray) nogil
|
| 506 |
+
|
| 507 |
+
bint PyArray_SAFEALIGNEDCOPY(ndarray) nogil
|
| 508 |
+
bint PyArray_ISNBO(char) nogil # works on ndarray.byteorder
|
| 509 |
+
bint PyArray_IsNativeByteOrder(char) nogil # works on ndarray.byteorder
|
| 510 |
+
bint PyArray_ISNOTSWAPPED(ndarray) nogil
|
| 511 |
+
bint PyArray_ISBYTESWAPPED(ndarray) nogil
|
| 512 |
+
|
| 513 |
+
bint PyArray_FLAGSWAP(ndarray, int) nogil
|
| 514 |
+
|
| 515 |
+
bint PyArray_ISCARRAY(ndarray) nogil
|
| 516 |
+
bint PyArray_ISCARRAY_RO(ndarray) nogil
|
| 517 |
+
bint PyArray_ISFARRAY(ndarray) nogil
|
| 518 |
+
bint PyArray_ISFARRAY_RO(ndarray) nogil
|
| 519 |
+
bint PyArray_ISBEHAVED(ndarray) nogil
|
| 520 |
+
bint PyArray_ISBEHAVED_RO(ndarray) nogil
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
bint PyDataType_ISNOTSWAPPED(dtype) nogil
|
| 524 |
+
bint PyDataType_ISBYTESWAPPED(dtype) nogil
|
| 525 |
+
|
| 526 |
+
bint PyArray_DescrCheck(object)
|
| 527 |
+
|
| 528 |
+
bint PyArray_Check(object)
|
| 529 |
+
bint PyArray_CheckExact(object)
|
| 530 |
+
|
| 531 |
+
# Cannot be supported due to out arg:
|
| 532 |
+
# bint PyArray_HasArrayInterfaceType(object, dtype, object, object&)
|
| 533 |
+
# bint PyArray_HasArrayInterface(op, out)
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
bint PyArray_IsZeroDim(object)
|
| 537 |
+
# Cannot be supported due to ## ## in macro:
|
| 538 |
+
# bint PyArray_IsScalar(object, verbatim work)
|
| 539 |
+
bint PyArray_CheckScalar(object)
|
| 540 |
+
bint PyArray_IsPythonNumber(object)
|
| 541 |
+
bint PyArray_IsPythonScalar(object)
|
| 542 |
+
bint PyArray_IsAnyScalar(object)
|
| 543 |
+
bint PyArray_CheckAnyScalar(object)
|
| 544 |
+
|
| 545 |
+
ndarray PyArray_GETCONTIGUOUS(ndarray)
|
| 546 |
+
bint PyArray_SAMESHAPE(ndarray, ndarray) nogil
|
| 547 |
+
npy_intp PyArray_SIZE(ndarray) nogil
|
| 548 |
+
npy_intp PyArray_NBYTES(ndarray) nogil
|
| 549 |
+
|
| 550 |
+
object PyArray_FROM_O(object)
|
| 551 |
+
object PyArray_FROM_OF(object m, int flags)
|
| 552 |
+
object PyArray_FROM_OT(object m, int type)
|
| 553 |
+
object PyArray_FROM_OTF(object m, int type, int flags)
|
| 554 |
+
object PyArray_FROMANY(object m, int type, int min, int max, int flags)
|
| 555 |
+
object PyArray_ZEROS(int nd, npy_intp* dims, int type, int fortran)
|
| 556 |
+
object PyArray_EMPTY(int nd, npy_intp* dims, int type, int fortran)
|
| 557 |
+
void PyArray_FILLWBYTE(ndarray, int val)
|
| 558 |
+
object PyArray_ContiguousFromAny(op, int, int min_depth, int max_depth)
|
| 559 |
+
unsigned char PyArray_EquivArrTypes(ndarray a1, ndarray a2)
|
| 560 |
+
bint PyArray_EquivByteorders(int b1, int b2) nogil
|
| 561 |
+
object PyArray_SimpleNew(int nd, npy_intp* dims, int typenum)
|
| 562 |
+
object PyArray_SimpleNewFromData(int nd, npy_intp* dims, int typenum, void* data)
|
| 563 |
+
#object PyArray_SimpleNewFromDescr(int nd, npy_intp* dims, dtype descr)
|
| 564 |
+
object PyArray_ToScalar(void* data, ndarray arr)
|
| 565 |
+
|
| 566 |
+
void* PyArray_GETPTR1(ndarray m, npy_intp i) nogil
|
| 567 |
+
void* PyArray_GETPTR2(ndarray m, npy_intp i, npy_intp j) nogil
|
| 568 |
+
void* PyArray_GETPTR3(ndarray m, npy_intp i, npy_intp j, npy_intp k) nogil
|
| 569 |
+
void* PyArray_GETPTR4(ndarray m, npy_intp i, npy_intp j, npy_intp k, npy_intp l) nogil
|
| 570 |
+
|
| 571 |
+
# Cannot be supported due to out arg
|
| 572 |
+
# void PyArray_DESCR_REPLACE(descr)
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
object PyArray_Copy(ndarray)
|
| 576 |
+
object PyArray_FromObject(object op, int type, int min_depth, int max_depth)
|
| 577 |
+
object PyArray_ContiguousFromObject(object op, int type, int min_depth, int max_depth)
|
| 578 |
+
object PyArray_CopyFromObject(object op, int type, int min_depth, int max_depth)
|
| 579 |
+
|
| 580 |
+
object PyArray_Cast(ndarray mp, int type_num)
|
| 581 |
+
object PyArray_Take(ndarray ap, object items, int axis)
|
| 582 |
+
object PyArray_Put(ndarray ap, object items, object values)
|
| 583 |
+
|
| 584 |
+
void PyArray_ITER_RESET(flatiter it) nogil
|
| 585 |
+
void PyArray_ITER_NEXT(flatiter it) nogil
|
| 586 |
+
void PyArray_ITER_GOTO(flatiter it, npy_intp* destination) nogil
|
| 587 |
+
void PyArray_ITER_GOTO1D(flatiter it, npy_intp ind) nogil
|
| 588 |
+
void* PyArray_ITER_DATA(flatiter it) nogil
|
| 589 |
+
bint PyArray_ITER_NOTDONE(flatiter it) nogil
|
| 590 |
+
|
| 591 |
+
void PyArray_MultiIter_RESET(broadcast multi) nogil
|
| 592 |
+
void PyArray_MultiIter_NEXT(broadcast multi) nogil
|
| 593 |
+
void PyArray_MultiIter_GOTO(broadcast multi, npy_intp dest) nogil
|
| 594 |
+
void PyArray_MultiIter_GOTO1D(broadcast multi, npy_intp ind) nogil
|
| 595 |
+
void* PyArray_MultiIter_DATA(broadcast multi, npy_intp i) nogil
|
| 596 |
+
void PyArray_MultiIter_NEXTi(broadcast multi, npy_intp i) nogil
|
| 597 |
+
bint PyArray_MultiIter_NOTDONE(broadcast multi) nogil
|
| 598 |
+
npy_intp PyArray_MultiIter_SIZE(broadcast multi) nogil
|
| 599 |
+
int PyArray_MultiIter_NDIM(broadcast multi) nogil
|
| 600 |
+
npy_intp PyArray_MultiIter_INDEX(broadcast multi) nogil
|
| 601 |
+
int PyArray_MultiIter_NUMITER(broadcast multi) nogil
|
| 602 |
+
npy_intp* PyArray_MultiIter_DIMS(broadcast multi) nogil
|
| 603 |
+
void** PyArray_MultiIter_ITERS(broadcast multi) nogil
|
| 604 |
+
|
| 605 |
+
# Functions from __multiarray_api.h
|
| 606 |
+
|
| 607 |
+
# Functions taking dtype and returning object/ndarray are disabled
|
| 608 |
+
# for now as they steal dtype references. I'm conservative and disable
|
| 609 |
+
# more than is probably needed until it can be checked further.
|
| 610 |
+
int PyArray_INCREF (ndarray) except * # uses PyArray_Item_INCREF...
|
| 611 |
+
int PyArray_XDECREF (ndarray) except * # uses PyArray_Item_DECREF...
|
| 612 |
+
dtype PyArray_DescrFromType (int)
|
| 613 |
+
object PyArray_TypeObjectFromType (int)
|
| 614 |
+
char * PyArray_Zero (ndarray)
|
| 615 |
+
char * PyArray_One (ndarray)
|
| 616 |
+
#object PyArray_CastToType (ndarray, dtype, int)
|
| 617 |
+
int PyArray_CanCastSafely (int, int) # writes errors
|
| 618 |
+
npy_bool PyArray_CanCastTo (dtype, dtype) # writes errors
|
| 619 |
+
int PyArray_ObjectType (object, int) except 0
|
| 620 |
+
dtype PyArray_DescrFromObject (object, dtype)
|
| 621 |
+
#ndarray* PyArray_ConvertToCommonType (object, int *)
|
| 622 |
+
dtype PyArray_DescrFromScalar (object)
|
| 623 |
+
dtype PyArray_DescrFromTypeObject (object)
|
| 624 |
+
npy_intp PyArray_Size (object)
|
| 625 |
+
#object PyArray_Scalar (void *, dtype, object)
|
| 626 |
+
#object PyArray_FromScalar (object, dtype)
|
| 627 |
+
void PyArray_ScalarAsCtype (object, void *)
|
| 628 |
+
#int PyArray_CastScalarToCtype (object, void *, dtype)
|
| 629 |
+
#int PyArray_CastScalarDirect (object, dtype, void *, int)
|
| 630 |
+
#PyArray_VectorUnaryFunc * PyArray_GetCastFunc (dtype, int)
|
| 631 |
+
#object PyArray_FromAny (object, dtype, int, int, int, object)
|
| 632 |
+
object PyArray_EnsureArray (object)
|
| 633 |
+
object PyArray_EnsureAnyArray (object)
|
| 634 |
+
#object PyArray_FromFile (stdio.FILE *, dtype, npy_intp, char *)
|
| 635 |
+
#object PyArray_FromString (char *, npy_intp, dtype, npy_intp, char *)
|
| 636 |
+
#object PyArray_FromBuffer (object, dtype, npy_intp, npy_intp)
|
| 637 |
+
#object PyArray_FromIter (object, dtype, npy_intp)
|
| 638 |
+
object PyArray_Return (ndarray)
|
| 639 |
+
#object PyArray_GetField (ndarray, dtype, int)
|
| 640 |
+
#int PyArray_SetField (ndarray, dtype, int, object) except -1
|
| 641 |
+
object PyArray_Byteswap (ndarray, npy_bool)
|
| 642 |
+
object PyArray_Resize (ndarray, PyArray_Dims *, int, NPY_ORDER)
|
| 643 |
+
int PyArray_CopyInto (ndarray, ndarray) except -1
|
| 644 |
+
int PyArray_CopyAnyInto (ndarray, ndarray) except -1
|
| 645 |
+
int PyArray_CopyObject (ndarray, object) except -1
|
| 646 |
+
object PyArray_NewCopy (ndarray, NPY_ORDER)
|
| 647 |
+
object PyArray_ToList (ndarray)
|
| 648 |
+
object PyArray_ToString (ndarray, NPY_ORDER)
|
| 649 |
+
int PyArray_ToFile (ndarray, stdio.FILE *, char *, char *) except -1
|
| 650 |
+
int PyArray_Dump (object, object, int) except -1
|
| 651 |
+
object PyArray_Dumps (object, int)
|
| 652 |
+
int PyArray_ValidType (int) # Cannot error
|
| 653 |
+
void PyArray_UpdateFlags (ndarray, int)
|
| 654 |
+
object PyArray_New (type, int, npy_intp *, int, npy_intp *, void *, int, int, object)
|
| 655 |
+
#object PyArray_NewFromDescr (type, dtype, int, npy_intp *, npy_intp *, void *, int, object)
|
| 656 |
+
#dtype PyArray_DescrNew (dtype)
|
| 657 |
+
dtype PyArray_DescrNewFromType (int)
|
| 658 |
+
double PyArray_GetPriority (object, double) # clears errors as of 1.25
|
| 659 |
+
object PyArray_IterNew (object)
|
| 660 |
+
object PyArray_MultiIterNew (int, ...)
|
| 661 |
+
|
| 662 |
+
int PyArray_PyIntAsInt (object) except? -1
|
| 663 |
+
npy_intp PyArray_PyIntAsIntp (object)
|
| 664 |
+
int PyArray_Broadcast (broadcast) except -1
|
| 665 |
+
int PyArray_FillWithScalar (ndarray, object) except -1
|
| 666 |
+
npy_bool PyArray_CheckStrides (int, int, npy_intp, npy_intp, npy_intp *, npy_intp *)
|
| 667 |
+
dtype PyArray_DescrNewByteorder (dtype, char)
|
| 668 |
+
object PyArray_IterAllButAxis (object, int *)
|
| 669 |
+
#object PyArray_CheckFromAny (object, dtype, int, int, int, object)
|
| 670 |
+
#object PyArray_FromArray (ndarray, dtype, int)
|
| 671 |
+
object PyArray_FromInterface (object)
|
| 672 |
+
object PyArray_FromStructInterface (object)
|
| 673 |
+
#object PyArray_FromArrayAttr (object, dtype, object)
|
| 674 |
+
#NPY_SCALARKIND PyArray_ScalarKind (int, ndarray*)
|
| 675 |
+
int PyArray_CanCoerceScalar (int, int, NPY_SCALARKIND)
|
| 676 |
+
npy_bool PyArray_CanCastScalar (type, type)
|
| 677 |
+
int PyArray_RemoveSmallest (broadcast) except -1
|
| 678 |
+
int PyArray_ElementStrides (object)
|
| 679 |
+
void PyArray_Item_INCREF (char *, dtype) except *
|
| 680 |
+
void PyArray_Item_XDECREF (char *, dtype) except *
|
| 681 |
+
object PyArray_Transpose (ndarray, PyArray_Dims *)
|
| 682 |
+
object PyArray_TakeFrom (ndarray, object, int, ndarray, NPY_CLIPMODE)
|
| 683 |
+
object PyArray_PutTo (ndarray, object, object, NPY_CLIPMODE)
|
| 684 |
+
object PyArray_PutMask (ndarray, object, object)
|
| 685 |
+
object PyArray_Repeat (ndarray, object, int)
|
| 686 |
+
object PyArray_Choose (ndarray, object, ndarray, NPY_CLIPMODE)
|
| 687 |
+
int PyArray_Sort (ndarray, int, NPY_SORTKIND) except -1
|
| 688 |
+
object PyArray_ArgSort (ndarray, int, NPY_SORTKIND)
|
| 689 |
+
object PyArray_SearchSorted (ndarray, object, NPY_SEARCHSIDE, PyObject *)
|
| 690 |
+
object PyArray_ArgMax (ndarray, int, ndarray)
|
| 691 |
+
object PyArray_ArgMin (ndarray, int, ndarray)
|
| 692 |
+
object PyArray_Reshape (ndarray, object)
|
| 693 |
+
object PyArray_Newshape (ndarray, PyArray_Dims *, NPY_ORDER)
|
| 694 |
+
object PyArray_Squeeze (ndarray)
|
| 695 |
+
#object PyArray_View (ndarray, dtype, type)
|
| 696 |
+
object PyArray_SwapAxes (ndarray, int, int)
|
| 697 |
+
object PyArray_Max (ndarray, int, ndarray)
|
| 698 |
+
object PyArray_Min (ndarray, int, ndarray)
|
| 699 |
+
object PyArray_Ptp (ndarray, int, ndarray)
|
| 700 |
+
object PyArray_Mean (ndarray, int, int, ndarray)
|
| 701 |
+
object PyArray_Trace (ndarray, int, int, int, int, ndarray)
|
| 702 |
+
object PyArray_Diagonal (ndarray, int, int, int)
|
| 703 |
+
object PyArray_Clip (ndarray, object, object, ndarray)
|
| 704 |
+
object PyArray_Conjugate (ndarray, ndarray)
|
| 705 |
+
object PyArray_Nonzero (ndarray)
|
| 706 |
+
object PyArray_Std (ndarray, int, int, ndarray, int)
|
| 707 |
+
object PyArray_Sum (ndarray, int, int, ndarray)
|
| 708 |
+
object PyArray_CumSum (ndarray, int, int, ndarray)
|
| 709 |
+
object PyArray_Prod (ndarray, int, int, ndarray)
|
| 710 |
+
object PyArray_CumProd (ndarray, int, int, ndarray)
|
| 711 |
+
object PyArray_All (ndarray, int, ndarray)
|
| 712 |
+
object PyArray_Any (ndarray, int, ndarray)
|
| 713 |
+
object PyArray_Compress (ndarray, object, int, ndarray)
|
| 714 |
+
object PyArray_Flatten (ndarray, NPY_ORDER)
|
| 715 |
+
object PyArray_Ravel (ndarray, NPY_ORDER)
|
| 716 |
+
npy_intp PyArray_MultiplyList (npy_intp *, int)
|
| 717 |
+
int PyArray_MultiplyIntList (int *, int)
|
| 718 |
+
void * PyArray_GetPtr (ndarray, npy_intp*)
|
| 719 |
+
int PyArray_CompareLists (npy_intp *, npy_intp *, int)
|
| 720 |
+
#int PyArray_AsCArray (object*, void *, npy_intp *, int, dtype)
|
| 721 |
+
int PyArray_Free (object, void *)
|
| 722 |
+
#int PyArray_Converter (object, object*)
|
| 723 |
+
int PyArray_IntpFromSequence (object, npy_intp *, int) except -1
|
| 724 |
+
object PyArray_Concatenate (object, int)
|
| 725 |
+
object PyArray_InnerProduct (object, object)
|
| 726 |
+
object PyArray_MatrixProduct (object, object)
|
| 727 |
+
object PyArray_Correlate (object, object, int)
|
| 728 |
+
#int PyArray_DescrConverter (object, dtype*) except 0
|
| 729 |
+
#int PyArray_DescrConverter2 (object, dtype*) except 0
|
| 730 |
+
int PyArray_IntpConverter (object, PyArray_Dims *) except 0
|
| 731 |
+
#int PyArray_BufferConverter (object, chunk) except 0
|
| 732 |
+
int PyArray_AxisConverter (object, int *) except 0
|
| 733 |
+
int PyArray_BoolConverter (object, npy_bool *) except 0
|
| 734 |
+
int PyArray_ByteorderConverter (object, char *) except 0
|
| 735 |
+
int PyArray_OrderConverter (object, NPY_ORDER *) except 0
|
| 736 |
+
unsigned char PyArray_EquivTypes (dtype, dtype) # clears errors
|
| 737 |
+
#object PyArray_Zeros (int, npy_intp *, dtype, int)
|
| 738 |
+
#object PyArray_Empty (int, npy_intp *, dtype, int)
|
| 739 |
+
object PyArray_Where (object, object, object)
|
| 740 |
+
object PyArray_Arange (double, double, double, int)
|
| 741 |
+
#object PyArray_ArangeObj (object, object, object, dtype)
|
| 742 |
+
int PyArray_SortkindConverter (object, NPY_SORTKIND *) except 0
|
| 743 |
+
object PyArray_LexSort (object, int)
|
| 744 |
+
object PyArray_Round (ndarray, int, ndarray)
|
| 745 |
+
unsigned char PyArray_EquivTypenums (int, int)
|
| 746 |
+
int PyArray_RegisterDataType (dtype) except -1
|
| 747 |
+
int PyArray_RegisterCastFunc (dtype, int, PyArray_VectorUnaryFunc *) except -1
|
| 748 |
+
int PyArray_RegisterCanCast (dtype, int, NPY_SCALARKIND) except -1
|
| 749 |
+
#void PyArray_InitArrFuncs (PyArray_ArrFuncs *)
|
| 750 |
+
object PyArray_IntTupleFromIntp (int, npy_intp *)
|
| 751 |
+
int PyArray_ClipmodeConverter (object, NPY_CLIPMODE *) except 0
|
| 752 |
+
#int PyArray_OutputConverter (object, ndarray*) except 0
|
| 753 |
+
object PyArray_BroadcastToShape (object, npy_intp *, int)
|
| 754 |
+
#int PyArray_DescrAlignConverter (object, dtype*) except 0
|
| 755 |
+
#int PyArray_DescrAlignConverter2 (object, dtype*) except 0
|
| 756 |
+
int PyArray_SearchsideConverter (object, void *) except 0
|
| 757 |
+
object PyArray_CheckAxis (ndarray, int *, int)
|
| 758 |
+
npy_intp PyArray_OverflowMultiplyList (npy_intp *, int)
|
| 759 |
+
int PyArray_SetBaseObject(ndarray, base) except -1 # NOTE: steals a reference to base! Use "set_array_base()" instead.
|
| 760 |
+
|
| 761 |
+
# The memory handler functions require the NumPy 1.22 API
|
| 762 |
+
# and may require defining NPY_TARGET_VERSION
|
| 763 |
+
ctypedef struct PyDataMemAllocator:
|
| 764 |
+
void *ctx
|
| 765 |
+
void* (*malloc) (void *ctx, size_t size)
|
| 766 |
+
void* (*calloc) (void *ctx, size_t nelem, size_t elsize)
|
| 767 |
+
void* (*realloc) (void *ctx, void *ptr, size_t new_size)
|
| 768 |
+
void (*free) (void *ctx, void *ptr, size_t size)
|
| 769 |
+
|
| 770 |
+
ctypedef struct PyDataMem_Handler:
|
| 771 |
+
char* name
|
| 772 |
+
npy_uint8 version
|
| 773 |
+
PyDataMemAllocator allocator
|
| 774 |
+
|
| 775 |
+
object PyDataMem_SetHandler(object handler)
|
| 776 |
+
object PyDataMem_GetHandler()
|
| 777 |
+
|
| 778 |
+
# additional datetime related functions are defined below
|
| 779 |
+
|
| 780 |
+
|
| 781 |
+
# Typedefs that matches the runtime dtype objects in
|
| 782 |
+
# the numpy module.
|
| 783 |
+
|
| 784 |
+
# The ones that are commented out needs an IFDEF function
|
| 785 |
+
# in Cython to enable them only on the right systems.
|
| 786 |
+
|
| 787 |
+
ctypedef npy_int8 int8_t
|
| 788 |
+
ctypedef npy_int16 int16_t
|
| 789 |
+
ctypedef npy_int32 int32_t
|
| 790 |
+
ctypedef npy_int64 int64_t
|
| 791 |
+
#ctypedef npy_int96 int96_t
|
| 792 |
+
#ctypedef npy_int128 int128_t
|
| 793 |
+
|
| 794 |
+
ctypedef npy_uint8 uint8_t
|
| 795 |
+
ctypedef npy_uint16 uint16_t
|
| 796 |
+
ctypedef npy_uint32 uint32_t
|
| 797 |
+
ctypedef npy_uint64 uint64_t
|
| 798 |
+
#ctypedef npy_uint96 uint96_t
|
| 799 |
+
#ctypedef npy_uint128 uint128_t
|
| 800 |
+
|
| 801 |
+
ctypedef npy_float32 float32_t
|
| 802 |
+
ctypedef npy_float64 float64_t
|
| 803 |
+
#ctypedef npy_float80 float80_t
|
| 804 |
+
#ctypedef npy_float128 float128_t
|
| 805 |
+
|
| 806 |
+
ctypedef float complex complex64_t
|
| 807 |
+
ctypedef double complex complex128_t
|
| 808 |
+
|
| 809 |
+
ctypedef npy_longlong longlong_t
|
| 810 |
+
ctypedef npy_ulonglong ulonglong_t
|
| 811 |
+
|
| 812 |
+
ctypedef npy_intp intp_t
|
| 813 |
+
ctypedef npy_uintp uintp_t
|
| 814 |
+
|
| 815 |
+
ctypedef npy_double float_t
|
| 816 |
+
ctypedef npy_double double_t
|
| 817 |
+
ctypedef npy_longdouble longdouble_t
|
| 818 |
+
|
| 819 |
+
ctypedef float complex cfloat_t
|
| 820 |
+
ctypedef double complex cdouble_t
|
| 821 |
+
ctypedef double complex complex_t
|
| 822 |
+
ctypedef long double complex clongdouble_t
|
| 823 |
+
|
| 824 |
+
cdef inline object PyArray_MultiIterNew1(a):
|
| 825 |
+
return PyArray_MultiIterNew(1, <void*>a)
|
| 826 |
+
|
| 827 |
+
cdef inline object PyArray_MultiIterNew2(a, b):
|
| 828 |
+
return PyArray_MultiIterNew(2, <void*>a, <void*>b)
|
| 829 |
+
|
| 830 |
+
cdef inline object PyArray_MultiIterNew3(a, b, c):
|
| 831 |
+
return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)
|
| 832 |
+
|
| 833 |
+
cdef inline object PyArray_MultiIterNew4(a, b, c, d):
|
| 834 |
+
return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)
|
| 835 |
+
|
| 836 |
+
cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
|
| 837 |
+
return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)
|
| 838 |
+
|
| 839 |
+
cdef inline tuple PyDataType_SHAPE(dtype d):
|
| 840 |
+
if PyDataType_HASSUBARRAY(d):
|
| 841 |
+
return <tuple>d.subarray.shape
|
| 842 |
+
else:
|
| 843 |
+
return ()
|
| 844 |
+
|
| 845 |
+
|
| 846 |
+
cdef extern from "numpy/ndarrayobject.h":
|
| 847 |
+
PyTypeObject PyTimedeltaArrType_Type
|
| 848 |
+
PyTypeObject PyDatetimeArrType_Type
|
| 849 |
+
ctypedef int64_t npy_timedelta
|
| 850 |
+
ctypedef int64_t npy_datetime
|
| 851 |
+
|
| 852 |
+
cdef extern from "numpy/ndarraytypes.h":
|
| 853 |
+
ctypedef struct PyArray_DatetimeMetaData:
|
| 854 |
+
NPY_DATETIMEUNIT base
|
| 855 |
+
int64_t num
|
| 856 |
+
|
| 857 |
+
ctypedef struct npy_datetimestruct:
|
| 858 |
+
int64_t year
|
| 859 |
+
int32_t month, day, hour, min, sec, us, ps, as
|
| 860 |
+
|
| 861 |
+
|
| 862 |
+
cdef extern from "numpy/arrayscalars.h":
|
| 863 |
+
|
| 864 |
+
# abstract types
|
| 865 |
+
ctypedef class numpy.generic [object PyObject]:
|
| 866 |
+
pass
|
| 867 |
+
ctypedef class numpy.number [object PyObject]:
|
| 868 |
+
pass
|
| 869 |
+
ctypedef class numpy.integer [object PyObject]:
|
| 870 |
+
pass
|
| 871 |
+
ctypedef class numpy.signedinteger [object PyObject]:
|
| 872 |
+
pass
|
| 873 |
+
ctypedef class numpy.unsignedinteger [object PyObject]:
|
| 874 |
+
pass
|
| 875 |
+
ctypedef class numpy.inexact [object PyObject]:
|
| 876 |
+
pass
|
| 877 |
+
ctypedef class numpy.floating [object PyObject]:
|
| 878 |
+
pass
|
| 879 |
+
ctypedef class numpy.complexfloating [object PyObject]:
|
| 880 |
+
pass
|
| 881 |
+
ctypedef class numpy.flexible [object PyObject]:
|
| 882 |
+
pass
|
| 883 |
+
ctypedef class numpy.character [object PyObject]:
|
| 884 |
+
pass
|
| 885 |
+
|
| 886 |
+
ctypedef struct PyDatetimeScalarObject:
|
| 887 |
+
# PyObject_HEAD
|
| 888 |
+
npy_datetime obval
|
| 889 |
+
PyArray_DatetimeMetaData obmeta
|
| 890 |
+
|
| 891 |
+
ctypedef struct PyTimedeltaScalarObject:
|
| 892 |
+
# PyObject_HEAD
|
| 893 |
+
npy_timedelta obval
|
| 894 |
+
PyArray_DatetimeMetaData obmeta
|
| 895 |
+
|
| 896 |
+
ctypedef enum NPY_DATETIMEUNIT:
|
| 897 |
+
NPY_FR_Y
|
| 898 |
+
NPY_FR_M
|
| 899 |
+
NPY_FR_W
|
| 900 |
+
NPY_FR_D
|
| 901 |
+
NPY_FR_B
|
| 902 |
+
NPY_FR_h
|
| 903 |
+
NPY_FR_m
|
| 904 |
+
NPY_FR_s
|
| 905 |
+
NPY_FR_ms
|
| 906 |
+
NPY_FR_us
|
| 907 |
+
NPY_FR_ns
|
| 908 |
+
NPY_FR_ps
|
| 909 |
+
NPY_FR_fs
|
| 910 |
+
NPY_FR_as
|
| 911 |
+
NPY_FR_GENERIC
|
| 912 |
+
|
| 913 |
+
|
| 914 |
+
cdef extern from "numpy/arrayobject.h":
|
| 915 |
+
# These are part of the C-API defined in `__multiarray_api.h`
|
| 916 |
+
|
| 917 |
+
# NumPy internal definitions in datetime_strings.c:
|
| 918 |
+
int get_datetime_iso_8601_strlen "NpyDatetime_GetDatetimeISO8601StrLen" (
|
| 919 |
+
int local, NPY_DATETIMEUNIT base)
|
| 920 |
+
int make_iso_8601_datetime "NpyDatetime_MakeISO8601Datetime" (
|
| 921 |
+
npy_datetimestruct *dts, char *outstr, npy_intp outlen,
|
| 922 |
+
int local, int utc, NPY_DATETIMEUNIT base, int tzoffset,
|
| 923 |
+
NPY_CASTING casting) except -1
|
| 924 |
+
|
| 925 |
+
# NumPy internal definition in datetime.c:
|
| 926 |
+
# May return 1 to indicate that object does not appear to be a datetime
|
| 927 |
+
# (returns 0 on success).
|
| 928 |
+
int convert_pydatetime_to_datetimestruct "NpyDatetime_ConvertPyDateTimeToDatetimeStruct" (
|
| 929 |
+
PyObject *obj, npy_datetimestruct *out,
|
| 930 |
+
NPY_DATETIMEUNIT *out_bestunit, int apply_tzinfo) except -1
|
| 931 |
+
int convert_datetime64_to_datetimestruct "NpyDatetime_ConvertDatetime64ToDatetimeStruct" (
|
| 932 |
+
PyArray_DatetimeMetaData *meta, npy_datetime dt,
|
| 933 |
+
npy_datetimestruct *out) except -1
|
| 934 |
+
int convert_datetimestruct_to_datetime64 "NpyDatetime_ConvertDatetimeStructToDatetime64"(
|
| 935 |
+
PyArray_DatetimeMetaData *meta, const npy_datetimestruct *dts,
|
| 936 |
+
npy_datetime *out) except -1
|
| 937 |
+
|
| 938 |
+
|
| 939 |
+
#
|
| 940 |
+
# ufunc API
|
| 941 |
+
#
|
| 942 |
+
|
| 943 |
+
cdef extern from "numpy/ufuncobject.h":
|
| 944 |
+
|
| 945 |
+
ctypedef void (*PyUFuncGenericFunction) (char **, npy_intp *, npy_intp *, void *)
|
| 946 |
+
|
| 947 |
+
ctypedef class numpy.ufunc [object PyUFuncObject, check_size ignore]:
|
| 948 |
+
cdef:
|
| 949 |
+
int nin, nout, nargs
|
| 950 |
+
int identity
|
| 951 |
+
PyUFuncGenericFunction *functions
|
| 952 |
+
void **data
|
| 953 |
+
int ntypes
|
| 954 |
+
int check_return
|
| 955 |
+
char *name
|
| 956 |
+
char *types
|
| 957 |
+
char *doc
|
| 958 |
+
void *ptr
|
| 959 |
+
PyObject *obj
|
| 960 |
+
PyObject *userloops
|
| 961 |
+
|
| 962 |
+
cdef enum:
|
| 963 |
+
PyUFunc_Zero
|
| 964 |
+
PyUFunc_One
|
| 965 |
+
PyUFunc_None
|
| 966 |
+
UFUNC_FPE_DIVIDEBYZERO
|
| 967 |
+
UFUNC_FPE_OVERFLOW
|
| 968 |
+
UFUNC_FPE_UNDERFLOW
|
| 969 |
+
UFUNC_FPE_INVALID
|
| 970 |
+
|
| 971 |
+
object PyUFunc_FromFuncAndData(PyUFuncGenericFunction *,
|
| 972 |
+
void **, char *, int, int, int, int, char *, char *, int)
|
| 973 |
+
int PyUFunc_RegisterLoopForType(ufunc, int,
|
| 974 |
+
PyUFuncGenericFunction, int *, void *) except -1
|
| 975 |
+
void PyUFunc_f_f_As_d_d \
|
| 976 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 977 |
+
void PyUFunc_d_d \
|
| 978 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 979 |
+
void PyUFunc_f_f \
|
| 980 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 981 |
+
void PyUFunc_g_g \
|
| 982 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 983 |
+
void PyUFunc_F_F_As_D_D \
|
| 984 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 985 |
+
void PyUFunc_F_F \
|
| 986 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 987 |
+
void PyUFunc_D_D \
|
| 988 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 989 |
+
void PyUFunc_G_G \
|
| 990 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 991 |
+
void PyUFunc_O_O \
|
| 992 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 993 |
+
void PyUFunc_ff_f_As_dd_d \
|
| 994 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 995 |
+
void PyUFunc_ff_f \
|
| 996 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 997 |
+
void PyUFunc_dd_d \
|
| 998 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 999 |
+
void PyUFunc_gg_g \
|
| 1000 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 1001 |
+
void PyUFunc_FF_F_As_DD_D \
|
| 1002 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 1003 |
+
void PyUFunc_DD_D \
|
| 1004 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 1005 |
+
void PyUFunc_FF_F \
|
| 1006 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 1007 |
+
void PyUFunc_GG_G \
|
| 1008 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 1009 |
+
void PyUFunc_OO_O \
|
| 1010 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 1011 |
+
void PyUFunc_O_O_method \
|
| 1012 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 1013 |
+
void PyUFunc_OO_O_method \
|
| 1014 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 1015 |
+
void PyUFunc_On_Om \
|
| 1016 |
+
(char **, npy_intp *, npy_intp *, void *)
|
| 1017 |
+
void PyUFunc_clearfperr()
|
| 1018 |
+
int PyUFunc_getfperr()
|
| 1019 |
+
int PyUFunc_ReplaceLoopBySignature \
|
| 1020 |
+
(ufunc, PyUFuncGenericFunction, int *, PyUFuncGenericFunction *)
|
| 1021 |
+
object PyUFunc_FromFuncAndDataAndSignature \
|
| 1022 |
+
(PyUFuncGenericFunction *, void **, char *, int, int, int,
|
| 1023 |
+
int, char *, char *, int, char *)
|
| 1024 |
+
|
| 1025 |
+
int _import_umath() except -1
|
| 1026 |
+
|
| 1027 |
+
cdef inline void set_array_base(ndarray arr, object base) except *:
|
| 1028 |
+
Py_INCREF(base) # important to do this before stealing the reference below!
|
| 1029 |
+
PyArray_SetBaseObject(arr, base)
|
| 1030 |
+
|
| 1031 |
+
cdef inline object get_array_base(ndarray arr):
|
| 1032 |
+
base = PyArray_BASE(arr)
|
| 1033 |
+
if base is NULL:
|
| 1034 |
+
return None
|
| 1035 |
+
return <object>base
|
| 1036 |
+
|
| 1037 |
+
# Versions of the import_* functions which are more suitable for
|
| 1038 |
+
# Cython code.
|
| 1039 |
+
cdef inline int import_array() except -1:
|
| 1040 |
+
try:
|
| 1041 |
+
__pyx_import_array()
|
| 1042 |
+
except Exception:
|
| 1043 |
+
raise ImportError("numpy._core.multiarray failed to import")
|
| 1044 |
+
|
| 1045 |
+
cdef inline int import_umath() except -1:
|
| 1046 |
+
try:
|
| 1047 |
+
_import_umath()
|
| 1048 |
+
except Exception:
|
| 1049 |
+
raise ImportError("numpy._core.umath failed to import")
|
| 1050 |
+
|
| 1051 |
+
cdef inline int import_ufunc() except -1:
|
| 1052 |
+
try:
|
| 1053 |
+
_import_umath()
|
| 1054 |
+
except Exception:
|
| 1055 |
+
raise ImportError("numpy._core.umath failed to import")
|
| 1056 |
+
|
| 1057 |
+
|
| 1058 |
+
cdef inline bint is_timedelta64_object(object obj) noexcept:
|
| 1059 |
+
"""
|
| 1060 |
+
Cython equivalent of `isinstance(obj, np.timedelta64)`
|
| 1061 |
+
|
| 1062 |
+
Parameters
|
| 1063 |
+
----------
|
| 1064 |
+
obj : object
|
| 1065 |
+
|
| 1066 |
+
Returns
|
| 1067 |
+
-------
|
| 1068 |
+
bool
|
| 1069 |
+
"""
|
| 1070 |
+
return PyObject_TypeCheck(obj, &PyTimedeltaArrType_Type)
|
| 1071 |
+
|
| 1072 |
+
|
| 1073 |
+
cdef inline bint is_datetime64_object(object obj) noexcept:
|
| 1074 |
+
"""
|
| 1075 |
+
Cython equivalent of `isinstance(obj, np.datetime64)`
|
| 1076 |
+
|
| 1077 |
+
Parameters
|
| 1078 |
+
----------
|
| 1079 |
+
obj : object
|
| 1080 |
+
|
| 1081 |
+
Returns
|
| 1082 |
+
-------
|
| 1083 |
+
bool
|
| 1084 |
+
"""
|
| 1085 |
+
return PyObject_TypeCheck(obj, &PyDatetimeArrType_Type)
|
| 1086 |
+
|
| 1087 |
+
|
| 1088 |
+
cdef inline npy_datetime get_datetime64_value(object obj) noexcept nogil:
|
| 1089 |
+
"""
|
| 1090 |
+
returns the int64 value underlying scalar numpy datetime64 object
|
| 1091 |
+
|
| 1092 |
+
Note that to interpret this as a datetime, the corresponding unit is
|
| 1093 |
+
also needed. That can be found using `get_datetime64_unit`.
|
| 1094 |
+
"""
|
| 1095 |
+
return (<PyDatetimeScalarObject*>obj).obval
|
| 1096 |
+
|
| 1097 |
+
|
| 1098 |
+
cdef inline npy_timedelta get_timedelta64_value(object obj) noexcept nogil:
|
| 1099 |
+
"""
|
| 1100 |
+
returns the int64 value underlying scalar numpy timedelta64 object
|
| 1101 |
+
"""
|
| 1102 |
+
return (<PyTimedeltaScalarObject*>obj).obval
|
| 1103 |
+
|
| 1104 |
+
|
| 1105 |
+
cdef inline NPY_DATETIMEUNIT get_datetime64_unit(object obj) noexcept nogil:
|
| 1106 |
+
"""
|
| 1107 |
+
returns the unit part of the dtype for a numpy datetime64 object.
|
| 1108 |
+
"""
|
| 1109 |
+
return <NPY_DATETIMEUNIT>(<PyDatetimeScalarObject*>obj).obmeta.base
|
| 1110 |
+
|
| 1111 |
+
|
| 1112 |
+
# Iterator API added in v1.6
|
| 1113 |
+
ctypedef int (*NpyIter_IterNextFunc)(NpyIter* it) noexcept nogil
|
| 1114 |
+
ctypedef void (*NpyIter_GetMultiIndexFunc)(NpyIter* it, npy_intp* outcoords) noexcept nogil
|
| 1115 |
+
|
| 1116 |
+
cdef extern from "numpy/arrayobject.h":
|
| 1117 |
+
|
| 1118 |
+
ctypedef struct NpyIter:
|
| 1119 |
+
pass
|
| 1120 |
+
|
| 1121 |
+
cdef enum:
|
| 1122 |
+
NPY_FAIL
|
| 1123 |
+
NPY_SUCCEED
|
| 1124 |
+
|
| 1125 |
+
cdef enum:
|
| 1126 |
+
# Track an index representing C order
|
| 1127 |
+
NPY_ITER_C_INDEX
|
| 1128 |
+
# Track an index representing Fortran order
|
| 1129 |
+
NPY_ITER_F_INDEX
|
| 1130 |
+
# Track a multi-index
|
| 1131 |
+
NPY_ITER_MULTI_INDEX
|
| 1132 |
+
# User code external to the iterator does the 1-dimensional innermost loop
|
| 1133 |
+
NPY_ITER_EXTERNAL_LOOP
|
| 1134 |
+
# Convert all the operands to a common data type
|
| 1135 |
+
NPY_ITER_COMMON_DTYPE
|
| 1136 |
+
# Operands may hold references, requiring API access during iteration
|
| 1137 |
+
NPY_ITER_REFS_OK
|
| 1138 |
+
# Zero-sized operands should be permitted, iteration checks IterSize for 0
|
| 1139 |
+
NPY_ITER_ZEROSIZE_OK
|
| 1140 |
+
# Permits reductions (size-0 stride with dimension size > 1)
|
| 1141 |
+
NPY_ITER_REDUCE_OK
|
| 1142 |
+
# Enables sub-range iteration
|
| 1143 |
+
NPY_ITER_RANGED
|
| 1144 |
+
# Enables buffering
|
| 1145 |
+
NPY_ITER_BUFFERED
|
| 1146 |
+
# When buffering is enabled, grows the inner loop if possible
|
| 1147 |
+
NPY_ITER_GROWINNER
|
| 1148 |
+
# Delay allocation of buffers until first Reset* call
|
| 1149 |
+
NPY_ITER_DELAY_BUFALLOC
|
| 1150 |
+
# When NPY_KEEPORDER is specified, disable reversing negative-stride axes
|
| 1151 |
+
NPY_ITER_DONT_NEGATE_STRIDES
|
| 1152 |
+
NPY_ITER_COPY_IF_OVERLAP
|
| 1153 |
+
# The operand will be read from and written to
|
| 1154 |
+
NPY_ITER_READWRITE
|
| 1155 |
+
# The operand will only be read from
|
| 1156 |
+
NPY_ITER_READONLY
|
| 1157 |
+
# The operand will only be written to
|
| 1158 |
+
NPY_ITER_WRITEONLY
|
| 1159 |
+
# The operand's data must be in native byte order
|
| 1160 |
+
NPY_ITER_NBO
|
| 1161 |
+
# The operand's data must be aligned
|
| 1162 |
+
NPY_ITER_ALIGNED
|
| 1163 |
+
# The operand's data must be contiguous (within the inner loop)
|
| 1164 |
+
NPY_ITER_CONTIG
|
| 1165 |
+
# The operand may be copied to satisfy requirements
|
| 1166 |
+
NPY_ITER_COPY
|
| 1167 |
+
# The operand may be copied with WRITEBACKIFCOPY to satisfy requirements
|
| 1168 |
+
NPY_ITER_UPDATEIFCOPY
|
| 1169 |
+
# Allocate the operand if it is NULL
|
| 1170 |
+
NPY_ITER_ALLOCATE
|
| 1171 |
+
# If an operand is allocated, don't use any subtype
|
| 1172 |
+
NPY_ITER_NO_SUBTYPE
|
| 1173 |
+
# This is a virtual array slot, operand is NULL but temporary data is there
|
| 1174 |
+
NPY_ITER_VIRTUAL
|
| 1175 |
+
# Require that the dimension match the iterator dimensions exactly
|
| 1176 |
+
NPY_ITER_NO_BROADCAST
|
| 1177 |
+
# A mask is being used on this array, affects buffer -> array copy
|
| 1178 |
+
NPY_ITER_WRITEMASKED
|
| 1179 |
+
# This array is the mask for all WRITEMASKED operands
|
| 1180 |
+
NPY_ITER_ARRAYMASK
|
| 1181 |
+
# Assume iterator order data access for COPY_IF_OVERLAP
|
| 1182 |
+
NPY_ITER_OVERLAP_ASSUME_ELEMENTWISE
|
| 1183 |
+
|
| 1184 |
+
# construction and destruction functions
|
| 1185 |
+
NpyIter* NpyIter_New(ndarray arr, npy_uint32 flags, NPY_ORDER order,
|
| 1186 |
+
NPY_CASTING casting, dtype datatype) except NULL
|
| 1187 |
+
NpyIter* NpyIter_MultiNew(npy_intp nop, PyArrayObject** op, npy_uint32 flags,
|
| 1188 |
+
NPY_ORDER order, NPY_CASTING casting, npy_uint32*
|
| 1189 |
+
op_flags, PyArray_Descr** op_dtypes) except NULL
|
| 1190 |
+
NpyIter* NpyIter_AdvancedNew(npy_intp nop, PyArrayObject** op,
|
| 1191 |
+
npy_uint32 flags, NPY_ORDER order,
|
| 1192 |
+
NPY_CASTING casting, npy_uint32* op_flags,
|
| 1193 |
+
PyArray_Descr** op_dtypes, int oa_ndim,
|
| 1194 |
+
int** op_axes, const npy_intp* itershape,
|
| 1195 |
+
npy_intp buffersize) except NULL
|
| 1196 |
+
NpyIter* NpyIter_Copy(NpyIter* it) except NULL
|
| 1197 |
+
int NpyIter_RemoveAxis(NpyIter* it, int axis) except NPY_FAIL
|
| 1198 |
+
int NpyIter_RemoveMultiIndex(NpyIter* it) except NPY_FAIL
|
| 1199 |
+
int NpyIter_EnableExternalLoop(NpyIter* it) except NPY_FAIL
|
| 1200 |
+
int NpyIter_Deallocate(NpyIter* it) except NPY_FAIL
|
| 1201 |
+
int NpyIter_Reset(NpyIter* it, char** errmsg) except NPY_FAIL
|
| 1202 |
+
int NpyIter_ResetToIterIndexRange(NpyIter* it, npy_intp istart,
|
| 1203 |
+
npy_intp iend, char** errmsg) except NPY_FAIL
|
| 1204 |
+
int NpyIter_ResetBasePointers(NpyIter* it, char** baseptrs, char** errmsg) except NPY_FAIL
|
| 1205 |
+
int NpyIter_GotoMultiIndex(NpyIter* it, const npy_intp* multi_index) except NPY_FAIL
|
| 1206 |
+
int NpyIter_GotoIndex(NpyIter* it, npy_intp index) except NPY_FAIL
|
| 1207 |
+
npy_intp NpyIter_GetIterSize(NpyIter* it) nogil
|
| 1208 |
+
npy_intp NpyIter_GetIterIndex(NpyIter* it) nogil
|
| 1209 |
+
void NpyIter_GetIterIndexRange(NpyIter* it, npy_intp* istart,
|
| 1210 |
+
npy_intp* iend) nogil
|
| 1211 |
+
int NpyIter_GotoIterIndex(NpyIter* it, npy_intp iterindex) except NPY_FAIL
|
| 1212 |
+
npy_bool NpyIter_HasDelayedBufAlloc(NpyIter* it) nogil
|
| 1213 |
+
npy_bool NpyIter_HasExternalLoop(NpyIter* it) nogil
|
| 1214 |
+
npy_bool NpyIter_HasMultiIndex(NpyIter* it) nogil
|
| 1215 |
+
npy_bool NpyIter_HasIndex(NpyIter* it) nogil
|
| 1216 |
+
npy_bool NpyIter_RequiresBuffering(NpyIter* it) nogil
|
| 1217 |
+
npy_bool NpyIter_IsBuffered(NpyIter* it) nogil
|
| 1218 |
+
npy_bool NpyIter_IsGrowInner(NpyIter* it) nogil
|
| 1219 |
+
npy_intp NpyIter_GetBufferSize(NpyIter* it) nogil
|
| 1220 |
+
int NpyIter_GetNDim(NpyIter* it) nogil
|
| 1221 |
+
int NpyIter_GetNOp(NpyIter* it) nogil
|
| 1222 |
+
npy_intp* NpyIter_GetAxisStrideArray(NpyIter* it, int axis) except NULL
|
| 1223 |
+
int NpyIter_GetShape(NpyIter* it, npy_intp* outshape) nogil
|
| 1224 |
+
PyArray_Descr** NpyIter_GetDescrArray(NpyIter* it)
|
| 1225 |
+
PyArrayObject** NpyIter_GetOperandArray(NpyIter* it)
|
| 1226 |
+
ndarray NpyIter_GetIterView(NpyIter* it, npy_intp i)
|
| 1227 |
+
void NpyIter_GetReadFlags(NpyIter* it, char* outreadflags)
|
| 1228 |
+
void NpyIter_GetWriteFlags(NpyIter* it, char* outwriteflags)
|
| 1229 |
+
int NpyIter_CreateCompatibleStrides(NpyIter* it, npy_intp itemsize,
|
| 1230 |
+
npy_intp* outstrides) except NPY_FAIL
|
| 1231 |
+
npy_bool NpyIter_IsFirstVisit(NpyIter* it, int iop) nogil
|
| 1232 |
+
# functions for iterating an NpyIter object
|
| 1233 |
+
NpyIter_IterNextFunc* NpyIter_GetIterNext(NpyIter* it, char** errmsg) except NULL
|
| 1234 |
+
NpyIter_GetMultiIndexFunc* NpyIter_GetGetMultiIndex(NpyIter* it,
|
| 1235 |
+
char** errmsg) except NULL
|
| 1236 |
+
char** NpyIter_GetDataPtrArray(NpyIter* it) nogil
|
| 1237 |
+
char** NpyIter_GetInitialDataPtrArray(NpyIter* it) nogil
|
| 1238 |
+
npy_intp* NpyIter_GetIndexPtr(NpyIter* it)
|
| 1239 |
+
npy_intp* NpyIter_GetInnerStrideArray(NpyIter* it) nogil
|
| 1240 |
+
npy_intp* NpyIter_GetInnerLoopSizePtr(NpyIter* it) nogil
|
| 1241 |
+
void NpyIter_GetInnerFixedStrideArray(NpyIter* it, npy_intp* outstrides) nogil
|
| 1242 |
+
npy_bool NpyIter_IterationNeedsAPI(NpyIter* it) nogil
|
| 1243 |
+
void NpyIter_DebugPrint(NpyIter* it)
|
mantis_evalkit/lib/python3.10/site-packages/numpy/__init__.py
ADDED
|
@@ -0,0 +1,547 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
NumPy
|
| 3 |
+
=====
|
| 4 |
+
|
| 5 |
+
Provides
|
| 6 |
+
1. An array object of arbitrary homogeneous items
|
| 7 |
+
2. Fast mathematical operations over arrays
|
| 8 |
+
3. Linear Algebra, Fourier Transforms, Random Number Generation
|
| 9 |
+
|
| 10 |
+
How to use the documentation
|
| 11 |
+
----------------------------
|
| 12 |
+
Documentation is available in two forms: docstrings provided
|
| 13 |
+
with the code, and a loose standing reference guide, available from
|
| 14 |
+
`the NumPy homepage <https://numpy.org>`_.
|
| 15 |
+
|
| 16 |
+
We recommend exploring the docstrings using
|
| 17 |
+
`IPython <https://ipython.org>`_, an advanced Python shell with
|
| 18 |
+
TAB-completion and introspection capabilities. See below for further
|
| 19 |
+
instructions.
|
| 20 |
+
|
| 21 |
+
The docstring examples assume that `numpy` has been imported as ``np``::
|
| 22 |
+
|
| 23 |
+
>>> import numpy as np
|
| 24 |
+
|
| 25 |
+
Code snippets are indicated by three greater-than signs::
|
| 26 |
+
|
| 27 |
+
>>> x = 42
|
| 28 |
+
>>> x = x + 1
|
| 29 |
+
|
| 30 |
+
Use the built-in ``help`` function to view a function's docstring::
|
| 31 |
+
|
| 32 |
+
>>> help(np.sort)
|
| 33 |
+
... # doctest: +SKIP
|
| 34 |
+
|
| 35 |
+
For some objects, ``np.info(obj)`` may provide additional help. This is
|
| 36 |
+
particularly true if you see the line "Help on ufunc object:" at the top
|
| 37 |
+
of the help() page. Ufuncs are implemented in C, not Python, for speed.
|
| 38 |
+
The native Python help() does not know how to view their help, but our
|
| 39 |
+
np.info() function does.
|
| 40 |
+
|
| 41 |
+
Available subpackages
|
| 42 |
+
---------------------
|
| 43 |
+
lib
|
| 44 |
+
Basic functions used by several sub-packages.
|
| 45 |
+
random
|
| 46 |
+
Core Random Tools
|
| 47 |
+
linalg
|
| 48 |
+
Core Linear Algebra Tools
|
| 49 |
+
fft
|
| 50 |
+
Core FFT routines
|
| 51 |
+
polynomial
|
| 52 |
+
Polynomial tools
|
| 53 |
+
testing
|
| 54 |
+
NumPy testing tools
|
| 55 |
+
distutils
|
| 56 |
+
Enhancements to distutils with support for
|
| 57 |
+
Fortran compilers support and more (for Python <= 3.11)
|
| 58 |
+
|
| 59 |
+
Utilities
|
| 60 |
+
---------
|
| 61 |
+
test
|
| 62 |
+
Run numpy unittests
|
| 63 |
+
show_config
|
| 64 |
+
Show numpy build configuration
|
| 65 |
+
__version__
|
| 66 |
+
NumPy version string
|
| 67 |
+
|
| 68 |
+
Viewing documentation using IPython
|
| 69 |
+
-----------------------------------
|
| 70 |
+
|
| 71 |
+
Start IPython and import `numpy` usually under the alias ``np``: `import
|
| 72 |
+
numpy as np`. Then, directly past or use the ``%cpaste`` magic to paste
|
| 73 |
+
examples into the shell. To see which functions are available in `numpy`,
|
| 74 |
+
type ``np.<TAB>`` (where ``<TAB>`` refers to the TAB key), or use
|
| 75 |
+
``np.*cos*?<ENTER>`` (where ``<ENTER>`` refers to the ENTER key) to narrow
|
| 76 |
+
down the list. To view the docstring for a function, use
|
| 77 |
+
``np.cos?<ENTER>`` (to view the docstring) and ``np.cos??<ENTER>`` (to view
|
| 78 |
+
the source code).
|
| 79 |
+
|
| 80 |
+
Copies vs. in-place operation
|
| 81 |
+
-----------------------------
|
| 82 |
+
Most of the functions in `numpy` return a copy of the array argument
|
| 83 |
+
(e.g., `np.sort`). In-place versions of these functions are often
|
| 84 |
+
available as array methods, i.e. ``x = np.array([1,2,3]); x.sort()``.
|
| 85 |
+
Exceptions to this rule are documented.
|
| 86 |
+
|
| 87 |
+
"""
|
| 88 |
+
import os
|
| 89 |
+
import sys
|
| 90 |
+
import warnings
|
| 91 |
+
|
| 92 |
+
from ._globals import _NoValue, _CopyMode
|
| 93 |
+
from ._expired_attrs_2_0 import __expired_attributes__
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
# If a version with git hash was stored, use that instead
|
| 97 |
+
from . import version
|
| 98 |
+
from .version import __version__
|
| 99 |
+
|
| 100 |
+
# We first need to detect if we're being called as part of the numpy setup
|
| 101 |
+
# procedure itself in a reliable manner.
|
| 102 |
+
try:
|
| 103 |
+
__NUMPY_SETUP__
|
| 104 |
+
except NameError:
|
| 105 |
+
__NUMPY_SETUP__ = False
|
| 106 |
+
|
| 107 |
+
if __NUMPY_SETUP__:
|
| 108 |
+
sys.stderr.write('Running from numpy source directory.\n')
|
| 109 |
+
else:
|
| 110 |
+
# Allow distributors to run custom init code before importing numpy._core
|
| 111 |
+
from . import _distributor_init
|
| 112 |
+
|
| 113 |
+
try:
|
| 114 |
+
from numpy.__config__ import show_config
|
| 115 |
+
except ImportError as e:
|
| 116 |
+
msg = """Error importing numpy: you should not try to import numpy from
|
| 117 |
+
its source directory; please exit the numpy source tree, and relaunch
|
| 118 |
+
your python interpreter from there."""
|
| 119 |
+
raise ImportError(msg) from e
|
| 120 |
+
|
| 121 |
+
from . import _core
|
| 122 |
+
from ._core import (
|
| 123 |
+
False_, ScalarType, True_,
|
| 124 |
+
abs, absolute, acos, acosh, add, all, allclose,
|
| 125 |
+
amax, amin, any, arange, arccos, arccosh, arcsin, arcsinh,
|
| 126 |
+
arctan, arctan2, arctanh, argmax, argmin, argpartition, argsort,
|
| 127 |
+
argwhere, around, array, array2string, array_equal, array_equiv,
|
| 128 |
+
array_repr, array_str, asanyarray, asarray, ascontiguousarray,
|
| 129 |
+
asfortranarray, asin, asinh, atan, atanh, atan2, astype, atleast_1d,
|
| 130 |
+
atleast_2d, atleast_3d, base_repr, binary_repr, bitwise_and,
|
| 131 |
+
bitwise_count, bitwise_invert, bitwise_left_shift, bitwise_not,
|
| 132 |
+
bitwise_or, bitwise_right_shift, bitwise_xor, block, bool, bool_,
|
| 133 |
+
broadcast, busday_count, busday_offset, busdaycalendar, byte, bytes_,
|
| 134 |
+
can_cast, cbrt, cdouble, ceil, character, choose, clip, clongdouble,
|
| 135 |
+
complex128, complex64, complexfloating, compress, concat, concatenate,
|
| 136 |
+
conj, conjugate, convolve, copysign, copyto, correlate, cos, cosh,
|
| 137 |
+
count_nonzero, cross, csingle, cumprod, cumsum, cumulative_prod,
|
| 138 |
+
cumulative_sum, datetime64, datetime_as_string, datetime_data,
|
| 139 |
+
deg2rad, degrees, diagonal, divide, divmod, dot, double, dtype, e,
|
| 140 |
+
einsum, einsum_path, empty, empty_like, equal, errstate, euler_gamma,
|
| 141 |
+
exp, exp2, expm1, fabs, finfo, flatiter, flatnonzero, flexible,
|
| 142 |
+
float16, float32, float64, float_power, floating, floor, floor_divide,
|
| 143 |
+
fmax, fmin, fmod, format_float_positional, format_float_scientific,
|
| 144 |
+
frexp, from_dlpack, frombuffer, fromfile, fromfunction, fromiter,
|
| 145 |
+
frompyfunc, fromstring, full, full_like, gcd, generic, geomspace,
|
| 146 |
+
get_printoptions, getbufsize, geterr, geterrcall, greater,
|
| 147 |
+
greater_equal, half, heaviside, hstack, hypot, identity, iinfo,
|
| 148 |
+
indices, inexact, inf, inner, int16, int32, int64, int8, int_, intc,
|
| 149 |
+
integer, intp, invert, is_busday, isclose, isdtype, isfinite,
|
| 150 |
+
isfortran, isinf, isnan, isnat, isscalar, issubdtype, lcm, ldexp,
|
| 151 |
+
left_shift, less, less_equal, lexsort, linspace, little_endian, log,
|
| 152 |
+
log10, log1p, log2, logaddexp, logaddexp2, logical_and, logical_not,
|
| 153 |
+
logical_or, logical_xor, logspace, long, longdouble, longlong, matmul,
|
| 154 |
+
matvec, matrix_transpose, max, maximum, may_share_memory, mean, memmap,
|
| 155 |
+
min, min_scalar_type, minimum, mod, modf, moveaxis, multiply, nan,
|
| 156 |
+
ndarray, ndim, nditer, negative, nested_iters, newaxis, nextafter,
|
| 157 |
+
nonzero, not_equal, number, object_, ones, ones_like, outer, partition,
|
| 158 |
+
permute_dims, pi, positive, pow, power, printoptions, prod,
|
| 159 |
+
promote_types, ptp, put, putmask, rad2deg, radians, ravel, recarray,
|
| 160 |
+
reciprocal, record, remainder, repeat, require, reshape, resize,
|
| 161 |
+
result_type, right_shift, rint, roll, rollaxis, round, sctypeDict,
|
| 162 |
+
searchsorted, set_printoptions, setbufsize, seterr, seterrcall, shape,
|
| 163 |
+
shares_memory, short, sign, signbit, signedinteger, sin, single, sinh,
|
| 164 |
+
size, sort, spacing, sqrt, square, squeeze, stack, std,
|
| 165 |
+
str_, subtract, sum, swapaxes, take, tan, tanh, tensordot,
|
| 166 |
+
timedelta64, trace, transpose, true_divide, trunc, typecodes, ubyte,
|
| 167 |
+
ufunc, uint, uint16, uint32, uint64, uint8, uintc, uintp, ulong,
|
| 168 |
+
ulonglong, unsignedinteger, unstack, ushort, var, vdot, vecdot,
|
| 169 |
+
vecmat, void, vstack, where, zeros, zeros_like
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
# NOTE: It's still under discussion whether these aliases
|
| 173 |
+
# should be removed.
|
| 174 |
+
for ta in ["float96", "float128", "complex192", "complex256"]:
|
| 175 |
+
try:
|
| 176 |
+
globals()[ta] = getattr(_core, ta)
|
| 177 |
+
except AttributeError:
|
| 178 |
+
pass
|
| 179 |
+
del ta
|
| 180 |
+
|
| 181 |
+
from . import lib
|
| 182 |
+
from .lib import scimath as emath
|
| 183 |
+
from .lib._histograms_impl import (
|
| 184 |
+
histogram, histogram_bin_edges, histogramdd
|
| 185 |
+
)
|
| 186 |
+
from .lib._nanfunctions_impl import (
|
| 187 |
+
nanargmax, nanargmin, nancumprod, nancumsum, nanmax, nanmean,
|
| 188 |
+
nanmedian, nanmin, nanpercentile, nanprod, nanquantile, nanstd,
|
| 189 |
+
nansum, nanvar
|
| 190 |
+
)
|
| 191 |
+
from .lib._function_base_impl import (
|
| 192 |
+
select, piecewise, trim_zeros, copy, iterable, percentile, diff,
|
| 193 |
+
gradient, angle, unwrap, sort_complex, flip, rot90, extract, place,
|
| 194 |
+
vectorize, asarray_chkfinite, average, bincount, digitize, cov,
|
| 195 |
+
corrcoef, median, sinc, hamming, hanning, bartlett, blackman,
|
| 196 |
+
kaiser, trapezoid, trapz, i0, meshgrid, delete, insert, append,
|
| 197 |
+
interp, quantile
|
| 198 |
+
)
|
| 199 |
+
from .lib._twodim_base_impl import (
|
| 200 |
+
diag, diagflat, eye, fliplr, flipud, tri, triu, tril, vander,
|
| 201 |
+
histogram2d, mask_indices, tril_indices, tril_indices_from,
|
| 202 |
+
triu_indices, triu_indices_from
|
| 203 |
+
)
|
| 204 |
+
from .lib._shape_base_impl import (
|
| 205 |
+
apply_over_axes, apply_along_axis, array_split, column_stack, dsplit,
|
| 206 |
+
dstack, expand_dims, hsplit, kron, put_along_axis, row_stack, split,
|
| 207 |
+
take_along_axis, tile, vsplit
|
| 208 |
+
)
|
| 209 |
+
from .lib._type_check_impl import (
|
| 210 |
+
iscomplexobj, isrealobj, imag, iscomplex, isreal, nan_to_num, real,
|
| 211 |
+
real_if_close, typename, mintypecode, common_type
|
| 212 |
+
)
|
| 213 |
+
from .lib._arraysetops_impl import (
|
| 214 |
+
ediff1d, in1d, intersect1d, isin, setdiff1d, setxor1d, union1d,
|
| 215 |
+
unique, unique_all, unique_counts, unique_inverse, unique_values
|
| 216 |
+
)
|
| 217 |
+
from .lib._ufunclike_impl import fix, isneginf, isposinf
|
| 218 |
+
from .lib._arraypad_impl import pad
|
| 219 |
+
from .lib._utils_impl import (
|
| 220 |
+
show_runtime, get_include, info
|
| 221 |
+
)
|
| 222 |
+
from .lib._stride_tricks_impl import (
|
| 223 |
+
broadcast_arrays, broadcast_shapes, broadcast_to
|
| 224 |
+
)
|
| 225 |
+
from .lib._polynomial_impl import (
|
| 226 |
+
poly, polyint, polyder, polyadd, polysub, polymul, polydiv, polyval,
|
| 227 |
+
polyfit, poly1d, roots
|
| 228 |
+
)
|
| 229 |
+
from .lib._npyio_impl import (
|
| 230 |
+
savetxt, loadtxt, genfromtxt, load, save, savez, packbits,
|
| 231 |
+
savez_compressed, unpackbits, fromregex
|
| 232 |
+
)
|
| 233 |
+
from .lib._index_tricks_impl import (
|
| 234 |
+
diag_indices_from, diag_indices, fill_diagonal, ndindex, ndenumerate,
|
| 235 |
+
ix_, c_, r_, s_, ogrid, mgrid, unravel_index, ravel_multi_index,
|
| 236 |
+
index_exp
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
from . import matrixlib as _mat
|
| 240 |
+
from .matrixlib import (
|
| 241 |
+
asmatrix, bmat, matrix
|
| 242 |
+
)
|
| 243 |
+
|
| 244 |
+
# public submodules are imported lazily, therefore are accessible from
|
| 245 |
+
# __getattr__. Note that `distutils` (deprecated) and `array_api`
|
| 246 |
+
# (experimental label) are not added here, because `from numpy import *`
|
| 247 |
+
# must not raise any warnings - that's too disruptive.
|
| 248 |
+
__numpy_submodules__ = {
|
| 249 |
+
"linalg", "fft", "dtypes", "random", "polynomial", "ma",
|
| 250 |
+
"exceptions", "lib", "ctypeslib", "testing", "typing",
|
| 251 |
+
"f2py", "test", "rec", "char", "core", "strings",
|
| 252 |
+
}
|
| 253 |
+
|
| 254 |
+
# We build warning messages for former attributes
|
| 255 |
+
_msg = (
|
| 256 |
+
"module 'numpy' has no attribute '{n}'.\n"
|
| 257 |
+
"`np.{n}` was a deprecated alias for the builtin `{n}`. "
|
| 258 |
+
"To avoid this error in existing code, use `{n}` by itself. "
|
| 259 |
+
"Doing this will not modify any behavior and is safe. {extended_msg}\n"
|
| 260 |
+
"The aliases was originally deprecated in NumPy 1.20; for more "
|
| 261 |
+
"details and guidance see the original release note at:\n"
|
| 262 |
+
" https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations")
|
| 263 |
+
|
| 264 |
+
_specific_msg = (
|
| 265 |
+
"If you specifically wanted the numpy scalar type, use `np.{}` here.")
|
| 266 |
+
|
| 267 |
+
_int_extended_msg = (
|
| 268 |
+
"When replacing `np.{}`, you may wish to use e.g. `np.int64` "
|
| 269 |
+
"or `np.int32` to specify the precision. If you wish to review "
|
| 270 |
+
"your current use, check the release note link for "
|
| 271 |
+
"additional information.")
|
| 272 |
+
|
| 273 |
+
_type_info = [
|
| 274 |
+
("object", ""), # The NumPy scalar only exists by name.
|
| 275 |
+
("float", _specific_msg.format("float64")),
|
| 276 |
+
("complex", _specific_msg.format("complex128")),
|
| 277 |
+
("str", _specific_msg.format("str_")),
|
| 278 |
+
("int", _int_extended_msg.format("int"))]
|
| 279 |
+
|
| 280 |
+
__former_attrs__ = {
|
| 281 |
+
n: _msg.format(n=n, extended_msg=extended_msg)
|
| 282 |
+
for n, extended_msg in _type_info
|
| 283 |
+
}
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
# Some of these could be defined right away, but most were aliases to
|
| 287 |
+
# the Python objects and only removed in NumPy 1.24. Defining them should
|
| 288 |
+
# probably wait for NumPy 1.26 or 2.0.
|
| 289 |
+
# When defined, these should possibly not be added to `__all__` to avoid
|
| 290 |
+
# import with `from numpy import *`.
|
| 291 |
+
__future_scalars__ = {"str", "bytes", "object"}
|
| 292 |
+
|
| 293 |
+
__array_api_version__ = "2023.12"
|
| 294 |
+
|
| 295 |
+
from ._array_api_info import __array_namespace_info__
|
| 296 |
+
|
| 297 |
+
# now that numpy core module is imported, can initialize limits
|
| 298 |
+
_core.getlimits._register_known_types()
|
| 299 |
+
|
| 300 |
+
__all__ = list(
|
| 301 |
+
__numpy_submodules__ |
|
| 302 |
+
set(_core.__all__) |
|
| 303 |
+
set(_mat.__all__) |
|
| 304 |
+
set(lib._histograms_impl.__all__) |
|
| 305 |
+
set(lib._nanfunctions_impl.__all__) |
|
| 306 |
+
set(lib._function_base_impl.__all__) |
|
| 307 |
+
set(lib._twodim_base_impl.__all__) |
|
| 308 |
+
set(lib._shape_base_impl.__all__) |
|
| 309 |
+
set(lib._type_check_impl.__all__) |
|
| 310 |
+
set(lib._arraysetops_impl.__all__) |
|
| 311 |
+
set(lib._ufunclike_impl.__all__) |
|
| 312 |
+
set(lib._arraypad_impl.__all__) |
|
| 313 |
+
set(lib._utils_impl.__all__) |
|
| 314 |
+
set(lib._stride_tricks_impl.__all__) |
|
| 315 |
+
set(lib._polynomial_impl.__all__) |
|
| 316 |
+
set(lib._npyio_impl.__all__) |
|
| 317 |
+
set(lib._index_tricks_impl.__all__) |
|
| 318 |
+
{"emath", "show_config", "__version__", "__array_namespace_info__"}
|
| 319 |
+
)
|
| 320 |
+
|
| 321 |
+
# Filter out Cython harmless warnings
|
| 322 |
+
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
|
| 323 |
+
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
|
| 324 |
+
warnings.filterwarnings("ignore", message="numpy.ndarray size changed")
|
| 325 |
+
|
| 326 |
+
def __getattr__(attr):
|
| 327 |
+
# Warn for expired attributes
|
| 328 |
+
import warnings
|
| 329 |
+
|
| 330 |
+
if attr == "linalg":
|
| 331 |
+
import numpy.linalg as linalg
|
| 332 |
+
return linalg
|
| 333 |
+
elif attr == "fft":
|
| 334 |
+
import numpy.fft as fft
|
| 335 |
+
return fft
|
| 336 |
+
elif attr == "dtypes":
|
| 337 |
+
import numpy.dtypes as dtypes
|
| 338 |
+
return dtypes
|
| 339 |
+
elif attr == "random":
|
| 340 |
+
import numpy.random as random
|
| 341 |
+
return random
|
| 342 |
+
elif attr == "polynomial":
|
| 343 |
+
import numpy.polynomial as polynomial
|
| 344 |
+
return polynomial
|
| 345 |
+
elif attr == "ma":
|
| 346 |
+
import numpy.ma as ma
|
| 347 |
+
return ma
|
| 348 |
+
elif attr == "ctypeslib":
|
| 349 |
+
import numpy.ctypeslib as ctypeslib
|
| 350 |
+
return ctypeslib
|
| 351 |
+
elif attr == "exceptions":
|
| 352 |
+
import numpy.exceptions as exceptions
|
| 353 |
+
return exceptions
|
| 354 |
+
elif attr == "testing":
|
| 355 |
+
import numpy.testing as testing
|
| 356 |
+
return testing
|
| 357 |
+
elif attr == "matlib":
|
| 358 |
+
import numpy.matlib as matlib
|
| 359 |
+
return matlib
|
| 360 |
+
elif attr == "f2py":
|
| 361 |
+
import numpy.f2py as f2py
|
| 362 |
+
return f2py
|
| 363 |
+
elif attr == "typing":
|
| 364 |
+
import numpy.typing as typing
|
| 365 |
+
return typing
|
| 366 |
+
elif attr == "rec":
|
| 367 |
+
import numpy.rec as rec
|
| 368 |
+
return rec
|
| 369 |
+
elif attr == "char":
|
| 370 |
+
import numpy.char as char
|
| 371 |
+
return char
|
| 372 |
+
elif attr == "array_api":
|
| 373 |
+
raise AttributeError("`numpy.array_api` is not available from "
|
| 374 |
+
"numpy 2.0 onwards", name=None)
|
| 375 |
+
elif attr == "core":
|
| 376 |
+
import numpy.core as core
|
| 377 |
+
return core
|
| 378 |
+
elif attr == "strings":
|
| 379 |
+
import numpy.strings as strings
|
| 380 |
+
return strings
|
| 381 |
+
elif attr == "distutils":
|
| 382 |
+
if 'distutils' in __numpy_submodules__:
|
| 383 |
+
import numpy.distutils as distutils
|
| 384 |
+
return distutils
|
| 385 |
+
else:
|
| 386 |
+
raise AttributeError("`numpy.distutils` is not available from "
|
| 387 |
+
"Python 3.12 onwards", name=None)
|
| 388 |
+
|
| 389 |
+
if attr in __future_scalars__:
|
| 390 |
+
# And future warnings for those that will change, but also give
|
| 391 |
+
# the AttributeError
|
| 392 |
+
warnings.warn(
|
| 393 |
+
f"In the future `np.{attr}` will be defined as the "
|
| 394 |
+
"corresponding NumPy scalar.", FutureWarning, stacklevel=2)
|
| 395 |
+
|
| 396 |
+
if attr in __former_attrs__:
|
| 397 |
+
raise AttributeError(__former_attrs__[attr], name=None)
|
| 398 |
+
|
| 399 |
+
if attr in __expired_attributes__:
|
| 400 |
+
raise AttributeError(
|
| 401 |
+
f"`np.{attr}` was removed in the NumPy 2.0 release. "
|
| 402 |
+
f"{__expired_attributes__[attr]}",
|
| 403 |
+
name=None
|
| 404 |
+
)
|
| 405 |
+
|
| 406 |
+
if attr == "chararray":
|
| 407 |
+
warnings.warn(
|
| 408 |
+
"`np.chararray` is deprecated and will be removed from "
|
| 409 |
+
"the main namespace in the future. Use an array with a string "
|
| 410 |
+
"or bytes dtype instead.", DeprecationWarning, stacklevel=2)
|
| 411 |
+
import numpy.char as char
|
| 412 |
+
return char.chararray
|
| 413 |
+
|
| 414 |
+
raise AttributeError("module {!r} has no attribute "
|
| 415 |
+
"{!r}".format(__name__, attr))
|
| 416 |
+
|
| 417 |
+
def __dir__():
|
| 418 |
+
public_symbols = (
|
| 419 |
+
globals().keys() | __numpy_submodules__
|
| 420 |
+
)
|
| 421 |
+
public_symbols -= {
|
| 422 |
+
"matrixlib", "matlib", "tests", "conftest", "version",
|
| 423 |
+
"compat", "distutils", "array_api"
|
| 424 |
+
}
|
| 425 |
+
return list(public_symbols)
|
| 426 |
+
|
| 427 |
+
# Pytest testing
|
| 428 |
+
from numpy._pytesttester import PytestTester
|
| 429 |
+
test = PytestTester(__name__)
|
| 430 |
+
del PytestTester
|
| 431 |
+
|
| 432 |
+
def _sanity_check():
|
| 433 |
+
"""
|
| 434 |
+
Quick sanity checks for common bugs caused by environment.
|
| 435 |
+
There are some cases e.g. with wrong BLAS ABI that cause wrong
|
| 436 |
+
results under specific runtime conditions that are not necessarily
|
| 437 |
+
achieved during test suite runs, and it is useful to catch those early.
|
| 438 |
+
|
| 439 |
+
See https://github.com/numpy/numpy/issues/8577 and other
|
| 440 |
+
similar bug reports.
|
| 441 |
+
|
| 442 |
+
"""
|
| 443 |
+
try:
|
| 444 |
+
x = ones(2, dtype=float32)
|
| 445 |
+
if not abs(x.dot(x) - float32(2.0)) < 1e-5:
|
| 446 |
+
raise AssertionError
|
| 447 |
+
except AssertionError:
|
| 448 |
+
msg = ("The current Numpy installation ({!r}) fails to "
|
| 449 |
+
"pass simple sanity checks. This can be caused for example "
|
| 450 |
+
"by incorrect BLAS library being linked in, or by mixing "
|
| 451 |
+
"package managers (pip, conda, apt, ...). Search closed "
|
| 452 |
+
"numpy issues for similar problems.")
|
| 453 |
+
raise RuntimeError(msg.format(__file__)) from None
|
| 454 |
+
|
| 455 |
+
_sanity_check()
|
| 456 |
+
del _sanity_check
|
| 457 |
+
|
| 458 |
+
def _mac_os_check():
|
| 459 |
+
"""
|
| 460 |
+
Quick Sanity check for Mac OS look for accelerate build bugs.
|
| 461 |
+
Testing numpy polyfit calls init_dgelsd(LAPACK)
|
| 462 |
+
"""
|
| 463 |
+
try:
|
| 464 |
+
c = array([3., 2., 1.])
|
| 465 |
+
x = linspace(0, 2, 5)
|
| 466 |
+
y = polyval(c, x)
|
| 467 |
+
_ = polyfit(x, y, 2, cov=True)
|
| 468 |
+
except ValueError:
|
| 469 |
+
pass
|
| 470 |
+
|
| 471 |
+
if sys.platform == "darwin":
|
| 472 |
+
from . import exceptions
|
| 473 |
+
with warnings.catch_warnings(record=True) as w:
|
| 474 |
+
_mac_os_check()
|
| 475 |
+
# Throw runtime error, if the test failed Check for warning and error_message
|
| 476 |
+
if len(w) > 0:
|
| 477 |
+
for _wn in w:
|
| 478 |
+
if _wn.category is exceptions.RankWarning:
|
| 479 |
+
# Ignore other warnings, they may not be relevant (see gh-25433).
|
| 480 |
+
error_message = (
|
| 481 |
+
f"{_wn.category.__name__}: {_wn.message}"
|
| 482 |
+
)
|
| 483 |
+
msg = (
|
| 484 |
+
"Polyfit sanity test emitted a warning, most likely due "
|
| 485 |
+
"to using a buggy Accelerate backend."
|
| 486 |
+
"\nIf you compiled yourself, more information is available at:"
|
| 487 |
+
"\nhttps://numpy.org/devdocs/building/index.html"
|
| 488 |
+
"\nOtherwise report this to the vendor "
|
| 489 |
+
"that provided NumPy.\n\n{}\n".format(error_message))
|
| 490 |
+
raise RuntimeError(msg)
|
| 491 |
+
del _wn
|
| 492 |
+
del w
|
| 493 |
+
del _mac_os_check
|
| 494 |
+
|
| 495 |
+
def hugepage_setup():
|
| 496 |
+
"""
|
| 497 |
+
We usually use madvise hugepages support, but on some old kernels it
|
| 498 |
+
is slow and thus better avoided. Specifically kernel version 4.6
|
| 499 |
+
had a bug fix which probably fixed this:
|
| 500 |
+
https://github.com/torvalds/linux/commit/7cf91a98e607c2f935dbcc177d70011e95b8faff
|
| 501 |
+
"""
|
| 502 |
+
use_hugepage = os.environ.get("NUMPY_MADVISE_HUGEPAGE", None)
|
| 503 |
+
if sys.platform == "linux" and use_hugepage is None:
|
| 504 |
+
# If there is an issue with parsing the kernel version,
|
| 505 |
+
# set use_hugepage to 0. Usage of LooseVersion will handle
|
| 506 |
+
# the kernel version parsing better, but avoided since it
|
| 507 |
+
# will increase the import time.
|
| 508 |
+
# See: #16679 for related discussion.
|
| 509 |
+
try:
|
| 510 |
+
use_hugepage = 1
|
| 511 |
+
kernel_version = os.uname().release.split(".")[:2]
|
| 512 |
+
kernel_version = tuple(int(v) for v in kernel_version)
|
| 513 |
+
if kernel_version < (4, 6):
|
| 514 |
+
use_hugepage = 0
|
| 515 |
+
except ValueError:
|
| 516 |
+
use_hugepage = 0
|
| 517 |
+
elif use_hugepage is None:
|
| 518 |
+
# This is not Linux, so it should not matter, just enable anyway
|
| 519 |
+
use_hugepage = 1
|
| 520 |
+
else:
|
| 521 |
+
use_hugepage = int(use_hugepage)
|
| 522 |
+
return use_hugepage
|
| 523 |
+
|
| 524 |
+
# Note that this will currently only make a difference on Linux
|
| 525 |
+
_core.multiarray._set_madvise_hugepage(hugepage_setup())
|
| 526 |
+
del hugepage_setup
|
| 527 |
+
|
| 528 |
+
# Give a warning if NumPy is reloaded or imported on a sub-interpreter
|
| 529 |
+
# We do this from python, since the C-module may not be reloaded and
|
| 530 |
+
# it is tidier organized.
|
| 531 |
+
_core.multiarray._multiarray_umath._reload_guard()
|
| 532 |
+
|
| 533 |
+
# TODO: Remove the environment variable entirely now that it is "weak"
|
| 534 |
+
if (os.environ.get("NPY_PROMOTION_STATE", "weak") != "weak"):
|
| 535 |
+
warnings.warn(
|
| 536 |
+
"NPY_PROMOTION_STATE was a temporary feature for NumPy 2.0 "
|
| 537 |
+
"transition and is ignored after NumPy 2.2.",
|
| 538 |
+
UserWarning, stacklevel=2)
|
| 539 |
+
|
| 540 |
+
# Tell PyInstaller where to find hook-numpy.py
|
| 541 |
+
def _pyinstaller_hooks_dir():
|
| 542 |
+
from pathlib import Path
|
| 543 |
+
return [str(Path(__file__).with_name("_pyinstaller").resolve())]
|
| 544 |
+
|
| 545 |
+
|
| 546 |
+
# Remove symbols imported for internal use
|
| 547 |
+
del os, sys, warnings
|
mantis_evalkit/lib/python3.10/site-packages/numpy/_configtool.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
import sys
|
| 4 |
+
|
| 5 |
+
from .version import __version__
|
| 6 |
+
from .lib._utils_impl import get_include
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def main() -> None:
|
| 10 |
+
parser = argparse.ArgumentParser()
|
| 11 |
+
parser.add_argument(
|
| 12 |
+
"--version",
|
| 13 |
+
action="version",
|
| 14 |
+
version=__version__,
|
| 15 |
+
help="Print the version and exit.",
|
| 16 |
+
)
|
| 17 |
+
parser.add_argument(
|
| 18 |
+
"--cflags",
|
| 19 |
+
action="store_true",
|
| 20 |
+
help="Compile flag needed when using the NumPy headers.",
|
| 21 |
+
)
|
| 22 |
+
parser.add_argument(
|
| 23 |
+
"--pkgconfigdir",
|
| 24 |
+
action="store_true",
|
| 25 |
+
help=("Print the pkgconfig directory in which `numpy.pc` is stored "
|
| 26 |
+
"(useful for setting $PKG_CONFIG_PATH)."),
|
| 27 |
+
)
|
| 28 |
+
args = parser.parse_args()
|
| 29 |
+
if not sys.argv[1:]:
|
| 30 |
+
parser.print_help()
|
| 31 |
+
if args.cflags:
|
| 32 |
+
print("-I" + get_include())
|
| 33 |
+
if args.pkgconfigdir:
|
| 34 |
+
_path = Path(get_include()) / '..' / 'lib' / 'pkgconfig'
|
| 35 |
+
print(_path.resolve())
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
if __name__ == "__main__":
|
| 39 |
+
main()
|
mantis_evalkit/lib/python3.10/site-packages/numpy/_expired_attrs_2_0.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Dict of expired attributes that are discontinued since 2.0 release.
|
| 3 |
+
Each item is associated with a migration note.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
__expired_attributes__ = {
|
| 7 |
+
"geterrobj": "Use the np.errstate context manager instead.",
|
| 8 |
+
"seterrobj": "Use the np.errstate context manager instead.",
|
| 9 |
+
"cast": "Use `np.asarray(arr, dtype=dtype)` instead.",
|
| 10 |
+
"source": "Use `inspect.getsource` instead.",
|
| 11 |
+
"lookfor": "Search NumPy's documentation directly.",
|
| 12 |
+
"who": "Use an IDE variable explorer or `locals()` instead.",
|
| 13 |
+
"fastCopyAndTranspose": "Use `arr.T.copy()` instead.",
|
| 14 |
+
"set_numeric_ops":
|
| 15 |
+
"For the general case, use `PyUFunc_ReplaceLoopBySignature`. "
|
| 16 |
+
"For ndarray subclasses, define the ``__array_ufunc__`` method "
|
| 17 |
+
"and override the relevant ufunc.",
|
| 18 |
+
"NINF": "Use `-np.inf` instead.",
|
| 19 |
+
"PINF": "Use `np.inf` instead.",
|
| 20 |
+
"NZERO": "Use `-0.0` instead.",
|
| 21 |
+
"PZERO": "Use `0.0` instead.",
|
| 22 |
+
"add_newdoc":
|
| 23 |
+
"It's still available as `np.lib.add_newdoc`.",
|
| 24 |
+
"add_docstring":
|
| 25 |
+
"It's still available as `np.lib.add_docstring`.",
|
| 26 |
+
"add_newdoc_ufunc":
|
| 27 |
+
"It's an internal function and doesn't have a replacement.",
|
| 28 |
+
"compat": "There's no replacement, as Python 2 is no longer supported.",
|
| 29 |
+
"safe_eval": "Use `ast.literal_eval` instead.",
|
| 30 |
+
"float_": "Use `np.float64` instead.",
|
| 31 |
+
"complex_": "Use `np.complex128` instead.",
|
| 32 |
+
"longfloat": "Use `np.longdouble` instead.",
|
| 33 |
+
"singlecomplex": "Use `np.complex64` instead.",
|
| 34 |
+
"cfloat": "Use `np.complex128` instead.",
|
| 35 |
+
"longcomplex": "Use `np.clongdouble` instead.",
|
| 36 |
+
"clongfloat": "Use `np.clongdouble` instead.",
|
| 37 |
+
"string_": "Use `np.bytes_` instead.",
|
| 38 |
+
"unicode_": "Use `np.str_` instead.",
|
| 39 |
+
"Inf": "Use `np.inf` instead.",
|
| 40 |
+
"Infinity": "Use `np.inf` instead.",
|
| 41 |
+
"NaN": "Use `np.nan` instead.",
|
| 42 |
+
"infty": "Use `np.inf` instead.",
|
| 43 |
+
"issctype": "Use `issubclass(rep, np.generic)` instead.",
|
| 44 |
+
"maximum_sctype":
|
| 45 |
+
"Use a specific dtype instead. You should avoid relying "
|
| 46 |
+
"on any implicit mechanism and select the largest dtype of "
|
| 47 |
+
"a kind explicitly in the code.",
|
| 48 |
+
"obj2sctype": "Use `np.dtype(obj).type` instead.",
|
| 49 |
+
"sctype2char": "Use `np.dtype(obj).char` instead.",
|
| 50 |
+
"sctypes": "Access dtypes explicitly instead.",
|
| 51 |
+
"issubsctype": "Use `np.issubdtype` instead.",
|
| 52 |
+
"set_string_function":
|
| 53 |
+
"Use `np.set_printoptions` instead with a formatter for "
|
| 54 |
+
"custom printing of NumPy objects.",
|
| 55 |
+
"asfarray": "Use `np.asarray` with a proper dtype instead.",
|
| 56 |
+
"issubclass_": "Use `issubclass` builtin instead.",
|
| 57 |
+
"tracemalloc_domain": "It's now available from `np.lib`.",
|
| 58 |
+
"mat": "Use `np.asmatrix` instead.",
|
| 59 |
+
"recfromcsv": "Use `np.genfromtxt` with comma delimiter instead.",
|
| 60 |
+
"recfromtxt": "Use `np.genfromtxt` instead.",
|
| 61 |
+
"deprecate": "Emit `DeprecationWarning` with `warnings.warn` directly, "
|
| 62 |
+
"or use `typing.deprecated`.",
|
| 63 |
+
"deprecate_with_doc": "Emit `DeprecationWarning` with `warnings.warn` "
|
| 64 |
+
"directly, or use `typing.deprecated`.",
|
| 65 |
+
"disp": "Use your own printing function instead.",
|
| 66 |
+
"find_common_type":
|
| 67 |
+
"Use `numpy.promote_types` or `numpy.result_type` instead. "
|
| 68 |
+
"To achieve semantics for the `scalar_types` argument, use "
|
| 69 |
+
"`numpy.result_type` and pass the Python values `0`, `0.0`, or `0j`.",
|
| 70 |
+
"round_": "Use `np.round` instead.",
|
| 71 |
+
"get_array_wrap": "",
|
| 72 |
+
"DataSource": "It's still available as `np.lib.npyio.DataSource`.",
|
| 73 |
+
"nbytes": "Use `np.dtype(<dtype>).itemsize` instead.",
|
| 74 |
+
"byte_bounds": "Now it's available under `np.lib.array_utils.byte_bounds`",
|
| 75 |
+
"compare_chararrays":
|
| 76 |
+
"It's still available as `np.char.compare_chararrays`.",
|
| 77 |
+
"format_parser": "It's still available as `np.rec.format_parser`.",
|
| 78 |
+
"alltrue": "Use `np.all` instead.",
|
| 79 |
+
"sometrue": "Use `np.any` instead.",
|
| 80 |
+
}
|
mantis_evalkit/lib/python3.10/site-packages/numpy/_globals.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Module defining global singleton classes.
|
| 3 |
+
|
| 4 |
+
This module raises a RuntimeError if an attempt to reload it is made. In that
|
| 5 |
+
way the identities of the classes defined here are fixed and will remain so
|
| 6 |
+
even if numpy itself is reloaded. In particular, a function like the following
|
| 7 |
+
will still work correctly after numpy is reloaded::
|
| 8 |
+
|
| 9 |
+
def foo(arg=np._NoValue):
|
| 10 |
+
if arg is np._NoValue:
|
| 11 |
+
...
|
| 12 |
+
|
| 13 |
+
That was not the case when the singleton classes were defined in the numpy
|
| 14 |
+
``__init__.py`` file. See gh-7844 for a discussion of the reload problem that
|
| 15 |
+
motivated this module.
|
| 16 |
+
|
| 17 |
+
"""
|
| 18 |
+
import enum
|
| 19 |
+
|
| 20 |
+
from ._utils import set_module as _set_module
|
| 21 |
+
|
| 22 |
+
__all__ = ['_NoValue', '_CopyMode']
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# Disallow reloading this module so as to preserve the identities of the
|
| 26 |
+
# classes defined here.
|
| 27 |
+
if '_is_loaded' in globals():
|
| 28 |
+
raise RuntimeError('Reloading numpy._globals is not allowed')
|
| 29 |
+
_is_loaded = True
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class _NoValueType:
|
| 33 |
+
"""Special keyword value.
|
| 34 |
+
|
| 35 |
+
The instance of this class may be used as the default value assigned to a
|
| 36 |
+
keyword if no other obvious default (e.g., `None`) is suitable,
|
| 37 |
+
|
| 38 |
+
Common reasons for using this keyword are:
|
| 39 |
+
|
| 40 |
+
- A new keyword is added to a function, and that function forwards its
|
| 41 |
+
inputs to another function or method which can be defined outside of
|
| 42 |
+
NumPy. For example, ``np.std(x)`` calls ``x.std``, so when a ``keepdims``
|
| 43 |
+
keyword was added that could only be forwarded if the user explicitly
|
| 44 |
+
specified ``keepdims``; downstream array libraries may not have added
|
| 45 |
+
the same keyword, so adding ``x.std(..., keepdims=keepdims)``
|
| 46 |
+
unconditionally could have broken previously working code.
|
| 47 |
+
- A keyword is being deprecated, and a deprecation warning must only be
|
| 48 |
+
emitted when the keyword is used.
|
| 49 |
+
|
| 50 |
+
"""
|
| 51 |
+
__instance = None
|
| 52 |
+
def __new__(cls):
|
| 53 |
+
# ensure that only one instance exists
|
| 54 |
+
if not cls.__instance:
|
| 55 |
+
cls.__instance = super().__new__(cls)
|
| 56 |
+
return cls.__instance
|
| 57 |
+
|
| 58 |
+
def __repr__(self):
|
| 59 |
+
return "<no value>"
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
_NoValue = _NoValueType()
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
@_set_module("numpy")
|
| 66 |
+
class _CopyMode(enum.Enum):
|
| 67 |
+
"""
|
| 68 |
+
An enumeration for the copy modes supported
|
| 69 |
+
by numpy.copy() and numpy.array(). The following three modes are supported,
|
| 70 |
+
|
| 71 |
+
- ALWAYS: This means that a deep copy of the input
|
| 72 |
+
array will always be taken.
|
| 73 |
+
- IF_NEEDED: This means that a deep copy of the input
|
| 74 |
+
array will be taken only if necessary.
|
| 75 |
+
- NEVER: This means that the deep copy will never be taken.
|
| 76 |
+
If a copy cannot be avoided then a `ValueError` will be
|
| 77 |
+
raised.
|
| 78 |
+
|
| 79 |
+
Note that the buffer-protocol could in theory do copies. NumPy currently
|
| 80 |
+
assumes an object exporting the buffer protocol will never do this.
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
ALWAYS = True
|
| 84 |
+
NEVER = False
|
| 85 |
+
IF_NEEDED = 2
|
| 86 |
+
|
| 87 |
+
def __bool__(self):
|
| 88 |
+
# For backwards compatibility
|
| 89 |
+
if self == _CopyMode.ALWAYS:
|
| 90 |
+
return True
|
| 91 |
+
|
| 92 |
+
if self == _CopyMode.NEVER:
|
| 93 |
+
return False
|
| 94 |
+
|
| 95 |
+
raise ValueError(f"{self} is neither True nor False.")
|
mantis_evalkit/lib/python3.10/site-packages/numpy/exceptions.py
ADDED
|
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Exceptions and Warnings (:mod:`numpy.exceptions`)
|
| 3 |
+
=================================================
|
| 4 |
+
|
| 5 |
+
General exceptions used by NumPy. Note that some exceptions may be module
|
| 6 |
+
specific, such as linear algebra errors.
|
| 7 |
+
|
| 8 |
+
.. versionadded:: NumPy 1.25
|
| 9 |
+
|
| 10 |
+
The exceptions module is new in NumPy 1.25. Older exceptions remain
|
| 11 |
+
available through the main NumPy namespace for compatibility.
|
| 12 |
+
|
| 13 |
+
.. currentmodule:: numpy.exceptions
|
| 14 |
+
|
| 15 |
+
Warnings
|
| 16 |
+
--------
|
| 17 |
+
.. autosummary::
|
| 18 |
+
:toctree: generated/
|
| 19 |
+
|
| 20 |
+
ComplexWarning Given when converting complex to real.
|
| 21 |
+
VisibleDeprecationWarning Same as a DeprecationWarning, but more visible.
|
| 22 |
+
RankWarning Issued when the design matrix is rank deficient.
|
| 23 |
+
|
| 24 |
+
Exceptions
|
| 25 |
+
----------
|
| 26 |
+
.. autosummary::
|
| 27 |
+
:toctree: generated/
|
| 28 |
+
|
| 29 |
+
AxisError Given when an axis was invalid.
|
| 30 |
+
DTypePromotionError Given when no common dtype could be found.
|
| 31 |
+
TooHardError Error specific to `numpy.shares_memory`.
|
| 32 |
+
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
__all__ = [
|
| 37 |
+
"ComplexWarning", "VisibleDeprecationWarning", "ModuleDeprecationWarning",
|
| 38 |
+
"TooHardError", "AxisError", "DTypePromotionError"]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
# Disallow reloading this module so as to preserve the identities of the
|
| 42 |
+
# classes defined here.
|
| 43 |
+
if '_is_loaded' in globals():
|
| 44 |
+
raise RuntimeError('Reloading numpy._globals is not allowed')
|
| 45 |
+
_is_loaded = True
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class ComplexWarning(RuntimeWarning):
|
| 49 |
+
"""
|
| 50 |
+
The warning raised when casting a complex dtype to a real dtype.
|
| 51 |
+
|
| 52 |
+
As implemented, casting a complex number to a real discards its imaginary
|
| 53 |
+
part, but this behavior may not be what the user actually wants.
|
| 54 |
+
|
| 55 |
+
"""
|
| 56 |
+
pass
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class ModuleDeprecationWarning(DeprecationWarning):
|
| 60 |
+
"""Module deprecation warning.
|
| 61 |
+
|
| 62 |
+
.. warning::
|
| 63 |
+
|
| 64 |
+
This warning should not be used, since nose testing is not relevant
|
| 65 |
+
anymore.
|
| 66 |
+
|
| 67 |
+
The nose tester turns ordinary Deprecation warnings into test failures.
|
| 68 |
+
That makes it hard to deprecate whole modules, because they get
|
| 69 |
+
imported by default. So this is a special Deprecation warning that the
|
| 70 |
+
nose tester will let pass without making tests fail.
|
| 71 |
+
|
| 72 |
+
"""
|
| 73 |
+
pass
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class VisibleDeprecationWarning(UserWarning):
|
| 77 |
+
"""Visible deprecation warning.
|
| 78 |
+
|
| 79 |
+
By default, python will not show deprecation warnings, so this class
|
| 80 |
+
can be used when a very visible warning is helpful, for example because
|
| 81 |
+
the usage is most likely a user bug.
|
| 82 |
+
|
| 83 |
+
"""
|
| 84 |
+
pass
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class RankWarning(RuntimeWarning):
|
| 88 |
+
"""Matrix rank warning.
|
| 89 |
+
|
| 90 |
+
Issued by polynomial functions when the design matrix is rank deficient.
|
| 91 |
+
|
| 92 |
+
"""
|
| 93 |
+
pass
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
# Exception used in shares_memory()
|
| 97 |
+
class TooHardError(RuntimeError):
|
| 98 |
+
"""max_work was exceeded.
|
| 99 |
+
|
| 100 |
+
This is raised whenever the maximum number of candidate solutions
|
| 101 |
+
to consider specified by the ``max_work`` parameter is exceeded.
|
| 102 |
+
Assigning a finite number to max_work may have caused the operation
|
| 103 |
+
to fail.
|
| 104 |
+
|
| 105 |
+
"""
|
| 106 |
+
pass
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class AxisError(ValueError, IndexError):
|
| 110 |
+
"""Axis supplied was invalid.
|
| 111 |
+
|
| 112 |
+
This is raised whenever an ``axis`` parameter is specified that is larger
|
| 113 |
+
than the number of array dimensions.
|
| 114 |
+
For compatibility with code written against older numpy versions, which
|
| 115 |
+
raised a mixture of :exc:`ValueError` and :exc:`IndexError` for this
|
| 116 |
+
situation, this exception subclasses both to ensure that
|
| 117 |
+
``except ValueError`` and ``except IndexError`` statements continue
|
| 118 |
+
to catch ``AxisError``.
|
| 119 |
+
|
| 120 |
+
Parameters
|
| 121 |
+
----------
|
| 122 |
+
axis : int or str
|
| 123 |
+
The out of bounds axis or a custom exception message.
|
| 124 |
+
If an axis is provided, then `ndim` should be specified as well.
|
| 125 |
+
ndim : int, optional
|
| 126 |
+
The number of array dimensions.
|
| 127 |
+
msg_prefix : str, optional
|
| 128 |
+
A prefix for the exception message.
|
| 129 |
+
|
| 130 |
+
Attributes
|
| 131 |
+
----------
|
| 132 |
+
axis : int, optional
|
| 133 |
+
The out of bounds axis or ``None`` if a custom exception
|
| 134 |
+
message was provided. This should be the axis as passed by
|
| 135 |
+
the user, before any normalization to resolve negative indices.
|
| 136 |
+
|
| 137 |
+
.. versionadded:: 1.22
|
| 138 |
+
ndim : int, optional
|
| 139 |
+
The number of array dimensions or ``None`` if a custom exception
|
| 140 |
+
message was provided.
|
| 141 |
+
|
| 142 |
+
.. versionadded:: 1.22
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
Examples
|
| 146 |
+
--------
|
| 147 |
+
>>> import numpy as np
|
| 148 |
+
>>> array_1d = np.arange(10)
|
| 149 |
+
>>> np.cumsum(array_1d, axis=1)
|
| 150 |
+
Traceback (most recent call last):
|
| 151 |
+
...
|
| 152 |
+
numpy.exceptions.AxisError: axis 1 is out of bounds for array of dimension 1
|
| 153 |
+
|
| 154 |
+
Negative axes are preserved:
|
| 155 |
+
|
| 156 |
+
>>> np.cumsum(array_1d, axis=-2)
|
| 157 |
+
Traceback (most recent call last):
|
| 158 |
+
...
|
| 159 |
+
numpy.exceptions.AxisError: axis -2 is out of bounds for array of dimension 1
|
| 160 |
+
|
| 161 |
+
The class constructor generally takes the axis and arrays'
|
| 162 |
+
dimensionality as arguments:
|
| 163 |
+
|
| 164 |
+
>>> print(np.exceptions.AxisError(2, 1, msg_prefix='error'))
|
| 165 |
+
error: axis 2 is out of bounds for array of dimension 1
|
| 166 |
+
|
| 167 |
+
Alternatively, a custom exception message can be passed:
|
| 168 |
+
|
| 169 |
+
>>> print(np.exceptions.AxisError('Custom error message'))
|
| 170 |
+
Custom error message
|
| 171 |
+
|
| 172 |
+
"""
|
| 173 |
+
|
| 174 |
+
__slots__ = ("axis", "ndim", "_msg")
|
| 175 |
+
|
| 176 |
+
def __init__(self, axis, ndim=None, msg_prefix=None):
|
| 177 |
+
if ndim is msg_prefix is None:
|
| 178 |
+
# single-argument form: directly set the error message
|
| 179 |
+
self._msg = axis
|
| 180 |
+
self.axis = None
|
| 181 |
+
self.ndim = None
|
| 182 |
+
else:
|
| 183 |
+
self._msg = msg_prefix
|
| 184 |
+
self.axis = axis
|
| 185 |
+
self.ndim = ndim
|
| 186 |
+
|
| 187 |
+
def __str__(self):
|
| 188 |
+
axis = self.axis
|
| 189 |
+
ndim = self.ndim
|
| 190 |
+
|
| 191 |
+
if axis is ndim is None:
|
| 192 |
+
return self._msg
|
| 193 |
+
else:
|
| 194 |
+
msg = f"axis {axis} is out of bounds for array of dimension {ndim}"
|
| 195 |
+
if self._msg is not None:
|
| 196 |
+
msg = f"{self._msg}: {msg}"
|
| 197 |
+
return msg
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
class DTypePromotionError(TypeError):
|
| 201 |
+
"""Multiple DTypes could not be converted to a common one.
|
| 202 |
+
|
| 203 |
+
This exception derives from ``TypeError`` and is raised whenever dtypes
|
| 204 |
+
cannot be converted to a single common one. This can be because they
|
| 205 |
+
are of a different category/class or incompatible instances of the same
|
| 206 |
+
one (see Examples).
|
| 207 |
+
|
| 208 |
+
Notes
|
| 209 |
+
-----
|
| 210 |
+
Many functions will use promotion to find the correct result and
|
| 211 |
+
implementation. For these functions the error will typically be chained
|
| 212 |
+
with a more specific error indicating that no implementation was found
|
| 213 |
+
for the input dtypes.
|
| 214 |
+
|
| 215 |
+
Typically promotion should be considered "invalid" between the dtypes of
|
| 216 |
+
two arrays when `arr1 == arr2` can safely return all ``False`` because the
|
| 217 |
+
dtypes are fundamentally different.
|
| 218 |
+
|
| 219 |
+
Examples
|
| 220 |
+
--------
|
| 221 |
+
Datetimes and complex numbers are incompatible classes and cannot be
|
| 222 |
+
promoted:
|
| 223 |
+
|
| 224 |
+
>>> import numpy as np
|
| 225 |
+
>>> np.result_type(np.dtype("M8[s]"), np.complex128) # doctest: +IGNORE_EXCEPTION_DETAIL
|
| 226 |
+
Traceback (most recent call last):
|
| 227 |
+
...
|
| 228 |
+
DTypePromotionError: The DType <class 'numpy.dtype[datetime64]'> could not
|
| 229 |
+
be promoted by <class 'numpy.dtype[complex128]'>. This means that no common
|
| 230 |
+
DType exists for the given inputs. For example they cannot be stored in a
|
| 231 |
+
single array unless the dtype is `object`. The full list of DTypes is:
|
| 232 |
+
(<class 'numpy.dtype[datetime64]'>, <class 'numpy.dtype[complex128]'>)
|
| 233 |
+
|
| 234 |
+
For example for structured dtypes, the structure can mismatch and the
|
| 235 |
+
same ``DTypePromotionError`` is given when two structured dtypes with
|
| 236 |
+
a mismatch in their number of fields is given:
|
| 237 |
+
|
| 238 |
+
>>> dtype1 = np.dtype([("field1", np.float64), ("field2", np.int64)])
|
| 239 |
+
>>> dtype2 = np.dtype([("field1", np.float64)])
|
| 240 |
+
>>> np.promote_types(dtype1, dtype2) # doctest: +IGNORE_EXCEPTION_DETAIL
|
| 241 |
+
Traceback (most recent call last):
|
| 242 |
+
...
|
| 243 |
+
DTypePromotionError: field names `('field1', 'field2')` and `('field1',)`
|
| 244 |
+
mismatch.
|
| 245 |
+
|
| 246 |
+
""" # NOQA
|
| 247 |
+
pass
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.12 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/_impl.cpython-310.pyc
ADDED
|
Binary file (2.23 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/_utils.cpython-310.pyc
ADDED
|
Binary file (25.5 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (26.4 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/basecontainer.cpython-310.pyc
ADDED
|
Binary file (20.7 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/dictconfig.cpython-310.pyc
ADDED
|
Binary file (19.5 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/errors.cpython-310.pyc
ADDED
|
Binary file (5.11 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/grammar_parser.cpython-310.pyc
ADDED
|
Binary file (3.44 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/grammar_visitor.cpython-310.pyc
ADDED
|
Binary file (10.9 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/listconfig.cpython-310.pyc
ADDED
|
Binary file (17.9 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/nodes.cpython-310.pyc
ADDED
|
Binary file (16 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/omegaconf.cpython-310.pyc
ADDED
|
Binary file (30.4 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/__pycache__/version.cpython-310.pyc
ADDED
|
Binary file (585 Bytes). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/base.py
ADDED
|
@@ -0,0 +1,962 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import copy
|
| 2 |
+
import sys
|
| 3 |
+
from abc import ABC, abstractmethod
|
| 4 |
+
from collections import defaultdict
|
| 5 |
+
from dataclasses import dataclass, field
|
| 6 |
+
from enum import Enum
|
| 7 |
+
from typing import Any, Dict, Iterator, List, Optional, Set, Tuple, Type, Union
|
| 8 |
+
|
| 9 |
+
from antlr4 import ParserRuleContext
|
| 10 |
+
|
| 11 |
+
from ._utils import (
|
| 12 |
+
_DEFAULT_MARKER_,
|
| 13 |
+
NoneType,
|
| 14 |
+
ValueKind,
|
| 15 |
+
_get_value,
|
| 16 |
+
_is_interpolation,
|
| 17 |
+
_is_missing_value,
|
| 18 |
+
_is_special,
|
| 19 |
+
format_and_raise,
|
| 20 |
+
get_value_kind,
|
| 21 |
+
is_union_annotation,
|
| 22 |
+
is_valid_value_annotation,
|
| 23 |
+
split_key,
|
| 24 |
+
type_str,
|
| 25 |
+
)
|
| 26 |
+
from .errors import (
|
| 27 |
+
ConfigKeyError,
|
| 28 |
+
ConfigTypeError,
|
| 29 |
+
InterpolationKeyError,
|
| 30 |
+
InterpolationResolutionError,
|
| 31 |
+
InterpolationToMissingValueError,
|
| 32 |
+
InterpolationValidationError,
|
| 33 |
+
MissingMandatoryValue,
|
| 34 |
+
UnsupportedInterpolationType,
|
| 35 |
+
ValidationError,
|
| 36 |
+
)
|
| 37 |
+
from .grammar.gen.OmegaConfGrammarParser import OmegaConfGrammarParser
|
| 38 |
+
from .grammar_parser import parse
|
| 39 |
+
from .grammar_visitor import GrammarVisitor
|
| 40 |
+
|
| 41 |
+
DictKeyType = Union[str, bytes, int, Enum, float, bool]
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@dataclass
|
| 45 |
+
class Metadata:
|
| 46 |
+
|
| 47 |
+
ref_type: Union[Type[Any], Any]
|
| 48 |
+
|
| 49 |
+
object_type: Union[Type[Any], Any]
|
| 50 |
+
|
| 51 |
+
optional: bool
|
| 52 |
+
|
| 53 |
+
key: Any
|
| 54 |
+
|
| 55 |
+
# Flags have 3 modes:
|
| 56 |
+
# unset : inherit from parent (None if no parent specifies)
|
| 57 |
+
# set to true: flag is true
|
| 58 |
+
# set to false: flag is false
|
| 59 |
+
flags: Optional[Dict[str, bool]] = None
|
| 60 |
+
|
| 61 |
+
# If True, when checking the value of a flag, if the flag is not set None is returned
|
| 62 |
+
# otherwise, the parent node is queried.
|
| 63 |
+
flags_root: bool = False
|
| 64 |
+
|
| 65 |
+
resolver_cache: Dict[str, Any] = field(default_factory=lambda: defaultdict(dict))
|
| 66 |
+
|
| 67 |
+
def __post_init__(self) -> None:
|
| 68 |
+
if self.flags is None:
|
| 69 |
+
self.flags = {}
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def type_hint(self) -> Union[Type[Any], Any]:
|
| 73 |
+
"""Compute `type_hint` from `self.optional` and `self.ref_type`"""
|
| 74 |
+
# For compatibility with pickled OmegaConf objects created using older
|
| 75 |
+
# versions of OmegaConf, we store `ref_type` and `object_type`
|
| 76 |
+
# separately (rather than storing `type_hint` directly).
|
| 77 |
+
if self.optional:
|
| 78 |
+
return Optional[self.ref_type]
|
| 79 |
+
else:
|
| 80 |
+
return self.ref_type
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@dataclass
|
| 84 |
+
class ContainerMetadata(Metadata):
|
| 85 |
+
key_type: Any = None
|
| 86 |
+
element_type: Any = None
|
| 87 |
+
|
| 88 |
+
def __post_init__(self) -> None:
|
| 89 |
+
if self.ref_type is None:
|
| 90 |
+
self.ref_type = Any
|
| 91 |
+
assert self.key_type is Any or isinstance(self.key_type, type)
|
| 92 |
+
if self.element_type is not None:
|
| 93 |
+
if not is_valid_value_annotation(self.element_type):
|
| 94 |
+
raise ValidationError(
|
| 95 |
+
f"Unsupported value type: '{type_str(self.element_type, include_module_name=True)}'"
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
if self.flags is None:
|
| 99 |
+
self.flags = {}
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class Node(ABC):
|
| 103 |
+
_metadata: Metadata
|
| 104 |
+
|
| 105 |
+
_parent: Optional["Box"]
|
| 106 |
+
_flags_cache: Optional[Dict[str, Optional[bool]]]
|
| 107 |
+
|
| 108 |
+
def __init__(self, parent: Optional["Box"], metadata: Metadata):
|
| 109 |
+
self.__dict__["_metadata"] = metadata
|
| 110 |
+
self.__dict__["_parent"] = parent
|
| 111 |
+
self.__dict__["_flags_cache"] = None
|
| 112 |
+
|
| 113 |
+
def __getstate__(self) -> Dict[str, Any]:
|
| 114 |
+
# Overridden to ensure that the flags cache is cleared on serialization.
|
| 115 |
+
state_dict = copy.copy(self.__dict__)
|
| 116 |
+
del state_dict["_flags_cache"]
|
| 117 |
+
return state_dict
|
| 118 |
+
|
| 119 |
+
def __setstate__(self, state_dict: Dict[str, Any]) -> None:
|
| 120 |
+
self.__dict__.update(state_dict)
|
| 121 |
+
self.__dict__["_flags_cache"] = None
|
| 122 |
+
|
| 123 |
+
def _set_parent(self, parent: Optional["Box"]) -> None:
|
| 124 |
+
assert parent is None or isinstance(parent, Box)
|
| 125 |
+
self.__dict__["_parent"] = parent
|
| 126 |
+
self._invalidate_flags_cache()
|
| 127 |
+
|
| 128 |
+
def _invalidate_flags_cache(self) -> None:
|
| 129 |
+
self.__dict__["_flags_cache"] = None
|
| 130 |
+
|
| 131 |
+
def _get_parent(self) -> Optional["Box"]:
|
| 132 |
+
parent = self.__dict__["_parent"]
|
| 133 |
+
assert parent is None or isinstance(parent, Box)
|
| 134 |
+
return parent
|
| 135 |
+
|
| 136 |
+
def _get_parent_container(self) -> Optional["Container"]:
|
| 137 |
+
"""
|
| 138 |
+
Like _get_parent, but returns the grandparent
|
| 139 |
+
in the case where `self` is wrapped by a UnionNode.
|
| 140 |
+
"""
|
| 141 |
+
parent = self.__dict__["_parent"]
|
| 142 |
+
assert parent is None or isinstance(parent, Box)
|
| 143 |
+
|
| 144 |
+
if isinstance(parent, UnionNode):
|
| 145 |
+
grandparent = parent.__dict__["_parent"]
|
| 146 |
+
assert grandparent is None or isinstance(grandparent, Container)
|
| 147 |
+
return grandparent
|
| 148 |
+
else:
|
| 149 |
+
assert parent is None or isinstance(parent, Container)
|
| 150 |
+
return parent
|
| 151 |
+
|
| 152 |
+
def _set_flag(
|
| 153 |
+
self,
|
| 154 |
+
flags: Union[List[str], str],
|
| 155 |
+
values: Union[List[Optional[bool]], Optional[bool]],
|
| 156 |
+
) -> "Node":
|
| 157 |
+
if isinstance(flags, str):
|
| 158 |
+
flags = [flags]
|
| 159 |
+
|
| 160 |
+
if values is None or isinstance(values, bool):
|
| 161 |
+
values = [values]
|
| 162 |
+
|
| 163 |
+
if len(values) == 1:
|
| 164 |
+
values = len(flags) * values
|
| 165 |
+
|
| 166 |
+
if len(flags) != len(values):
|
| 167 |
+
raise ValueError("Inconsistent lengths of input flag names and values")
|
| 168 |
+
|
| 169 |
+
for idx, flag in enumerate(flags):
|
| 170 |
+
value = values[idx]
|
| 171 |
+
if value is None:
|
| 172 |
+
assert self._metadata.flags is not None
|
| 173 |
+
if flag in self._metadata.flags:
|
| 174 |
+
del self._metadata.flags[flag]
|
| 175 |
+
else:
|
| 176 |
+
assert self._metadata.flags is not None
|
| 177 |
+
self._metadata.flags[flag] = value
|
| 178 |
+
self._invalidate_flags_cache()
|
| 179 |
+
return self
|
| 180 |
+
|
| 181 |
+
def _get_node_flag(self, flag: str) -> Optional[bool]:
|
| 182 |
+
"""
|
| 183 |
+
:param flag: flag to inspect
|
| 184 |
+
:return: the state of the flag on this node.
|
| 185 |
+
"""
|
| 186 |
+
assert self._metadata.flags is not None
|
| 187 |
+
return self._metadata.flags.get(flag)
|
| 188 |
+
|
| 189 |
+
def _get_flag(self, flag: str) -> Optional[bool]:
|
| 190 |
+
cache = self.__dict__["_flags_cache"]
|
| 191 |
+
if cache is None:
|
| 192 |
+
cache = self.__dict__["_flags_cache"] = {}
|
| 193 |
+
|
| 194 |
+
ret = cache.get(flag, _DEFAULT_MARKER_)
|
| 195 |
+
if ret is _DEFAULT_MARKER_:
|
| 196 |
+
ret = self._get_flag_no_cache(flag)
|
| 197 |
+
cache[flag] = ret
|
| 198 |
+
assert ret is None or isinstance(ret, bool)
|
| 199 |
+
return ret
|
| 200 |
+
|
| 201 |
+
def _get_flag_no_cache(self, flag: str) -> Optional[bool]:
|
| 202 |
+
"""
|
| 203 |
+
Returns True if this config node flag is set
|
| 204 |
+
A flag is set if node.set_flag(True) was called
|
| 205 |
+
or one if it's parents is flag is set
|
| 206 |
+
:return:
|
| 207 |
+
"""
|
| 208 |
+
flags = self._metadata.flags
|
| 209 |
+
assert flags is not None
|
| 210 |
+
if flag in flags and flags[flag] is not None:
|
| 211 |
+
return flags[flag]
|
| 212 |
+
|
| 213 |
+
if self._is_flags_root():
|
| 214 |
+
return None
|
| 215 |
+
|
| 216 |
+
parent = self._get_parent()
|
| 217 |
+
if parent is None:
|
| 218 |
+
return None
|
| 219 |
+
else:
|
| 220 |
+
# noinspection PyProtectedMember
|
| 221 |
+
return parent._get_flag(flag)
|
| 222 |
+
|
| 223 |
+
def _format_and_raise(
|
| 224 |
+
self,
|
| 225 |
+
key: Any,
|
| 226 |
+
value: Any,
|
| 227 |
+
cause: Exception,
|
| 228 |
+
msg: Optional[str] = None,
|
| 229 |
+
type_override: Any = None,
|
| 230 |
+
) -> None:
|
| 231 |
+
format_and_raise(
|
| 232 |
+
node=self,
|
| 233 |
+
key=key,
|
| 234 |
+
value=value,
|
| 235 |
+
msg=str(cause) if msg is None else msg,
|
| 236 |
+
cause=cause,
|
| 237 |
+
type_override=type_override,
|
| 238 |
+
)
|
| 239 |
+
assert False
|
| 240 |
+
|
| 241 |
+
@abstractmethod
|
| 242 |
+
def _get_full_key(self, key: Optional[Union[DictKeyType, int]]) -> str:
|
| 243 |
+
...
|
| 244 |
+
|
| 245 |
+
def _dereference_node(self) -> "Node":
|
| 246 |
+
node = self._dereference_node_impl(throw_on_resolution_failure=True)
|
| 247 |
+
assert node is not None
|
| 248 |
+
return node
|
| 249 |
+
|
| 250 |
+
def _maybe_dereference_node(
|
| 251 |
+
self,
|
| 252 |
+
throw_on_resolution_failure: bool = False,
|
| 253 |
+
memo: Optional[Set[int]] = None,
|
| 254 |
+
) -> Optional["Node"]:
|
| 255 |
+
return self._dereference_node_impl(
|
| 256 |
+
throw_on_resolution_failure=throw_on_resolution_failure,
|
| 257 |
+
memo=memo,
|
| 258 |
+
)
|
| 259 |
+
|
| 260 |
+
def _dereference_node_impl(
|
| 261 |
+
self,
|
| 262 |
+
throw_on_resolution_failure: bool,
|
| 263 |
+
memo: Optional[Set[int]] = None,
|
| 264 |
+
) -> Optional["Node"]:
|
| 265 |
+
if not self._is_interpolation():
|
| 266 |
+
return self
|
| 267 |
+
|
| 268 |
+
parent = self._get_parent_container()
|
| 269 |
+
if parent is None:
|
| 270 |
+
if throw_on_resolution_failure:
|
| 271 |
+
raise InterpolationResolutionError(
|
| 272 |
+
"Cannot resolve interpolation for a node without a parent"
|
| 273 |
+
)
|
| 274 |
+
return None
|
| 275 |
+
assert parent is not None
|
| 276 |
+
key = self._key()
|
| 277 |
+
return parent._resolve_interpolation_from_parse_tree(
|
| 278 |
+
parent=parent,
|
| 279 |
+
key=key,
|
| 280 |
+
value=self,
|
| 281 |
+
parse_tree=parse(_get_value(self)),
|
| 282 |
+
throw_on_resolution_failure=throw_on_resolution_failure,
|
| 283 |
+
memo=memo,
|
| 284 |
+
)
|
| 285 |
+
|
| 286 |
+
def _get_root(self) -> "Container":
|
| 287 |
+
root: Optional[Box] = self._get_parent()
|
| 288 |
+
if root is None:
|
| 289 |
+
assert isinstance(self, Container)
|
| 290 |
+
return self
|
| 291 |
+
assert root is not None and isinstance(root, Box)
|
| 292 |
+
while root._get_parent() is not None:
|
| 293 |
+
root = root._get_parent()
|
| 294 |
+
assert root is not None and isinstance(root, Box)
|
| 295 |
+
assert root is not None and isinstance(root, Container)
|
| 296 |
+
return root
|
| 297 |
+
|
| 298 |
+
def _is_missing(self) -> bool:
|
| 299 |
+
"""
|
| 300 |
+
Check if the node's value is `???` (does *not* resolve interpolations).
|
| 301 |
+
"""
|
| 302 |
+
return _is_missing_value(self)
|
| 303 |
+
|
| 304 |
+
def _is_none(self) -> bool:
|
| 305 |
+
"""
|
| 306 |
+
Check if the node's value is `None` (does *not* resolve interpolations).
|
| 307 |
+
"""
|
| 308 |
+
return self._value() is None
|
| 309 |
+
|
| 310 |
+
@abstractmethod
|
| 311 |
+
def __eq__(self, other: Any) -> bool:
|
| 312 |
+
...
|
| 313 |
+
|
| 314 |
+
@abstractmethod
|
| 315 |
+
def __ne__(self, other: Any) -> bool:
|
| 316 |
+
...
|
| 317 |
+
|
| 318 |
+
@abstractmethod
|
| 319 |
+
def __hash__(self) -> int:
|
| 320 |
+
...
|
| 321 |
+
|
| 322 |
+
@abstractmethod
|
| 323 |
+
def _value(self) -> Any:
|
| 324 |
+
...
|
| 325 |
+
|
| 326 |
+
@abstractmethod
|
| 327 |
+
def _set_value(self, value: Any, flags: Optional[Dict[str, bool]] = None) -> None:
|
| 328 |
+
...
|
| 329 |
+
|
| 330 |
+
@abstractmethod
|
| 331 |
+
def _is_optional(self) -> bool:
|
| 332 |
+
...
|
| 333 |
+
|
| 334 |
+
@abstractmethod
|
| 335 |
+
def _is_interpolation(self) -> bool:
|
| 336 |
+
...
|
| 337 |
+
|
| 338 |
+
def _key(self) -> Any:
|
| 339 |
+
return self._metadata.key
|
| 340 |
+
|
| 341 |
+
def _set_key(self, key: Any) -> None:
|
| 342 |
+
self._metadata.key = key
|
| 343 |
+
|
| 344 |
+
def _is_flags_root(self) -> bool:
|
| 345 |
+
return self._metadata.flags_root
|
| 346 |
+
|
| 347 |
+
def _set_flags_root(self, flags_root: bool) -> None:
|
| 348 |
+
if self._metadata.flags_root != flags_root:
|
| 349 |
+
self._metadata.flags_root = flags_root
|
| 350 |
+
self._invalidate_flags_cache()
|
| 351 |
+
|
| 352 |
+
def _has_ref_type(self) -> bool:
|
| 353 |
+
return self._metadata.ref_type is not Any
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
class Box(Node):
|
| 357 |
+
"""
|
| 358 |
+
Base class for nodes that can contain other nodes.
|
| 359 |
+
Concrete subclasses include DictConfig, ListConfig, and UnionNode.
|
| 360 |
+
"""
|
| 361 |
+
|
| 362 |
+
_content: Any
|
| 363 |
+
|
| 364 |
+
def __init__(self, parent: Optional["Box"], metadata: Metadata):
|
| 365 |
+
super().__init__(parent=parent, metadata=metadata)
|
| 366 |
+
self.__dict__["_content"] = None
|
| 367 |
+
|
| 368 |
+
def __copy__(self) -> Any:
|
| 369 |
+
# real shallow copy is impossible because of the reference to the parent.
|
| 370 |
+
return copy.deepcopy(self)
|
| 371 |
+
|
| 372 |
+
def _re_parent(self) -> None:
|
| 373 |
+
from .dictconfig import DictConfig
|
| 374 |
+
from .listconfig import ListConfig
|
| 375 |
+
|
| 376 |
+
# update parents of first level Config nodes to self
|
| 377 |
+
|
| 378 |
+
if isinstance(self, DictConfig):
|
| 379 |
+
content = self.__dict__["_content"]
|
| 380 |
+
if isinstance(content, dict):
|
| 381 |
+
for _key, value in self.__dict__["_content"].items():
|
| 382 |
+
if value is not None:
|
| 383 |
+
value._set_parent(self)
|
| 384 |
+
if isinstance(value, Box):
|
| 385 |
+
value._re_parent()
|
| 386 |
+
elif isinstance(self, ListConfig):
|
| 387 |
+
content = self.__dict__["_content"]
|
| 388 |
+
if isinstance(content, list):
|
| 389 |
+
for item in self.__dict__["_content"]:
|
| 390 |
+
if item is not None:
|
| 391 |
+
item._set_parent(self)
|
| 392 |
+
if isinstance(item, Box):
|
| 393 |
+
item._re_parent()
|
| 394 |
+
elif isinstance(self, UnionNode):
|
| 395 |
+
content = self.__dict__["_content"]
|
| 396 |
+
if isinstance(content, Node):
|
| 397 |
+
content._set_parent(self)
|
| 398 |
+
if isinstance(content, Box): # pragma: no cover
|
| 399 |
+
# No coverage here as support for containers inside
|
| 400 |
+
# UnionNode is not yet implemented
|
| 401 |
+
content._re_parent()
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
class Container(Box):
|
| 405 |
+
"""
|
| 406 |
+
Container tagging interface
|
| 407 |
+
"""
|
| 408 |
+
|
| 409 |
+
_metadata: ContainerMetadata
|
| 410 |
+
|
| 411 |
+
@abstractmethod
|
| 412 |
+
def _get_child(
|
| 413 |
+
self,
|
| 414 |
+
key: Any,
|
| 415 |
+
validate_access: bool = True,
|
| 416 |
+
validate_key: bool = True,
|
| 417 |
+
throw_on_missing_value: bool = False,
|
| 418 |
+
throw_on_missing_key: bool = False,
|
| 419 |
+
) -> Union[Optional[Node], List[Optional[Node]]]:
|
| 420 |
+
...
|
| 421 |
+
|
| 422 |
+
@abstractmethod
|
| 423 |
+
def _get_node(
|
| 424 |
+
self,
|
| 425 |
+
key: Any,
|
| 426 |
+
validate_access: bool = True,
|
| 427 |
+
validate_key: bool = True,
|
| 428 |
+
throw_on_missing_value: bool = False,
|
| 429 |
+
throw_on_missing_key: bool = False,
|
| 430 |
+
) -> Union[Optional[Node], List[Optional[Node]]]:
|
| 431 |
+
...
|
| 432 |
+
|
| 433 |
+
@abstractmethod
|
| 434 |
+
def __delitem__(self, key: Any) -> None:
|
| 435 |
+
...
|
| 436 |
+
|
| 437 |
+
@abstractmethod
|
| 438 |
+
def __setitem__(self, key: Any, value: Any) -> None:
|
| 439 |
+
...
|
| 440 |
+
|
| 441 |
+
@abstractmethod
|
| 442 |
+
def __iter__(self) -> Iterator[Any]:
|
| 443 |
+
...
|
| 444 |
+
|
| 445 |
+
@abstractmethod
|
| 446 |
+
def __getitem__(self, key_or_index: Any) -> Any:
|
| 447 |
+
...
|
| 448 |
+
|
| 449 |
+
def _resolve_key_and_root(self, key: str) -> Tuple["Container", str]:
|
| 450 |
+
orig = key
|
| 451 |
+
if not key.startswith("."):
|
| 452 |
+
return self._get_root(), key
|
| 453 |
+
else:
|
| 454 |
+
root: Optional[Container] = self
|
| 455 |
+
assert key.startswith(".")
|
| 456 |
+
while True:
|
| 457 |
+
assert root is not None
|
| 458 |
+
key = key[1:]
|
| 459 |
+
if not key.startswith("."):
|
| 460 |
+
break
|
| 461 |
+
root = root._get_parent_container()
|
| 462 |
+
if root is None:
|
| 463 |
+
raise ConfigKeyError(f"Error resolving key '{orig}'")
|
| 464 |
+
|
| 465 |
+
return root, key
|
| 466 |
+
|
| 467 |
+
def _select_impl(
|
| 468 |
+
self,
|
| 469 |
+
key: str,
|
| 470 |
+
throw_on_missing: bool,
|
| 471 |
+
throw_on_resolution_failure: bool,
|
| 472 |
+
memo: Optional[Set[int]] = None,
|
| 473 |
+
) -> Tuple[Optional["Container"], Optional[str], Optional[Node]]:
|
| 474 |
+
"""
|
| 475 |
+
Select a value using dot separated key sequence
|
| 476 |
+
"""
|
| 477 |
+
from .omegaconf import _select_one
|
| 478 |
+
|
| 479 |
+
if key == "":
|
| 480 |
+
return self, "", self
|
| 481 |
+
|
| 482 |
+
split = split_key(key)
|
| 483 |
+
root: Optional[Container] = self
|
| 484 |
+
for i in range(len(split) - 1):
|
| 485 |
+
if root is None:
|
| 486 |
+
break
|
| 487 |
+
|
| 488 |
+
k = split[i]
|
| 489 |
+
ret, _ = _select_one(
|
| 490 |
+
c=root,
|
| 491 |
+
key=k,
|
| 492 |
+
throw_on_missing=throw_on_missing,
|
| 493 |
+
throw_on_type_error=throw_on_resolution_failure,
|
| 494 |
+
)
|
| 495 |
+
if isinstance(ret, Node):
|
| 496 |
+
ret = ret._maybe_dereference_node(
|
| 497 |
+
throw_on_resolution_failure=throw_on_resolution_failure,
|
| 498 |
+
memo=memo,
|
| 499 |
+
)
|
| 500 |
+
|
| 501 |
+
if ret is not None and not isinstance(ret, Container):
|
| 502 |
+
parent_key = ".".join(split[0 : i + 1])
|
| 503 |
+
child_key = split[i + 1]
|
| 504 |
+
raise ConfigTypeError(
|
| 505 |
+
f"Error trying to access {key}: node `{parent_key}` "
|
| 506 |
+
f"is not a container and thus cannot contain `{child_key}`"
|
| 507 |
+
)
|
| 508 |
+
root = ret
|
| 509 |
+
|
| 510 |
+
if root is None:
|
| 511 |
+
return None, None, None
|
| 512 |
+
|
| 513 |
+
last_key = split[-1]
|
| 514 |
+
value, _ = _select_one(
|
| 515 |
+
c=root,
|
| 516 |
+
key=last_key,
|
| 517 |
+
throw_on_missing=throw_on_missing,
|
| 518 |
+
throw_on_type_error=throw_on_resolution_failure,
|
| 519 |
+
)
|
| 520 |
+
if value is None:
|
| 521 |
+
return root, last_key, None
|
| 522 |
+
|
| 523 |
+
if memo is not None:
|
| 524 |
+
vid = id(value)
|
| 525 |
+
if vid in memo:
|
| 526 |
+
raise InterpolationResolutionError("Recursive interpolation detected")
|
| 527 |
+
# push to memo "stack"
|
| 528 |
+
memo.add(vid)
|
| 529 |
+
|
| 530 |
+
try:
|
| 531 |
+
value = root._maybe_resolve_interpolation(
|
| 532 |
+
parent=root,
|
| 533 |
+
key=last_key,
|
| 534 |
+
value=value,
|
| 535 |
+
throw_on_resolution_failure=throw_on_resolution_failure,
|
| 536 |
+
memo=memo,
|
| 537 |
+
)
|
| 538 |
+
finally:
|
| 539 |
+
if memo is not None:
|
| 540 |
+
# pop from memo "stack"
|
| 541 |
+
memo.remove(vid)
|
| 542 |
+
|
| 543 |
+
return root, last_key, value
|
| 544 |
+
|
| 545 |
+
def _resolve_interpolation_from_parse_tree(
|
| 546 |
+
self,
|
| 547 |
+
parent: Optional["Container"],
|
| 548 |
+
value: "Node",
|
| 549 |
+
key: Any,
|
| 550 |
+
parse_tree: OmegaConfGrammarParser.ConfigValueContext,
|
| 551 |
+
throw_on_resolution_failure: bool,
|
| 552 |
+
memo: Optional[Set[int]],
|
| 553 |
+
) -> Optional["Node"]:
|
| 554 |
+
"""
|
| 555 |
+
Resolve an interpolation.
|
| 556 |
+
|
| 557 |
+
This happens in two steps:
|
| 558 |
+
1. The parse tree is visited, which outputs either a `Node` (e.g.,
|
| 559 |
+
for node interpolations "${foo}"), a string (e.g., for string
|
| 560 |
+
interpolations "hello ${name}", or any other arbitrary value
|
| 561 |
+
(e.g., or custom interpolations "${foo:bar}").
|
| 562 |
+
2. This output is potentially validated and converted when the node
|
| 563 |
+
being resolved (`value`) is typed.
|
| 564 |
+
|
| 565 |
+
If an error occurs in one of the above steps, an `InterpolationResolutionError`
|
| 566 |
+
(or a subclass of it) is raised, *unless* `throw_on_resolution_failure` is set
|
| 567 |
+
to `False` (in which case the return value is `None`).
|
| 568 |
+
|
| 569 |
+
:param parent: Parent of the node being resolved.
|
| 570 |
+
:param value: Node being resolved.
|
| 571 |
+
:param key: The associated key in the parent.
|
| 572 |
+
:param parse_tree: The parse tree as obtained from `grammar_parser.parse()`.
|
| 573 |
+
:param throw_on_resolution_failure: If `False`, then exceptions raised during
|
| 574 |
+
the resolution of the interpolation are silenced, and instead `None` is
|
| 575 |
+
returned.
|
| 576 |
+
|
| 577 |
+
:return: A `Node` that contains the interpolation result. This may be an existing
|
| 578 |
+
node in the config (in the case of a node interpolation "${foo}"), or a new
|
| 579 |
+
node that is created to wrap the interpolated value. It is `None` if and only if
|
| 580 |
+
`throw_on_resolution_failure` is `False` and an error occurs during resolution.
|
| 581 |
+
"""
|
| 582 |
+
|
| 583 |
+
try:
|
| 584 |
+
resolved = self.resolve_parse_tree(
|
| 585 |
+
parse_tree=parse_tree, node=value, key=key, memo=memo
|
| 586 |
+
)
|
| 587 |
+
except InterpolationResolutionError:
|
| 588 |
+
if throw_on_resolution_failure:
|
| 589 |
+
raise
|
| 590 |
+
return None
|
| 591 |
+
|
| 592 |
+
return self._validate_and_convert_interpolation_result(
|
| 593 |
+
parent=parent,
|
| 594 |
+
value=value,
|
| 595 |
+
key=key,
|
| 596 |
+
resolved=resolved,
|
| 597 |
+
throw_on_resolution_failure=throw_on_resolution_failure,
|
| 598 |
+
)
|
| 599 |
+
|
| 600 |
+
def _validate_and_convert_interpolation_result(
|
| 601 |
+
self,
|
| 602 |
+
parent: Optional["Container"],
|
| 603 |
+
value: "Node",
|
| 604 |
+
key: Any,
|
| 605 |
+
resolved: Any,
|
| 606 |
+
throw_on_resolution_failure: bool,
|
| 607 |
+
) -> Optional["Node"]:
|
| 608 |
+
from .nodes import AnyNode, InterpolationResultNode, ValueNode
|
| 609 |
+
|
| 610 |
+
# If the output is not a Node already (e.g., because it is the output of a
|
| 611 |
+
# custom resolver), then we will need to wrap it within a Node.
|
| 612 |
+
must_wrap = not isinstance(resolved, Node)
|
| 613 |
+
|
| 614 |
+
# If the node is typed, validate (and possibly convert) the result.
|
| 615 |
+
if isinstance(value, ValueNode) and not isinstance(value, AnyNode):
|
| 616 |
+
res_value = _get_value(resolved)
|
| 617 |
+
try:
|
| 618 |
+
conv_value = value.validate_and_convert(res_value)
|
| 619 |
+
except ValidationError as e:
|
| 620 |
+
if throw_on_resolution_failure:
|
| 621 |
+
self._format_and_raise(
|
| 622 |
+
key=key,
|
| 623 |
+
value=res_value,
|
| 624 |
+
cause=e,
|
| 625 |
+
msg=f"While dereferencing interpolation '{value}': {e}",
|
| 626 |
+
type_override=InterpolationValidationError,
|
| 627 |
+
)
|
| 628 |
+
return None
|
| 629 |
+
|
| 630 |
+
# If the converted value is of the same type, it means that no conversion
|
| 631 |
+
# was actually needed. As a result, we can keep the original `resolved`
|
| 632 |
+
# (and otherwise, the converted value must be wrapped into a new node).
|
| 633 |
+
if type(conv_value) != type(res_value):
|
| 634 |
+
must_wrap = True
|
| 635 |
+
resolved = conv_value
|
| 636 |
+
|
| 637 |
+
if must_wrap:
|
| 638 |
+
return InterpolationResultNode(value=resolved, key=key, parent=parent)
|
| 639 |
+
else:
|
| 640 |
+
assert isinstance(resolved, Node)
|
| 641 |
+
return resolved
|
| 642 |
+
|
| 643 |
+
def _validate_not_dereferencing_to_parent(self, node: Node, target: Node) -> None:
|
| 644 |
+
parent: Optional[Node] = node
|
| 645 |
+
while parent is not None:
|
| 646 |
+
if parent is target:
|
| 647 |
+
raise InterpolationResolutionError(
|
| 648 |
+
"Interpolation to parent node detected"
|
| 649 |
+
)
|
| 650 |
+
parent = parent._get_parent()
|
| 651 |
+
|
| 652 |
+
def _resolve_node_interpolation(
|
| 653 |
+
self, inter_key: str, memo: Optional[Set[int]]
|
| 654 |
+
) -> "Node":
|
| 655 |
+
"""A node interpolation is of the form `${foo.bar}`"""
|
| 656 |
+
try:
|
| 657 |
+
root_node, inter_key = self._resolve_key_and_root(inter_key)
|
| 658 |
+
except ConfigKeyError as exc:
|
| 659 |
+
raise InterpolationKeyError(
|
| 660 |
+
f"ConfigKeyError while resolving interpolation: {exc}"
|
| 661 |
+
).with_traceback(sys.exc_info()[2])
|
| 662 |
+
|
| 663 |
+
try:
|
| 664 |
+
parent, last_key, value = root_node._select_impl(
|
| 665 |
+
inter_key,
|
| 666 |
+
throw_on_missing=True,
|
| 667 |
+
throw_on_resolution_failure=True,
|
| 668 |
+
memo=memo,
|
| 669 |
+
)
|
| 670 |
+
except MissingMandatoryValue as exc:
|
| 671 |
+
raise InterpolationToMissingValueError(
|
| 672 |
+
f"MissingMandatoryValue while resolving interpolation: {exc}"
|
| 673 |
+
).with_traceback(sys.exc_info()[2])
|
| 674 |
+
|
| 675 |
+
if parent is None or value is None:
|
| 676 |
+
raise InterpolationKeyError(f"Interpolation key '{inter_key}' not found")
|
| 677 |
+
else:
|
| 678 |
+
self._validate_not_dereferencing_to_parent(node=self, target=value)
|
| 679 |
+
return value
|
| 680 |
+
|
| 681 |
+
def _evaluate_custom_resolver(
|
| 682 |
+
self,
|
| 683 |
+
key: Any,
|
| 684 |
+
node: Node,
|
| 685 |
+
inter_type: str,
|
| 686 |
+
inter_args: Tuple[Any, ...],
|
| 687 |
+
inter_args_str: Tuple[str, ...],
|
| 688 |
+
) -> Any:
|
| 689 |
+
from omegaconf import OmegaConf
|
| 690 |
+
|
| 691 |
+
resolver = OmegaConf._get_resolver(inter_type)
|
| 692 |
+
if resolver is not None:
|
| 693 |
+
root_node = self._get_root()
|
| 694 |
+
return resolver(
|
| 695 |
+
root_node,
|
| 696 |
+
self,
|
| 697 |
+
node,
|
| 698 |
+
inter_args,
|
| 699 |
+
inter_args_str,
|
| 700 |
+
)
|
| 701 |
+
else:
|
| 702 |
+
raise UnsupportedInterpolationType(
|
| 703 |
+
f"Unsupported interpolation type {inter_type}"
|
| 704 |
+
)
|
| 705 |
+
|
| 706 |
+
def _maybe_resolve_interpolation(
|
| 707 |
+
self,
|
| 708 |
+
parent: Optional["Container"],
|
| 709 |
+
key: Any,
|
| 710 |
+
value: Node,
|
| 711 |
+
throw_on_resolution_failure: bool,
|
| 712 |
+
memo: Optional[Set[int]] = None,
|
| 713 |
+
) -> Optional[Node]:
|
| 714 |
+
value_kind = get_value_kind(value)
|
| 715 |
+
if value_kind != ValueKind.INTERPOLATION:
|
| 716 |
+
return value
|
| 717 |
+
|
| 718 |
+
parse_tree = parse(_get_value(value))
|
| 719 |
+
return self._resolve_interpolation_from_parse_tree(
|
| 720 |
+
parent=parent,
|
| 721 |
+
value=value,
|
| 722 |
+
key=key,
|
| 723 |
+
parse_tree=parse_tree,
|
| 724 |
+
throw_on_resolution_failure=throw_on_resolution_failure,
|
| 725 |
+
memo=memo if memo is not None else set(),
|
| 726 |
+
)
|
| 727 |
+
|
| 728 |
+
def resolve_parse_tree(
|
| 729 |
+
self,
|
| 730 |
+
parse_tree: ParserRuleContext,
|
| 731 |
+
node: Node,
|
| 732 |
+
memo: Optional[Set[int]] = None,
|
| 733 |
+
key: Optional[Any] = None,
|
| 734 |
+
) -> Any:
|
| 735 |
+
"""
|
| 736 |
+
Resolve a given parse tree into its value.
|
| 737 |
+
|
| 738 |
+
We make no assumption here on the type of the tree's root, so that the
|
| 739 |
+
return value may be of any type.
|
| 740 |
+
"""
|
| 741 |
+
|
| 742 |
+
def node_interpolation_callback(
|
| 743 |
+
inter_key: str, memo: Optional[Set[int]]
|
| 744 |
+
) -> Optional["Node"]:
|
| 745 |
+
return self._resolve_node_interpolation(inter_key=inter_key, memo=memo)
|
| 746 |
+
|
| 747 |
+
def resolver_interpolation_callback(
|
| 748 |
+
name: str, args: Tuple[Any, ...], args_str: Tuple[str, ...]
|
| 749 |
+
) -> Any:
|
| 750 |
+
return self._evaluate_custom_resolver(
|
| 751 |
+
key=key,
|
| 752 |
+
node=node,
|
| 753 |
+
inter_type=name,
|
| 754 |
+
inter_args=args,
|
| 755 |
+
inter_args_str=args_str,
|
| 756 |
+
)
|
| 757 |
+
|
| 758 |
+
visitor = GrammarVisitor(
|
| 759 |
+
node_interpolation_callback=node_interpolation_callback,
|
| 760 |
+
resolver_interpolation_callback=resolver_interpolation_callback,
|
| 761 |
+
memo=memo,
|
| 762 |
+
)
|
| 763 |
+
try:
|
| 764 |
+
return visitor.visit(parse_tree)
|
| 765 |
+
except InterpolationResolutionError:
|
| 766 |
+
raise
|
| 767 |
+
except Exception as exc:
|
| 768 |
+
# Other kinds of exceptions are wrapped in an `InterpolationResolutionError`.
|
| 769 |
+
raise InterpolationResolutionError(
|
| 770 |
+
f"{type(exc).__name__} raised while resolving interpolation: {exc}"
|
| 771 |
+
).with_traceback(sys.exc_info()[2])
|
| 772 |
+
|
| 773 |
+
def _invalidate_flags_cache(self) -> None:
|
| 774 |
+
from .dictconfig import DictConfig
|
| 775 |
+
from .listconfig import ListConfig
|
| 776 |
+
|
| 777 |
+
# invalidate subtree cache only if the cache is initialized in this node.
|
| 778 |
+
|
| 779 |
+
if self.__dict__["_flags_cache"] is not None:
|
| 780 |
+
self.__dict__["_flags_cache"] = None
|
| 781 |
+
if isinstance(self, DictConfig):
|
| 782 |
+
content = self.__dict__["_content"]
|
| 783 |
+
if isinstance(content, dict):
|
| 784 |
+
for value in self.__dict__["_content"].values():
|
| 785 |
+
value._invalidate_flags_cache()
|
| 786 |
+
elif isinstance(self, ListConfig):
|
| 787 |
+
content = self.__dict__["_content"]
|
| 788 |
+
if isinstance(content, list):
|
| 789 |
+
for item in self.__dict__["_content"]:
|
| 790 |
+
item._invalidate_flags_cache()
|
| 791 |
+
|
| 792 |
+
|
| 793 |
+
class SCMode(Enum):
|
| 794 |
+
DICT = 1 # Convert to plain dict
|
| 795 |
+
DICT_CONFIG = 2 # Keep as OmegaConf DictConfig
|
| 796 |
+
INSTANTIATE = 3 # Create a dataclass or attrs class instance
|
| 797 |
+
|
| 798 |
+
|
| 799 |
+
class UnionNode(Box):
|
| 800 |
+
"""
|
| 801 |
+
This class handles Union type hints. The `_content` attribute is either a
|
| 802 |
+
child node that is compatible with the given Union ref_type, or it is a
|
| 803 |
+
special value (None or MISSING or interpolation).
|
| 804 |
+
|
| 805 |
+
Much of the logic for e.g. value assignment and type validation is
|
| 806 |
+
delegated to the child node. As such, UnionNode functions as a
|
| 807 |
+
"pass-through" node. User apps and downstream libraries should not need to
|
| 808 |
+
know about UnionNode (assuming they only use OmegaConf's public API).
|
| 809 |
+
"""
|
| 810 |
+
|
| 811 |
+
_parent: Optional[Container]
|
| 812 |
+
_content: Union[Node, None, str]
|
| 813 |
+
|
| 814 |
+
def __init__(
|
| 815 |
+
self,
|
| 816 |
+
content: Any,
|
| 817 |
+
ref_type: Any,
|
| 818 |
+
is_optional: bool = True,
|
| 819 |
+
key: Any = None,
|
| 820 |
+
parent: Optional[Box] = None,
|
| 821 |
+
) -> None:
|
| 822 |
+
try:
|
| 823 |
+
if not is_union_annotation(ref_type): # pragma: no cover
|
| 824 |
+
msg = (
|
| 825 |
+
f"UnionNode got unexpected ref_type {ref_type}. Please file a bug"
|
| 826 |
+
+ " report at https://github.com/omry/omegaconf/issues"
|
| 827 |
+
)
|
| 828 |
+
raise AssertionError(msg)
|
| 829 |
+
if not isinstance(parent, (Container, NoneType)):
|
| 830 |
+
raise ConfigTypeError("Parent type is not omegaconf.Container")
|
| 831 |
+
super().__init__(
|
| 832 |
+
parent=parent,
|
| 833 |
+
metadata=Metadata(
|
| 834 |
+
ref_type=ref_type,
|
| 835 |
+
object_type=None,
|
| 836 |
+
optional=is_optional,
|
| 837 |
+
key=key,
|
| 838 |
+
flags={"convert": False},
|
| 839 |
+
),
|
| 840 |
+
)
|
| 841 |
+
self._set_value(content)
|
| 842 |
+
except Exception as ex:
|
| 843 |
+
format_and_raise(node=None, key=key, value=content, msg=str(ex), cause=ex)
|
| 844 |
+
|
| 845 |
+
def _get_full_key(self, key: Optional[Union[DictKeyType, int]]) -> str:
|
| 846 |
+
parent = self._get_parent()
|
| 847 |
+
if parent is None:
|
| 848 |
+
if self._metadata.key is None:
|
| 849 |
+
return ""
|
| 850 |
+
else:
|
| 851 |
+
return str(self._metadata.key)
|
| 852 |
+
else:
|
| 853 |
+
return parent._get_full_key(self._metadata.key)
|
| 854 |
+
|
| 855 |
+
def __eq__(self, other: Any) -> bool:
|
| 856 |
+
content = self.__dict__["_content"]
|
| 857 |
+
if isinstance(content, Node):
|
| 858 |
+
ret = content.__eq__(other)
|
| 859 |
+
elif isinstance(other, Node):
|
| 860 |
+
ret = other.__eq__(content)
|
| 861 |
+
else:
|
| 862 |
+
ret = content.__eq__(other)
|
| 863 |
+
assert isinstance(ret, (bool, type(NotImplemented)))
|
| 864 |
+
return ret
|
| 865 |
+
|
| 866 |
+
def __ne__(self, other: Any) -> bool:
|
| 867 |
+
x = self.__eq__(other)
|
| 868 |
+
if x is NotImplemented:
|
| 869 |
+
return NotImplemented
|
| 870 |
+
return not x
|
| 871 |
+
|
| 872 |
+
def __hash__(self) -> int:
|
| 873 |
+
return hash(self.__dict__["_content"])
|
| 874 |
+
|
| 875 |
+
def _value(self) -> Union[Node, None, str]:
|
| 876 |
+
content = self.__dict__["_content"]
|
| 877 |
+
assert isinstance(content, (Node, NoneType, str))
|
| 878 |
+
return content
|
| 879 |
+
|
| 880 |
+
def _set_value(self, value: Any, flags: Optional[Dict[str, bool]] = None) -> None:
|
| 881 |
+
previous_content = self.__dict__["_content"]
|
| 882 |
+
previous_metadata = self.__dict__["_metadata"]
|
| 883 |
+
try:
|
| 884 |
+
self._set_value_impl(value, flags)
|
| 885 |
+
except Exception as e:
|
| 886 |
+
self.__dict__["_content"] = previous_content
|
| 887 |
+
self.__dict__["_metadata"] = previous_metadata
|
| 888 |
+
raise e
|
| 889 |
+
|
| 890 |
+
def _set_value_impl(
|
| 891 |
+
self, value: Any, flags: Optional[Dict[str, bool]] = None
|
| 892 |
+
) -> None:
|
| 893 |
+
from omegaconf.omegaconf import _node_wrap
|
| 894 |
+
|
| 895 |
+
ref_type = self._metadata.ref_type
|
| 896 |
+
type_hint = self._metadata.type_hint
|
| 897 |
+
|
| 898 |
+
value = _get_value(value)
|
| 899 |
+
if _is_special(value):
|
| 900 |
+
assert isinstance(value, (str, NoneType))
|
| 901 |
+
if value is None:
|
| 902 |
+
if not self._is_optional():
|
| 903 |
+
raise ValidationError(
|
| 904 |
+
f"Value '$VALUE' is incompatible with type hint '{type_str(type_hint)}'"
|
| 905 |
+
)
|
| 906 |
+
self.__dict__["_content"] = value
|
| 907 |
+
elif isinstance(value, Container):
|
| 908 |
+
raise ValidationError(
|
| 909 |
+
f"Cannot assign container '$VALUE' of type '$VALUE_TYPE' to {type_str(type_hint)}"
|
| 910 |
+
)
|
| 911 |
+
else:
|
| 912 |
+
for candidate_ref_type in ref_type.__args__:
|
| 913 |
+
try:
|
| 914 |
+
self.__dict__["_content"] = _node_wrap(
|
| 915 |
+
value=value,
|
| 916 |
+
ref_type=candidate_ref_type,
|
| 917 |
+
is_optional=False,
|
| 918 |
+
key=None,
|
| 919 |
+
parent=self,
|
| 920 |
+
)
|
| 921 |
+
break
|
| 922 |
+
except ValidationError:
|
| 923 |
+
continue
|
| 924 |
+
else:
|
| 925 |
+
raise ValidationError(
|
| 926 |
+
f"Value '$VALUE' of type '$VALUE_TYPE' is incompatible with type hint '{type_str(type_hint)}'"
|
| 927 |
+
)
|
| 928 |
+
|
| 929 |
+
def _is_optional(self) -> bool:
|
| 930 |
+
return self.__dict__["_metadata"].optional is True
|
| 931 |
+
|
| 932 |
+
def _is_interpolation(self) -> bool:
|
| 933 |
+
return _is_interpolation(self.__dict__["_content"])
|
| 934 |
+
|
| 935 |
+
def __str__(self) -> str:
|
| 936 |
+
return str(self.__dict__["_content"])
|
| 937 |
+
|
| 938 |
+
def __repr__(self) -> str:
|
| 939 |
+
return repr(self.__dict__["_content"])
|
| 940 |
+
|
| 941 |
+
def __deepcopy__(self, memo: Dict[int, Any]) -> "UnionNode":
|
| 942 |
+
res = object.__new__(type(self))
|
| 943 |
+
for key, value in self.__dict__.items():
|
| 944 |
+
if key not in ("_content", "_parent"):
|
| 945 |
+
res.__dict__[key] = copy.deepcopy(value, memo=memo)
|
| 946 |
+
|
| 947 |
+
src_content = self.__dict__["_content"]
|
| 948 |
+
if isinstance(src_content, Node):
|
| 949 |
+
old_parent = src_content.__dict__["_parent"]
|
| 950 |
+
try:
|
| 951 |
+
src_content.__dict__["_parent"] = None
|
| 952 |
+
content_copy = copy.deepcopy(src_content, memo=memo)
|
| 953 |
+
content_copy.__dict__["_parent"] = res
|
| 954 |
+
finally:
|
| 955 |
+
src_content.__dict__["_parent"] = old_parent
|
| 956 |
+
else:
|
| 957 |
+
# None and strings can be assigned as is
|
| 958 |
+
content_copy = src_content
|
| 959 |
+
|
| 960 |
+
res.__dict__["_content"] = content_copy
|
| 961 |
+
res.__dict__["_parent"] = self.__dict__["_parent"]
|
| 962 |
+
return res
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/basecontainer.py
ADDED
|
@@ -0,0 +1,916 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import copy
|
| 2 |
+
import sys
|
| 3 |
+
from abc import ABC, abstractmethod
|
| 4 |
+
from enum import Enum
|
| 5 |
+
from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Tuple, Union
|
| 6 |
+
|
| 7 |
+
import yaml
|
| 8 |
+
|
| 9 |
+
from ._utils import (
|
| 10 |
+
_DEFAULT_MARKER_,
|
| 11 |
+
ValueKind,
|
| 12 |
+
_ensure_container,
|
| 13 |
+
_get_value,
|
| 14 |
+
_is_interpolation,
|
| 15 |
+
_is_missing_value,
|
| 16 |
+
_is_none,
|
| 17 |
+
_is_special,
|
| 18 |
+
_resolve_optional,
|
| 19 |
+
get_structured_config_data,
|
| 20 |
+
get_type_hint,
|
| 21 |
+
get_value_kind,
|
| 22 |
+
get_yaml_loader,
|
| 23 |
+
is_container_annotation,
|
| 24 |
+
is_dict_annotation,
|
| 25 |
+
is_list_annotation,
|
| 26 |
+
is_primitive_dict,
|
| 27 |
+
is_primitive_type_annotation,
|
| 28 |
+
is_structured_config,
|
| 29 |
+
is_tuple_annotation,
|
| 30 |
+
is_union_annotation,
|
| 31 |
+
)
|
| 32 |
+
from .base import (
|
| 33 |
+
Box,
|
| 34 |
+
Container,
|
| 35 |
+
ContainerMetadata,
|
| 36 |
+
DictKeyType,
|
| 37 |
+
Node,
|
| 38 |
+
SCMode,
|
| 39 |
+
UnionNode,
|
| 40 |
+
)
|
| 41 |
+
from .errors import (
|
| 42 |
+
ConfigCycleDetectedException,
|
| 43 |
+
ConfigTypeError,
|
| 44 |
+
InterpolationResolutionError,
|
| 45 |
+
KeyValidationError,
|
| 46 |
+
MissingMandatoryValue,
|
| 47 |
+
OmegaConfBaseException,
|
| 48 |
+
ReadonlyConfigError,
|
| 49 |
+
ValidationError,
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
if TYPE_CHECKING:
|
| 53 |
+
from .dictconfig import DictConfig # pragma: no cover
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class BaseContainer(Container, ABC):
|
| 57 |
+
_resolvers: ClassVar[Dict[str, Any]] = {}
|
| 58 |
+
|
| 59 |
+
def __init__(self, parent: Optional[Box], metadata: ContainerMetadata):
|
| 60 |
+
if not (parent is None or isinstance(parent, Box)):
|
| 61 |
+
raise ConfigTypeError("Parent type is not omegaconf.Box")
|
| 62 |
+
super().__init__(parent=parent, metadata=metadata)
|
| 63 |
+
|
| 64 |
+
def _get_child(
|
| 65 |
+
self,
|
| 66 |
+
key: Any,
|
| 67 |
+
validate_access: bool = True,
|
| 68 |
+
validate_key: bool = True,
|
| 69 |
+
throw_on_missing_value: bool = False,
|
| 70 |
+
throw_on_missing_key: bool = False,
|
| 71 |
+
) -> Union[Optional[Node], List[Optional[Node]]]:
|
| 72 |
+
"""Like _get_node, passing through to the nearest concrete Node."""
|
| 73 |
+
child = self._get_node(
|
| 74 |
+
key=key,
|
| 75 |
+
validate_access=validate_access,
|
| 76 |
+
validate_key=validate_key,
|
| 77 |
+
throw_on_missing_value=throw_on_missing_value,
|
| 78 |
+
throw_on_missing_key=throw_on_missing_key,
|
| 79 |
+
)
|
| 80 |
+
if isinstance(child, UnionNode) and not _is_special(child):
|
| 81 |
+
value = child._value()
|
| 82 |
+
assert isinstance(value, Node) and not isinstance(value, UnionNode)
|
| 83 |
+
child = value
|
| 84 |
+
return child
|
| 85 |
+
|
| 86 |
+
def _resolve_with_default(
|
| 87 |
+
self,
|
| 88 |
+
key: Union[DictKeyType, int],
|
| 89 |
+
value: Node,
|
| 90 |
+
default_value: Any = _DEFAULT_MARKER_,
|
| 91 |
+
) -> Any:
|
| 92 |
+
"""returns the value with the specified key, like obj.key and obj['key']"""
|
| 93 |
+
if _is_missing_value(value):
|
| 94 |
+
if default_value is not _DEFAULT_MARKER_:
|
| 95 |
+
return default_value
|
| 96 |
+
raise MissingMandatoryValue("Missing mandatory value: $FULL_KEY")
|
| 97 |
+
|
| 98 |
+
resolved_node = self._maybe_resolve_interpolation(
|
| 99 |
+
parent=self,
|
| 100 |
+
key=key,
|
| 101 |
+
value=value,
|
| 102 |
+
throw_on_resolution_failure=True,
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
return _get_value(resolved_node)
|
| 106 |
+
|
| 107 |
+
def __str__(self) -> str:
|
| 108 |
+
return self.__repr__()
|
| 109 |
+
|
| 110 |
+
def __repr__(self) -> str:
|
| 111 |
+
if self.__dict__["_content"] is None:
|
| 112 |
+
return "None"
|
| 113 |
+
elif self._is_interpolation() or self._is_missing():
|
| 114 |
+
v = self.__dict__["_content"]
|
| 115 |
+
return f"'{v}'"
|
| 116 |
+
else:
|
| 117 |
+
return self.__dict__["_content"].__repr__() # type: ignore
|
| 118 |
+
|
| 119 |
+
# Support pickle
|
| 120 |
+
def __getstate__(self) -> Dict[str, Any]:
|
| 121 |
+
dict_copy = copy.copy(self.__dict__)
|
| 122 |
+
|
| 123 |
+
# no need to serialize the flags cache, it can be re-constructed later
|
| 124 |
+
dict_copy.pop("_flags_cache", None)
|
| 125 |
+
|
| 126 |
+
dict_copy["_metadata"] = copy.copy(dict_copy["_metadata"])
|
| 127 |
+
ref_type = self._metadata.ref_type
|
| 128 |
+
if is_container_annotation(ref_type):
|
| 129 |
+
if is_dict_annotation(ref_type):
|
| 130 |
+
dict_copy["_metadata"].ref_type = Dict
|
| 131 |
+
elif is_list_annotation(ref_type):
|
| 132 |
+
dict_copy["_metadata"].ref_type = List
|
| 133 |
+
else:
|
| 134 |
+
assert False
|
| 135 |
+
if sys.version_info < (3, 7): # pragma: no cover
|
| 136 |
+
element_type = self._metadata.element_type
|
| 137 |
+
if is_union_annotation(element_type):
|
| 138 |
+
raise OmegaConfBaseException(
|
| 139 |
+
"Serializing structured configs with `Union` element type requires python >= 3.7"
|
| 140 |
+
)
|
| 141 |
+
return dict_copy
|
| 142 |
+
|
| 143 |
+
# Support pickle
|
| 144 |
+
def __setstate__(self, d: Dict[str, Any]) -> None:
|
| 145 |
+
from omegaconf import DictConfig
|
| 146 |
+
from omegaconf._utils import is_generic_dict, is_generic_list
|
| 147 |
+
|
| 148 |
+
if isinstance(self, DictConfig):
|
| 149 |
+
key_type = d["_metadata"].key_type
|
| 150 |
+
|
| 151 |
+
# backward compatibility to load OmegaConf 2.0 configs
|
| 152 |
+
if key_type is None:
|
| 153 |
+
key_type = Any
|
| 154 |
+
d["_metadata"].key_type = key_type
|
| 155 |
+
|
| 156 |
+
element_type = d["_metadata"].element_type
|
| 157 |
+
|
| 158 |
+
# backward compatibility to load OmegaConf 2.0 configs
|
| 159 |
+
if element_type is None:
|
| 160 |
+
element_type = Any
|
| 161 |
+
d["_metadata"].element_type = element_type
|
| 162 |
+
|
| 163 |
+
ref_type = d["_metadata"].ref_type
|
| 164 |
+
if is_container_annotation(ref_type):
|
| 165 |
+
if is_generic_dict(ref_type):
|
| 166 |
+
d["_metadata"].ref_type = Dict[key_type, element_type] # type: ignore
|
| 167 |
+
elif is_generic_list(ref_type):
|
| 168 |
+
d["_metadata"].ref_type = List[element_type] # type: ignore
|
| 169 |
+
else:
|
| 170 |
+
assert False
|
| 171 |
+
|
| 172 |
+
d["_flags_cache"] = None
|
| 173 |
+
self.__dict__.update(d)
|
| 174 |
+
|
| 175 |
+
@abstractmethod
|
| 176 |
+
def __delitem__(self, key: Any) -> None:
|
| 177 |
+
...
|
| 178 |
+
|
| 179 |
+
def __len__(self) -> int:
|
| 180 |
+
if self._is_none() or self._is_missing() or self._is_interpolation():
|
| 181 |
+
return 0
|
| 182 |
+
content = self.__dict__["_content"]
|
| 183 |
+
return len(content)
|
| 184 |
+
|
| 185 |
+
def merge_with_cli(self) -> None:
|
| 186 |
+
args_list = sys.argv[1:]
|
| 187 |
+
self.merge_with_dotlist(args_list)
|
| 188 |
+
|
| 189 |
+
def merge_with_dotlist(self, dotlist: List[str]) -> None:
|
| 190 |
+
from omegaconf import OmegaConf
|
| 191 |
+
|
| 192 |
+
def fail() -> None:
|
| 193 |
+
raise ValueError("Input list must be a list or a tuple of strings")
|
| 194 |
+
|
| 195 |
+
if not isinstance(dotlist, (list, tuple)):
|
| 196 |
+
fail()
|
| 197 |
+
|
| 198 |
+
for arg in dotlist:
|
| 199 |
+
if not isinstance(arg, str):
|
| 200 |
+
fail()
|
| 201 |
+
|
| 202 |
+
idx = arg.find("=")
|
| 203 |
+
if idx == -1:
|
| 204 |
+
key = arg
|
| 205 |
+
value = None
|
| 206 |
+
else:
|
| 207 |
+
key = arg[0:idx]
|
| 208 |
+
value = arg[idx + 1 :]
|
| 209 |
+
value = yaml.load(value, Loader=get_yaml_loader())
|
| 210 |
+
|
| 211 |
+
OmegaConf.update(self, key, value)
|
| 212 |
+
|
| 213 |
+
def is_empty(self) -> bool:
|
| 214 |
+
"""return true if config is empty"""
|
| 215 |
+
return len(self.__dict__["_content"]) == 0
|
| 216 |
+
|
| 217 |
+
@staticmethod
|
| 218 |
+
def _to_content(
|
| 219 |
+
conf: Container,
|
| 220 |
+
resolve: bool,
|
| 221 |
+
throw_on_missing: bool,
|
| 222 |
+
enum_to_str: bool = False,
|
| 223 |
+
structured_config_mode: SCMode = SCMode.DICT,
|
| 224 |
+
) -> Union[None, Any, str, Dict[DictKeyType, Any], List[Any]]:
|
| 225 |
+
from omegaconf import MISSING, DictConfig, ListConfig
|
| 226 |
+
|
| 227 |
+
def convert(val: Node) -> Any:
|
| 228 |
+
value = val._value()
|
| 229 |
+
if enum_to_str and isinstance(value, Enum):
|
| 230 |
+
value = f"{value.name}"
|
| 231 |
+
|
| 232 |
+
return value
|
| 233 |
+
|
| 234 |
+
def get_node_value(key: Union[DictKeyType, int]) -> Any:
|
| 235 |
+
try:
|
| 236 |
+
node = conf._get_child(key, throw_on_missing_value=throw_on_missing)
|
| 237 |
+
except MissingMandatoryValue as e:
|
| 238 |
+
conf._format_and_raise(key=key, value=None, cause=e)
|
| 239 |
+
assert isinstance(node, Node)
|
| 240 |
+
if resolve:
|
| 241 |
+
try:
|
| 242 |
+
node = node._dereference_node()
|
| 243 |
+
except InterpolationResolutionError as e:
|
| 244 |
+
conf._format_and_raise(key=key, value=None, cause=e)
|
| 245 |
+
|
| 246 |
+
if isinstance(node, Container):
|
| 247 |
+
value = BaseContainer._to_content(
|
| 248 |
+
node,
|
| 249 |
+
resolve=resolve,
|
| 250 |
+
throw_on_missing=throw_on_missing,
|
| 251 |
+
enum_to_str=enum_to_str,
|
| 252 |
+
structured_config_mode=structured_config_mode,
|
| 253 |
+
)
|
| 254 |
+
else:
|
| 255 |
+
value = convert(node)
|
| 256 |
+
return value
|
| 257 |
+
|
| 258 |
+
if conf._is_none():
|
| 259 |
+
return None
|
| 260 |
+
elif conf._is_missing():
|
| 261 |
+
if throw_on_missing:
|
| 262 |
+
conf._format_and_raise(
|
| 263 |
+
key=None,
|
| 264 |
+
value=None,
|
| 265 |
+
cause=MissingMandatoryValue("Missing mandatory value"),
|
| 266 |
+
)
|
| 267 |
+
else:
|
| 268 |
+
return MISSING
|
| 269 |
+
elif not resolve and conf._is_interpolation():
|
| 270 |
+
inter = conf._value()
|
| 271 |
+
assert isinstance(inter, str)
|
| 272 |
+
return inter
|
| 273 |
+
|
| 274 |
+
if resolve:
|
| 275 |
+
_conf = conf._dereference_node()
|
| 276 |
+
assert isinstance(_conf, Container)
|
| 277 |
+
conf = _conf
|
| 278 |
+
|
| 279 |
+
if isinstance(conf, DictConfig):
|
| 280 |
+
if (
|
| 281 |
+
conf._metadata.object_type not in (dict, None)
|
| 282 |
+
and structured_config_mode == SCMode.DICT_CONFIG
|
| 283 |
+
):
|
| 284 |
+
return conf
|
| 285 |
+
if structured_config_mode == SCMode.INSTANTIATE and is_structured_config(
|
| 286 |
+
conf._metadata.object_type
|
| 287 |
+
):
|
| 288 |
+
return conf._to_object()
|
| 289 |
+
|
| 290 |
+
retdict: Dict[DictKeyType, Any] = {}
|
| 291 |
+
for key in conf.keys():
|
| 292 |
+
value = get_node_value(key)
|
| 293 |
+
if enum_to_str and isinstance(key, Enum):
|
| 294 |
+
key = f"{key.name}"
|
| 295 |
+
retdict[key] = value
|
| 296 |
+
return retdict
|
| 297 |
+
elif isinstance(conf, ListConfig):
|
| 298 |
+
retlist: List[Any] = []
|
| 299 |
+
for index in range(len(conf)):
|
| 300 |
+
item = get_node_value(index)
|
| 301 |
+
retlist.append(item)
|
| 302 |
+
|
| 303 |
+
return retlist
|
| 304 |
+
assert False
|
| 305 |
+
|
| 306 |
+
@staticmethod
|
| 307 |
+
def _map_merge(dest: "BaseContainer", src: "BaseContainer") -> None:
|
| 308 |
+
"""merge src into dest and return a new copy, does not modified input"""
|
| 309 |
+
from omegaconf import AnyNode, DictConfig, ValueNode
|
| 310 |
+
|
| 311 |
+
assert isinstance(dest, DictConfig)
|
| 312 |
+
assert isinstance(src, DictConfig)
|
| 313 |
+
src_type = src._metadata.object_type
|
| 314 |
+
src_ref_type = get_type_hint(src)
|
| 315 |
+
assert src_ref_type is not None
|
| 316 |
+
|
| 317 |
+
# If source DictConfig is:
|
| 318 |
+
# - None => set the destination DictConfig to None
|
| 319 |
+
# - an interpolation => set the destination DictConfig to be the same interpolation
|
| 320 |
+
if src._is_none() or src._is_interpolation():
|
| 321 |
+
dest._set_value(src._value())
|
| 322 |
+
_update_types(node=dest, ref_type=src_ref_type, object_type=src_type)
|
| 323 |
+
return
|
| 324 |
+
|
| 325 |
+
dest._validate_merge(value=src)
|
| 326 |
+
|
| 327 |
+
def expand(node: Container) -> None:
|
| 328 |
+
rt = node._metadata.ref_type
|
| 329 |
+
val: Any
|
| 330 |
+
if rt is not Any:
|
| 331 |
+
if is_dict_annotation(rt):
|
| 332 |
+
val = {}
|
| 333 |
+
elif is_list_annotation(rt) or is_tuple_annotation(rt):
|
| 334 |
+
val = []
|
| 335 |
+
else:
|
| 336 |
+
val = rt
|
| 337 |
+
elif isinstance(node, DictConfig):
|
| 338 |
+
val = {}
|
| 339 |
+
else:
|
| 340 |
+
assert False
|
| 341 |
+
|
| 342 |
+
node._set_value(val)
|
| 343 |
+
|
| 344 |
+
if (
|
| 345 |
+
src._is_missing()
|
| 346 |
+
and not dest._is_missing()
|
| 347 |
+
and is_structured_config(src_ref_type)
|
| 348 |
+
):
|
| 349 |
+
# Replace `src` with a prototype of its corresponding structured config
|
| 350 |
+
# whose fields are all missing (to avoid overwriting fields in `dest`).
|
| 351 |
+
assert src_type is None # src missing, so src's object_type should be None
|
| 352 |
+
src_type = src_ref_type
|
| 353 |
+
src = _create_structured_with_missing_fields(
|
| 354 |
+
ref_type=src_ref_type, object_type=src_type
|
| 355 |
+
)
|
| 356 |
+
|
| 357 |
+
if (dest._is_interpolation() or dest._is_missing()) and not src._is_missing():
|
| 358 |
+
expand(dest)
|
| 359 |
+
|
| 360 |
+
src_items = list(src) if not src._is_missing() else []
|
| 361 |
+
for key in src_items:
|
| 362 |
+
src_node = src._get_node(key, validate_access=False)
|
| 363 |
+
dest_node = dest._get_node(key, validate_access=False)
|
| 364 |
+
assert isinstance(src_node, Node)
|
| 365 |
+
assert dest_node is None or isinstance(dest_node, Node)
|
| 366 |
+
src_value = _get_value(src_node)
|
| 367 |
+
|
| 368 |
+
src_vk = get_value_kind(src_node)
|
| 369 |
+
src_node_missing = src_vk is ValueKind.MANDATORY_MISSING
|
| 370 |
+
|
| 371 |
+
if isinstance(dest_node, DictConfig):
|
| 372 |
+
dest_node._validate_merge(value=src_node)
|
| 373 |
+
|
| 374 |
+
if (
|
| 375 |
+
isinstance(dest_node, Container)
|
| 376 |
+
and dest_node._is_none()
|
| 377 |
+
and not src_node_missing
|
| 378 |
+
and not _is_none(src_node, resolve=True)
|
| 379 |
+
):
|
| 380 |
+
expand(dest_node)
|
| 381 |
+
|
| 382 |
+
if dest_node is not None and dest_node._is_interpolation():
|
| 383 |
+
target_node = dest_node._maybe_dereference_node()
|
| 384 |
+
if isinstance(target_node, Container):
|
| 385 |
+
dest[key] = target_node
|
| 386 |
+
dest_node = dest._get_node(key)
|
| 387 |
+
|
| 388 |
+
is_optional, et = _resolve_optional(dest._metadata.element_type)
|
| 389 |
+
if dest_node is None and is_structured_config(et) and not src_node_missing:
|
| 390 |
+
# merging into a new node. Use element_type as a base
|
| 391 |
+
dest[key] = DictConfig(
|
| 392 |
+
et, parent=dest, ref_type=et, is_optional=is_optional
|
| 393 |
+
)
|
| 394 |
+
dest_node = dest._get_node(key)
|
| 395 |
+
|
| 396 |
+
if dest_node is not None:
|
| 397 |
+
if isinstance(dest_node, BaseContainer):
|
| 398 |
+
if isinstance(src_node, BaseContainer):
|
| 399 |
+
dest_node._merge_with(src_node)
|
| 400 |
+
elif not src_node_missing:
|
| 401 |
+
dest.__setitem__(key, src_node)
|
| 402 |
+
else:
|
| 403 |
+
if isinstance(src_node, BaseContainer):
|
| 404 |
+
dest.__setitem__(key, src_node)
|
| 405 |
+
else:
|
| 406 |
+
assert isinstance(dest_node, (ValueNode, UnionNode))
|
| 407 |
+
assert isinstance(src_node, (ValueNode, UnionNode))
|
| 408 |
+
try:
|
| 409 |
+
if isinstance(dest_node, AnyNode):
|
| 410 |
+
if src_node_missing:
|
| 411 |
+
node = copy.copy(src_node)
|
| 412 |
+
# if src node is missing, use the value from the dest_node,
|
| 413 |
+
# but validate it against the type of the src node before assigment
|
| 414 |
+
node._set_value(dest_node._value())
|
| 415 |
+
else:
|
| 416 |
+
node = src_node
|
| 417 |
+
dest.__setitem__(key, node)
|
| 418 |
+
else:
|
| 419 |
+
if not src_node_missing:
|
| 420 |
+
dest_node._set_value(src_value)
|
| 421 |
+
|
| 422 |
+
except (ValidationError, ReadonlyConfigError) as e:
|
| 423 |
+
dest._format_and_raise(key=key, value=src_value, cause=e)
|
| 424 |
+
else:
|
| 425 |
+
from omegaconf import open_dict
|
| 426 |
+
|
| 427 |
+
if is_structured_config(src_type):
|
| 428 |
+
# verified to be compatible above in _validate_merge
|
| 429 |
+
with open_dict(dest):
|
| 430 |
+
dest[key] = src._get_node(key)
|
| 431 |
+
else:
|
| 432 |
+
dest[key] = src._get_node(key)
|
| 433 |
+
|
| 434 |
+
_update_types(node=dest, ref_type=src_ref_type, object_type=src_type)
|
| 435 |
+
|
| 436 |
+
# explicit flags on the source config are replacing the flag values in the destination
|
| 437 |
+
flags = src._metadata.flags
|
| 438 |
+
assert flags is not None
|
| 439 |
+
for flag, value in flags.items():
|
| 440 |
+
if value is not None:
|
| 441 |
+
dest._set_flag(flag, value)
|
| 442 |
+
|
| 443 |
+
@staticmethod
|
| 444 |
+
def _list_merge(dest: Any, src: Any) -> None:
|
| 445 |
+
from omegaconf import DictConfig, ListConfig, OmegaConf
|
| 446 |
+
|
| 447 |
+
assert isinstance(dest, ListConfig)
|
| 448 |
+
assert isinstance(src, ListConfig)
|
| 449 |
+
|
| 450 |
+
if src._is_none():
|
| 451 |
+
dest._set_value(None)
|
| 452 |
+
elif src._is_missing():
|
| 453 |
+
# do not change dest if src is MISSING.
|
| 454 |
+
if dest._metadata.element_type is Any:
|
| 455 |
+
dest._metadata.element_type = src._metadata.element_type
|
| 456 |
+
elif src._is_interpolation():
|
| 457 |
+
dest._set_value(src._value())
|
| 458 |
+
else:
|
| 459 |
+
temp_target = ListConfig(content=[], parent=dest._get_parent())
|
| 460 |
+
temp_target.__dict__["_metadata"] = copy.deepcopy(
|
| 461 |
+
dest.__dict__["_metadata"]
|
| 462 |
+
)
|
| 463 |
+
is_optional, et = _resolve_optional(dest._metadata.element_type)
|
| 464 |
+
if is_structured_config(et):
|
| 465 |
+
prototype = DictConfig(et, ref_type=et, is_optional=is_optional)
|
| 466 |
+
for item in src._iter_ex(resolve=False):
|
| 467 |
+
if isinstance(item, DictConfig):
|
| 468 |
+
item = OmegaConf.merge(prototype, item)
|
| 469 |
+
temp_target.append(item)
|
| 470 |
+
else:
|
| 471 |
+
for item in src._iter_ex(resolve=False):
|
| 472 |
+
temp_target.append(item)
|
| 473 |
+
|
| 474 |
+
dest.__dict__["_content"] = temp_target.__dict__["_content"]
|
| 475 |
+
|
| 476 |
+
# explicit flags on the source config are replacing the flag values in the destination
|
| 477 |
+
flags = src._metadata.flags
|
| 478 |
+
assert flags is not None
|
| 479 |
+
for flag, value in flags.items():
|
| 480 |
+
if value is not None:
|
| 481 |
+
dest._set_flag(flag, value)
|
| 482 |
+
|
| 483 |
+
def merge_with(
|
| 484 |
+
self,
|
| 485 |
+
*others: Union[
|
| 486 |
+
"BaseContainer", Dict[str, Any], List[Any], Tuple[Any, ...], Any
|
| 487 |
+
],
|
| 488 |
+
) -> None:
|
| 489 |
+
try:
|
| 490 |
+
self._merge_with(*others)
|
| 491 |
+
except Exception as e:
|
| 492 |
+
self._format_and_raise(key=None, value=None, cause=e)
|
| 493 |
+
|
| 494 |
+
def _merge_with(
|
| 495 |
+
self,
|
| 496 |
+
*others: Union[
|
| 497 |
+
"BaseContainer", Dict[str, Any], List[Any], Tuple[Any, ...], Any
|
| 498 |
+
],
|
| 499 |
+
) -> None:
|
| 500 |
+
from .dictconfig import DictConfig
|
| 501 |
+
from .listconfig import ListConfig
|
| 502 |
+
|
| 503 |
+
"""merge a list of other Config objects into this one, overriding as needed"""
|
| 504 |
+
for other in others:
|
| 505 |
+
if other is None:
|
| 506 |
+
raise ValueError("Cannot merge with a None config")
|
| 507 |
+
|
| 508 |
+
my_flags = {}
|
| 509 |
+
if self._get_flag("allow_objects") is True:
|
| 510 |
+
my_flags = {"allow_objects": True}
|
| 511 |
+
other = _ensure_container(other, flags=my_flags)
|
| 512 |
+
|
| 513 |
+
if isinstance(self, DictConfig) and isinstance(other, DictConfig):
|
| 514 |
+
BaseContainer._map_merge(self, other)
|
| 515 |
+
elif isinstance(self, ListConfig) and isinstance(other, ListConfig):
|
| 516 |
+
BaseContainer._list_merge(self, other)
|
| 517 |
+
else:
|
| 518 |
+
raise TypeError("Cannot merge DictConfig with ListConfig")
|
| 519 |
+
|
| 520 |
+
# recursively correct the parent hierarchy after the merge
|
| 521 |
+
self._re_parent()
|
| 522 |
+
|
| 523 |
+
# noinspection PyProtectedMember
|
| 524 |
+
def _set_item_impl(self, key: Any, value: Any) -> None:
|
| 525 |
+
"""
|
| 526 |
+
Changes the value of the node key with the desired value. If the node key doesn't
|
| 527 |
+
exist it creates a new one.
|
| 528 |
+
"""
|
| 529 |
+
from .nodes import AnyNode, ValueNode
|
| 530 |
+
|
| 531 |
+
if isinstance(value, Node):
|
| 532 |
+
do_deepcopy = not self._get_flag("no_deepcopy_set_nodes")
|
| 533 |
+
if not do_deepcopy and isinstance(value, Box):
|
| 534 |
+
# if value is from the same config, perform a deepcopy no matter what.
|
| 535 |
+
if self._get_root() is value._get_root():
|
| 536 |
+
do_deepcopy = True
|
| 537 |
+
|
| 538 |
+
if do_deepcopy:
|
| 539 |
+
value = copy.deepcopy(value)
|
| 540 |
+
value._set_parent(None)
|
| 541 |
+
|
| 542 |
+
try:
|
| 543 |
+
old = value._key()
|
| 544 |
+
value._set_key(key)
|
| 545 |
+
self._validate_set(key, value)
|
| 546 |
+
finally:
|
| 547 |
+
value._set_key(old)
|
| 548 |
+
else:
|
| 549 |
+
self._validate_set(key, value)
|
| 550 |
+
|
| 551 |
+
if self._get_flag("readonly"):
|
| 552 |
+
raise ReadonlyConfigError("Cannot change read-only config container")
|
| 553 |
+
|
| 554 |
+
input_is_node = isinstance(value, Node)
|
| 555 |
+
target_node_ref = self._get_node(key)
|
| 556 |
+
assert target_node_ref is None or isinstance(target_node_ref, Node)
|
| 557 |
+
|
| 558 |
+
input_is_typed_vnode = isinstance(value, ValueNode) and not isinstance(
|
| 559 |
+
value, AnyNode
|
| 560 |
+
)
|
| 561 |
+
|
| 562 |
+
def get_target_type_hint(val: Any) -> Any:
|
| 563 |
+
if not is_structured_config(val):
|
| 564 |
+
type_hint = self._metadata.element_type
|
| 565 |
+
else:
|
| 566 |
+
target = self._get_node(key)
|
| 567 |
+
if target is None:
|
| 568 |
+
type_hint = self._metadata.element_type
|
| 569 |
+
else:
|
| 570 |
+
assert isinstance(target, Node)
|
| 571 |
+
type_hint = target._metadata.type_hint
|
| 572 |
+
return type_hint
|
| 573 |
+
|
| 574 |
+
target_type_hint = get_target_type_hint(value)
|
| 575 |
+
_, target_ref_type = _resolve_optional(target_type_hint)
|
| 576 |
+
|
| 577 |
+
def assign(value_key: Any, val: Node) -> None:
|
| 578 |
+
assert val._get_parent() is None
|
| 579 |
+
v = val
|
| 580 |
+
v._set_parent(self)
|
| 581 |
+
v._set_key(value_key)
|
| 582 |
+
_deep_update_type_hint(node=v, type_hint=self._metadata.element_type)
|
| 583 |
+
self.__dict__["_content"][value_key] = v
|
| 584 |
+
|
| 585 |
+
if input_is_typed_vnode and not is_union_annotation(target_ref_type):
|
| 586 |
+
assign(key, value)
|
| 587 |
+
else:
|
| 588 |
+
# input is not a ValueNode, can be primitive or box
|
| 589 |
+
|
| 590 |
+
special_value = _is_special(value)
|
| 591 |
+
# We use the `Node._set_value` method if the target node exists and:
|
| 592 |
+
# 1. the target has an explicit ref_type, or
|
| 593 |
+
# 2. the target is an AnyNode and the input is a primitive type.
|
| 594 |
+
should_set_value = target_node_ref is not None and (
|
| 595 |
+
target_node_ref._has_ref_type()
|
| 596 |
+
or (
|
| 597 |
+
isinstance(target_node_ref, AnyNode)
|
| 598 |
+
and is_primitive_type_annotation(value)
|
| 599 |
+
)
|
| 600 |
+
)
|
| 601 |
+
if should_set_value:
|
| 602 |
+
if special_value and isinstance(value, Node):
|
| 603 |
+
value = value._value()
|
| 604 |
+
self.__dict__["_content"][key]._set_value(value)
|
| 605 |
+
elif input_is_node:
|
| 606 |
+
if (
|
| 607 |
+
special_value
|
| 608 |
+
and (
|
| 609 |
+
is_container_annotation(target_ref_type)
|
| 610 |
+
or is_structured_config(target_ref_type)
|
| 611 |
+
)
|
| 612 |
+
or is_primitive_type_annotation(target_ref_type)
|
| 613 |
+
or is_union_annotation(target_ref_type)
|
| 614 |
+
):
|
| 615 |
+
value = _get_value(value)
|
| 616 |
+
self._wrap_value_and_set(key, value, target_type_hint)
|
| 617 |
+
else:
|
| 618 |
+
assign(key, value)
|
| 619 |
+
else:
|
| 620 |
+
self._wrap_value_and_set(key, value, target_type_hint)
|
| 621 |
+
|
| 622 |
+
def _wrap_value_and_set(self, key: Any, val: Any, type_hint: Any) -> None:
|
| 623 |
+
from omegaconf.omegaconf import _maybe_wrap
|
| 624 |
+
|
| 625 |
+
is_optional, ref_type = _resolve_optional(type_hint)
|
| 626 |
+
|
| 627 |
+
try:
|
| 628 |
+
wrapped = _maybe_wrap(
|
| 629 |
+
ref_type=ref_type,
|
| 630 |
+
key=key,
|
| 631 |
+
value=val,
|
| 632 |
+
is_optional=is_optional,
|
| 633 |
+
parent=self,
|
| 634 |
+
)
|
| 635 |
+
except ValidationError as e:
|
| 636 |
+
self._format_and_raise(key=key, value=val, cause=e)
|
| 637 |
+
self.__dict__["_content"][key] = wrapped
|
| 638 |
+
|
| 639 |
+
@staticmethod
|
| 640 |
+
def _item_eq(
|
| 641 |
+
c1: Container,
|
| 642 |
+
k1: Union[DictKeyType, int],
|
| 643 |
+
c2: Container,
|
| 644 |
+
k2: Union[DictKeyType, int],
|
| 645 |
+
) -> bool:
|
| 646 |
+
v1 = c1._get_child(k1)
|
| 647 |
+
v2 = c2._get_child(k2)
|
| 648 |
+
assert v1 is not None and v2 is not None
|
| 649 |
+
|
| 650 |
+
assert isinstance(v1, Node)
|
| 651 |
+
assert isinstance(v2, Node)
|
| 652 |
+
|
| 653 |
+
if v1._is_none() and v2._is_none():
|
| 654 |
+
return True
|
| 655 |
+
|
| 656 |
+
if v1._is_missing() and v2._is_missing():
|
| 657 |
+
return True
|
| 658 |
+
|
| 659 |
+
v1_inter = v1._is_interpolation()
|
| 660 |
+
v2_inter = v2._is_interpolation()
|
| 661 |
+
dv1: Optional[Node] = v1
|
| 662 |
+
dv2: Optional[Node] = v2
|
| 663 |
+
|
| 664 |
+
if v1_inter:
|
| 665 |
+
dv1 = v1._maybe_dereference_node()
|
| 666 |
+
if v2_inter:
|
| 667 |
+
dv2 = v2._maybe_dereference_node()
|
| 668 |
+
|
| 669 |
+
if v1_inter and v2_inter:
|
| 670 |
+
if dv1 is None or dv2 is None:
|
| 671 |
+
return v1 == v2
|
| 672 |
+
else:
|
| 673 |
+
# both are not none, if both are containers compare as container
|
| 674 |
+
if isinstance(dv1, Container) and isinstance(dv2, Container):
|
| 675 |
+
if dv1 != dv2:
|
| 676 |
+
return False
|
| 677 |
+
dv1 = _get_value(dv1)
|
| 678 |
+
dv2 = _get_value(dv2)
|
| 679 |
+
return dv1 == dv2
|
| 680 |
+
elif not v1_inter and not v2_inter:
|
| 681 |
+
v1 = _get_value(v1)
|
| 682 |
+
v2 = _get_value(v2)
|
| 683 |
+
ret = v1 == v2
|
| 684 |
+
assert isinstance(ret, bool)
|
| 685 |
+
return ret
|
| 686 |
+
else:
|
| 687 |
+
dv1 = _get_value(dv1)
|
| 688 |
+
dv2 = _get_value(dv2)
|
| 689 |
+
ret = dv1 == dv2
|
| 690 |
+
assert isinstance(ret, bool)
|
| 691 |
+
return ret
|
| 692 |
+
|
| 693 |
+
def _is_optional(self) -> bool:
|
| 694 |
+
return self.__dict__["_metadata"].optional is True
|
| 695 |
+
|
| 696 |
+
def _is_interpolation(self) -> bool:
|
| 697 |
+
return _is_interpolation(self.__dict__["_content"])
|
| 698 |
+
|
| 699 |
+
@abstractmethod
|
| 700 |
+
def _validate_get(self, key: Any, value: Any = None) -> None:
|
| 701 |
+
...
|
| 702 |
+
|
| 703 |
+
@abstractmethod
|
| 704 |
+
def _validate_set(self, key: Any, value: Any) -> None:
|
| 705 |
+
...
|
| 706 |
+
|
| 707 |
+
def _value(self) -> Any:
|
| 708 |
+
return self.__dict__["_content"]
|
| 709 |
+
|
| 710 |
+
def _get_full_key(self, key: Union[DictKeyType, int, slice, None]) -> str:
|
| 711 |
+
from .listconfig import ListConfig
|
| 712 |
+
from .omegaconf import _select_one
|
| 713 |
+
|
| 714 |
+
if not isinstance(key, (int, str, Enum, float, bool, slice, bytes, type(None))):
|
| 715 |
+
return ""
|
| 716 |
+
|
| 717 |
+
def _slice_to_str(x: slice) -> str:
|
| 718 |
+
if x.step is not None:
|
| 719 |
+
return f"{x.start}:{x.stop}:{x.step}"
|
| 720 |
+
else:
|
| 721 |
+
return f"{x.start}:{x.stop}"
|
| 722 |
+
|
| 723 |
+
def prepand(
|
| 724 |
+
full_key: str,
|
| 725 |
+
parent_type: Any,
|
| 726 |
+
cur_type: Any,
|
| 727 |
+
key: Optional[Union[DictKeyType, int, slice]],
|
| 728 |
+
) -> str:
|
| 729 |
+
if key is None:
|
| 730 |
+
return full_key
|
| 731 |
+
|
| 732 |
+
if isinstance(key, slice):
|
| 733 |
+
key = _slice_to_str(key)
|
| 734 |
+
elif isinstance(key, Enum):
|
| 735 |
+
key = key.name
|
| 736 |
+
else:
|
| 737 |
+
key = str(key)
|
| 738 |
+
|
| 739 |
+
assert isinstance(key, str)
|
| 740 |
+
|
| 741 |
+
if issubclass(parent_type, ListConfig):
|
| 742 |
+
if full_key != "":
|
| 743 |
+
if issubclass(cur_type, ListConfig):
|
| 744 |
+
full_key = f"[{key}]{full_key}"
|
| 745 |
+
else:
|
| 746 |
+
full_key = f"[{key}].{full_key}"
|
| 747 |
+
else:
|
| 748 |
+
full_key = f"[{key}]"
|
| 749 |
+
else:
|
| 750 |
+
if full_key == "":
|
| 751 |
+
full_key = key
|
| 752 |
+
else:
|
| 753 |
+
if issubclass(cur_type, ListConfig):
|
| 754 |
+
full_key = f"{key}{full_key}"
|
| 755 |
+
else:
|
| 756 |
+
full_key = f"{key}.{full_key}"
|
| 757 |
+
return full_key
|
| 758 |
+
|
| 759 |
+
if key is not None and key != "":
|
| 760 |
+
assert isinstance(self, Container)
|
| 761 |
+
cur, _ = _select_one(
|
| 762 |
+
c=self, key=str(key), throw_on_missing=False, throw_on_type_error=False
|
| 763 |
+
)
|
| 764 |
+
if cur is None:
|
| 765 |
+
cur = self
|
| 766 |
+
full_key = prepand("", type(cur), None, key)
|
| 767 |
+
if cur._key() is not None:
|
| 768 |
+
full_key = prepand(
|
| 769 |
+
full_key, type(cur._get_parent()), type(cur), cur._key()
|
| 770 |
+
)
|
| 771 |
+
else:
|
| 772 |
+
full_key = prepand("", type(cur._get_parent()), type(cur), cur._key())
|
| 773 |
+
else:
|
| 774 |
+
cur = self
|
| 775 |
+
if cur._key() is None:
|
| 776 |
+
return ""
|
| 777 |
+
full_key = self._key()
|
| 778 |
+
|
| 779 |
+
assert cur is not None
|
| 780 |
+
memo = {id(cur)} # remember already visited nodes so as to detect cycles
|
| 781 |
+
while cur._get_parent() is not None:
|
| 782 |
+
cur = cur._get_parent()
|
| 783 |
+
if id(cur) in memo:
|
| 784 |
+
raise ConfigCycleDetectedException(
|
| 785 |
+
f"Cycle when iterating over parents of key `{key!s}`"
|
| 786 |
+
)
|
| 787 |
+
memo.add(id(cur))
|
| 788 |
+
assert cur is not None
|
| 789 |
+
if cur._key() is not None:
|
| 790 |
+
full_key = prepand(
|
| 791 |
+
full_key, type(cur._get_parent()), type(cur), cur._key()
|
| 792 |
+
)
|
| 793 |
+
|
| 794 |
+
return full_key
|
| 795 |
+
|
| 796 |
+
|
| 797 |
+
def _create_structured_with_missing_fields(
|
| 798 |
+
ref_type: type, object_type: Optional[type] = None
|
| 799 |
+
) -> "DictConfig":
|
| 800 |
+
from . import MISSING, DictConfig
|
| 801 |
+
|
| 802 |
+
cfg_data = get_structured_config_data(ref_type)
|
| 803 |
+
for v in cfg_data.values():
|
| 804 |
+
v._set_value(MISSING)
|
| 805 |
+
|
| 806 |
+
cfg = DictConfig(cfg_data)
|
| 807 |
+
cfg._metadata.optional, cfg._metadata.ref_type = _resolve_optional(ref_type)
|
| 808 |
+
cfg._metadata.object_type = object_type
|
| 809 |
+
|
| 810 |
+
return cfg
|
| 811 |
+
|
| 812 |
+
|
| 813 |
+
def _update_types(node: Node, ref_type: Any, object_type: Optional[type]) -> None:
|
| 814 |
+
if object_type is not None and not is_primitive_dict(object_type):
|
| 815 |
+
node._metadata.object_type = object_type
|
| 816 |
+
|
| 817 |
+
if node._metadata.ref_type is Any:
|
| 818 |
+
_deep_update_type_hint(node, ref_type)
|
| 819 |
+
|
| 820 |
+
|
| 821 |
+
def _deep_update_type_hint(node: Node, type_hint: Any) -> None:
|
| 822 |
+
"""Ensure node is compatible with type_hint, mutating if necessary."""
|
| 823 |
+
from omegaconf import DictConfig, ListConfig
|
| 824 |
+
|
| 825 |
+
from ._utils import get_dict_key_value_types, get_list_element_type
|
| 826 |
+
|
| 827 |
+
if type_hint is Any:
|
| 828 |
+
return
|
| 829 |
+
|
| 830 |
+
_shallow_validate_type_hint(node, type_hint)
|
| 831 |
+
|
| 832 |
+
new_is_optional, new_ref_type = _resolve_optional(type_hint)
|
| 833 |
+
node._metadata.ref_type = new_ref_type
|
| 834 |
+
node._metadata.optional = new_is_optional
|
| 835 |
+
|
| 836 |
+
if is_list_annotation(new_ref_type) and isinstance(node, ListConfig):
|
| 837 |
+
new_element_type = get_list_element_type(new_ref_type)
|
| 838 |
+
node._metadata.element_type = new_element_type
|
| 839 |
+
if not _is_special(node):
|
| 840 |
+
for i in range(len(node)):
|
| 841 |
+
_deep_update_subnode(node, i, new_element_type)
|
| 842 |
+
|
| 843 |
+
if is_dict_annotation(new_ref_type) and isinstance(node, DictConfig):
|
| 844 |
+
new_key_type, new_element_type = get_dict_key_value_types(new_ref_type)
|
| 845 |
+
node._metadata.key_type = new_key_type
|
| 846 |
+
node._metadata.element_type = new_element_type
|
| 847 |
+
if not _is_special(node):
|
| 848 |
+
for key in node:
|
| 849 |
+
if new_key_type is not Any and not isinstance(key, new_key_type):
|
| 850 |
+
raise KeyValidationError(
|
| 851 |
+
f"Key {key!r} ({type(key).__name__}) is incompatible"
|
| 852 |
+
+ f" with key type hint '{new_key_type.__name__}'"
|
| 853 |
+
)
|
| 854 |
+
_deep_update_subnode(node, key, new_element_type)
|
| 855 |
+
|
| 856 |
+
|
| 857 |
+
def _deep_update_subnode(node: BaseContainer, key: Any, value_type_hint: Any) -> None:
|
| 858 |
+
"""Get node[key] and ensure it is compatible with value_type_hint, mutating if necessary."""
|
| 859 |
+
subnode = node._get_node(key)
|
| 860 |
+
assert isinstance(subnode, Node)
|
| 861 |
+
if _is_special(subnode):
|
| 862 |
+
# Ensure special values are wrapped in a Node subclass that
|
| 863 |
+
# is compatible with the type hint.
|
| 864 |
+
node._wrap_value_and_set(key, subnode._value(), value_type_hint)
|
| 865 |
+
subnode = node._get_node(key)
|
| 866 |
+
assert isinstance(subnode, Node)
|
| 867 |
+
_deep_update_type_hint(subnode, value_type_hint)
|
| 868 |
+
|
| 869 |
+
|
| 870 |
+
def _shallow_validate_type_hint(node: Node, type_hint: Any) -> None:
|
| 871 |
+
"""Error if node's type, content and metadata are not compatible with type_hint."""
|
| 872 |
+
from omegaconf import DictConfig, ListConfig, ValueNode
|
| 873 |
+
|
| 874 |
+
is_optional, ref_type = _resolve_optional(type_hint)
|
| 875 |
+
|
| 876 |
+
vk = get_value_kind(node)
|
| 877 |
+
|
| 878 |
+
if node._is_none():
|
| 879 |
+
if not is_optional:
|
| 880 |
+
value = _get_value(node)
|
| 881 |
+
raise ValidationError(
|
| 882 |
+
f"Value {value!r} ({type(value).__name__})"
|
| 883 |
+
+ f" is incompatible with type hint '{ref_type.__name__}'"
|
| 884 |
+
)
|
| 885 |
+
return
|
| 886 |
+
elif vk in (ValueKind.MANDATORY_MISSING, ValueKind.INTERPOLATION):
|
| 887 |
+
return
|
| 888 |
+
elif vk == ValueKind.VALUE:
|
| 889 |
+
if is_primitive_type_annotation(ref_type) and isinstance(node, ValueNode):
|
| 890 |
+
value = node._value()
|
| 891 |
+
if not isinstance(value, ref_type):
|
| 892 |
+
raise ValidationError(
|
| 893 |
+
f"Value {value!r} ({type(value).__name__})"
|
| 894 |
+
+ f" is incompatible with type hint '{ref_type.__name__}'"
|
| 895 |
+
)
|
| 896 |
+
elif is_structured_config(ref_type) and isinstance(node, DictConfig):
|
| 897 |
+
return
|
| 898 |
+
elif is_dict_annotation(ref_type) and isinstance(node, DictConfig):
|
| 899 |
+
return
|
| 900 |
+
elif is_list_annotation(ref_type) and isinstance(node, ListConfig):
|
| 901 |
+
return
|
| 902 |
+
else:
|
| 903 |
+
if isinstance(node, ValueNode):
|
| 904 |
+
value = node._value()
|
| 905 |
+
raise ValidationError(
|
| 906 |
+
f"Value {value!r} ({type(value).__name__})"
|
| 907 |
+
+ f" is incompatible with type hint '{ref_type}'"
|
| 908 |
+
)
|
| 909 |
+
else:
|
| 910 |
+
raise ValidationError(
|
| 911 |
+
f"'{type(node).__name__}' is incompatible"
|
| 912 |
+
+ f" with type hint '{ref_type}'"
|
| 913 |
+
)
|
| 914 |
+
|
| 915 |
+
else:
|
| 916 |
+
assert False
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/dictconfig.py
ADDED
|
@@ -0,0 +1,776 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import copy
|
| 2 |
+
from enum import Enum
|
| 3 |
+
from typing import (
|
| 4 |
+
Any,
|
| 5 |
+
Dict,
|
| 6 |
+
ItemsView,
|
| 7 |
+
Iterable,
|
| 8 |
+
Iterator,
|
| 9 |
+
KeysView,
|
| 10 |
+
List,
|
| 11 |
+
MutableMapping,
|
| 12 |
+
Optional,
|
| 13 |
+
Sequence,
|
| 14 |
+
Tuple,
|
| 15 |
+
Type,
|
| 16 |
+
Union,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
from ._utils import (
|
| 20 |
+
_DEFAULT_MARKER_,
|
| 21 |
+
ValueKind,
|
| 22 |
+
_get_value,
|
| 23 |
+
_is_interpolation,
|
| 24 |
+
_is_missing_literal,
|
| 25 |
+
_is_missing_value,
|
| 26 |
+
_is_none,
|
| 27 |
+
_resolve_optional,
|
| 28 |
+
_valid_dict_key_annotation_type,
|
| 29 |
+
format_and_raise,
|
| 30 |
+
get_structured_config_data,
|
| 31 |
+
get_structured_config_init_field_names,
|
| 32 |
+
get_type_of,
|
| 33 |
+
get_value_kind,
|
| 34 |
+
is_container_annotation,
|
| 35 |
+
is_dict,
|
| 36 |
+
is_primitive_dict,
|
| 37 |
+
is_structured_config,
|
| 38 |
+
is_structured_config_frozen,
|
| 39 |
+
type_str,
|
| 40 |
+
)
|
| 41 |
+
from .base import Box, Container, ContainerMetadata, DictKeyType, Node
|
| 42 |
+
from .basecontainer import BaseContainer
|
| 43 |
+
from .errors import (
|
| 44 |
+
ConfigAttributeError,
|
| 45 |
+
ConfigKeyError,
|
| 46 |
+
ConfigTypeError,
|
| 47 |
+
InterpolationResolutionError,
|
| 48 |
+
KeyValidationError,
|
| 49 |
+
MissingMandatoryValue,
|
| 50 |
+
OmegaConfBaseException,
|
| 51 |
+
ReadonlyConfigError,
|
| 52 |
+
ValidationError,
|
| 53 |
+
)
|
| 54 |
+
from .nodes import EnumNode, ValueNode
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class DictConfig(BaseContainer, MutableMapping[Any, Any]):
|
| 58 |
+
|
| 59 |
+
_metadata: ContainerMetadata
|
| 60 |
+
_content: Union[Dict[DictKeyType, Node], None, str]
|
| 61 |
+
|
| 62 |
+
def __init__(
|
| 63 |
+
self,
|
| 64 |
+
content: Union[Dict[DictKeyType, Any], "DictConfig", Any],
|
| 65 |
+
key: Any = None,
|
| 66 |
+
parent: Optional[Box] = None,
|
| 67 |
+
ref_type: Union[Any, Type[Any]] = Any,
|
| 68 |
+
key_type: Union[Any, Type[Any]] = Any,
|
| 69 |
+
element_type: Union[Any, Type[Any]] = Any,
|
| 70 |
+
is_optional: bool = True,
|
| 71 |
+
flags: Optional[Dict[str, bool]] = None,
|
| 72 |
+
) -> None:
|
| 73 |
+
try:
|
| 74 |
+
if isinstance(content, DictConfig):
|
| 75 |
+
if flags is None:
|
| 76 |
+
flags = content._metadata.flags
|
| 77 |
+
super().__init__(
|
| 78 |
+
parent=parent,
|
| 79 |
+
metadata=ContainerMetadata(
|
| 80 |
+
key=key,
|
| 81 |
+
optional=is_optional,
|
| 82 |
+
ref_type=ref_type,
|
| 83 |
+
object_type=dict,
|
| 84 |
+
key_type=key_type,
|
| 85 |
+
element_type=element_type,
|
| 86 |
+
flags=flags,
|
| 87 |
+
),
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
if not _valid_dict_key_annotation_type(key_type):
|
| 91 |
+
raise KeyValidationError(f"Unsupported key type {key_type}")
|
| 92 |
+
|
| 93 |
+
if is_structured_config(content) or is_structured_config(ref_type):
|
| 94 |
+
self._set_value(content, flags=flags)
|
| 95 |
+
if is_structured_config_frozen(content) or is_structured_config_frozen(
|
| 96 |
+
ref_type
|
| 97 |
+
):
|
| 98 |
+
self._set_flag("readonly", True)
|
| 99 |
+
|
| 100 |
+
else:
|
| 101 |
+
if isinstance(content, DictConfig):
|
| 102 |
+
metadata = copy.deepcopy(content._metadata)
|
| 103 |
+
metadata.key = key
|
| 104 |
+
metadata.ref_type = ref_type
|
| 105 |
+
metadata.optional = is_optional
|
| 106 |
+
metadata.element_type = element_type
|
| 107 |
+
metadata.key_type = key_type
|
| 108 |
+
self.__dict__["_metadata"] = metadata
|
| 109 |
+
self._set_value(content, flags=flags)
|
| 110 |
+
except Exception as ex:
|
| 111 |
+
format_and_raise(node=None, key=key, value=None, cause=ex, msg=str(ex))
|
| 112 |
+
|
| 113 |
+
def __deepcopy__(self, memo: Dict[int, Any]) -> "DictConfig":
|
| 114 |
+
res = DictConfig(None)
|
| 115 |
+
res.__dict__["_metadata"] = copy.deepcopy(self.__dict__["_metadata"], memo=memo)
|
| 116 |
+
res.__dict__["_flags_cache"] = copy.deepcopy(
|
| 117 |
+
self.__dict__["_flags_cache"], memo=memo
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
src_content = self.__dict__["_content"]
|
| 121 |
+
if isinstance(src_content, dict):
|
| 122 |
+
content_copy = {}
|
| 123 |
+
for k, v in src_content.items():
|
| 124 |
+
old_parent = v.__dict__["_parent"]
|
| 125 |
+
try:
|
| 126 |
+
v.__dict__["_parent"] = None
|
| 127 |
+
vc = copy.deepcopy(v, memo=memo)
|
| 128 |
+
vc.__dict__["_parent"] = res
|
| 129 |
+
content_copy[k] = vc
|
| 130 |
+
finally:
|
| 131 |
+
v.__dict__["_parent"] = old_parent
|
| 132 |
+
else:
|
| 133 |
+
# None and strings can be assigned as is
|
| 134 |
+
content_copy = src_content
|
| 135 |
+
|
| 136 |
+
res.__dict__["_content"] = content_copy
|
| 137 |
+
# parent is retained, but not copied
|
| 138 |
+
res.__dict__["_parent"] = self.__dict__["_parent"]
|
| 139 |
+
return res
|
| 140 |
+
|
| 141 |
+
def copy(self) -> "DictConfig":
|
| 142 |
+
return copy.copy(self)
|
| 143 |
+
|
| 144 |
+
def _is_typed(self) -> bool:
|
| 145 |
+
return self._metadata.object_type not in (Any, None) and not is_dict(
|
| 146 |
+
self._metadata.object_type
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
def _validate_get(self, key: Any, value: Any = None) -> None:
|
| 150 |
+
is_typed = self._is_typed()
|
| 151 |
+
|
| 152 |
+
is_struct = self._get_flag("struct") is True
|
| 153 |
+
if key not in self.__dict__["_content"]:
|
| 154 |
+
if is_typed:
|
| 155 |
+
# do not raise an exception if struct is explicitly set to False
|
| 156 |
+
if self._get_node_flag("struct") is False:
|
| 157 |
+
return
|
| 158 |
+
if is_typed or is_struct:
|
| 159 |
+
if is_typed:
|
| 160 |
+
assert self._metadata.object_type not in (dict, None)
|
| 161 |
+
msg = f"Key '{key}' not in '{self._metadata.object_type.__name__}'"
|
| 162 |
+
else:
|
| 163 |
+
msg = f"Key '{key}' is not in struct"
|
| 164 |
+
self._format_and_raise(
|
| 165 |
+
key=key, value=value, cause=ConfigAttributeError(msg)
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
def _validate_set(self, key: Any, value: Any) -> None:
|
| 169 |
+
from omegaconf import OmegaConf
|
| 170 |
+
|
| 171 |
+
vk = get_value_kind(value)
|
| 172 |
+
if vk == ValueKind.INTERPOLATION:
|
| 173 |
+
return
|
| 174 |
+
if _is_none(value):
|
| 175 |
+
self._validate_non_optional(key, value)
|
| 176 |
+
return
|
| 177 |
+
if vk == ValueKind.MANDATORY_MISSING or value is None:
|
| 178 |
+
return
|
| 179 |
+
|
| 180 |
+
target = self._get_node(key) if key is not None else self
|
| 181 |
+
|
| 182 |
+
target_has_ref_type = isinstance(
|
| 183 |
+
target, DictConfig
|
| 184 |
+
) and target._metadata.ref_type not in (Any, dict)
|
| 185 |
+
is_valid_target = target is None or not target_has_ref_type
|
| 186 |
+
|
| 187 |
+
if is_valid_target:
|
| 188 |
+
return
|
| 189 |
+
|
| 190 |
+
assert isinstance(target, Node)
|
| 191 |
+
|
| 192 |
+
target_type = target._metadata.ref_type
|
| 193 |
+
value_type = OmegaConf.get_type(value)
|
| 194 |
+
|
| 195 |
+
if is_dict(value_type) and is_dict(target_type):
|
| 196 |
+
return
|
| 197 |
+
if is_container_annotation(target_type) and not is_container_annotation(
|
| 198 |
+
value_type
|
| 199 |
+
):
|
| 200 |
+
raise ValidationError(
|
| 201 |
+
f"Cannot assign {type_str(value_type)} to {type_str(target_type)}"
|
| 202 |
+
)
|
| 203 |
+
|
| 204 |
+
if target_type is not None and value_type is not None:
|
| 205 |
+
origin = getattr(target_type, "__origin__", target_type)
|
| 206 |
+
if not issubclass(value_type, origin):
|
| 207 |
+
self._raise_invalid_value(value, value_type, target_type)
|
| 208 |
+
|
| 209 |
+
def _validate_merge(self, value: Any) -> None:
|
| 210 |
+
from omegaconf import OmegaConf
|
| 211 |
+
|
| 212 |
+
dest = self
|
| 213 |
+
src = value
|
| 214 |
+
|
| 215 |
+
self._validate_non_optional(None, src)
|
| 216 |
+
|
| 217 |
+
dest_obj_type = OmegaConf.get_type(dest)
|
| 218 |
+
src_obj_type = OmegaConf.get_type(src)
|
| 219 |
+
|
| 220 |
+
if dest._is_missing() and src._metadata.object_type not in (dict, None):
|
| 221 |
+
self._validate_set(key=None, value=_get_value(src))
|
| 222 |
+
|
| 223 |
+
if src._is_missing():
|
| 224 |
+
return
|
| 225 |
+
|
| 226 |
+
validation_error = (
|
| 227 |
+
dest_obj_type is not None
|
| 228 |
+
and src_obj_type is not None
|
| 229 |
+
and is_structured_config(dest_obj_type)
|
| 230 |
+
and not src._is_none()
|
| 231 |
+
and not is_dict(src_obj_type)
|
| 232 |
+
and not issubclass(src_obj_type, dest_obj_type)
|
| 233 |
+
)
|
| 234 |
+
if validation_error:
|
| 235 |
+
msg = (
|
| 236 |
+
f"Merge error: {type_str(src_obj_type)} is not a "
|
| 237 |
+
f"subclass of {type_str(dest_obj_type)}. value: {src}"
|
| 238 |
+
)
|
| 239 |
+
raise ValidationError(msg)
|
| 240 |
+
|
| 241 |
+
def _validate_non_optional(self, key: Optional[DictKeyType], value: Any) -> None:
|
| 242 |
+
if _is_none(value, resolve=True, throw_on_resolution_failure=False):
|
| 243 |
+
|
| 244 |
+
if key is not None:
|
| 245 |
+
child = self._get_node(key)
|
| 246 |
+
if child is not None:
|
| 247 |
+
assert isinstance(child, Node)
|
| 248 |
+
field_is_optional = child._is_optional()
|
| 249 |
+
else:
|
| 250 |
+
field_is_optional, _ = _resolve_optional(
|
| 251 |
+
self._metadata.element_type
|
| 252 |
+
)
|
| 253 |
+
else:
|
| 254 |
+
field_is_optional = self._is_optional()
|
| 255 |
+
|
| 256 |
+
if not field_is_optional:
|
| 257 |
+
self._format_and_raise(
|
| 258 |
+
key=key,
|
| 259 |
+
value=value,
|
| 260 |
+
cause=ValidationError("field '$FULL_KEY' is not Optional"),
|
| 261 |
+
)
|
| 262 |
+
|
| 263 |
+
def _raise_invalid_value(
|
| 264 |
+
self, value: Any, value_type: Any, target_type: Any
|
| 265 |
+
) -> None:
|
| 266 |
+
assert value_type is not None
|
| 267 |
+
assert target_type is not None
|
| 268 |
+
msg = (
|
| 269 |
+
f"Invalid type assigned: {type_str(value_type)} is not a "
|
| 270 |
+
f"subclass of {type_str(target_type)}. value: {value}"
|
| 271 |
+
)
|
| 272 |
+
raise ValidationError(msg)
|
| 273 |
+
|
| 274 |
+
def _validate_and_normalize_key(self, key: Any) -> DictKeyType:
|
| 275 |
+
return self._s_validate_and_normalize_key(self._metadata.key_type, key)
|
| 276 |
+
|
| 277 |
+
def _s_validate_and_normalize_key(self, key_type: Any, key: Any) -> DictKeyType:
|
| 278 |
+
if key_type is Any:
|
| 279 |
+
for t in DictKeyType.__args__: # type: ignore
|
| 280 |
+
if isinstance(key, t):
|
| 281 |
+
return key # type: ignore
|
| 282 |
+
raise KeyValidationError("Incompatible key type '$KEY_TYPE'")
|
| 283 |
+
elif key_type is bool and key in [0, 1]:
|
| 284 |
+
# Python treats True as 1 and False as 0 when used as dict keys
|
| 285 |
+
# assert hash(0) == hash(False)
|
| 286 |
+
# assert hash(1) == hash(True)
|
| 287 |
+
return bool(key)
|
| 288 |
+
elif key_type in (str, bytes, int, float, bool): # primitive type
|
| 289 |
+
if not isinstance(key, key_type):
|
| 290 |
+
raise KeyValidationError(
|
| 291 |
+
f"Key $KEY ($KEY_TYPE) is incompatible with ({key_type.__name__})"
|
| 292 |
+
)
|
| 293 |
+
|
| 294 |
+
return key # type: ignore
|
| 295 |
+
elif issubclass(key_type, Enum):
|
| 296 |
+
try:
|
| 297 |
+
return EnumNode.validate_and_convert_to_enum(key_type, key)
|
| 298 |
+
except ValidationError:
|
| 299 |
+
valid = ", ".join([x for x in key_type.__members__.keys()])
|
| 300 |
+
raise KeyValidationError(
|
| 301 |
+
f"Key '$KEY' is incompatible with the enum type '{key_type.__name__}', valid: [{valid}]"
|
| 302 |
+
)
|
| 303 |
+
else:
|
| 304 |
+
assert False, f"Unsupported key type {key_type}"
|
| 305 |
+
|
| 306 |
+
def __setitem__(self, key: DictKeyType, value: Any) -> None:
|
| 307 |
+
try:
|
| 308 |
+
self.__set_impl(key=key, value=value)
|
| 309 |
+
except AttributeError as e:
|
| 310 |
+
self._format_and_raise(
|
| 311 |
+
key=key, value=value, type_override=ConfigKeyError, cause=e
|
| 312 |
+
)
|
| 313 |
+
except Exception as e:
|
| 314 |
+
self._format_and_raise(key=key, value=value, cause=e)
|
| 315 |
+
|
| 316 |
+
def __set_impl(self, key: DictKeyType, value: Any) -> None:
|
| 317 |
+
key = self._validate_and_normalize_key(key)
|
| 318 |
+
self._set_item_impl(key, value)
|
| 319 |
+
|
| 320 |
+
# hide content while inspecting in debugger
|
| 321 |
+
def __dir__(self) -> Iterable[str]:
|
| 322 |
+
if self._is_missing() or self._is_none():
|
| 323 |
+
return []
|
| 324 |
+
return self.__dict__["_content"].keys() # type: ignore
|
| 325 |
+
|
| 326 |
+
def __setattr__(self, key: str, value: Any) -> None:
|
| 327 |
+
"""
|
| 328 |
+
Allow assigning attributes to DictConfig
|
| 329 |
+
:param key:
|
| 330 |
+
:param value:
|
| 331 |
+
:return:
|
| 332 |
+
"""
|
| 333 |
+
try:
|
| 334 |
+
self.__set_impl(key, value)
|
| 335 |
+
except Exception as e:
|
| 336 |
+
if isinstance(e, OmegaConfBaseException) and e._initialized:
|
| 337 |
+
raise e
|
| 338 |
+
self._format_and_raise(key=key, value=value, cause=e)
|
| 339 |
+
assert False
|
| 340 |
+
|
| 341 |
+
def __getattr__(self, key: str) -> Any:
|
| 342 |
+
"""
|
| 343 |
+
Allow accessing dictionary values as attributes
|
| 344 |
+
:param key:
|
| 345 |
+
:return:
|
| 346 |
+
"""
|
| 347 |
+
if key == "__name__":
|
| 348 |
+
raise AttributeError()
|
| 349 |
+
|
| 350 |
+
try:
|
| 351 |
+
return self._get_impl(
|
| 352 |
+
key=key, default_value=_DEFAULT_MARKER_, validate_key=False
|
| 353 |
+
)
|
| 354 |
+
except ConfigKeyError as e:
|
| 355 |
+
self._format_and_raise(
|
| 356 |
+
key=key, value=None, cause=e, type_override=ConfigAttributeError
|
| 357 |
+
)
|
| 358 |
+
except Exception as e:
|
| 359 |
+
self._format_and_raise(key=key, value=None, cause=e)
|
| 360 |
+
|
| 361 |
+
def __getitem__(self, key: DictKeyType) -> Any:
|
| 362 |
+
"""
|
| 363 |
+
Allow map style access
|
| 364 |
+
:param key:
|
| 365 |
+
:return:
|
| 366 |
+
"""
|
| 367 |
+
|
| 368 |
+
try:
|
| 369 |
+
return self._get_impl(key=key, default_value=_DEFAULT_MARKER_)
|
| 370 |
+
except AttributeError as e:
|
| 371 |
+
self._format_and_raise(
|
| 372 |
+
key=key, value=None, cause=e, type_override=ConfigKeyError
|
| 373 |
+
)
|
| 374 |
+
except Exception as e:
|
| 375 |
+
self._format_and_raise(key=key, value=None, cause=e)
|
| 376 |
+
|
| 377 |
+
def __delattr__(self, key: str) -> None:
|
| 378 |
+
"""
|
| 379 |
+
Allow deleting dictionary values as attributes
|
| 380 |
+
:param key:
|
| 381 |
+
:return:
|
| 382 |
+
"""
|
| 383 |
+
if self._get_flag("readonly"):
|
| 384 |
+
self._format_and_raise(
|
| 385 |
+
key=key,
|
| 386 |
+
value=None,
|
| 387 |
+
cause=ReadonlyConfigError(
|
| 388 |
+
"DictConfig in read-only mode does not support deletion"
|
| 389 |
+
),
|
| 390 |
+
)
|
| 391 |
+
try:
|
| 392 |
+
del self.__dict__["_content"][key]
|
| 393 |
+
except KeyError:
|
| 394 |
+
msg = "Attribute not found: '$KEY'"
|
| 395 |
+
self._format_and_raise(key=key, value=None, cause=ConfigAttributeError(msg))
|
| 396 |
+
|
| 397 |
+
def __delitem__(self, key: DictKeyType) -> None:
|
| 398 |
+
key = self._validate_and_normalize_key(key)
|
| 399 |
+
if self._get_flag("readonly"):
|
| 400 |
+
self._format_and_raise(
|
| 401 |
+
key=key,
|
| 402 |
+
value=None,
|
| 403 |
+
cause=ReadonlyConfigError(
|
| 404 |
+
"DictConfig in read-only mode does not support deletion"
|
| 405 |
+
),
|
| 406 |
+
)
|
| 407 |
+
if self._get_flag("struct"):
|
| 408 |
+
self._format_and_raise(
|
| 409 |
+
key=key,
|
| 410 |
+
value=None,
|
| 411 |
+
cause=ConfigTypeError(
|
| 412 |
+
"DictConfig in struct mode does not support deletion"
|
| 413 |
+
),
|
| 414 |
+
)
|
| 415 |
+
if self._is_typed() and self._get_node_flag("struct") is not False:
|
| 416 |
+
self._format_and_raise(
|
| 417 |
+
key=key,
|
| 418 |
+
value=None,
|
| 419 |
+
cause=ConfigTypeError(
|
| 420 |
+
f"{type_str(self._metadata.object_type)} (DictConfig) does not support deletion"
|
| 421 |
+
),
|
| 422 |
+
)
|
| 423 |
+
|
| 424 |
+
try:
|
| 425 |
+
del self.__dict__["_content"][key]
|
| 426 |
+
except KeyError:
|
| 427 |
+
msg = "Key not found: '$KEY'"
|
| 428 |
+
self._format_and_raise(key=key, value=None, cause=ConfigKeyError(msg))
|
| 429 |
+
|
| 430 |
+
def get(self, key: DictKeyType, default_value: Any = None) -> Any:
|
| 431 |
+
"""Return the value for `key` if `key` is in the dictionary, else
|
| 432 |
+
`default_value` (defaulting to `None`)."""
|
| 433 |
+
try:
|
| 434 |
+
return self._get_impl(key=key, default_value=default_value)
|
| 435 |
+
except KeyValidationError as e:
|
| 436 |
+
self._format_and_raise(key=key, value=None, cause=e)
|
| 437 |
+
|
| 438 |
+
def _get_impl(
|
| 439 |
+
self, key: DictKeyType, default_value: Any, validate_key: bool = True
|
| 440 |
+
) -> Any:
|
| 441 |
+
try:
|
| 442 |
+
node = self._get_child(
|
| 443 |
+
key=key, throw_on_missing_key=True, validate_key=validate_key
|
| 444 |
+
)
|
| 445 |
+
except (ConfigAttributeError, ConfigKeyError):
|
| 446 |
+
if default_value is not _DEFAULT_MARKER_:
|
| 447 |
+
return default_value
|
| 448 |
+
else:
|
| 449 |
+
raise
|
| 450 |
+
assert isinstance(node, Node)
|
| 451 |
+
return self._resolve_with_default(
|
| 452 |
+
key=key, value=node, default_value=default_value
|
| 453 |
+
)
|
| 454 |
+
|
| 455 |
+
def _get_node(
|
| 456 |
+
self,
|
| 457 |
+
key: DictKeyType,
|
| 458 |
+
validate_access: bool = True,
|
| 459 |
+
validate_key: bool = True,
|
| 460 |
+
throw_on_missing_value: bool = False,
|
| 461 |
+
throw_on_missing_key: bool = False,
|
| 462 |
+
) -> Optional[Node]:
|
| 463 |
+
try:
|
| 464 |
+
key = self._validate_and_normalize_key(key)
|
| 465 |
+
except KeyValidationError:
|
| 466 |
+
if validate_access and validate_key:
|
| 467 |
+
raise
|
| 468 |
+
else:
|
| 469 |
+
if throw_on_missing_key:
|
| 470 |
+
raise ConfigAttributeError
|
| 471 |
+
else:
|
| 472 |
+
return None
|
| 473 |
+
|
| 474 |
+
if validate_access:
|
| 475 |
+
self._validate_get(key)
|
| 476 |
+
|
| 477 |
+
value: Optional[Node] = self.__dict__["_content"].get(key)
|
| 478 |
+
if value is None:
|
| 479 |
+
if throw_on_missing_key:
|
| 480 |
+
raise ConfigKeyError(f"Missing key {key!s}")
|
| 481 |
+
elif throw_on_missing_value and value._is_missing():
|
| 482 |
+
raise MissingMandatoryValue("Missing mandatory value: $KEY")
|
| 483 |
+
return value
|
| 484 |
+
|
| 485 |
+
def pop(self, key: DictKeyType, default: Any = _DEFAULT_MARKER_) -> Any:
|
| 486 |
+
try:
|
| 487 |
+
if self._get_flag("readonly"):
|
| 488 |
+
raise ReadonlyConfigError("Cannot pop from read-only node")
|
| 489 |
+
if self._get_flag("struct"):
|
| 490 |
+
raise ConfigTypeError("DictConfig in struct mode does not support pop")
|
| 491 |
+
if self._is_typed() and self._get_node_flag("struct") is not False:
|
| 492 |
+
raise ConfigTypeError(
|
| 493 |
+
f"{type_str(self._metadata.object_type)} (DictConfig) does not support pop"
|
| 494 |
+
)
|
| 495 |
+
key = self._validate_and_normalize_key(key)
|
| 496 |
+
node = self._get_child(key=key, validate_access=False)
|
| 497 |
+
if node is not None:
|
| 498 |
+
assert isinstance(node, Node)
|
| 499 |
+
value = self._resolve_with_default(
|
| 500 |
+
key=key, value=node, default_value=default
|
| 501 |
+
)
|
| 502 |
+
|
| 503 |
+
del self[key]
|
| 504 |
+
return value
|
| 505 |
+
else:
|
| 506 |
+
if default is not _DEFAULT_MARKER_:
|
| 507 |
+
return default
|
| 508 |
+
else:
|
| 509 |
+
full = self._get_full_key(key=key)
|
| 510 |
+
if full != key:
|
| 511 |
+
raise ConfigKeyError(
|
| 512 |
+
f"Key not found: '{key!s}' (path: '{full}')"
|
| 513 |
+
)
|
| 514 |
+
else:
|
| 515 |
+
raise ConfigKeyError(f"Key not found: '{key!s}'")
|
| 516 |
+
except Exception as e:
|
| 517 |
+
self._format_and_raise(key=key, value=None, cause=e)
|
| 518 |
+
|
| 519 |
+
def keys(self) -> KeysView[DictKeyType]:
|
| 520 |
+
if self._is_missing() or self._is_interpolation() or self._is_none():
|
| 521 |
+
return {}.keys()
|
| 522 |
+
ret = self.__dict__["_content"].keys()
|
| 523 |
+
assert isinstance(ret, KeysView)
|
| 524 |
+
return ret
|
| 525 |
+
|
| 526 |
+
def __contains__(self, key: object) -> bool:
|
| 527 |
+
"""
|
| 528 |
+
A key is contained in a DictConfig if there is an associated value and
|
| 529 |
+
it is not a mandatory missing value ('???').
|
| 530 |
+
:param key:
|
| 531 |
+
:return:
|
| 532 |
+
"""
|
| 533 |
+
|
| 534 |
+
try:
|
| 535 |
+
key = self._validate_and_normalize_key(key)
|
| 536 |
+
except KeyValidationError:
|
| 537 |
+
return False
|
| 538 |
+
|
| 539 |
+
try:
|
| 540 |
+
node = self._get_child(key)
|
| 541 |
+
assert node is None or isinstance(node, Node)
|
| 542 |
+
except (KeyError, AttributeError):
|
| 543 |
+
node = None
|
| 544 |
+
|
| 545 |
+
if node is None:
|
| 546 |
+
return False
|
| 547 |
+
else:
|
| 548 |
+
try:
|
| 549 |
+
self._resolve_with_default(key=key, value=node)
|
| 550 |
+
return True
|
| 551 |
+
except InterpolationResolutionError:
|
| 552 |
+
# Interpolations that fail count as existing.
|
| 553 |
+
return True
|
| 554 |
+
except MissingMandatoryValue:
|
| 555 |
+
# Missing values count as *not* existing.
|
| 556 |
+
return False
|
| 557 |
+
|
| 558 |
+
def __iter__(self) -> Iterator[DictKeyType]:
|
| 559 |
+
return iter(self.keys())
|
| 560 |
+
|
| 561 |
+
def items(self) -> ItemsView[DictKeyType, Any]:
|
| 562 |
+
return dict(self.items_ex(resolve=True, keys=None)).items()
|
| 563 |
+
|
| 564 |
+
def setdefault(self, key: DictKeyType, default: Any = None) -> Any:
|
| 565 |
+
if key in self:
|
| 566 |
+
ret = self.__getitem__(key)
|
| 567 |
+
else:
|
| 568 |
+
ret = default
|
| 569 |
+
self.__setitem__(key, default)
|
| 570 |
+
return ret
|
| 571 |
+
|
| 572 |
+
def items_ex(
|
| 573 |
+
self, resolve: bool = True, keys: Optional[Sequence[DictKeyType]] = None
|
| 574 |
+
) -> List[Tuple[DictKeyType, Any]]:
|
| 575 |
+
items: List[Tuple[DictKeyType, Any]] = []
|
| 576 |
+
|
| 577 |
+
if self._is_none():
|
| 578 |
+
self._format_and_raise(
|
| 579 |
+
key=None,
|
| 580 |
+
value=None,
|
| 581 |
+
cause=TypeError("Cannot iterate a DictConfig object representing None"),
|
| 582 |
+
)
|
| 583 |
+
if self._is_missing():
|
| 584 |
+
raise MissingMandatoryValue("Cannot iterate a missing DictConfig")
|
| 585 |
+
|
| 586 |
+
for key in self.keys():
|
| 587 |
+
if resolve:
|
| 588 |
+
value = self[key]
|
| 589 |
+
else:
|
| 590 |
+
value = self.__dict__["_content"][key]
|
| 591 |
+
if isinstance(value, ValueNode):
|
| 592 |
+
value = value._value()
|
| 593 |
+
if keys is None or key in keys:
|
| 594 |
+
items.append((key, value))
|
| 595 |
+
|
| 596 |
+
return items
|
| 597 |
+
|
| 598 |
+
def __eq__(self, other: Any) -> bool:
|
| 599 |
+
if other is None:
|
| 600 |
+
return self.__dict__["_content"] is None
|
| 601 |
+
if is_primitive_dict(other) or is_structured_config(other):
|
| 602 |
+
other = DictConfig(other, flags={"allow_objects": True})
|
| 603 |
+
return DictConfig._dict_conf_eq(self, other)
|
| 604 |
+
if isinstance(other, DictConfig):
|
| 605 |
+
return DictConfig._dict_conf_eq(self, other)
|
| 606 |
+
if self._is_missing():
|
| 607 |
+
return _is_missing_literal(other)
|
| 608 |
+
return NotImplemented
|
| 609 |
+
|
| 610 |
+
def __ne__(self, other: Any) -> bool:
|
| 611 |
+
x = self.__eq__(other)
|
| 612 |
+
if x is not NotImplemented:
|
| 613 |
+
return not x
|
| 614 |
+
return NotImplemented
|
| 615 |
+
|
| 616 |
+
def __hash__(self) -> int:
|
| 617 |
+
return hash(str(self))
|
| 618 |
+
|
| 619 |
+
def _promote(self, type_or_prototype: Optional[Type[Any]]) -> None:
|
| 620 |
+
"""
|
| 621 |
+
Retypes a node.
|
| 622 |
+
This should only be used in rare circumstances, where you want to dynamically change
|
| 623 |
+
the runtime structured-type of a DictConfig.
|
| 624 |
+
It will change the type and add the additional fields based on the input class or object
|
| 625 |
+
"""
|
| 626 |
+
if type_or_prototype is None:
|
| 627 |
+
return
|
| 628 |
+
if not is_structured_config(type_or_prototype):
|
| 629 |
+
raise ValueError(f"Expected structured config class: {type_or_prototype}")
|
| 630 |
+
|
| 631 |
+
from omegaconf import OmegaConf
|
| 632 |
+
|
| 633 |
+
proto: DictConfig = OmegaConf.structured(type_or_prototype)
|
| 634 |
+
object_type = proto._metadata.object_type
|
| 635 |
+
# remove the type to prevent assignment validation from rejecting the promotion.
|
| 636 |
+
proto._metadata.object_type = None
|
| 637 |
+
self.merge_with(proto)
|
| 638 |
+
# restore the type.
|
| 639 |
+
self._metadata.object_type = object_type
|
| 640 |
+
|
| 641 |
+
def _set_value(self, value: Any, flags: Optional[Dict[str, bool]] = None) -> None:
|
| 642 |
+
try:
|
| 643 |
+
previous_content = self.__dict__["_content"]
|
| 644 |
+
self._set_value_impl(value, flags)
|
| 645 |
+
except Exception as e:
|
| 646 |
+
self.__dict__["_content"] = previous_content
|
| 647 |
+
raise e
|
| 648 |
+
|
| 649 |
+
def _set_value_impl(
|
| 650 |
+
self, value: Any, flags: Optional[Dict[str, bool]] = None
|
| 651 |
+
) -> None:
|
| 652 |
+
from omegaconf import MISSING, flag_override
|
| 653 |
+
|
| 654 |
+
if flags is None:
|
| 655 |
+
flags = {}
|
| 656 |
+
|
| 657 |
+
assert not isinstance(value, ValueNode)
|
| 658 |
+
self._validate_set(key=None, value=value)
|
| 659 |
+
|
| 660 |
+
if _is_none(value, resolve=True):
|
| 661 |
+
self.__dict__["_content"] = None
|
| 662 |
+
self._metadata.object_type = None
|
| 663 |
+
elif _is_interpolation(value, strict_interpolation_validation=True):
|
| 664 |
+
self.__dict__["_content"] = value
|
| 665 |
+
self._metadata.object_type = None
|
| 666 |
+
elif _is_missing_value(value):
|
| 667 |
+
self.__dict__["_content"] = MISSING
|
| 668 |
+
self._metadata.object_type = None
|
| 669 |
+
else:
|
| 670 |
+
self.__dict__["_content"] = {}
|
| 671 |
+
if is_structured_config(value):
|
| 672 |
+
self._metadata.object_type = None
|
| 673 |
+
ao = self._get_flag("allow_objects")
|
| 674 |
+
data = get_structured_config_data(value, allow_objects=ao)
|
| 675 |
+
with flag_override(self, ["struct", "readonly"], False):
|
| 676 |
+
for k, v in data.items():
|
| 677 |
+
self.__setitem__(k, v)
|
| 678 |
+
self._metadata.object_type = get_type_of(value)
|
| 679 |
+
|
| 680 |
+
elif isinstance(value, DictConfig):
|
| 681 |
+
self._metadata.flags = copy.deepcopy(flags)
|
| 682 |
+
with flag_override(self, ["struct", "readonly"], False):
|
| 683 |
+
for k, v in value.__dict__["_content"].items():
|
| 684 |
+
self.__setitem__(k, v)
|
| 685 |
+
self._metadata.object_type = value._metadata.object_type
|
| 686 |
+
|
| 687 |
+
elif isinstance(value, dict):
|
| 688 |
+
with flag_override(self, ["struct", "readonly"], False):
|
| 689 |
+
for k, v in value.items():
|
| 690 |
+
self.__setitem__(k, v)
|
| 691 |
+
self._metadata.object_type = dict
|
| 692 |
+
|
| 693 |
+
else: # pragma: no cover
|
| 694 |
+
msg = f"Unsupported value type: {value}"
|
| 695 |
+
raise ValidationError(msg)
|
| 696 |
+
|
| 697 |
+
@staticmethod
|
| 698 |
+
def _dict_conf_eq(d1: "DictConfig", d2: "DictConfig") -> bool:
|
| 699 |
+
|
| 700 |
+
d1_none = d1.__dict__["_content"] is None
|
| 701 |
+
d2_none = d2.__dict__["_content"] is None
|
| 702 |
+
if d1_none and d2_none:
|
| 703 |
+
return True
|
| 704 |
+
if d1_none != d2_none:
|
| 705 |
+
return False
|
| 706 |
+
|
| 707 |
+
assert isinstance(d1, DictConfig)
|
| 708 |
+
assert isinstance(d2, DictConfig)
|
| 709 |
+
if len(d1) != len(d2):
|
| 710 |
+
return False
|
| 711 |
+
if d1._is_missing() or d2._is_missing():
|
| 712 |
+
return d1._is_missing() is d2._is_missing()
|
| 713 |
+
|
| 714 |
+
for k, v in d1.items_ex(resolve=False):
|
| 715 |
+
if k not in d2.__dict__["_content"]:
|
| 716 |
+
return False
|
| 717 |
+
if not BaseContainer._item_eq(d1, k, d2, k):
|
| 718 |
+
return False
|
| 719 |
+
|
| 720 |
+
return True
|
| 721 |
+
|
| 722 |
+
def _to_object(self) -> Any:
|
| 723 |
+
"""
|
| 724 |
+
Instantiate an instance of `self._metadata.object_type`.
|
| 725 |
+
This requires `self` to be a structured config.
|
| 726 |
+
Nested subconfigs are converted by calling `OmegaConf.to_object`.
|
| 727 |
+
"""
|
| 728 |
+
from omegaconf import OmegaConf
|
| 729 |
+
|
| 730 |
+
object_type = self._metadata.object_type
|
| 731 |
+
assert is_structured_config(object_type)
|
| 732 |
+
init_field_names = set(get_structured_config_init_field_names(object_type))
|
| 733 |
+
|
| 734 |
+
init_field_items: Dict[str, Any] = {}
|
| 735 |
+
non_init_field_items: Dict[str, Any] = {}
|
| 736 |
+
for k in self.keys():
|
| 737 |
+
assert isinstance(k, str)
|
| 738 |
+
node = self._get_child(k)
|
| 739 |
+
assert isinstance(node, Node)
|
| 740 |
+
try:
|
| 741 |
+
node = node._dereference_node()
|
| 742 |
+
except InterpolationResolutionError as e:
|
| 743 |
+
self._format_and_raise(key=k, value=None, cause=e)
|
| 744 |
+
if node._is_missing():
|
| 745 |
+
if k not in init_field_names:
|
| 746 |
+
continue # MISSING is ignored for init=False fields
|
| 747 |
+
self._format_and_raise(
|
| 748 |
+
key=k,
|
| 749 |
+
value=None,
|
| 750 |
+
cause=MissingMandatoryValue(
|
| 751 |
+
"Structured config of type `$OBJECT_TYPE` has missing mandatory value: $KEY"
|
| 752 |
+
),
|
| 753 |
+
)
|
| 754 |
+
if isinstance(node, Container):
|
| 755 |
+
v = OmegaConf.to_object(node)
|
| 756 |
+
else:
|
| 757 |
+
v = node._value()
|
| 758 |
+
|
| 759 |
+
if k in init_field_names:
|
| 760 |
+
init_field_items[k] = v
|
| 761 |
+
else:
|
| 762 |
+
non_init_field_items[k] = v
|
| 763 |
+
|
| 764 |
+
try:
|
| 765 |
+
result = object_type(**init_field_items)
|
| 766 |
+
except TypeError as exc:
|
| 767 |
+
self._format_and_raise(
|
| 768 |
+
key=None,
|
| 769 |
+
value=None,
|
| 770 |
+
cause=exc,
|
| 771 |
+
msg="Could not create instance of `$OBJECT_TYPE`: " + str(exc),
|
| 772 |
+
)
|
| 773 |
+
|
| 774 |
+
for k, v in non_init_field_items.items():
|
| 775 |
+
setattr(result, k, v)
|
| 776 |
+
return result
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/__init__.py
ADDED
|
File without changes
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (177 Bytes). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/OmegaConfGrammarLexer.py
ADDED
|
@@ -0,0 +1,337 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated from /tmp/build-via-sdist-fm63w174/omegaconf-2.3.0/omegaconf/grammar/OmegaConfGrammarLexer.g4 by ANTLR 4.9.3
|
| 2 |
+
from antlr4 import *
|
| 3 |
+
from io import StringIO
|
| 4 |
+
import sys
|
| 5 |
+
if sys.version_info[1] > 5:
|
| 6 |
+
from typing import TextIO
|
| 7 |
+
else:
|
| 8 |
+
from typing.io import TextIO
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def serializedATN():
|
| 13 |
+
with StringIO() as buf:
|
| 14 |
+
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\36")
|
| 15 |
+
buf.write("\u01e7\b\1\b\1\b\1\b\1\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5")
|
| 16 |
+
buf.write("\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13")
|
| 17 |
+
buf.write("\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t")
|
| 18 |
+
buf.write("\21\4\22\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26")
|
| 19 |
+
buf.write("\4\27\t\27\4\30\t\30\4\31\t\31\4\32\t\32\4\33\t\33\4\34")
|
| 20 |
+
buf.write("\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!\t!\4\"\t")
|
| 21 |
+
buf.write("\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4")
|
| 22 |
+
buf.write("+\t+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62")
|
| 23 |
+
buf.write("\t\62\4\63\t\63\4\64\t\64\4\65\t\65\4\66\t\66\3\2\3\2")
|
| 24 |
+
buf.write("\3\3\3\3\3\4\3\4\3\4\5\4y\n\4\3\4\7\4|\n\4\f\4\16\4\177")
|
| 25 |
+
buf.write("\13\4\5\4\u0081\n\4\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3")
|
| 26 |
+
buf.write("\7\7\7\u008c\n\7\f\7\16\7\u008f\13\7\3\7\3\7\3\b\7\b\u0094")
|
| 27 |
+
buf.write("\n\b\f\b\16\b\u0097\13\b\3\b\3\b\3\b\3\b\3\t\6\t\u009e")
|
| 28 |
+
buf.write("\n\t\r\t\16\t\u009f\3\n\6\n\u00a3\n\n\r\n\16\n\u00a4\3")
|
| 29 |
+
buf.write("\n\3\n\3\13\3\13\3\13\3\13\3\f\3\f\3\f\3\f\5\f\u00b1\n")
|
| 30 |
+
buf.write("\f\3\f\3\f\3\r\3\r\5\r\u00b7\n\r\3\r\3\r\3\16\5\16\u00bc")
|
| 31 |
+
buf.write("\n\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\20\3\20")
|
| 32 |
+
buf.write("\3\20\3\20\3\21\5\21\u00cb\n\21\3\21\3\21\5\21\u00cf\n")
|
| 33 |
+
buf.write("\21\3\22\3\22\5\22\u00d3\n\22\3\23\5\23\u00d6\n\23\3\23")
|
| 34 |
+
buf.write("\3\23\3\24\5\24\u00db\n\24\3\24\3\24\5\24\u00df\n\24\3")
|
| 35 |
+
buf.write("\25\3\25\3\25\3\25\5\25\u00e5\n\25\3\25\3\25\3\25\5\25")
|
| 36 |
+
buf.write("\u00ea\n\25\3\25\7\25\u00ed\n\25\f\25\16\25\u00f0\13\25")
|
| 37 |
+
buf.write("\5\25\u00f2\n\25\3\26\3\26\5\26\u00f6\n\26\3\26\3\26\5")
|
| 38 |
+
buf.write("\26\u00fa\n\26\3\26\3\26\5\26\u00fe\n\26\3\26\7\26\u0101")
|
| 39 |
+
buf.write("\n\26\f\26\16\26\u0104\13\26\3\27\5\27\u0107\n\27\3\27")
|
| 40 |
+
buf.write("\3\27\3\27\3\27\3\27\3\27\3\27\3\27\5\27\u0111\n\27\3")
|
| 41 |
+
buf.write("\30\5\30\u0114\n\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31")
|
| 42 |
+
buf.write("\3\31\3\31\3\31\3\31\5\31\u0121\n\31\3\32\3\32\3\32\3")
|
| 43 |
+
buf.write("\32\3\32\3\33\3\33\3\34\3\34\5\34\u012c\n\34\3\34\3\34")
|
| 44 |
+
buf.write("\3\34\7\34\u0131\n\34\f\34\16\34\u0134\13\34\3\35\3\35")
|
| 45 |
+
buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35")
|
| 46 |
+
buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\6\35")
|
| 47 |
+
buf.write("\u014d\n\35\r\35\16\35\u014e\3\36\6\36\u0152\n\36\r\36")
|
| 48 |
+
buf.write("\16\36\u0153\3\37\3\37\5\37\u0158\n\37\3\37\3\37\3\37")
|
| 49 |
+
buf.write("\3 \5 \u015e\n \3 \3 \5 \u0162\n \3 \3 \3 \3!\5!\u0168")
|
| 50 |
+
buf.write("\n!\3!\3!\3!\3!\3\"\3\"\3#\3#\3#\3#\3$\3$\3$\3$\3%\3%")
|
| 51 |
+
buf.write("\3%\3%\3&\6&\u017d\n&\r&\16&\u017e\3\'\3\'\3\'\3\'\3\'")
|
| 52 |
+
buf.write("\3(\3(\3(\3(\3)\7)\u018b\n)\f)\16)\u018e\13)\3)\3)\3)")
|
| 53 |
+
buf.write("\3)\3*\3*\3*\3*\3+\7+\u0199\n+\f+\16+\u019c\13+\3+\3+")
|
| 54 |
+
buf.write("\3+\3+\3+\3,\6,\u01a4\n,\r,\16,\u01a5\3-\6-\u01a9\n-\r")
|
| 55 |
+
buf.write("-\16-\u01aa\3-\3-\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60")
|
| 56 |
+
buf.write("\3\60\3\60\3\60\3\61\7\61\u01be\n\61\f\61\16\61\u01c1")
|
| 57 |
+
buf.write("\13\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\62\3\63\7")
|
| 58 |
+
buf.write("\63\u01cc\n\63\f\63\16\63\u01cf\13\63\3\63\3\63\3\63\3")
|
| 59 |
+
buf.write("\63\3\63\3\64\6\64\u01d7\n\64\r\64\16\64\u01d8\3\64\3")
|
| 60 |
+
buf.write("\64\3\65\6\65\u01de\n\65\r\65\16\65\u01df\3\65\3\65\3")
|
| 61 |
+
buf.write("\66\3\66\3\66\3\66\2\2\67\7\2\t\2\13\2\r\2\17\2\21\3\23")
|
| 62 |
+
buf.write("\4\25\5\27\2\31\34\33\6\35\7\37\b!\t#\n%\13\'\f)\r+\16")
|
| 63 |
+
buf.write("-\2/\2\61\17\63\20\65\21\67\229\23;\24=\25?\26A\2C\2E")
|
| 64 |
+
buf.write("\27G\30I\35K\36M\2O\31Q\2S\32U\2W\2Y\2[\33]\2_\2a\2c\2")
|
| 65 |
+
buf.write("e\2g\2i\2k\2m\2o\2\7\2\3\4\5\6\32\4\2C\\c|\3\2\62;\3\2")
|
| 66 |
+
buf.write("\63;\3\2&&\4\2&&^^\4\2GGgg\4\2--//\4\2KKkk\4\2PPpp\4\2")
|
| 67 |
+
buf.write("HHhh\4\2CCcc\4\2VVvv\4\2TTtt\4\2WWww\4\2NNnn\4\2UUuu\b")
|
| 68 |
+
buf.write("\2&\',-/\61AB^^~~\4\2//aa\4\2\13\13\"\"\13\2\13\13\"\"")
|
| 69 |
+
buf.write("$$)+\60\60<<]_}}\177\177\4\2&&))\5\2&&))^^\4\2$$&&\5\2")
|
| 70 |
+
buf.write("$$&&^^\2\u0218\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2")
|
| 71 |
+
buf.write("\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\3\33\3\2\2\2\3")
|
| 72 |
+
buf.write("\35\3\2\2\2\3\37\3\2\2\2\3!\3\2\2\2\3#\3\2\2\2\3%\3\2")
|
| 73 |
+
buf.write("\2\2\3\'\3\2\2\2\3)\3\2\2\2\3+\3\2\2\2\3\61\3\2\2\2\3")
|
| 74 |
+
buf.write("\63\3\2\2\2\3\65\3\2\2\2\3\67\3\2\2\2\39\3\2\2\2\3;\3")
|
| 75 |
+
buf.write("\2\2\2\3=\3\2\2\2\3?\3\2\2\2\4A\3\2\2\2\4C\3\2\2\2\4E")
|
| 76 |
+
buf.write("\3\2\2\2\4G\3\2\2\2\4I\3\2\2\2\4K\3\2\2\2\4M\3\2\2\2\4")
|
| 77 |
+
buf.write("O\3\2\2\2\5Q\3\2\2\2\5S\3\2\2\2\5U\3\2\2\2\5W\3\2\2\2")
|
| 78 |
+
buf.write("\5Y\3\2\2\2\5[\3\2\2\2\5]\3\2\2\2\5_\3\2\2\2\6a\3\2\2")
|
| 79 |
+
buf.write("\2\6c\3\2\2\2\6e\3\2\2\2\6g\3\2\2\2\6i\3\2\2\2\6k\3\2")
|
| 80 |
+
buf.write("\2\2\6m\3\2\2\2\6o\3\2\2\2\7q\3\2\2\2\ts\3\2\2\2\13\u0080")
|
| 81 |
+
buf.write("\3\2\2\2\r\u0082\3\2\2\2\17\u0085\3\2\2\2\21\u008d\3\2")
|
| 82 |
+
buf.write("\2\2\23\u0095\3\2\2\2\25\u009d\3\2\2\2\27\u00a2\3\2\2")
|
| 83 |
+
buf.write("\2\31\u00a8\3\2\2\2\33\u00ac\3\2\2\2\35\u00b4\3\2\2\2")
|
| 84 |
+
buf.write("\37\u00bb\3\2\2\2!\u00c1\3\2\2\2#\u00c5\3\2\2\2%\u00ca")
|
| 85 |
+
buf.write("\3\2\2\2\'\u00d0\3\2\2\2)\u00d5\3\2\2\2+\u00da\3\2\2\2")
|
| 86 |
+
buf.write("-\u00f1\3\2\2\2/\u00f5\3\2\2\2\61\u0106\3\2\2\2\63\u0113")
|
| 87 |
+
buf.write("\3\2\2\2\65\u0120\3\2\2\2\67\u0122\3\2\2\29\u0127\3\2")
|
| 88 |
+
buf.write("\2\2;\u012b\3\2\2\2=\u014c\3\2\2\2?\u0151\3\2\2\2A\u0155")
|
| 89 |
+
buf.write("\3\2\2\2C\u015d\3\2\2\2E\u0167\3\2\2\2G\u016d\3\2\2\2")
|
| 90 |
+
buf.write("I\u016f\3\2\2\2K\u0173\3\2\2\2M\u0177\3\2\2\2O\u017c\3")
|
| 91 |
+
buf.write("\2\2\2Q\u0180\3\2\2\2S\u0185\3\2\2\2U\u018c\3\2\2\2W\u0193")
|
| 92 |
+
buf.write("\3\2\2\2Y\u019a\3\2\2\2[\u01a3\3\2\2\2]\u01a8\3\2\2\2")
|
| 93 |
+
buf.write("_\u01ae\3\2\2\2a\u01b2\3\2\2\2c\u01b7\3\2\2\2e\u01bf\3")
|
| 94 |
+
buf.write("\2\2\2g\u01c6\3\2\2\2i\u01cd\3\2\2\2k\u01d6\3\2\2\2m\u01dd")
|
| 95 |
+
buf.write("\3\2\2\2o\u01e3\3\2\2\2qr\t\2\2\2r\b\3\2\2\2st\t\3\2\2")
|
| 96 |
+
buf.write("t\n\3\2\2\2u\u0081\7\62\2\2v}\t\4\2\2wy\7a\2\2xw\3\2\2")
|
| 97 |
+
buf.write("\2xy\3\2\2\2yz\3\2\2\2z|\5\t\3\2{x\3\2\2\2|\177\3\2\2")
|
| 98 |
+
buf.write("\2}{\3\2\2\2}~\3\2\2\2~\u0081\3\2\2\2\177}\3\2\2\2\u0080")
|
| 99 |
+
buf.write("u\3\2\2\2\u0080v\3\2\2\2\u0081\f\3\2\2\2\u0082\u0083\7")
|
| 100 |
+
buf.write("^\2\2\u0083\u0084\7^\2\2\u0084\16\3\2\2\2\u0085\u0086")
|
| 101 |
+
buf.write("\5\33\f\2\u0086\u0087\3\2\2\2\u0087\u0088\b\6\2\2\u0088")
|
| 102 |
+
buf.write("\u0089\b\6\3\2\u0089\20\3\2\2\2\u008a\u008c\n\5\2\2\u008b")
|
| 103 |
+
buf.write("\u008a\3\2\2\2\u008c\u008f\3\2\2\2\u008d\u008b\3\2\2\2")
|
| 104 |
+
buf.write("\u008d\u008e\3\2\2\2\u008e\u0090\3\2\2\2\u008f\u008d\3")
|
| 105 |
+
buf.write("\2\2\2\u0090\u0091\n\6\2\2\u0091\22\3\2\2\2\u0092\u0094")
|
| 106 |
+
buf.write("\5\r\5\2\u0093\u0092\3\2\2\2\u0094\u0097\3\2\2\2\u0095")
|
| 107 |
+
buf.write("\u0093\3\2\2\2\u0095\u0096\3\2\2\2\u0096\u0098\3\2\2\2")
|
| 108 |
+
buf.write("\u0097\u0095\3\2\2\2\u0098\u0099\7^\2\2\u0099\u009a\7")
|
| 109 |
+
buf.write("&\2\2\u009a\u009b\7}\2\2\u009b\24\3\2\2\2\u009c\u009e")
|
| 110 |
+
buf.write("\5\r\5\2\u009d\u009c\3\2\2\2\u009e\u009f\3\2\2\2\u009f")
|
| 111 |
+
buf.write("\u009d\3\2\2\2\u009f\u00a0\3\2\2\2\u00a0\26\3\2\2\2\u00a1")
|
| 112 |
+
buf.write("\u00a3\7^\2\2\u00a2\u00a1\3\2\2\2\u00a3\u00a4\3\2\2\2")
|
| 113 |
+
buf.write("\u00a4\u00a2\3\2\2\2\u00a4\u00a5\3\2\2\2\u00a5\u00a6\3")
|
| 114 |
+
buf.write("\2\2\2\u00a6\u00a7\b\n\4\2\u00a7\30\3\2\2\2\u00a8\u00a9")
|
| 115 |
+
buf.write("\7&\2\2\u00a9\u00aa\3\2\2\2\u00aa\u00ab\b\13\4\2\u00ab")
|
| 116 |
+
buf.write("\32\3\2\2\2\u00ac\u00ad\7&\2\2\u00ad\u00ae\7}\2\2\u00ae")
|
| 117 |
+
buf.write("\u00b0\3\2\2\2\u00af\u00b1\5?\36\2\u00b0\u00af\3\2\2\2")
|
| 118 |
+
buf.write("\u00b0\u00b1\3\2\2\2\u00b1\u00b2\3\2\2\2\u00b2\u00b3\b")
|
| 119 |
+
buf.write("\f\3\2\u00b3\34\3\2\2\2\u00b4\u00b6\7}\2\2\u00b5\u00b7")
|
| 120 |
+
buf.write("\5?\36\2\u00b6\u00b5\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7")
|
| 121 |
+
buf.write("\u00b8\3\2\2\2\u00b8\u00b9\b\r\5\2\u00b9\36\3\2\2\2\u00ba")
|
| 122 |
+
buf.write("\u00bc\5?\36\2\u00bb\u00ba\3\2\2\2\u00bb\u00bc\3\2\2\2")
|
| 123 |
+
buf.write("\u00bc\u00bd\3\2\2\2\u00bd\u00be\7\177\2\2\u00be\u00bf")
|
| 124 |
+
buf.write("\3\2\2\2\u00bf\u00c0\b\16\6\2\u00c0 \3\2\2\2\u00c1\u00c2")
|
| 125 |
+
buf.write("\7)\2\2\u00c2\u00c3\3\2\2\2\u00c3\u00c4\b\17\7\2\u00c4")
|
| 126 |
+
buf.write("\"\3\2\2\2\u00c5\u00c6\7$\2\2\u00c6\u00c7\3\2\2\2\u00c7")
|
| 127 |
+
buf.write("\u00c8\b\20\b\2\u00c8$\3\2\2\2\u00c9\u00cb\5?\36\2\u00ca")
|
| 128 |
+
buf.write("\u00c9\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb\u00cc\3\2\2\2")
|
| 129 |
+
buf.write("\u00cc\u00ce\7.\2\2\u00cd\u00cf\5?\36\2\u00ce\u00cd\3")
|
| 130 |
+
buf.write("\2\2\2\u00ce\u00cf\3\2\2\2\u00cf&\3\2\2\2\u00d0\u00d2")
|
| 131 |
+
buf.write("\7]\2\2\u00d1\u00d3\5?\36\2\u00d2\u00d1\3\2\2\2\u00d2")
|
| 132 |
+
buf.write("\u00d3\3\2\2\2\u00d3(\3\2\2\2\u00d4\u00d6\5?\36\2\u00d5")
|
| 133 |
+
buf.write("\u00d4\3\2\2\2\u00d5\u00d6\3\2\2\2\u00d6\u00d7\3\2\2\2")
|
| 134 |
+
buf.write("\u00d7\u00d8\7_\2\2\u00d8*\3\2\2\2\u00d9\u00db\5?\36\2")
|
| 135 |
+
buf.write("\u00da\u00d9\3\2\2\2\u00da\u00db\3\2\2\2\u00db\u00dc\3")
|
| 136 |
+
buf.write("\2\2\2\u00dc\u00de\7<\2\2\u00dd\u00df\5?\36\2\u00de\u00dd")
|
| 137 |
+
buf.write("\3\2\2\2\u00de\u00df\3\2\2\2\u00df,\3\2\2\2\u00e0\u00e1")
|
| 138 |
+
buf.write("\5\13\4\2\u00e1\u00e2\7\60\2\2\u00e2\u00f2\3\2\2\2\u00e3")
|
| 139 |
+
buf.write("\u00e5\5\13\4\2\u00e4\u00e3\3\2\2\2\u00e4\u00e5\3\2\2")
|
| 140 |
+
buf.write("\2\u00e5\u00e6\3\2\2\2\u00e6\u00e7\7\60\2\2\u00e7\u00ee")
|
| 141 |
+
buf.write("\5\t\3\2\u00e8\u00ea\7a\2\2\u00e9\u00e8\3\2\2\2\u00e9")
|
| 142 |
+
buf.write("\u00ea\3\2\2\2\u00ea\u00eb\3\2\2\2\u00eb\u00ed\5\t\3\2")
|
| 143 |
+
buf.write("\u00ec\u00e9\3\2\2\2\u00ed\u00f0\3\2\2\2\u00ee\u00ec\3")
|
| 144 |
+
buf.write("\2\2\2\u00ee\u00ef\3\2\2\2\u00ef\u00f2\3\2\2\2\u00f0\u00ee")
|
| 145 |
+
buf.write("\3\2\2\2\u00f1\u00e0\3\2\2\2\u00f1\u00e4\3\2\2\2\u00f2")
|
| 146 |
+
buf.write(".\3\2\2\2\u00f3\u00f6\5\13\4\2\u00f4\u00f6\5-\25\2\u00f5")
|
| 147 |
+
buf.write("\u00f3\3\2\2\2\u00f5\u00f4\3\2\2\2\u00f6\u00f7\3\2\2\2")
|
| 148 |
+
buf.write("\u00f7\u00f9\t\7\2\2\u00f8\u00fa\t\b\2\2\u00f9\u00f8\3")
|
| 149 |
+
buf.write("\2\2\2\u00f9\u00fa\3\2\2\2\u00fa\u00fb\3\2\2\2\u00fb\u0102")
|
| 150 |
+
buf.write("\5\t\3\2\u00fc\u00fe\7a\2\2\u00fd\u00fc\3\2\2\2\u00fd")
|
| 151 |
+
buf.write("\u00fe\3\2\2\2\u00fe\u00ff\3\2\2\2\u00ff\u0101\5\t\3\2")
|
| 152 |
+
buf.write("\u0100\u00fd\3\2\2\2\u0101\u0104\3\2\2\2\u0102\u0100\3")
|
| 153 |
+
buf.write("\2\2\2\u0102\u0103\3\2\2\2\u0103\60\3\2\2\2\u0104\u0102")
|
| 154 |
+
buf.write("\3\2\2\2\u0105\u0107\t\b\2\2\u0106\u0105\3\2\2\2\u0106")
|
| 155 |
+
buf.write("\u0107\3\2\2\2\u0107\u0110\3\2\2\2\u0108\u0111\5-\25\2")
|
| 156 |
+
buf.write("\u0109\u0111\5/\26\2\u010a\u010b\t\t\2\2\u010b\u010c\t")
|
| 157 |
+
buf.write("\n\2\2\u010c\u0111\t\13\2\2\u010d\u010e\t\n\2\2\u010e")
|
| 158 |
+
buf.write("\u010f\t\f\2\2\u010f\u0111\t\n\2\2\u0110\u0108\3\2\2\2")
|
| 159 |
+
buf.write("\u0110\u0109\3\2\2\2\u0110\u010a\3\2\2\2\u0110\u010d\3")
|
| 160 |
+
buf.write("\2\2\2\u0111\62\3\2\2\2\u0112\u0114\t\b\2\2\u0113\u0112")
|
| 161 |
+
buf.write("\3\2\2\2\u0113\u0114\3\2\2\2\u0114\u0115\3\2\2\2\u0115")
|
| 162 |
+
buf.write("\u0116\5\13\4\2\u0116\64\3\2\2\2\u0117\u0118\t\r\2\2\u0118")
|
| 163 |
+
buf.write("\u0119\t\16\2\2\u0119\u011a\t\17\2\2\u011a\u0121\t\7\2")
|
| 164 |
+
buf.write("\2\u011b\u011c\t\13\2\2\u011c\u011d\t\f\2\2\u011d\u011e")
|
| 165 |
+
buf.write("\t\20\2\2\u011e\u011f\t\21\2\2\u011f\u0121\t\7\2\2\u0120")
|
| 166 |
+
buf.write("\u0117\3\2\2\2\u0120\u011b\3\2\2\2\u0121\66\3\2\2\2\u0122")
|
| 167 |
+
buf.write("\u0123\t\n\2\2\u0123\u0124\t\17\2\2\u0124\u0125\t\20\2")
|
| 168 |
+
buf.write("\2\u0125\u0126\t\20\2\2\u01268\3\2\2\2\u0127\u0128\t\22")
|
| 169 |
+
buf.write("\2\2\u0128:\3\2\2\2\u0129\u012c\5\7\2\2\u012a\u012c\7")
|
| 170 |
+
buf.write("a\2\2\u012b\u0129\3\2\2\2\u012b\u012a\3\2\2\2\u012c\u0132")
|
| 171 |
+
buf.write("\3\2\2\2\u012d\u0131\5\7\2\2\u012e\u0131\5\t\3\2\u012f")
|
| 172 |
+
buf.write("\u0131\t\23\2\2\u0130\u012d\3\2\2\2\u0130\u012e\3\2\2")
|
| 173 |
+
buf.write("\2\u0130\u012f\3\2\2\2\u0131\u0134\3\2\2\2\u0132\u0130")
|
| 174 |
+
buf.write("\3\2\2\2\u0132\u0133\3\2\2\2\u0133<\3\2\2\2\u0134\u0132")
|
| 175 |
+
buf.write("\3\2\2\2\u0135\u014d\5\r\5\2\u0136\u0137\7^\2\2\u0137")
|
| 176 |
+
buf.write("\u014d\7*\2\2\u0138\u0139\7^\2\2\u0139\u014d\7+\2\2\u013a")
|
| 177 |
+
buf.write("\u013b\7^\2\2\u013b\u014d\7]\2\2\u013c\u013d\7^\2\2\u013d")
|
| 178 |
+
buf.write("\u014d\7_\2\2\u013e\u013f\7^\2\2\u013f\u014d\7}\2\2\u0140")
|
| 179 |
+
buf.write("\u0141\7^\2\2\u0141\u014d\7\177\2\2\u0142\u0143\7^\2\2")
|
| 180 |
+
buf.write("\u0143\u014d\7<\2\2\u0144\u0145\7^\2\2\u0145\u014d\7?")
|
| 181 |
+
buf.write("\2\2\u0146\u0147\7^\2\2\u0147\u014d\7.\2\2\u0148\u0149")
|
| 182 |
+
buf.write("\7^\2\2\u0149\u014d\7\"\2\2\u014a\u014b\7^\2\2\u014b\u014d")
|
| 183 |
+
buf.write("\7\13\2\2\u014c\u0135\3\2\2\2\u014c\u0136\3\2\2\2\u014c")
|
| 184 |
+
buf.write("\u0138\3\2\2\2\u014c\u013a\3\2\2\2\u014c\u013c\3\2\2\2")
|
| 185 |
+
buf.write("\u014c\u013e\3\2\2\2\u014c\u0140\3\2\2\2\u014c\u0142\3")
|
| 186 |
+
buf.write("\2\2\2\u014c\u0144\3\2\2\2\u014c\u0146\3\2\2\2\u014c\u0148")
|
| 187 |
+
buf.write("\3\2\2\2\u014c\u014a\3\2\2\2\u014d\u014e\3\2\2\2\u014e")
|
| 188 |
+
buf.write("\u014c\3\2\2\2\u014e\u014f\3\2\2\2\u014f>\3\2\2\2\u0150")
|
| 189 |
+
buf.write("\u0152\t\24\2\2\u0151\u0150\3\2\2\2\u0152\u0153\3\2\2")
|
| 190 |
+
buf.write("\2\u0153\u0151\3\2\2\2\u0153\u0154\3\2\2\2\u0154@\3\2")
|
| 191 |
+
buf.write("\2\2\u0155\u0157\5\33\f\2\u0156\u0158\5?\36\2\u0157\u0156")
|
| 192 |
+
buf.write("\3\2\2\2\u0157\u0158\3\2\2\2\u0158\u0159\3\2\2\2\u0159")
|
| 193 |
+
buf.write("\u015a\b\37\2\2\u015a\u015b\b\37\3\2\u015bB\3\2\2\2\u015c")
|
| 194 |
+
buf.write("\u015e\5?\36\2\u015d\u015c\3\2\2\2\u015d\u015e\3\2\2\2")
|
| 195 |
+
buf.write("\u015e\u015f\3\2\2\2\u015f\u0161\7<\2\2\u0160\u0162\5")
|
| 196 |
+
buf.write("?\36\2\u0161\u0160\3\2\2\2\u0161\u0162\3\2\2\2\u0162\u0163")
|
| 197 |
+
buf.write("\3\2\2\2\u0163\u0164\b \t\2\u0164\u0165\b \n\2\u0165D")
|
| 198 |
+
buf.write("\3\2\2\2\u0166\u0168\5?\36\2\u0167\u0166\3\2\2\2\u0167")
|
| 199 |
+
buf.write("\u0168\3\2\2\2\u0168\u0169\3\2\2\2\u0169\u016a\7\177\2")
|
| 200 |
+
buf.write("\2\u016a\u016b\3\2\2\2\u016b\u016c\b!\6\2\u016cF\3\2\2")
|
| 201 |
+
buf.write("\2\u016d\u016e\7\60\2\2\u016eH\3\2\2\2\u016f\u0170\7]")
|
| 202 |
+
buf.write("\2\2\u0170\u0171\3\2\2\2\u0171\u0172\b#\13\2\u0172J\3")
|
| 203 |
+
buf.write("\2\2\2\u0173\u0174\7_\2\2\u0174\u0175\3\2\2\2\u0175\u0176")
|
| 204 |
+
buf.write("\b$\f\2\u0176L\3\2\2\2\u0177\u0178\5;\34\2\u0178\u0179")
|
| 205 |
+
buf.write("\3\2\2\2\u0179\u017a\b%\r\2\u017aN\3\2\2\2\u017b\u017d")
|
| 206 |
+
buf.write("\n\25\2\2\u017c\u017b\3\2\2\2\u017d\u017e\3\2\2\2\u017e")
|
| 207 |
+
buf.write("\u017c\3\2\2\2\u017e\u017f\3\2\2\2\u017fP\3\2\2\2\u0180")
|
| 208 |
+
buf.write("\u0181\5\33\f\2\u0181\u0182\3\2\2\2\u0182\u0183\b\'\2")
|
| 209 |
+
buf.write("\2\u0183\u0184\b\'\3\2\u0184R\3\2\2\2\u0185\u0186\7)\2")
|
| 210 |
+
buf.write("\2\u0186\u0187\3\2\2\2\u0187\u0188\b(\6\2\u0188T\3\2\2")
|
| 211 |
+
buf.write("\2\u0189\u018b\n\26\2\2\u018a\u0189\3\2\2\2\u018b\u018e")
|
| 212 |
+
buf.write("\3\2\2\2\u018c\u018a\3\2\2\2\u018c\u018d\3\2\2\2\u018d")
|
| 213 |
+
buf.write("\u018f\3\2\2\2\u018e\u018c\3\2\2\2\u018f\u0190\n\27\2")
|
| 214 |
+
buf.write("\2\u0190\u0191\3\2\2\2\u0191\u0192\b)\4\2\u0192V\3\2\2")
|
| 215 |
+
buf.write("\2\u0193\u0194\5\23\b\2\u0194\u0195\3\2\2\2\u0195\u0196")
|
| 216 |
+
buf.write("\b*\16\2\u0196X\3\2\2\2\u0197\u0199\5\r\5\2\u0198\u0197")
|
| 217 |
+
buf.write("\3\2\2\2\u0199\u019c\3\2\2\2\u019a\u0198\3\2\2\2\u019a")
|
| 218 |
+
buf.write("\u019b\3\2\2\2\u019b\u019d\3\2\2\2\u019c\u019a\3\2\2\2")
|
| 219 |
+
buf.write("\u019d\u019e\7^\2\2\u019e\u019f\7)\2\2\u019f\u01a0\3\2")
|
| 220 |
+
buf.write("\2\2\u01a0\u01a1\b+\17\2\u01a1Z\3\2\2\2\u01a2\u01a4\5")
|
| 221 |
+
buf.write("\r\5\2\u01a3\u01a2\3\2\2\2\u01a4\u01a5\3\2\2\2\u01a5\u01a3")
|
| 222 |
+
buf.write("\3\2\2\2\u01a5\u01a6\3\2\2\2\u01a6\\\3\2\2\2\u01a7\u01a9")
|
| 223 |
+
buf.write("\7^\2\2\u01a8\u01a7\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa")
|
| 224 |
+
buf.write("\u01a8\3\2\2\2\u01aa\u01ab\3\2\2\2\u01ab\u01ac\3\2\2\2")
|
| 225 |
+
buf.write("\u01ac\u01ad\b-\4\2\u01ad^\3\2\2\2\u01ae\u01af\7&\2\2")
|
| 226 |
+
buf.write("\u01af\u01b0\3\2\2\2\u01b0\u01b1\b.\4\2\u01b1`\3\2\2\2")
|
| 227 |
+
buf.write("\u01b2\u01b3\5\33\f\2\u01b3\u01b4\3\2\2\2\u01b4\u01b5")
|
| 228 |
+
buf.write("\b/\2\2\u01b5\u01b6\b/\3\2\u01b6b\3\2\2\2\u01b7\u01b8")
|
| 229 |
+
buf.write("\7$\2\2\u01b8\u01b9\3\2\2\2\u01b9\u01ba\b\60\20\2\u01ba")
|
| 230 |
+
buf.write("\u01bb\b\60\6\2\u01bbd\3\2\2\2\u01bc\u01be\n\30\2\2\u01bd")
|
| 231 |
+
buf.write("\u01bc\3\2\2\2\u01be\u01c1\3\2\2\2\u01bf\u01bd\3\2\2\2")
|
| 232 |
+
buf.write("\u01bf\u01c0\3\2\2\2\u01c0\u01c2\3\2\2\2\u01c1\u01bf\3")
|
| 233 |
+
buf.write("\2\2\2\u01c2\u01c3\n\31\2\2\u01c3\u01c4\3\2\2\2\u01c4")
|
| 234 |
+
buf.write("\u01c5\b\61\4\2\u01c5f\3\2\2\2\u01c6\u01c7\5\23\b\2\u01c7")
|
| 235 |
+
buf.write("\u01c8\3\2\2\2\u01c8\u01c9\b\62\16\2\u01c9h\3\2\2\2\u01ca")
|
| 236 |
+
buf.write("\u01cc\5\r\5\2\u01cb\u01ca\3\2\2\2\u01cc\u01cf\3\2\2\2")
|
| 237 |
+
buf.write("\u01cd\u01cb\3\2\2\2\u01cd\u01ce\3\2\2\2\u01ce\u01d0\3")
|
| 238 |
+
buf.write("\2\2\2\u01cf\u01cd\3\2\2\2\u01d0\u01d1\7^\2\2\u01d1\u01d2")
|
| 239 |
+
buf.write("\7$\2\2\u01d2\u01d3\3\2\2\2\u01d3\u01d4\b\63\17\2\u01d4")
|
| 240 |
+
buf.write("j\3\2\2\2\u01d5\u01d7\5\r\5\2\u01d6\u01d5\3\2\2\2\u01d7")
|
| 241 |
+
buf.write("\u01d8\3\2\2\2\u01d8\u01d6\3\2\2\2\u01d8\u01d9\3\2\2\2")
|
| 242 |
+
buf.write("\u01d9\u01da\3\2\2\2\u01da\u01db\b\64\21\2\u01dbl\3\2")
|
| 243 |
+
buf.write("\2\2\u01dc\u01de\7^\2\2\u01dd\u01dc\3\2\2\2\u01de\u01df")
|
| 244 |
+
buf.write("\3\2\2\2\u01df\u01dd\3\2\2\2\u01df\u01e0\3\2\2\2\u01e0")
|
| 245 |
+
buf.write("\u01e1\3\2\2\2\u01e1\u01e2\b\65\4\2\u01e2n\3\2\2\2\u01e3")
|
| 246 |
+
buf.write("\u01e4\7&\2\2\u01e4\u01e5\3\2\2\2\u01e5\u01e6\b\66\4\2")
|
| 247 |
+
buf.write("\u01e6p\3\2\2\2\66\2\3\4\5\6x}\u0080\u008d\u0095\u009f")
|
| 248 |
+
buf.write("\u00a4\u00b0\u00b6\u00bb\u00ca\u00ce\u00d2\u00d5\u00da")
|
| 249 |
+
buf.write("\u00de\u00e4\u00e9\u00ee\u00f1\u00f5\u00f9\u00fd\u0102")
|
| 250 |
+
buf.write("\u0106\u0110\u0113\u0120\u012b\u0130\u0132\u014c\u014e")
|
| 251 |
+
buf.write("\u0153\u0157\u015d\u0161\u0167\u017e\u018c\u019a\u01a5")
|
| 252 |
+
buf.write("\u01aa\u01bf\u01cd\u01d8\u01df\22\t\6\2\7\4\2\t\3\2\7")
|
| 253 |
+
buf.write("\3\2\6\2\2\7\5\2\7\6\2\t\16\2\4\3\2\t\f\2\t\r\2\t\24\2")
|
| 254 |
+
buf.write("\t\4\2\t\25\2\t\32\2\t\33\2")
|
| 255 |
+
return buf.getvalue()
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
class OmegaConfGrammarLexer(Lexer):
|
| 259 |
+
|
| 260 |
+
atn = ATNDeserializer().deserialize(serializedATN())
|
| 261 |
+
|
| 262 |
+
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
|
| 263 |
+
|
| 264 |
+
VALUE_MODE = 1
|
| 265 |
+
INTERPOLATION_MODE = 2
|
| 266 |
+
QUOTED_SINGLE_MODE = 3
|
| 267 |
+
QUOTED_DOUBLE_MODE = 4
|
| 268 |
+
|
| 269 |
+
ANY_STR = 1
|
| 270 |
+
ESC_INTER = 2
|
| 271 |
+
TOP_ESC = 3
|
| 272 |
+
INTER_OPEN = 4
|
| 273 |
+
BRACE_OPEN = 5
|
| 274 |
+
BRACE_CLOSE = 6
|
| 275 |
+
QUOTE_OPEN_SINGLE = 7
|
| 276 |
+
QUOTE_OPEN_DOUBLE = 8
|
| 277 |
+
COMMA = 9
|
| 278 |
+
BRACKET_OPEN = 10
|
| 279 |
+
BRACKET_CLOSE = 11
|
| 280 |
+
COLON = 12
|
| 281 |
+
FLOAT = 13
|
| 282 |
+
INT = 14
|
| 283 |
+
BOOL = 15
|
| 284 |
+
NULL = 16
|
| 285 |
+
UNQUOTED_CHAR = 17
|
| 286 |
+
ID = 18
|
| 287 |
+
ESC = 19
|
| 288 |
+
WS = 20
|
| 289 |
+
INTER_CLOSE = 21
|
| 290 |
+
DOT = 22
|
| 291 |
+
INTER_KEY = 23
|
| 292 |
+
MATCHING_QUOTE_CLOSE = 24
|
| 293 |
+
QUOTED_ESC = 25
|
| 294 |
+
DOLLAR = 26
|
| 295 |
+
INTER_BRACKET_OPEN = 27
|
| 296 |
+
INTER_BRACKET_CLOSE = 28
|
| 297 |
+
|
| 298 |
+
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
|
| 299 |
+
|
| 300 |
+
modeNames = [ "DEFAULT_MODE", "VALUE_MODE", "INTERPOLATION_MODE", "QUOTED_SINGLE_MODE",
|
| 301 |
+
"QUOTED_DOUBLE_MODE" ]
|
| 302 |
+
|
| 303 |
+
literalNames = [ "<INVALID>",
|
| 304 |
+
"'.'", "'['", "']'" ]
|
| 305 |
+
|
| 306 |
+
symbolicNames = [ "<INVALID>",
|
| 307 |
+
"ANY_STR", "ESC_INTER", "TOP_ESC", "INTER_OPEN", "BRACE_OPEN",
|
| 308 |
+
"BRACE_CLOSE", "QUOTE_OPEN_SINGLE", "QUOTE_OPEN_DOUBLE", "COMMA",
|
| 309 |
+
"BRACKET_OPEN", "BRACKET_CLOSE", "COLON", "FLOAT", "INT", "BOOL",
|
| 310 |
+
"NULL", "UNQUOTED_CHAR", "ID", "ESC", "WS", "INTER_CLOSE", "DOT",
|
| 311 |
+
"INTER_KEY", "MATCHING_QUOTE_CLOSE", "QUOTED_ESC", "DOLLAR",
|
| 312 |
+
"INTER_BRACKET_OPEN", "INTER_BRACKET_CLOSE" ]
|
| 313 |
+
|
| 314 |
+
ruleNames = [ "CHAR", "DIGIT", "INT_UNSIGNED", "ESC_BACKSLASH", "TOP_INTER_OPEN",
|
| 315 |
+
"ANY_STR", "ESC_INTER", "TOP_ESC", "BACKSLASHES", "DOLLAR",
|
| 316 |
+
"INTER_OPEN", "BRACE_OPEN", "BRACE_CLOSE", "QUOTE_OPEN_SINGLE",
|
| 317 |
+
"QUOTE_OPEN_DOUBLE", "COMMA", "BRACKET_OPEN", "BRACKET_CLOSE",
|
| 318 |
+
"COLON", "POINT_FLOAT", "EXPONENT_FLOAT", "FLOAT", "INT",
|
| 319 |
+
"BOOL", "NULL", "UNQUOTED_CHAR", "ID", "ESC", "WS", "NESTED_INTER_OPEN",
|
| 320 |
+
"INTER_COLON", "INTER_CLOSE", "DOT", "INTER_BRACKET_OPEN",
|
| 321 |
+
"INTER_BRACKET_CLOSE", "INTER_ID", "INTER_KEY", "QSINGLE_INTER_OPEN",
|
| 322 |
+
"MATCHING_QUOTE_CLOSE", "QSINGLE_STR", "QSINGLE_ESC_INTER",
|
| 323 |
+
"QSINGLE_ESC_QUOTE", "QUOTED_ESC", "QSINGLE_BACKSLASHES",
|
| 324 |
+
"QSINGLE_DOLLAR", "QDOUBLE_INTER_OPEN", "QDOUBLE_CLOSE",
|
| 325 |
+
"QDOUBLE_STR", "QDOUBLE_ESC_INTER", "QDOUBLE_ESC_QUOTE",
|
| 326 |
+
"QDOUBLE_ESC", "QDOUBLE_BACKSLASHES", "QDOUBLE_DOLLAR" ]
|
| 327 |
+
|
| 328 |
+
grammarFileName = "OmegaConfGrammarLexer.g4"
|
| 329 |
+
|
| 330 |
+
def __init__(self, input=None, output:TextIO = sys.stdout):
|
| 331 |
+
super().__init__(input, output)
|
| 332 |
+
self.checkVersion("4.9.3")
|
| 333 |
+
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
|
| 334 |
+
self._actions = None
|
| 335 |
+
self._predicates = None
|
| 336 |
+
|
| 337 |
+
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/OmegaConfGrammarParser.py
ADDED
|
@@ -0,0 +1,1595 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated from /tmp/build-via-sdist-fm63w174/omegaconf-2.3.0/omegaconf/grammar/OmegaConfGrammarParser.g4 by ANTLR 4.9.3
|
| 2 |
+
# encoding: utf-8
|
| 3 |
+
from antlr4 import *
|
| 4 |
+
from io import StringIO
|
| 5 |
+
import sys
|
| 6 |
+
if sys.version_info[1] > 5:
|
| 7 |
+
from typing import TextIO
|
| 8 |
+
else:
|
| 9 |
+
from typing.io import TextIO
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def serializedATN():
|
| 13 |
+
with StringIO() as buf:
|
| 14 |
+
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\36")
|
| 15 |
+
buf.write("\u00b7\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
|
| 16 |
+
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
|
| 17 |
+
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\3\2\3\2\3\2\3\3\3")
|
| 18 |
+
buf.write("\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\6\4/\n\4\r\4\16\4\60\3")
|
| 19 |
+
buf.write("\5\3\5\3\5\3\5\5\5\67\n\5\3\6\3\6\5\6;\n\6\3\6\3\6\3\7")
|
| 20 |
+
buf.write("\3\7\3\7\3\7\7\7C\n\7\f\7\16\7F\13\7\5\7H\n\7\3\7\3\7")
|
| 21 |
+
buf.write("\3\b\3\b\3\b\3\b\3\t\3\t\3\t\5\tS\n\t\7\tU\n\t\f\t\16")
|
| 22 |
+
buf.write("\tX\13\t\3\t\3\t\5\t\\\n\t\6\t^\n\t\r\t\16\t_\5\tb\n\t")
|
| 23 |
+
buf.write("\3\n\3\n\5\nf\n\n\3\13\3\13\7\13j\n\13\f\13\16\13m\13")
|
| 24 |
+
buf.write("\13\3\13\3\13\3\13\3\13\3\13\5\13t\n\13\3\13\3\13\3\13")
|
| 25 |
+
buf.write("\3\13\3\13\3\13\7\13|\n\13\f\13\16\13\177\13\13\3\13\3")
|
| 26 |
+
buf.write("\13\3\f\3\f\3\f\3\f\5\f\u0087\n\f\3\f\3\f\3\r\3\r\3\r")
|
| 27 |
+
buf.write("\5\r\u008e\n\r\3\16\3\16\5\16\u0092\n\16\3\16\3\16\3\16")
|
| 28 |
+
buf.write("\5\16\u0097\n\16\7\16\u0099\n\16\f\16\16\16\u009c\13\16")
|
| 29 |
+
buf.write("\3\17\3\17\5\17\u00a0\n\17\3\17\3\17\3\20\3\20\3\20\3")
|
| 30 |
+
buf.write("\20\3\20\3\20\3\20\3\20\3\20\3\20\6\20\u00ae\n\20\r\20")
|
| 31 |
+
buf.write("\16\20\u00af\3\21\6\21\u00b3\n\21\r\21\16\21\u00b4\3\21")
|
| 32 |
+
buf.write("\2\2\22\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \2\4\3")
|
| 33 |
+
buf.write("\2\t\n\3\2\17\26\2\u00ce\2\"\3\2\2\2\4%\3\2\2\2\6.\3\2")
|
| 34 |
+
buf.write("\2\2\b\66\3\2\2\2\n8\3\2\2\2\f>\3\2\2\2\16K\3\2\2\2\20")
|
| 35 |
+
buf.write("a\3\2\2\2\22e\3\2\2\2\24g\3\2\2\2\26\u0082\3\2\2\2\30")
|
| 36 |
+
buf.write("\u008d\3\2\2\2\32\u0091\3\2\2\2\34\u009d\3\2\2\2\36\u00ad")
|
| 37 |
+
buf.write("\3\2\2\2 \u00b2\3\2\2\2\"#\5\6\4\2#$\7\2\2\3$\3\3\2\2")
|
| 38 |
+
buf.write("\2%&\5\b\5\2&\'\7\2\2\3\'\5\3\2\2\2(/\5\22\n\2)/\7\3\2")
|
| 39 |
+
buf.write("\2*/\7\25\2\2+/\7\4\2\2,/\7\5\2\2-/\7\33\2\2.(\3\2\2\2")
|
| 40 |
+
buf.write(".)\3\2\2\2.*\3\2\2\2.+\3\2\2\2.,\3\2\2\2.-\3\2\2\2/\60")
|
| 41 |
+
buf.write("\3\2\2\2\60.\3\2\2\2\60\61\3\2\2\2\61\7\3\2\2\2\62\67")
|
| 42 |
+
buf.write("\5\36\20\2\63\67\5\34\17\2\64\67\5\n\6\2\65\67\5\f\7\2")
|
| 43 |
+
buf.write("\66\62\3\2\2\2\66\63\3\2\2\2\66\64\3\2\2\2\66\65\3\2\2")
|
| 44 |
+
buf.write("\2\67\t\3\2\2\28:\7\f\2\29;\5\20\t\2:9\3\2\2\2:;\3\2\2")
|
| 45 |
+
buf.write("\2;<\3\2\2\2<=\7\r\2\2=\13\3\2\2\2>G\7\7\2\2?D\5\16\b")
|
| 46 |
+
buf.write("\2@A\7\13\2\2AC\5\16\b\2B@\3\2\2\2CF\3\2\2\2DB\3\2\2\2")
|
| 47 |
+
buf.write("DE\3\2\2\2EH\3\2\2\2FD\3\2\2\2G?\3\2\2\2GH\3\2\2\2HI\3")
|
| 48 |
+
buf.write("\2\2\2IJ\7\b\2\2J\r\3\2\2\2KL\5 \21\2LM\7\16\2\2MN\5\b")
|
| 49 |
+
buf.write("\5\2N\17\3\2\2\2OV\5\b\5\2PR\7\13\2\2QS\5\b\5\2RQ\3\2")
|
| 50 |
+
buf.write("\2\2RS\3\2\2\2SU\3\2\2\2TP\3\2\2\2UX\3\2\2\2VT\3\2\2\2")
|
| 51 |
+
buf.write("VW\3\2\2\2Wb\3\2\2\2XV\3\2\2\2Y[\7\13\2\2Z\\\5\b\5\2[")
|
| 52 |
+
buf.write("Z\3\2\2\2[\\\3\2\2\2\\^\3\2\2\2]Y\3\2\2\2^_\3\2\2\2_]")
|
| 53 |
+
buf.write("\3\2\2\2_`\3\2\2\2`b\3\2\2\2aO\3\2\2\2a]\3\2\2\2b\21\3")
|
| 54 |
+
buf.write("\2\2\2cf\5\24\13\2df\5\26\f\2ec\3\2\2\2ed\3\2\2\2f\23")
|
| 55 |
+
buf.write("\3\2\2\2gk\7\6\2\2hj\7\30\2\2ih\3\2\2\2jm\3\2\2\2ki\3")
|
| 56 |
+
buf.write("\2\2\2kl\3\2\2\2ls\3\2\2\2mk\3\2\2\2nt\5\30\r\2op\7\f")
|
| 57 |
+
buf.write("\2\2pq\5\30\r\2qr\7\r\2\2rt\3\2\2\2sn\3\2\2\2so\3\2\2")
|
| 58 |
+
buf.write("\2t}\3\2\2\2uv\7\30\2\2v|\5\30\r\2wx\7\f\2\2xy\5\30\r")
|
| 59 |
+
buf.write("\2yz\7\r\2\2z|\3\2\2\2{u\3\2\2\2{w\3\2\2\2|\177\3\2\2")
|
| 60 |
+
buf.write("\2}{\3\2\2\2}~\3\2\2\2~\u0080\3\2\2\2\177}\3\2\2\2\u0080")
|
| 61 |
+
buf.write("\u0081\7\27\2\2\u0081\25\3\2\2\2\u0082\u0083\7\6\2\2\u0083")
|
| 62 |
+
buf.write("\u0084\5\32\16\2\u0084\u0086\7\16\2\2\u0085\u0087\5\20")
|
| 63 |
+
buf.write("\t\2\u0086\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088")
|
| 64 |
+
buf.write("\3\2\2\2\u0088\u0089\7\b\2\2\u0089\27\3\2\2\2\u008a\u008e")
|
| 65 |
+
buf.write("\5\22\n\2\u008b\u008e\7\24\2\2\u008c\u008e\7\31\2\2\u008d")
|
| 66 |
+
buf.write("\u008a\3\2\2\2\u008d\u008b\3\2\2\2\u008d\u008c\3\2\2\2")
|
| 67 |
+
buf.write("\u008e\31\3\2\2\2\u008f\u0092\5\22\n\2\u0090\u0092\7\24")
|
| 68 |
+
buf.write("\2\2\u0091\u008f\3\2\2\2\u0091\u0090\3\2\2\2\u0092\u009a")
|
| 69 |
+
buf.write("\3\2\2\2\u0093\u0096\7\30\2\2\u0094\u0097\5\22\n\2\u0095")
|
| 70 |
+
buf.write("\u0097\7\24\2\2\u0096\u0094\3\2\2\2\u0096\u0095\3\2\2")
|
| 71 |
+
buf.write("\2\u0097\u0099\3\2\2\2\u0098\u0093\3\2\2\2\u0099\u009c")
|
| 72 |
+
buf.write("\3\2\2\2\u009a\u0098\3\2\2\2\u009a\u009b\3\2\2\2\u009b")
|
| 73 |
+
buf.write("\33\3\2\2\2\u009c\u009a\3\2\2\2\u009d\u009f\t\2\2\2\u009e")
|
| 74 |
+
buf.write("\u00a0\5\6\4\2\u009f\u009e\3\2\2\2\u009f\u00a0\3\2\2\2")
|
| 75 |
+
buf.write("\u00a0\u00a1\3\2\2\2\u00a1\u00a2\7\32\2\2\u00a2\35\3\2")
|
| 76 |
+
buf.write("\2\2\u00a3\u00ae\7\24\2\2\u00a4\u00ae\7\22\2\2\u00a5\u00ae")
|
| 77 |
+
buf.write("\7\20\2\2\u00a6\u00ae\7\17\2\2\u00a7\u00ae\7\21\2\2\u00a8")
|
| 78 |
+
buf.write("\u00ae\7\23\2\2\u00a9\u00ae\7\16\2\2\u00aa\u00ae\7\25")
|
| 79 |
+
buf.write("\2\2\u00ab\u00ae\7\26\2\2\u00ac\u00ae\5\22\n\2\u00ad\u00a3")
|
| 80 |
+
buf.write("\3\2\2\2\u00ad\u00a4\3\2\2\2\u00ad\u00a5\3\2\2\2\u00ad")
|
| 81 |
+
buf.write("\u00a6\3\2\2\2\u00ad\u00a7\3\2\2\2\u00ad\u00a8\3\2\2\2")
|
| 82 |
+
buf.write("\u00ad\u00a9\3\2\2\2\u00ad\u00aa\3\2\2\2\u00ad\u00ab\3")
|
| 83 |
+
buf.write("\2\2\2\u00ad\u00ac\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00ad")
|
| 84 |
+
buf.write("\3\2\2\2\u00af\u00b0\3\2\2\2\u00b0\37\3\2\2\2\u00b1\u00b3")
|
| 85 |
+
buf.write("\t\3\2\2\u00b2\u00b1\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4")
|
| 86 |
+
buf.write("\u00b2\3\2\2\2\u00b4\u00b5\3\2\2\2\u00b5!\3\2\2\2\33.")
|
| 87 |
+
buf.write("\60\66:DGRV[_aeks{}\u0086\u008d\u0091\u0096\u009a\u009f")
|
| 88 |
+
buf.write("\u00ad\u00af\u00b4")
|
| 89 |
+
return buf.getvalue()
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class OmegaConfGrammarParser ( Parser ):
|
| 93 |
+
|
| 94 |
+
grammarFileName = "OmegaConfGrammarParser.g4"
|
| 95 |
+
|
| 96 |
+
atn = ATNDeserializer().deserialize(serializedATN())
|
| 97 |
+
|
| 98 |
+
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
|
| 99 |
+
|
| 100 |
+
sharedContextCache = PredictionContextCache()
|
| 101 |
+
|
| 102 |
+
literalNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
|
| 103 |
+
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
|
| 104 |
+
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
|
| 105 |
+
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
|
| 106 |
+
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
|
| 107 |
+
"<INVALID>", "<INVALID>", "'.'", "<INVALID>", "<INVALID>",
|
| 108 |
+
"<INVALID>", "<INVALID>", "'['", "']'" ]
|
| 109 |
+
|
| 110 |
+
symbolicNames = [ "<INVALID>", "ANY_STR", "ESC_INTER", "TOP_ESC", "INTER_OPEN",
|
| 111 |
+
"BRACE_OPEN", "BRACE_CLOSE", "QUOTE_OPEN_SINGLE",
|
| 112 |
+
"QUOTE_OPEN_DOUBLE", "COMMA", "BRACKET_OPEN", "BRACKET_CLOSE",
|
| 113 |
+
"COLON", "FLOAT", "INT", "BOOL", "NULL", "UNQUOTED_CHAR",
|
| 114 |
+
"ID", "ESC", "WS", "INTER_CLOSE", "DOT", "INTER_KEY",
|
| 115 |
+
"MATCHING_QUOTE_CLOSE", "QUOTED_ESC", "DOLLAR", "INTER_BRACKET_OPEN",
|
| 116 |
+
"INTER_BRACKET_CLOSE" ]
|
| 117 |
+
|
| 118 |
+
RULE_configValue = 0
|
| 119 |
+
RULE_singleElement = 1
|
| 120 |
+
RULE_text = 2
|
| 121 |
+
RULE_element = 3
|
| 122 |
+
RULE_listContainer = 4
|
| 123 |
+
RULE_dictContainer = 5
|
| 124 |
+
RULE_dictKeyValuePair = 6
|
| 125 |
+
RULE_sequence = 7
|
| 126 |
+
RULE_interpolation = 8
|
| 127 |
+
RULE_interpolationNode = 9
|
| 128 |
+
RULE_interpolationResolver = 10
|
| 129 |
+
RULE_configKey = 11
|
| 130 |
+
RULE_resolverName = 12
|
| 131 |
+
RULE_quotedValue = 13
|
| 132 |
+
RULE_primitive = 14
|
| 133 |
+
RULE_dictKey = 15
|
| 134 |
+
|
| 135 |
+
ruleNames = [ "configValue", "singleElement", "text", "element", "listContainer",
|
| 136 |
+
"dictContainer", "dictKeyValuePair", "sequence", "interpolation",
|
| 137 |
+
"interpolationNode", "interpolationResolver", "configKey",
|
| 138 |
+
"resolverName", "quotedValue", "primitive", "dictKey" ]
|
| 139 |
+
|
| 140 |
+
EOF = Token.EOF
|
| 141 |
+
ANY_STR=1
|
| 142 |
+
ESC_INTER=2
|
| 143 |
+
TOP_ESC=3
|
| 144 |
+
INTER_OPEN=4
|
| 145 |
+
BRACE_OPEN=5
|
| 146 |
+
BRACE_CLOSE=6
|
| 147 |
+
QUOTE_OPEN_SINGLE=7
|
| 148 |
+
QUOTE_OPEN_DOUBLE=8
|
| 149 |
+
COMMA=9
|
| 150 |
+
BRACKET_OPEN=10
|
| 151 |
+
BRACKET_CLOSE=11
|
| 152 |
+
COLON=12
|
| 153 |
+
FLOAT=13
|
| 154 |
+
INT=14
|
| 155 |
+
BOOL=15
|
| 156 |
+
NULL=16
|
| 157 |
+
UNQUOTED_CHAR=17
|
| 158 |
+
ID=18
|
| 159 |
+
ESC=19
|
| 160 |
+
WS=20
|
| 161 |
+
INTER_CLOSE=21
|
| 162 |
+
DOT=22
|
| 163 |
+
INTER_KEY=23
|
| 164 |
+
MATCHING_QUOTE_CLOSE=24
|
| 165 |
+
QUOTED_ESC=25
|
| 166 |
+
DOLLAR=26
|
| 167 |
+
INTER_BRACKET_OPEN=27
|
| 168 |
+
INTER_BRACKET_CLOSE=28
|
| 169 |
+
|
| 170 |
+
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
|
| 171 |
+
super().__init__(input, output)
|
| 172 |
+
self.checkVersion("4.9.3")
|
| 173 |
+
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
|
| 174 |
+
self._predicates = None
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
class ConfigValueContext(ParserRuleContext):
|
| 180 |
+
__slots__ = 'parser'
|
| 181 |
+
|
| 182 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 183 |
+
super().__init__(parent, invokingState)
|
| 184 |
+
self.parser = parser
|
| 185 |
+
|
| 186 |
+
def text(self):
|
| 187 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.TextContext,0)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def EOF(self):
|
| 191 |
+
return self.getToken(OmegaConfGrammarParser.EOF, 0)
|
| 192 |
+
|
| 193 |
+
def getRuleIndex(self):
|
| 194 |
+
return OmegaConfGrammarParser.RULE_configValue
|
| 195 |
+
|
| 196 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 197 |
+
if hasattr( listener, "enterConfigValue" ):
|
| 198 |
+
listener.enterConfigValue(self)
|
| 199 |
+
|
| 200 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 201 |
+
if hasattr( listener, "exitConfigValue" ):
|
| 202 |
+
listener.exitConfigValue(self)
|
| 203 |
+
|
| 204 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 205 |
+
if hasattr( visitor, "visitConfigValue" ):
|
| 206 |
+
return visitor.visitConfigValue(self)
|
| 207 |
+
else:
|
| 208 |
+
return visitor.visitChildren(self)
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def configValue(self):
|
| 214 |
+
|
| 215 |
+
localctx = OmegaConfGrammarParser.ConfigValueContext(self, self._ctx, self.state)
|
| 216 |
+
self.enterRule(localctx, 0, self.RULE_configValue)
|
| 217 |
+
try:
|
| 218 |
+
self.enterOuterAlt(localctx, 1)
|
| 219 |
+
self.state = 32
|
| 220 |
+
self.text()
|
| 221 |
+
self.state = 33
|
| 222 |
+
self.match(OmegaConfGrammarParser.EOF)
|
| 223 |
+
except RecognitionException as re:
|
| 224 |
+
localctx.exception = re
|
| 225 |
+
self._errHandler.reportError(self, re)
|
| 226 |
+
self._errHandler.recover(self, re)
|
| 227 |
+
finally:
|
| 228 |
+
self.exitRule()
|
| 229 |
+
return localctx
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
class SingleElementContext(ParserRuleContext):
|
| 233 |
+
__slots__ = 'parser'
|
| 234 |
+
|
| 235 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 236 |
+
super().__init__(parent, invokingState)
|
| 237 |
+
self.parser = parser
|
| 238 |
+
|
| 239 |
+
def element(self):
|
| 240 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.ElementContext,0)
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
def EOF(self):
|
| 244 |
+
return self.getToken(OmegaConfGrammarParser.EOF, 0)
|
| 245 |
+
|
| 246 |
+
def getRuleIndex(self):
|
| 247 |
+
return OmegaConfGrammarParser.RULE_singleElement
|
| 248 |
+
|
| 249 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 250 |
+
if hasattr( listener, "enterSingleElement" ):
|
| 251 |
+
listener.enterSingleElement(self)
|
| 252 |
+
|
| 253 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 254 |
+
if hasattr( listener, "exitSingleElement" ):
|
| 255 |
+
listener.exitSingleElement(self)
|
| 256 |
+
|
| 257 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 258 |
+
if hasattr( visitor, "visitSingleElement" ):
|
| 259 |
+
return visitor.visitSingleElement(self)
|
| 260 |
+
else:
|
| 261 |
+
return visitor.visitChildren(self)
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
def singleElement(self):
|
| 267 |
+
|
| 268 |
+
localctx = OmegaConfGrammarParser.SingleElementContext(self, self._ctx, self.state)
|
| 269 |
+
self.enterRule(localctx, 2, self.RULE_singleElement)
|
| 270 |
+
try:
|
| 271 |
+
self.enterOuterAlt(localctx, 1)
|
| 272 |
+
self.state = 35
|
| 273 |
+
self.element()
|
| 274 |
+
self.state = 36
|
| 275 |
+
self.match(OmegaConfGrammarParser.EOF)
|
| 276 |
+
except RecognitionException as re:
|
| 277 |
+
localctx.exception = re
|
| 278 |
+
self._errHandler.reportError(self, re)
|
| 279 |
+
self._errHandler.recover(self, re)
|
| 280 |
+
finally:
|
| 281 |
+
self.exitRule()
|
| 282 |
+
return localctx
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
class TextContext(ParserRuleContext):
|
| 286 |
+
__slots__ = 'parser'
|
| 287 |
+
|
| 288 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 289 |
+
super().__init__(parent, invokingState)
|
| 290 |
+
self.parser = parser
|
| 291 |
+
|
| 292 |
+
def interpolation(self, i:int=None):
|
| 293 |
+
if i is None:
|
| 294 |
+
return self.getTypedRuleContexts(OmegaConfGrammarParser.InterpolationContext)
|
| 295 |
+
else:
|
| 296 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.InterpolationContext,i)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def ANY_STR(self, i:int=None):
|
| 300 |
+
if i is None:
|
| 301 |
+
return self.getTokens(OmegaConfGrammarParser.ANY_STR)
|
| 302 |
+
else:
|
| 303 |
+
return self.getToken(OmegaConfGrammarParser.ANY_STR, i)
|
| 304 |
+
|
| 305 |
+
def ESC(self, i:int=None):
|
| 306 |
+
if i is None:
|
| 307 |
+
return self.getTokens(OmegaConfGrammarParser.ESC)
|
| 308 |
+
else:
|
| 309 |
+
return self.getToken(OmegaConfGrammarParser.ESC, i)
|
| 310 |
+
|
| 311 |
+
def ESC_INTER(self, i:int=None):
|
| 312 |
+
if i is None:
|
| 313 |
+
return self.getTokens(OmegaConfGrammarParser.ESC_INTER)
|
| 314 |
+
else:
|
| 315 |
+
return self.getToken(OmegaConfGrammarParser.ESC_INTER, i)
|
| 316 |
+
|
| 317 |
+
def TOP_ESC(self, i:int=None):
|
| 318 |
+
if i is None:
|
| 319 |
+
return self.getTokens(OmegaConfGrammarParser.TOP_ESC)
|
| 320 |
+
else:
|
| 321 |
+
return self.getToken(OmegaConfGrammarParser.TOP_ESC, i)
|
| 322 |
+
|
| 323 |
+
def QUOTED_ESC(self, i:int=None):
|
| 324 |
+
if i is None:
|
| 325 |
+
return self.getTokens(OmegaConfGrammarParser.QUOTED_ESC)
|
| 326 |
+
else:
|
| 327 |
+
return self.getToken(OmegaConfGrammarParser.QUOTED_ESC, i)
|
| 328 |
+
|
| 329 |
+
def getRuleIndex(self):
|
| 330 |
+
return OmegaConfGrammarParser.RULE_text
|
| 331 |
+
|
| 332 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 333 |
+
if hasattr( listener, "enterText" ):
|
| 334 |
+
listener.enterText(self)
|
| 335 |
+
|
| 336 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 337 |
+
if hasattr( listener, "exitText" ):
|
| 338 |
+
listener.exitText(self)
|
| 339 |
+
|
| 340 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 341 |
+
if hasattr( visitor, "visitText" ):
|
| 342 |
+
return visitor.visitText(self)
|
| 343 |
+
else:
|
| 344 |
+
return visitor.visitChildren(self)
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
def text(self):
|
| 350 |
+
|
| 351 |
+
localctx = OmegaConfGrammarParser.TextContext(self, self._ctx, self.state)
|
| 352 |
+
self.enterRule(localctx, 4, self.RULE_text)
|
| 353 |
+
self._la = 0 # Token type
|
| 354 |
+
try:
|
| 355 |
+
self.enterOuterAlt(localctx, 1)
|
| 356 |
+
self.state = 44
|
| 357 |
+
self._errHandler.sync(self)
|
| 358 |
+
_la = self._input.LA(1)
|
| 359 |
+
while True:
|
| 360 |
+
self.state = 44
|
| 361 |
+
self._errHandler.sync(self)
|
| 362 |
+
token = self._input.LA(1)
|
| 363 |
+
if token in [OmegaConfGrammarParser.INTER_OPEN]:
|
| 364 |
+
self.state = 38
|
| 365 |
+
self.interpolation()
|
| 366 |
+
pass
|
| 367 |
+
elif token in [OmegaConfGrammarParser.ANY_STR]:
|
| 368 |
+
self.state = 39
|
| 369 |
+
self.match(OmegaConfGrammarParser.ANY_STR)
|
| 370 |
+
pass
|
| 371 |
+
elif token in [OmegaConfGrammarParser.ESC]:
|
| 372 |
+
self.state = 40
|
| 373 |
+
self.match(OmegaConfGrammarParser.ESC)
|
| 374 |
+
pass
|
| 375 |
+
elif token in [OmegaConfGrammarParser.ESC_INTER]:
|
| 376 |
+
self.state = 41
|
| 377 |
+
self.match(OmegaConfGrammarParser.ESC_INTER)
|
| 378 |
+
pass
|
| 379 |
+
elif token in [OmegaConfGrammarParser.TOP_ESC]:
|
| 380 |
+
self.state = 42
|
| 381 |
+
self.match(OmegaConfGrammarParser.TOP_ESC)
|
| 382 |
+
pass
|
| 383 |
+
elif token in [OmegaConfGrammarParser.QUOTED_ESC]:
|
| 384 |
+
self.state = 43
|
| 385 |
+
self.match(OmegaConfGrammarParser.QUOTED_ESC)
|
| 386 |
+
pass
|
| 387 |
+
else:
|
| 388 |
+
raise NoViableAltException(self)
|
| 389 |
+
|
| 390 |
+
self.state = 46
|
| 391 |
+
self._errHandler.sync(self)
|
| 392 |
+
_la = self._input.LA(1)
|
| 393 |
+
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.ANY_STR) | (1 << OmegaConfGrammarParser.ESC_INTER) | (1 << OmegaConfGrammarParser.TOP_ESC) | (1 << OmegaConfGrammarParser.INTER_OPEN) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.QUOTED_ESC))) != 0)):
|
| 394 |
+
break
|
| 395 |
+
|
| 396 |
+
except RecognitionException as re:
|
| 397 |
+
localctx.exception = re
|
| 398 |
+
self._errHandler.reportError(self, re)
|
| 399 |
+
self._errHandler.recover(self, re)
|
| 400 |
+
finally:
|
| 401 |
+
self.exitRule()
|
| 402 |
+
return localctx
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
class ElementContext(ParserRuleContext):
|
| 406 |
+
__slots__ = 'parser'
|
| 407 |
+
|
| 408 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 409 |
+
super().__init__(parent, invokingState)
|
| 410 |
+
self.parser = parser
|
| 411 |
+
|
| 412 |
+
def primitive(self):
|
| 413 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.PrimitiveContext,0)
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
def quotedValue(self):
|
| 417 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.QuotedValueContext,0)
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
def listContainer(self):
|
| 421 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.ListContainerContext,0)
|
| 422 |
+
|
| 423 |
+
|
| 424 |
+
def dictContainer(self):
|
| 425 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.DictContainerContext,0)
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
def getRuleIndex(self):
|
| 429 |
+
return OmegaConfGrammarParser.RULE_element
|
| 430 |
+
|
| 431 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 432 |
+
if hasattr( listener, "enterElement" ):
|
| 433 |
+
listener.enterElement(self)
|
| 434 |
+
|
| 435 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 436 |
+
if hasattr( listener, "exitElement" ):
|
| 437 |
+
listener.exitElement(self)
|
| 438 |
+
|
| 439 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 440 |
+
if hasattr( visitor, "visitElement" ):
|
| 441 |
+
return visitor.visitElement(self)
|
| 442 |
+
else:
|
| 443 |
+
return visitor.visitChildren(self)
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
def element(self):
|
| 449 |
+
|
| 450 |
+
localctx = OmegaConfGrammarParser.ElementContext(self, self._ctx, self.state)
|
| 451 |
+
self.enterRule(localctx, 6, self.RULE_element)
|
| 452 |
+
try:
|
| 453 |
+
self.state = 52
|
| 454 |
+
self._errHandler.sync(self)
|
| 455 |
+
token = self._input.LA(1)
|
| 456 |
+
if token in [OmegaConfGrammarParser.INTER_OPEN, OmegaConfGrammarParser.COLON, OmegaConfGrammarParser.FLOAT, OmegaConfGrammarParser.INT, OmegaConfGrammarParser.BOOL, OmegaConfGrammarParser.NULL, OmegaConfGrammarParser.UNQUOTED_CHAR, OmegaConfGrammarParser.ID, OmegaConfGrammarParser.ESC, OmegaConfGrammarParser.WS]:
|
| 457 |
+
self.enterOuterAlt(localctx, 1)
|
| 458 |
+
self.state = 48
|
| 459 |
+
self.primitive()
|
| 460 |
+
pass
|
| 461 |
+
elif token in [OmegaConfGrammarParser.QUOTE_OPEN_SINGLE, OmegaConfGrammarParser.QUOTE_OPEN_DOUBLE]:
|
| 462 |
+
self.enterOuterAlt(localctx, 2)
|
| 463 |
+
self.state = 49
|
| 464 |
+
self.quotedValue()
|
| 465 |
+
pass
|
| 466 |
+
elif token in [OmegaConfGrammarParser.BRACKET_OPEN]:
|
| 467 |
+
self.enterOuterAlt(localctx, 3)
|
| 468 |
+
self.state = 50
|
| 469 |
+
self.listContainer()
|
| 470 |
+
pass
|
| 471 |
+
elif token in [OmegaConfGrammarParser.BRACE_OPEN]:
|
| 472 |
+
self.enterOuterAlt(localctx, 4)
|
| 473 |
+
self.state = 51
|
| 474 |
+
self.dictContainer()
|
| 475 |
+
pass
|
| 476 |
+
else:
|
| 477 |
+
raise NoViableAltException(self)
|
| 478 |
+
|
| 479 |
+
except RecognitionException as re:
|
| 480 |
+
localctx.exception = re
|
| 481 |
+
self._errHandler.reportError(self, re)
|
| 482 |
+
self._errHandler.recover(self, re)
|
| 483 |
+
finally:
|
| 484 |
+
self.exitRule()
|
| 485 |
+
return localctx
|
| 486 |
+
|
| 487 |
+
|
| 488 |
+
class ListContainerContext(ParserRuleContext):
|
| 489 |
+
__slots__ = 'parser'
|
| 490 |
+
|
| 491 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 492 |
+
super().__init__(parent, invokingState)
|
| 493 |
+
self.parser = parser
|
| 494 |
+
|
| 495 |
+
def BRACKET_OPEN(self):
|
| 496 |
+
return self.getToken(OmegaConfGrammarParser.BRACKET_OPEN, 0)
|
| 497 |
+
|
| 498 |
+
def BRACKET_CLOSE(self):
|
| 499 |
+
return self.getToken(OmegaConfGrammarParser.BRACKET_CLOSE, 0)
|
| 500 |
+
|
| 501 |
+
def sequence(self):
|
| 502 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.SequenceContext,0)
|
| 503 |
+
|
| 504 |
+
|
| 505 |
+
def getRuleIndex(self):
|
| 506 |
+
return OmegaConfGrammarParser.RULE_listContainer
|
| 507 |
+
|
| 508 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 509 |
+
if hasattr( listener, "enterListContainer" ):
|
| 510 |
+
listener.enterListContainer(self)
|
| 511 |
+
|
| 512 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 513 |
+
if hasattr( listener, "exitListContainer" ):
|
| 514 |
+
listener.exitListContainer(self)
|
| 515 |
+
|
| 516 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 517 |
+
if hasattr( visitor, "visitListContainer" ):
|
| 518 |
+
return visitor.visitListContainer(self)
|
| 519 |
+
else:
|
| 520 |
+
return visitor.visitChildren(self)
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def listContainer(self):
|
| 526 |
+
|
| 527 |
+
localctx = OmegaConfGrammarParser.ListContainerContext(self, self._ctx, self.state)
|
| 528 |
+
self.enterRule(localctx, 8, self.RULE_listContainer)
|
| 529 |
+
self._la = 0 # Token type
|
| 530 |
+
try:
|
| 531 |
+
self.enterOuterAlt(localctx, 1)
|
| 532 |
+
self.state = 54
|
| 533 |
+
self.match(OmegaConfGrammarParser.BRACKET_OPEN)
|
| 534 |
+
self.state = 56
|
| 535 |
+
self._errHandler.sync(self)
|
| 536 |
+
_la = self._input.LA(1)
|
| 537 |
+
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.INTER_OPEN) | (1 << OmegaConfGrammarParser.BRACE_OPEN) | (1 << OmegaConfGrammarParser.QUOTE_OPEN_SINGLE) | (1 << OmegaConfGrammarParser.QUOTE_OPEN_DOUBLE) | (1 << OmegaConfGrammarParser.COMMA) | (1 << OmegaConfGrammarParser.BRACKET_OPEN) | (1 << OmegaConfGrammarParser.COLON) | (1 << OmegaConfGrammarParser.FLOAT) | (1 << OmegaConfGrammarParser.INT) | (1 << OmegaConfGrammarParser.BOOL) | (1 << OmegaConfGrammarParser.NULL) | (1 << OmegaConfGrammarParser.UNQUOTED_CHAR) | (1 << OmegaConfGrammarParser.ID) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.WS))) != 0):
|
| 538 |
+
self.state = 55
|
| 539 |
+
self.sequence()
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
self.state = 58
|
| 543 |
+
self.match(OmegaConfGrammarParser.BRACKET_CLOSE)
|
| 544 |
+
except RecognitionException as re:
|
| 545 |
+
localctx.exception = re
|
| 546 |
+
self._errHandler.reportError(self, re)
|
| 547 |
+
self._errHandler.recover(self, re)
|
| 548 |
+
finally:
|
| 549 |
+
self.exitRule()
|
| 550 |
+
return localctx
|
| 551 |
+
|
| 552 |
+
|
| 553 |
+
class DictContainerContext(ParserRuleContext):
|
| 554 |
+
__slots__ = 'parser'
|
| 555 |
+
|
| 556 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 557 |
+
super().__init__(parent, invokingState)
|
| 558 |
+
self.parser = parser
|
| 559 |
+
|
| 560 |
+
def BRACE_OPEN(self):
|
| 561 |
+
return self.getToken(OmegaConfGrammarParser.BRACE_OPEN, 0)
|
| 562 |
+
|
| 563 |
+
def BRACE_CLOSE(self):
|
| 564 |
+
return self.getToken(OmegaConfGrammarParser.BRACE_CLOSE, 0)
|
| 565 |
+
|
| 566 |
+
def dictKeyValuePair(self, i:int=None):
|
| 567 |
+
if i is None:
|
| 568 |
+
return self.getTypedRuleContexts(OmegaConfGrammarParser.DictKeyValuePairContext)
|
| 569 |
+
else:
|
| 570 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.DictKeyValuePairContext,i)
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
def COMMA(self, i:int=None):
|
| 574 |
+
if i is None:
|
| 575 |
+
return self.getTokens(OmegaConfGrammarParser.COMMA)
|
| 576 |
+
else:
|
| 577 |
+
return self.getToken(OmegaConfGrammarParser.COMMA, i)
|
| 578 |
+
|
| 579 |
+
def getRuleIndex(self):
|
| 580 |
+
return OmegaConfGrammarParser.RULE_dictContainer
|
| 581 |
+
|
| 582 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 583 |
+
if hasattr( listener, "enterDictContainer" ):
|
| 584 |
+
listener.enterDictContainer(self)
|
| 585 |
+
|
| 586 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 587 |
+
if hasattr( listener, "exitDictContainer" ):
|
| 588 |
+
listener.exitDictContainer(self)
|
| 589 |
+
|
| 590 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 591 |
+
if hasattr( visitor, "visitDictContainer" ):
|
| 592 |
+
return visitor.visitDictContainer(self)
|
| 593 |
+
else:
|
| 594 |
+
return visitor.visitChildren(self)
|
| 595 |
+
|
| 596 |
+
|
| 597 |
+
|
| 598 |
+
|
| 599 |
+
def dictContainer(self):
|
| 600 |
+
|
| 601 |
+
localctx = OmegaConfGrammarParser.DictContainerContext(self, self._ctx, self.state)
|
| 602 |
+
self.enterRule(localctx, 10, self.RULE_dictContainer)
|
| 603 |
+
self._la = 0 # Token type
|
| 604 |
+
try:
|
| 605 |
+
self.enterOuterAlt(localctx, 1)
|
| 606 |
+
self.state = 60
|
| 607 |
+
self.match(OmegaConfGrammarParser.BRACE_OPEN)
|
| 608 |
+
self.state = 69
|
| 609 |
+
self._errHandler.sync(self)
|
| 610 |
+
_la = self._input.LA(1)
|
| 611 |
+
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.FLOAT) | (1 << OmegaConfGrammarParser.INT) | (1 << OmegaConfGrammarParser.BOOL) | (1 << OmegaConfGrammarParser.NULL) | (1 << OmegaConfGrammarParser.UNQUOTED_CHAR) | (1 << OmegaConfGrammarParser.ID) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.WS))) != 0):
|
| 612 |
+
self.state = 61
|
| 613 |
+
self.dictKeyValuePair()
|
| 614 |
+
self.state = 66
|
| 615 |
+
self._errHandler.sync(self)
|
| 616 |
+
_la = self._input.LA(1)
|
| 617 |
+
while _la==OmegaConfGrammarParser.COMMA:
|
| 618 |
+
self.state = 62
|
| 619 |
+
self.match(OmegaConfGrammarParser.COMMA)
|
| 620 |
+
self.state = 63
|
| 621 |
+
self.dictKeyValuePair()
|
| 622 |
+
self.state = 68
|
| 623 |
+
self._errHandler.sync(self)
|
| 624 |
+
_la = self._input.LA(1)
|
| 625 |
+
|
| 626 |
+
|
| 627 |
+
|
| 628 |
+
self.state = 71
|
| 629 |
+
self.match(OmegaConfGrammarParser.BRACE_CLOSE)
|
| 630 |
+
except RecognitionException as re:
|
| 631 |
+
localctx.exception = re
|
| 632 |
+
self._errHandler.reportError(self, re)
|
| 633 |
+
self._errHandler.recover(self, re)
|
| 634 |
+
finally:
|
| 635 |
+
self.exitRule()
|
| 636 |
+
return localctx
|
| 637 |
+
|
| 638 |
+
|
| 639 |
+
class DictKeyValuePairContext(ParserRuleContext):
|
| 640 |
+
__slots__ = 'parser'
|
| 641 |
+
|
| 642 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 643 |
+
super().__init__(parent, invokingState)
|
| 644 |
+
self.parser = parser
|
| 645 |
+
|
| 646 |
+
def dictKey(self):
|
| 647 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.DictKeyContext,0)
|
| 648 |
+
|
| 649 |
+
|
| 650 |
+
def COLON(self):
|
| 651 |
+
return self.getToken(OmegaConfGrammarParser.COLON, 0)
|
| 652 |
+
|
| 653 |
+
def element(self):
|
| 654 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.ElementContext,0)
|
| 655 |
+
|
| 656 |
+
|
| 657 |
+
def getRuleIndex(self):
|
| 658 |
+
return OmegaConfGrammarParser.RULE_dictKeyValuePair
|
| 659 |
+
|
| 660 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 661 |
+
if hasattr( listener, "enterDictKeyValuePair" ):
|
| 662 |
+
listener.enterDictKeyValuePair(self)
|
| 663 |
+
|
| 664 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 665 |
+
if hasattr( listener, "exitDictKeyValuePair" ):
|
| 666 |
+
listener.exitDictKeyValuePair(self)
|
| 667 |
+
|
| 668 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 669 |
+
if hasattr( visitor, "visitDictKeyValuePair" ):
|
| 670 |
+
return visitor.visitDictKeyValuePair(self)
|
| 671 |
+
else:
|
| 672 |
+
return visitor.visitChildren(self)
|
| 673 |
+
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
|
| 677 |
+
def dictKeyValuePair(self):
|
| 678 |
+
|
| 679 |
+
localctx = OmegaConfGrammarParser.DictKeyValuePairContext(self, self._ctx, self.state)
|
| 680 |
+
self.enterRule(localctx, 12, self.RULE_dictKeyValuePair)
|
| 681 |
+
try:
|
| 682 |
+
self.enterOuterAlt(localctx, 1)
|
| 683 |
+
self.state = 73
|
| 684 |
+
self.dictKey()
|
| 685 |
+
self.state = 74
|
| 686 |
+
self.match(OmegaConfGrammarParser.COLON)
|
| 687 |
+
self.state = 75
|
| 688 |
+
self.element()
|
| 689 |
+
except RecognitionException as re:
|
| 690 |
+
localctx.exception = re
|
| 691 |
+
self._errHandler.reportError(self, re)
|
| 692 |
+
self._errHandler.recover(self, re)
|
| 693 |
+
finally:
|
| 694 |
+
self.exitRule()
|
| 695 |
+
return localctx
|
| 696 |
+
|
| 697 |
+
|
| 698 |
+
class SequenceContext(ParserRuleContext):
|
| 699 |
+
__slots__ = 'parser'
|
| 700 |
+
|
| 701 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 702 |
+
super().__init__(parent, invokingState)
|
| 703 |
+
self.parser = parser
|
| 704 |
+
|
| 705 |
+
def element(self, i:int=None):
|
| 706 |
+
if i is None:
|
| 707 |
+
return self.getTypedRuleContexts(OmegaConfGrammarParser.ElementContext)
|
| 708 |
+
else:
|
| 709 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.ElementContext,i)
|
| 710 |
+
|
| 711 |
+
|
| 712 |
+
def COMMA(self, i:int=None):
|
| 713 |
+
if i is None:
|
| 714 |
+
return self.getTokens(OmegaConfGrammarParser.COMMA)
|
| 715 |
+
else:
|
| 716 |
+
return self.getToken(OmegaConfGrammarParser.COMMA, i)
|
| 717 |
+
|
| 718 |
+
def getRuleIndex(self):
|
| 719 |
+
return OmegaConfGrammarParser.RULE_sequence
|
| 720 |
+
|
| 721 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 722 |
+
if hasattr( listener, "enterSequence" ):
|
| 723 |
+
listener.enterSequence(self)
|
| 724 |
+
|
| 725 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 726 |
+
if hasattr( listener, "exitSequence" ):
|
| 727 |
+
listener.exitSequence(self)
|
| 728 |
+
|
| 729 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 730 |
+
if hasattr( visitor, "visitSequence" ):
|
| 731 |
+
return visitor.visitSequence(self)
|
| 732 |
+
else:
|
| 733 |
+
return visitor.visitChildren(self)
|
| 734 |
+
|
| 735 |
+
|
| 736 |
+
|
| 737 |
+
|
| 738 |
+
def sequence(self):
|
| 739 |
+
|
| 740 |
+
localctx = OmegaConfGrammarParser.SequenceContext(self, self._ctx, self.state)
|
| 741 |
+
self.enterRule(localctx, 14, self.RULE_sequence)
|
| 742 |
+
self._la = 0 # Token type
|
| 743 |
+
try:
|
| 744 |
+
self.state = 95
|
| 745 |
+
self._errHandler.sync(self)
|
| 746 |
+
token = self._input.LA(1)
|
| 747 |
+
if token in [OmegaConfGrammarParser.INTER_OPEN, OmegaConfGrammarParser.BRACE_OPEN, OmegaConfGrammarParser.QUOTE_OPEN_SINGLE, OmegaConfGrammarParser.QUOTE_OPEN_DOUBLE, OmegaConfGrammarParser.BRACKET_OPEN, OmegaConfGrammarParser.COLON, OmegaConfGrammarParser.FLOAT, OmegaConfGrammarParser.INT, OmegaConfGrammarParser.BOOL, OmegaConfGrammarParser.NULL, OmegaConfGrammarParser.UNQUOTED_CHAR, OmegaConfGrammarParser.ID, OmegaConfGrammarParser.ESC, OmegaConfGrammarParser.WS]:
|
| 748 |
+
self.enterOuterAlt(localctx, 1)
|
| 749 |
+
self.state = 77
|
| 750 |
+
self.element()
|
| 751 |
+
self.state = 84
|
| 752 |
+
self._errHandler.sync(self)
|
| 753 |
+
_la = self._input.LA(1)
|
| 754 |
+
while _la==OmegaConfGrammarParser.COMMA:
|
| 755 |
+
self.state = 78
|
| 756 |
+
self.match(OmegaConfGrammarParser.COMMA)
|
| 757 |
+
self.state = 80
|
| 758 |
+
self._errHandler.sync(self)
|
| 759 |
+
_la = self._input.LA(1)
|
| 760 |
+
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.INTER_OPEN) | (1 << OmegaConfGrammarParser.BRACE_OPEN) | (1 << OmegaConfGrammarParser.QUOTE_OPEN_SINGLE) | (1 << OmegaConfGrammarParser.QUOTE_OPEN_DOUBLE) | (1 << OmegaConfGrammarParser.BRACKET_OPEN) | (1 << OmegaConfGrammarParser.COLON) | (1 << OmegaConfGrammarParser.FLOAT) | (1 << OmegaConfGrammarParser.INT) | (1 << OmegaConfGrammarParser.BOOL) | (1 << OmegaConfGrammarParser.NULL) | (1 << OmegaConfGrammarParser.UNQUOTED_CHAR) | (1 << OmegaConfGrammarParser.ID) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.WS))) != 0):
|
| 761 |
+
self.state = 79
|
| 762 |
+
self.element()
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
self.state = 86
|
| 766 |
+
self._errHandler.sync(self)
|
| 767 |
+
_la = self._input.LA(1)
|
| 768 |
+
|
| 769 |
+
pass
|
| 770 |
+
elif token in [OmegaConfGrammarParser.COMMA]:
|
| 771 |
+
self.enterOuterAlt(localctx, 2)
|
| 772 |
+
self.state = 91
|
| 773 |
+
self._errHandler.sync(self)
|
| 774 |
+
_la = self._input.LA(1)
|
| 775 |
+
while True:
|
| 776 |
+
self.state = 87
|
| 777 |
+
self.match(OmegaConfGrammarParser.COMMA)
|
| 778 |
+
self.state = 89
|
| 779 |
+
self._errHandler.sync(self)
|
| 780 |
+
_la = self._input.LA(1)
|
| 781 |
+
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.INTER_OPEN) | (1 << OmegaConfGrammarParser.BRACE_OPEN) | (1 << OmegaConfGrammarParser.QUOTE_OPEN_SINGLE) | (1 << OmegaConfGrammarParser.QUOTE_OPEN_DOUBLE) | (1 << OmegaConfGrammarParser.BRACKET_OPEN) | (1 << OmegaConfGrammarParser.COLON) | (1 << OmegaConfGrammarParser.FLOAT) | (1 << OmegaConfGrammarParser.INT) | (1 << OmegaConfGrammarParser.BOOL) | (1 << OmegaConfGrammarParser.NULL) | (1 << OmegaConfGrammarParser.UNQUOTED_CHAR) | (1 << OmegaConfGrammarParser.ID) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.WS))) != 0):
|
| 782 |
+
self.state = 88
|
| 783 |
+
self.element()
|
| 784 |
+
|
| 785 |
+
|
| 786 |
+
self.state = 93
|
| 787 |
+
self._errHandler.sync(self)
|
| 788 |
+
_la = self._input.LA(1)
|
| 789 |
+
if not (_la==OmegaConfGrammarParser.COMMA):
|
| 790 |
+
break
|
| 791 |
+
|
| 792 |
+
pass
|
| 793 |
+
else:
|
| 794 |
+
raise NoViableAltException(self)
|
| 795 |
+
|
| 796 |
+
except RecognitionException as re:
|
| 797 |
+
localctx.exception = re
|
| 798 |
+
self._errHandler.reportError(self, re)
|
| 799 |
+
self._errHandler.recover(self, re)
|
| 800 |
+
finally:
|
| 801 |
+
self.exitRule()
|
| 802 |
+
return localctx
|
| 803 |
+
|
| 804 |
+
|
| 805 |
+
class InterpolationContext(ParserRuleContext):
|
| 806 |
+
__slots__ = 'parser'
|
| 807 |
+
|
| 808 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 809 |
+
super().__init__(parent, invokingState)
|
| 810 |
+
self.parser = parser
|
| 811 |
+
|
| 812 |
+
def interpolationNode(self):
|
| 813 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.InterpolationNodeContext,0)
|
| 814 |
+
|
| 815 |
+
|
| 816 |
+
def interpolationResolver(self):
|
| 817 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.InterpolationResolverContext,0)
|
| 818 |
+
|
| 819 |
+
|
| 820 |
+
def getRuleIndex(self):
|
| 821 |
+
return OmegaConfGrammarParser.RULE_interpolation
|
| 822 |
+
|
| 823 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 824 |
+
if hasattr( listener, "enterInterpolation" ):
|
| 825 |
+
listener.enterInterpolation(self)
|
| 826 |
+
|
| 827 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 828 |
+
if hasattr( listener, "exitInterpolation" ):
|
| 829 |
+
listener.exitInterpolation(self)
|
| 830 |
+
|
| 831 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 832 |
+
if hasattr( visitor, "visitInterpolation" ):
|
| 833 |
+
return visitor.visitInterpolation(self)
|
| 834 |
+
else:
|
| 835 |
+
return visitor.visitChildren(self)
|
| 836 |
+
|
| 837 |
+
|
| 838 |
+
|
| 839 |
+
|
| 840 |
+
def interpolation(self):
|
| 841 |
+
|
| 842 |
+
localctx = OmegaConfGrammarParser.InterpolationContext(self, self._ctx, self.state)
|
| 843 |
+
self.enterRule(localctx, 16, self.RULE_interpolation)
|
| 844 |
+
try:
|
| 845 |
+
self.state = 99
|
| 846 |
+
self._errHandler.sync(self)
|
| 847 |
+
la_ = self._interp.adaptivePredict(self._input,11,self._ctx)
|
| 848 |
+
if la_ == 1:
|
| 849 |
+
self.enterOuterAlt(localctx, 1)
|
| 850 |
+
self.state = 97
|
| 851 |
+
self.interpolationNode()
|
| 852 |
+
pass
|
| 853 |
+
|
| 854 |
+
elif la_ == 2:
|
| 855 |
+
self.enterOuterAlt(localctx, 2)
|
| 856 |
+
self.state = 98
|
| 857 |
+
self.interpolationResolver()
|
| 858 |
+
pass
|
| 859 |
+
|
| 860 |
+
|
| 861 |
+
except RecognitionException as re:
|
| 862 |
+
localctx.exception = re
|
| 863 |
+
self._errHandler.reportError(self, re)
|
| 864 |
+
self._errHandler.recover(self, re)
|
| 865 |
+
finally:
|
| 866 |
+
self.exitRule()
|
| 867 |
+
return localctx
|
| 868 |
+
|
| 869 |
+
|
| 870 |
+
class InterpolationNodeContext(ParserRuleContext):
|
| 871 |
+
__slots__ = 'parser'
|
| 872 |
+
|
| 873 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 874 |
+
super().__init__(parent, invokingState)
|
| 875 |
+
self.parser = parser
|
| 876 |
+
|
| 877 |
+
def INTER_OPEN(self):
|
| 878 |
+
return self.getToken(OmegaConfGrammarParser.INTER_OPEN, 0)
|
| 879 |
+
|
| 880 |
+
def INTER_CLOSE(self):
|
| 881 |
+
return self.getToken(OmegaConfGrammarParser.INTER_CLOSE, 0)
|
| 882 |
+
|
| 883 |
+
def configKey(self, i:int=None):
|
| 884 |
+
if i is None:
|
| 885 |
+
return self.getTypedRuleContexts(OmegaConfGrammarParser.ConfigKeyContext)
|
| 886 |
+
else:
|
| 887 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.ConfigKeyContext,i)
|
| 888 |
+
|
| 889 |
+
|
| 890 |
+
def BRACKET_OPEN(self, i:int=None):
|
| 891 |
+
if i is None:
|
| 892 |
+
return self.getTokens(OmegaConfGrammarParser.BRACKET_OPEN)
|
| 893 |
+
else:
|
| 894 |
+
return self.getToken(OmegaConfGrammarParser.BRACKET_OPEN, i)
|
| 895 |
+
|
| 896 |
+
def BRACKET_CLOSE(self, i:int=None):
|
| 897 |
+
if i is None:
|
| 898 |
+
return self.getTokens(OmegaConfGrammarParser.BRACKET_CLOSE)
|
| 899 |
+
else:
|
| 900 |
+
return self.getToken(OmegaConfGrammarParser.BRACKET_CLOSE, i)
|
| 901 |
+
|
| 902 |
+
def DOT(self, i:int=None):
|
| 903 |
+
if i is None:
|
| 904 |
+
return self.getTokens(OmegaConfGrammarParser.DOT)
|
| 905 |
+
else:
|
| 906 |
+
return self.getToken(OmegaConfGrammarParser.DOT, i)
|
| 907 |
+
|
| 908 |
+
def getRuleIndex(self):
|
| 909 |
+
return OmegaConfGrammarParser.RULE_interpolationNode
|
| 910 |
+
|
| 911 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 912 |
+
if hasattr( listener, "enterInterpolationNode" ):
|
| 913 |
+
listener.enterInterpolationNode(self)
|
| 914 |
+
|
| 915 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 916 |
+
if hasattr( listener, "exitInterpolationNode" ):
|
| 917 |
+
listener.exitInterpolationNode(self)
|
| 918 |
+
|
| 919 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 920 |
+
if hasattr( visitor, "visitInterpolationNode" ):
|
| 921 |
+
return visitor.visitInterpolationNode(self)
|
| 922 |
+
else:
|
| 923 |
+
return visitor.visitChildren(self)
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
|
| 927 |
+
|
| 928 |
+
def interpolationNode(self):
|
| 929 |
+
|
| 930 |
+
localctx = OmegaConfGrammarParser.InterpolationNodeContext(self, self._ctx, self.state)
|
| 931 |
+
self.enterRule(localctx, 18, self.RULE_interpolationNode)
|
| 932 |
+
self._la = 0 # Token type
|
| 933 |
+
try:
|
| 934 |
+
self.enterOuterAlt(localctx, 1)
|
| 935 |
+
self.state = 101
|
| 936 |
+
self.match(OmegaConfGrammarParser.INTER_OPEN)
|
| 937 |
+
self.state = 105
|
| 938 |
+
self._errHandler.sync(self)
|
| 939 |
+
_la = self._input.LA(1)
|
| 940 |
+
while _la==OmegaConfGrammarParser.DOT:
|
| 941 |
+
self.state = 102
|
| 942 |
+
self.match(OmegaConfGrammarParser.DOT)
|
| 943 |
+
self.state = 107
|
| 944 |
+
self._errHandler.sync(self)
|
| 945 |
+
_la = self._input.LA(1)
|
| 946 |
+
|
| 947 |
+
self.state = 113
|
| 948 |
+
self._errHandler.sync(self)
|
| 949 |
+
token = self._input.LA(1)
|
| 950 |
+
if token in [OmegaConfGrammarParser.INTER_OPEN, OmegaConfGrammarParser.ID, OmegaConfGrammarParser.INTER_KEY]:
|
| 951 |
+
self.state = 108
|
| 952 |
+
self.configKey()
|
| 953 |
+
pass
|
| 954 |
+
elif token in [OmegaConfGrammarParser.BRACKET_OPEN]:
|
| 955 |
+
self.state = 109
|
| 956 |
+
self.match(OmegaConfGrammarParser.BRACKET_OPEN)
|
| 957 |
+
self.state = 110
|
| 958 |
+
self.configKey()
|
| 959 |
+
self.state = 111
|
| 960 |
+
self.match(OmegaConfGrammarParser.BRACKET_CLOSE)
|
| 961 |
+
pass
|
| 962 |
+
else:
|
| 963 |
+
raise NoViableAltException(self)
|
| 964 |
+
|
| 965 |
+
self.state = 123
|
| 966 |
+
self._errHandler.sync(self)
|
| 967 |
+
_la = self._input.LA(1)
|
| 968 |
+
while _la==OmegaConfGrammarParser.BRACKET_OPEN or _la==OmegaConfGrammarParser.DOT:
|
| 969 |
+
self.state = 121
|
| 970 |
+
self._errHandler.sync(self)
|
| 971 |
+
token = self._input.LA(1)
|
| 972 |
+
if token in [OmegaConfGrammarParser.DOT]:
|
| 973 |
+
self.state = 115
|
| 974 |
+
self.match(OmegaConfGrammarParser.DOT)
|
| 975 |
+
self.state = 116
|
| 976 |
+
self.configKey()
|
| 977 |
+
pass
|
| 978 |
+
elif token in [OmegaConfGrammarParser.BRACKET_OPEN]:
|
| 979 |
+
self.state = 117
|
| 980 |
+
self.match(OmegaConfGrammarParser.BRACKET_OPEN)
|
| 981 |
+
self.state = 118
|
| 982 |
+
self.configKey()
|
| 983 |
+
self.state = 119
|
| 984 |
+
self.match(OmegaConfGrammarParser.BRACKET_CLOSE)
|
| 985 |
+
pass
|
| 986 |
+
else:
|
| 987 |
+
raise NoViableAltException(self)
|
| 988 |
+
|
| 989 |
+
self.state = 125
|
| 990 |
+
self._errHandler.sync(self)
|
| 991 |
+
_la = self._input.LA(1)
|
| 992 |
+
|
| 993 |
+
self.state = 126
|
| 994 |
+
self.match(OmegaConfGrammarParser.INTER_CLOSE)
|
| 995 |
+
except RecognitionException as re:
|
| 996 |
+
localctx.exception = re
|
| 997 |
+
self._errHandler.reportError(self, re)
|
| 998 |
+
self._errHandler.recover(self, re)
|
| 999 |
+
finally:
|
| 1000 |
+
self.exitRule()
|
| 1001 |
+
return localctx
|
| 1002 |
+
|
| 1003 |
+
|
| 1004 |
+
class InterpolationResolverContext(ParserRuleContext):
|
| 1005 |
+
__slots__ = 'parser'
|
| 1006 |
+
|
| 1007 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 1008 |
+
super().__init__(parent, invokingState)
|
| 1009 |
+
self.parser = parser
|
| 1010 |
+
|
| 1011 |
+
def INTER_OPEN(self):
|
| 1012 |
+
return self.getToken(OmegaConfGrammarParser.INTER_OPEN, 0)
|
| 1013 |
+
|
| 1014 |
+
def resolverName(self):
|
| 1015 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.ResolverNameContext,0)
|
| 1016 |
+
|
| 1017 |
+
|
| 1018 |
+
def COLON(self):
|
| 1019 |
+
return self.getToken(OmegaConfGrammarParser.COLON, 0)
|
| 1020 |
+
|
| 1021 |
+
def BRACE_CLOSE(self):
|
| 1022 |
+
return self.getToken(OmegaConfGrammarParser.BRACE_CLOSE, 0)
|
| 1023 |
+
|
| 1024 |
+
def sequence(self):
|
| 1025 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.SequenceContext,0)
|
| 1026 |
+
|
| 1027 |
+
|
| 1028 |
+
def getRuleIndex(self):
|
| 1029 |
+
return OmegaConfGrammarParser.RULE_interpolationResolver
|
| 1030 |
+
|
| 1031 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 1032 |
+
if hasattr( listener, "enterInterpolationResolver" ):
|
| 1033 |
+
listener.enterInterpolationResolver(self)
|
| 1034 |
+
|
| 1035 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 1036 |
+
if hasattr( listener, "exitInterpolationResolver" ):
|
| 1037 |
+
listener.exitInterpolationResolver(self)
|
| 1038 |
+
|
| 1039 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 1040 |
+
if hasattr( visitor, "visitInterpolationResolver" ):
|
| 1041 |
+
return visitor.visitInterpolationResolver(self)
|
| 1042 |
+
else:
|
| 1043 |
+
return visitor.visitChildren(self)
|
| 1044 |
+
|
| 1045 |
+
|
| 1046 |
+
|
| 1047 |
+
|
| 1048 |
+
def interpolationResolver(self):
|
| 1049 |
+
|
| 1050 |
+
localctx = OmegaConfGrammarParser.InterpolationResolverContext(self, self._ctx, self.state)
|
| 1051 |
+
self.enterRule(localctx, 20, self.RULE_interpolationResolver)
|
| 1052 |
+
self._la = 0 # Token type
|
| 1053 |
+
try:
|
| 1054 |
+
self.enterOuterAlt(localctx, 1)
|
| 1055 |
+
self.state = 128
|
| 1056 |
+
self.match(OmegaConfGrammarParser.INTER_OPEN)
|
| 1057 |
+
self.state = 129
|
| 1058 |
+
self.resolverName()
|
| 1059 |
+
self.state = 130
|
| 1060 |
+
self.match(OmegaConfGrammarParser.COLON)
|
| 1061 |
+
self.state = 132
|
| 1062 |
+
self._errHandler.sync(self)
|
| 1063 |
+
_la = self._input.LA(1)
|
| 1064 |
+
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.INTER_OPEN) | (1 << OmegaConfGrammarParser.BRACE_OPEN) | (1 << OmegaConfGrammarParser.QUOTE_OPEN_SINGLE) | (1 << OmegaConfGrammarParser.QUOTE_OPEN_DOUBLE) | (1 << OmegaConfGrammarParser.COMMA) | (1 << OmegaConfGrammarParser.BRACKET_OPEN) | (1 << OmegaConfGrammarParser.COLON) | (1 << OmegaConfGrammarParser.FLOAT) | (1 << OmegaConfGrammarParser.INT) | (1 << OmegaConfGrammarParser.BOOL) | (1 << OmegaConfGrammarParser.NULL) | (1 << OmegaConfGrammarParser.UNQUOTED_CHAR) | (1 << OmegaConfGrammarParser.ID) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.WS))) != 0):
|
| 1065 |
+
self.state = 131
|
| 1066 |
+
self.sequence()
|
| 1067 |
+
|
| 1068 |
+
|
| 1069 |
+
self.state = 134
|
| 1070 |
+
self.match(OmegaConfGrammarParser.BRACE_CLOSE)
|
| 1071 |
+
except RecognitionException as re:
|
| 1072 |
+
localctx.exception = re
|
| 1073 |
+
self._errHandler.reportError(self, re)
|
| 1074 |
+
self._errHandler.recover(self, re)
|
| 1075 |
+
finally:
|
| 1076 |
+
self.exitRule()
|
| 1077 |
+
return localctx
|
| 1078 |
+
|
| 1079 |
+
|
| 1080 |
+
class ConfigKeyContext(ParserRuleContext):
|
| 1081 |
+
__slots__ = 'parser'
|
| 1082 |
+
|
| 1083 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 1084 |
+
super().__init__(parent, invokingState)
|
| 1085 |
+
self.parser = parser
|
| 1086 |
+
|
| 1087 |
+
def interpolation(self):
|
| 1088 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.InterpolationContext,0)
|
| 1089 |
+
|
| 1090 |
+
|
| 1091 |
+
def ID(self):
|
| 1092 |
+
return self.getToken(OmegaConfGrammarParser.ID, 0)
|
| 1093 |
+
|
| 1094 |
+
def INTER_KEY(self):
|
| 1095 |
+
return self.getToken(OmegaConfGrammarParser.INTER_KEY, 0)
|
| 1096 |
+
|
| 1097 |
+
def getRuleIndex(self):
|
| 1098 |
+
return OmegaConfGrammarParser.RULE_configKey
|
| 1099 |
+
|
| 1100 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 1101 |
+
if hasattr( listener, "enterConfigKey" ):
|
| 1102 |
+
listener.enterConfigKey(self)
|
| 1103 |
+
|
| 1104 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 1105 |
+
if hasattr( listener, "exitConfigKey" ):
|
| 1106 |
+
listener.exitConfigKey(self)
|
| 1107 |
+
|
| 1108 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 1109 |
+
if hasattr( visitor, "visitConfigKey" ):
|
| 1110 |
+
return visitor.visitConfigKey(self)
|
| 1111 |
+
else:
|
| 1112 |
+
return visitor.visitChildren(self)
|
| 1113 |
+
|
| 1114 |
+
|
| 1115 |
+
|
| 1116 |
+
|
| 1117 |
+
def configKey(self):
|
| 1118 |
+
|
| 1119 |
+
localctx = OmegaConfGrammarParser.ConfigKeyContext(self, self._ctx, self.state)
|
| 1120 |
+
self.enterRule(localctx, 22, self.RULE_configKey)
|
| 1121 |
+
try:
|
| 1122 |
+
self.state = 139
|
| 1123 |
+
self._errHandler.sync(self)
|
| 1124 |
+
token = self._input.LA(1)
|
| 1125 |
+
if token in [OmegaConfGrammarParser.INTER_OPEN]:
|
| 1126 |
+
self.enterOuterAlt(localctx, 1)
|
| 1127 |
+
self.state = 136
|
| 1128 |
+
self.interpolation()
|
| 1129 |
+
pass
|
| 1130 |
+
elif token in [OmegaConfGrammarParser.ID]:
|
| 1131 |
+
self.enterOuterAlt(localctx, 2)
|
| 1132 |
+
self.state = 137
|
| 1133 |
+
self.match(OmegaConfGrammarParser.ID)
|
| 1134 |
+
pass
|
| 1135 |
+
elif token in [OmegaConfGrammarParser.INTER_KEY]:
|
| 1136 |
+
self.enterOuterAlt(localctx, 3)
|
| 1137 |
+
self.state = 138
|
| 1138 |
+
self.match(OmegaConfGrammarParser.INTER_KEY)
|
| 1139 |
+
pass
|
| 1140 |
+
else:
|
| 1141 |
+
raise NoViableAltException(self)
|
| 1142 |
+
|
| 1143 |
+
except RecognitionException as re:
|
| 1144 |
+
localctx.exception = re
|
| 1145 |
+
self._errHandler.reportError(self, re)
|
| 1146 |
+
self._errHandler.recover(self, re)
|
| 1147 |
+
finally:
|
| 1148 |
+
self.exitRule()
|
| 1149 |
+
return localctx
|
| 1150 |
+
|
| 1151 |
+
|
| 1152 |
+
class ResolverNameContext(ParserRuleContext):
|
| 1153 |
+
__slots__ = 'parser'
|
| 1154 |
+
|
| 1155 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 1156 |
+
super().__init__(parent, invokingState)
|
| 1157 |
+
self.parser = parser
|
| 1158 |
+
|
| 1159 |
+
def interpolation(self, i:int=None):
|
| 1160 |
+
if i is None:
|
| 1161 |
+
return self.getTypedRuleContexts(OmegaConfGrammarParser.InterpolationContext)
|
| 1162 |
+
else:
|
| 1163 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.InterpolationContext,i)
|
| 1164 |
+
|
| 1165 |
+
|
| 1166 |
+
def ID(self, i:int=None):
|
| 1167 |
+
if i is None:
|
| 1168 |
+
return self.getTokens(OmegaConfGrammarParser.ID)
|
| 1169 |
+
else:
|
| 1170 |
+
return self.getToken(OmegaConfGrammarParser.ID, i)
|
| 1171 |
+
|
| 1172 |
+
def DOT(self, i:int=None):
|
| 1173 |
+
if i is None:
|
| 1174 |
+
return self.getTokens(OmegaConfGrammarParser.DOT)
|
| 1175 |
+
else:
|
| 1176 |
+
return self.getToken(OmegaConfGrammarParser.DOT, i)
|
| 1177 |
+
|
| 1178 |
+
def getRuleIndex(self):
|
| 1179 |
+
return OmegaConfGrammarParser.RULE_resolverName
|
| 1180 |
+
|
| 1181 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 1182 |
+
if hasattr( listener, "enterResolverName" ):
|
| 1183 |
+
listener.enterResolverName(self)
|
| 1184 |
+
|
| 1185 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 1186 |
+
if hasattr( listener, "exitResolverName" ):
|
| 1187 |
+
listener.exitResolverName(self)
|
| 1188 |
+
|
| 1189 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 1190 |
+
if hasattr( visitor, "visitResolverName" ):
|
| 1191 |
+
return visitor.visitResolverName(self)
|
| 1192 |
+
else:
|
| 1193 |
+
return visitor.visitChildren(self)
|
| 1194 |
+
|
| 1195 |
+
|
| 1196 |
+
|
| 1197 |
+
|
| 1198 |
+
def resolverName(self):
|
| 1199 |
+
|
| 1200 |
+
localctx = OmegaConfGrammarParser.ResolverNameContext(self, self._ctx, self.state)
|
| 1201 |
+
self.enterRule(localctx, 24, self.RULE_resolverName)
|
| 1202 |
+
self._la = 0 # Token type
|
| 1203 |
+
try:
|
| 1204 |
+
self.enterOuterAlt(localctx, 1)
|
| 1205 |
+
self.state = 143
|
| 1206 |
+
self._errHandler.sync(self)
|
| 1207 |
+
token = self._input.LA(1)
|
| 1208 |
+
if token in [OmegaConfGrammarParser.INTER_OPEN]:
|
| 1209 |
+
self.state = 141
|
| 1210 |
+
self.interpolation()
|
| 1211 |
+
pass
|
| 1212 |
+
elif token in [OmegaConfGrammarParser.ID]:
|
| 1213 |
+
self.state = 142
|
| 1214 |
+
self.match(OmegaConfGrammarParser.ID)
|
| 1215 |
+
pass
|
| 1216 |
+
else:
|
| 1217 |
+
raise NoViableAltException(self)
|
| 1218 |
+
|
| 1219 |
+
self.state = 152
|
| 1220 |
+
self._errHandler.sync(self)
|
| 1221 |
+
_la = self._input.LA(1)
|
| 1222 |
+
while _la==OmegaConfGrammarParser.DOT:
|
| 1223 |
+
self.state = 145
|
| 1224 |
+
self.match(OmegaConfGrammarParser.DOT)
|
| 1225 |
+
self.state = 148
|
| 1226 |
+
self._errHandler.sync(self)
|
| 1227 |
+
token = self._input.LA(1)
|
| 1228 |
+
if token in [OmegaConfGrammarParser.INTER_OPEN]:
|
| 1229 |
+
self.state = 146
|
| 1230 |
+
self.interpolation()
|
| 1231 |
+
pass
|
| 1232 |
+
elif token in [OmegaConfGrammarParser.ID]:
|
| 1233 |
+
self.state = 147
|
| 1234 |
+
self.match(OmegaConfGrammarParser.ID)
|
| 1235 |
+
pass
|
| 1236 |
+
else:
|
| 1237 |
+
raise NoViableAltException(self)
|
| 1238 |
+
|
| 1239 |
+
self.state = 154
|
| 1240 |
+
self._errHandler.sync(self)
|
| 1241 |
+
_la = self._input.LA(1)
|
| 1242 |
+
|
| 1243 |
+
except RecognitionException as re:
|
| 1244 |
+
localctx.exception = re
|
| 1245 |
+
self._errHandler.reportError(self, re)
|
| 1246 |
+
self._errHandler.recover(self, re)
|
| 1247 |
+
finally:
|
| 1248 |
+
self.exitRule()
|
| 1249 |
+
return localctx
|
| 1250 |
+
|
| 1251 |
+
|
| 1252 |
+
class QuotedValueContext(ParserRuleContext):
|
| 1253 |
+
__slots__ = 'parser'
|
| 1254 |
+
|
| 1255 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 1256 |
+
super().__init__(parent, invokingState)
|
| 1257 |
+
self.parser = parser
|
| 1258 |
+
|
| 1259 |
+
def MATCHING_QUOTE_CLOSE(self):
|
| 1260 |
+
return self.getToken(OmegaConfGrammarParser.MATCHING_QUOTE_CLOSE, 0)
|
| 1261 |
+
|
| 1262 |
+
def QUOTE_OPEN_SINGLE(self):
|
| 1263 |
+
return self.getToken(OmegaConfGrammarParser.QUOTE_OPEN_SINGLE, 0)
|
| 1264 |
+
|
| 1265 |
+
def QUOTE_OPEN_DOUBLE(self):
|
| 1266 |
+
return self.getToken(OmegaConfGrammarParser.QUOTE_OPEN_DOUBLE, 0)
|
| 1267 |
+
|
| 1268 |
+
def text(self):
|
| 1269 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.TextContext,0)
|
| 1270 |
+
|
| 1271 |
+
|
| 1272 |
+
def getRuleIndex(self):
|
| 1273 |
+
return OmegaConfGrammarParser.RULE_quotedValue
|
| 1274 |
+
|
| 1275 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 1276 |
+
if hasattr( listener, "enterQuotedValue" ):
|
| 1277 |
+
listener.enterQuotedValue(self)
|
| 1278 |
+
|
| 1279 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 1280 |
+
if hasattr( listener, "exitQuotedValue" ):
|
| 1281 |
+
listener.exitQuotedValue(self)
|
| 1282 |
+
|
| 1283 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 1284 |
+
if hasattr( visitor, "visitQuotedValue" ):
|
| 1285 |
+
return visitor.visitQuotedValue(self)
|
| 1286 |
+
else:
|
| 1287 |
+
return visitor.visitChildren(self)
|
| 1288 |
+
|
| 1289 |
+
|
| 1290 |
+
|
| 1291 |
+
|
| 1292 |
+
def quotedValue(self):
|
| 1293 |
+
|
| 1294 |
+
localctx = OmegaConfGrammarParser.QuotedValueContext(self, self._ctx, self.state)
|
| 1295 |
+
self.enterRule(localctx, 26, self.RULE_quotedValue)
|
| 1296 |
+
self._la = 0 # Token type
|
| 1297 |
+
try:
|
| 1298 |
+
self.enterOuterAlt(localctx, 1)
|
| 1299 |
+
self.state = 155
|
| 1300 |
+
_la = self._input.LA(1)
|
| 1301 |
+
if not(_la==OmegaConfGrammarParser.QUOTE_OPEN_SINGLE or _la==OmegaConfGrammarParser.QUOTE_OPEN_DOUBLE):
|
| 1302 |
+
self._errHandler.recoverInline(self)
|
| 1303 |
+
else:
|
| 1304 |
+
self._errHandler.reportMatch(self)
|
| 1305 |
+
self.consume()
|
| 1306 |
+
self.state = 157
|
| 1307 |
+
self._errHandler.sync(self)
|
| 1308 |
+
_la = self._input.LA(1)
|
| 1309 |
+
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.ANY_STR) | (1 << OmegaConfGrammarParser.ESC_INTER) | (1 << OmegaConfGrammarParser.TOP_ESC) | (1 << OmegaConfGrammarParser.INTER_OPEN) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.QUOTED_ESC))) != 0):
|
| 1310 |
+
self.state = 156
|
| 1311 |
+
self.text()
|
| 1312 |
+
|
| 1313 |
+
|
| 1314 |
+
self.state = 159
|
| 1315 |
+
self.match(OmegaConfGrammarParser.MATCHING_QUOTE_CLOSE)
|
| 1316 |
+
except RecognitionException as re:
|
| 1317 |
+
localctx.exception = re
|
| 1318 |
+
self._errHandler.reportError(self, re)
|
| 1319 |
+
self._errHandler.recover(self, re)
|
| 1320 |
+
finally:
|
| 1321 |
+
self.exitRule()
|
| 1322 |
+
return localctx
|
| 1323 |
+
|
| 1324 |
+
|
| 1325 |
+
class PrimitiveContext(ParserRuleContext):
|
| 1326 |
+
__slots__ = 'parser'
|
| 1327 |
+
|
| 1328 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 1329 |
+
super().__init__(parent, invokingState)
|
| 1330 |
+
self.parser = parser
|
| 1331 |
+
|
| 1332 |
+
def ID(self, i:int=None):
|
| 1333 |
+
if i is None:
|
| 1334 |
+
return self.getTokens(OmegaConfGrammarParser.ID)
|
| 1335 |
+
else:
|
| 1336 |
+
return self.getToken(OmegaConfGrammarParser.ID, i)
|
| 1337 |
+
|
| 1338 |
+
def NULL(self, i:int=None):
|
| 1339 |
+
if i is None:
|
| 1340 |
+
return self.getTokens(OmegaConfGrammarParser.NULL)
|
| 1341 |
+
else:
|
| 1342 |
+
return self.getToken(OmegaConfGrammarParser.NULL, i)
|
| 1343 |
+
|
| 1344 |
+
def INT(self, i:int=None):
|
| 1345 |
+
if i is None:
|
| 1346 |
+
return self.getTokens(OmegaConfGrammarParser.INT)
|
| 1347 |
+
else:
|
| 1348 |
+
return self.getToken(OmegaConfGrammarParser.INT, i)
|
| 1349 |
+
|
| 1350 |
+
def FLOAT(self, i:int=None):
|
| 1351 |
+
if i is None:
|
| 1352 |
+
return self.getTokens(OmegaConfGrammarParser.FLOAT)
|
| 1353 |
+
else:
|
| 1354 |
+
return self.getToken(OmegaConfGrammarParser.FLOAT, i)
|
| 1355 |
+
|
| 1356 |
+
def BOOL(self, i:int=None):
|
| 1357 |
+
if i is None:
|
| 1358 |
+
return self.getTokens(OmegaConfGrammarParser.BOOL)
|
| 1359 |
+
else:
|
| 1360 |
+
return self.getToken(OmegaConfGrammarParser.BOOL, i)
|
| 1361 |
+
|
| 1362 |
+
def UNQUOTED_CHAR(self, i:int=None):
|
| 1363 |
+
if i is None:
|
| 1364 |
+
return self.getTokens(OmegaConfGrammarParser.UNQUOTED_CHAR)
|
| 1365 |
+
else:
|
| 1366 |
+
return self.getToken(OmegaConfGrammarParser.UNQUOTED_CHAR, i)
|
| 1367 |
+
|
| 1368 |
+
def COLON(self, i:int=None):
|
| 1369 |
+
if i is None:
|
| 1370 |
+
return self.getTokens(OmegaConfGrammarParser.COLON)
|
| 1371 |
+
else:
|
| 1372 |
+
return self.getToken(OmegaConfGrammarParser.COLON, i)
|
| 1373 |
+
|
| 1374 |
+
def ESC(self, i:int=None):
|
| 1375 |
+
if i is None:
|
| 1376 |
+
return self.getTokens(OmegaConfGrammarParser.ESC)
|
| 1377 |
+
else:
|
| 1378 |
+
return self.getToken(OmegaConfGrammarParser.ESC, i)
|
| 1379 |
+
|
| 1380 |
+
def WS(self, i:int=None):
|
| 1381 |
+
if i is None:
|
| 1382 |
+
return self.getTokens(OmegaConfGrammarParser.WS)
|
| 1383 |
+
else:
|
| 1384 |
+
return self.getToken(OmegaConfGrammarParser.WS, i)
|
| 1385 |
+
|
| 1386 |
+
def interpolation(self, i:int=None):
|
| 1387 |
+
if i is None:
|
| 1388 |
+
return self.getTypedRuleContexts(OmegaConfGrammarParser.InterpolationContext)
|
| 1389 |
+
else:
|
| 1390 |
+
return self.getTypedRuleContext(OmegaConfGrammarParser.InterpolationContext,i)
|
| 1391 |
+
|
| 1392 |
+
|
| 1393 |
+
def getRuleIndex(self):
|
| 1394 |
+
return OmegaConfGrammarParser.RULE_primitive
|
| 1395 |
+
|
| 1396 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 1397 |
+
if hasattr( listener, "enterPrimitive" ):
|
| 1398 |
+
listener.enterPrimitive(self)
|
| 1399 |
+
|
| 1400 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 1401 |
+
if hasattr( listener, "exitPrimitive" ):
|
| 1402 |
+
listener.exitPrimitive(self)
|
| 1403 |
+
|
| 1404 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 1405 |
+
if hasattr( visitor, "visitPrimitive" ):
|
| 1406 |
+
return visitor.visitPrimitive(self)
|
| 1407 |
+
else:
|
| 1408 |
+
return visitor.visitChildren(self)
|
| 1409 |
+
|
| 1410 |
+
|
| 1411 |
+
|
| 1412 |
+
|
| 1413 |
+
def primitive(self):
|
| 1414 |
+
|
| 1415 |
+
localctx = OmegaConfGrammarParser.PrimitiveContext(self, self._ctx, self.state)
|
| 1416 |
+
self.enterRule(localctx, 28, self.RULE_primitive)
|
| 1417 |
+
self._la = 0 # Token type
|
| 1418 |
+
try:
|
| 1419 |
+
self.enterOuterAlt(localctx, 1)
|
| 1420 |
+
self.state = 171
|
| 1421 |
+
self._errHandler.sync(self)
|
| 1422 |
+
_la = self._input.LA(1)
|
| 1423 |
+
while True:
|
| 1424 |
+
self.state = 171
|
| 1425 |
+
self._errHandler.sync(self)
|
| 1426 |
+
token = self._input.LA(1)
|
| 1427 |
+
if token in [OmegaConfGrammarParser.ID]:
|
| 1428 |
+
self.state = 161
|
| 1429 |
+
self.match(OmegaConfGrammarParser.ID)
|
| 1430 |
+
pass
|
| 1431 |
+
elif token in [OmegaConfGrammarParser.NULL]:
|
| 1432 |
+
self.state = 162
|
| 1433 |
+
self.match(OmegaConfGrammarParser.NULL)
|
| 1434 |
+
pass
|
| 1435 |
+
elif token in [OmegaConfGrammarParser.INT]:
|
| 1436 |
+
self.state = 163
|
| 1437 |
+
self.match(OmegaConfGrammarParser.INT)
|
| 1438 |
+
pass
|
| 1439 |
+
elif token in [OmegaConfGrammarParser.FLOAT]:
|
| 1440 |
+
self.state = 164
|
| 1441 |
+
self.match(OmegaConfGrammarParser.FLOAT)
|
| 1442 |
+
pass
|
| 1443 |
+
elif token in [OmegaConfGrammarParser.BOOL]:
|
| 1444 |
+
self.state = 165
|
| 1445 |
+
self.match(OmegaConfGrammarParser.BOOL)
|
| 1446 |
+
pass
|
| 1447 |
+
elif token in [OmegaConfGrammarParser.UNQUOTED_CHAR]:
|
| 1448 |
+
self.state = 166
|
| 1449 |
+
self.match(OmegaConfGrammarParser.UNQUOTED_CHAR)
|
| 1450 |
+
pass
|
| 1451 |
+
elif token in [OmegaConfGrammarParser.COLON]:
|
| 1452 |
+
self.state = 167
|
| 1453 |
+
self.match(OmegaConfGrammarParser.COLON)
|
| 1454 |
+
pass
|
| 1455 |
+
elif token in [OmegaConfGrammarParser.ESC]:
|
| 1456 |
+
self.state = 168
|
| 1457 |
+
self.match(OmegaConfGrammarParser.ESC)
|
| 1458 |
+
pass
|
| 1459 |
+
elif token in [OmegaConfGrammarParser.WS]:
|
| 1460 |
+
self.state = 169
|
| 1461 |
+
self.match(OmegaConfGrammarParser.WS)
|
| 1462 |
+
pass
|
| 1463 |
+
elif token in [OmegaConfGrammarParser.INTER_OPEN]:
|
| 1464 |
+
self.state = 170
|
| 1465 |
+
self.interpolation()
|
| 1466 |
+
pass
|
| 1467 |
+
else:
|
| 1468 |
+
raise NoViableAltException(self)
|
| 1469 |
+
|
| 1470 |
+
self.state = 173
|
| 1471 |
+
self._errHandler.sync(self)
|
| 1472 |
+
_la = self._input.LA(1)
|
| 1473 |
+
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.INTER_OPEN) | (1 << OmegaConfGrammarParser.COLON) | (1 << OmegaConfGrammarParser.FLOAT) | (1 << OmegaConfGrammarParser.INT) | (1 << OmegaConfGrammarParser.BOOL) | (1 << OmegaConfGrammarParser.NULL) | (1 << OmegaConfGrammarParser.UNQUOTED_CHAR) | (1 << OmegaConfGrammarParser.ID) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.WS))) != 0)):
|
| 1474 |
+
break
|
| 1475 |
+
|
| 1476 |
+
except RecognitionException as re:
|
| 1477 |
+
localctx.exception = re
|
| 1478 |
+
self._errHandler.reportError(self, re)
|
| 1479 |
+
self._errHandler.recover(self, re)
|
| 1480 |
+
finally:
|
| 1481 |
+
self.exitRule()
|
| 1482 |
+
return localctx
|
| 1483 |
+
|
| 1484 |
+
|
| 1485 |
+
class DictKeyContext(ParserRuleContext):
|
| 1486 |
+
__slots__ = 'parser'
|
| 1487 |
+
|
| 1488 |
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
| 1489 |
+
super().__init__(parent, invokingState)
|
| 1490 |
+
self.parser = parser
|
| 1491 |
+
|
| 1492 |
+
def ID(self, i:int=None):
|
| 1493 |
+
if i is None:
|
| 1494 |
+
return self.getTokens(OmegaConfGrammarParser.ID)
|
| 1495 |
+
else:
|
| 1496 |
+
return self.getToken(OmegaConfGrammarParser.ID, i)
|
| 1497 |
+
|
| 1498 |
+
def NULL(self, i:int=None):
|
| 1499 |
+
if i is None:
|
| 1500 |
+
return self.getTokens(OmegaConfGrammarParser.NULL)
|
| 1501 |
+
else:
|
| 1502 |
+
return self.getToken(OmegaConfGrammarParser.NULL, i)
|
| 1503 |
+
|
| 1504 |
+
def INT(self, i:int=None):
|
| 1505 |
+
if i is None:
|
| 1506 |
+
return self.getTokens(OmegaConfGrammarParser.INT)
|
| 1507 |
+
else:
|
| 1508 |
+
return self.getToken(OmegaConfGrammarParser.INT, i)
|
| 1509 |
+
|
| 1510 |
+
def FLOAT(self, i:int=None):
|
| 1511 |
+
if i is None:
|
| 1512 |
+
return self.getTokens(OmegaConfGrammarParser.FLOAT)
|
| 1513 |
+
else:
|
| 1514 |
+
return self.getToken(OmegaConfGrammarParser.FLOAT, i)
|
| 1515 |
+
|
| 1516 |
+
def BOOL(self, i:int=None):
|
| 1517 |
+
if i is None:
|
| 1518 |
+
return self.getTokens(OmegaConfGrammarParser.BOOL)
|
| 1519 |
+
else:
|
| 1520 |
+
return self.getToken(OmegaConfGrammarParser.BOOL, i)
|
| 1521 |
+
|
| 1522 |
+
def UNQUOTED_CHAR(self, i:int=None):
|
| 1523 |
+
if i is None:
|
| 1524 |
+
return self.getTokens(OmegaConfGrammarParser.UNQUOTED_CHAR)
|
| 1525 |
+
else:
|
| 1526 |
+
return self.getToken(OmegaConfGrammarParser.UNQUOTED_CHAR, i)
|
| 1527 |
+
|
| 1528 |
+
def ESC(self, i:int=None):
|
| 1529 |
+
if i is None:
|
| 1530 |
+
return self.getTokens(OmegaConfGrammarParser.ESC)
|
| 1531 |
+
else:
|
| 1532 |
+
return self.getToken(OmegaConfGrammarParser.ESC, i)
|
| 1533 |
+
|
| 1534 |
+
def WS(self, i:int=None):
|
| 1535 |
+
if i is None:
|
| 1536 |
+
return self.getTokens(OmegaConfGrammarParser.WS)
|
| 1537 |
+
else:
|
| 1538 |
+
return self.getToken(OmegaConfGrammarParser.WS, i)
|
| 1539 |
+
|
| 1540 |
+
def getRuleIndex(self):
|
| 1541 |
+
return OmegaConfGrammarParser.RULE_dictKey
|
| 1542 |
+
|
| 1543 |
+
def enterRule(self, listener:ParseTreeListener):
|
| 1544 |
+
if hasattr( listener, "enterDictKey" ):
|
| 1545 |
+
listener.enterDictKey(self)
|
| 1546 |
+
|
| 1547 |
+
def exitRule(self, listener:ParseTreeListener):
|
| 1548 |
+
if hasattr( listener, "exitDictKey" ):
|
| 1549 |
+
listener.exitDictKey(self)
|
| 1550 |
+
|
| 1551 |
+
def accept(self, visitor:ParseTreeVisitor):
|
| 1552 |
+
if hasattr( visitor, "visitDictKey" ):
|
| 1553 |
+
return visitor.visitDictKey(self)
|
| 1554 |
+
else:
|
| 1555 |
+
return visitor.visitChildren(self)
|
| 1556 |
+
|
| 1557 |
+
|
| 1558 |
+
|
| 1559 |
+
|
| 1560 |
+
def dictKey(self):
|
| 1561 |
+
|
| 1562 |
+
localctx = OmegaConfGrammarParser.DictKeyContext(self, self._ctx, self.state)
|
| 1563 |
+
self.enterRule(localctx, 30, self.RULE_dictKey)
|
| 1564 |
+
self._la = 0 # Token type
|
| 1565 |
+
try:
|
| 1566 |
+
self.enterOuterAlt(localctx, 1)
|
| 1567 |
+
self.state = 176
|
| 1568 |
+
self._errHandler.sync(self)
|
| 1569 |
+
_la = self._input.LA(1)
|
| 1570 |
+
while True:
|
| 1571 |
+
self.state = 175
|
| 1572 |
+
_la = self._input.LA(1)
|
| 1573 |
+
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.FLOAT) | (1 << OmegaConfGrammarParser.INT) | (1 << OmegaConfGrammarParser.BOOL) | (1 << OmegaConfGrammarParser.NULL) | (1 << OmegaConfGrammarParser.UNQUOTED_CHAR) | (1 << OmegaConfGrammarParser.ID) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.WS))) != 0)):
|
| 1574 |
+
self._errHandler.recoverInline(self)
|
| 1575 |
+
else:
|
| 1576 |
+
self._errHandler.reportMatch(self)
|
| 1577 |
+
self.consume()
|
| 1578 |
+
self.state = 178
|
| 1579 |
+
self._errHandler.sync(self)
|
| 1580 |
+
_la = self._input.LA(1)
|
| 1581 |
+
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << OmegaConfGrammarParser.FLOAT) | (1 << OmegaConfGrammarParser.INT) | (1 << OmegaConfGrammarParser.BOOL) | (1 << OmegaConfGrammarParser.NULL) | (1 << OmegaConfGrammarParser.UNQUOTED_CHAR) | (1 << OmegaConfGrammarParser.ID) | (1 << OmegaConfGrammarParser.ESC) | (1 << OmegaConfGrammarParser.WS))) != 0)):
|
| 1582 |
+
break
|
| 1583 |
+
|
| 1584 |
+
except RecognitionException as re:
|
| 1585 |
+
localctx.exception = re
|
| 1586 |
+
self._errHandler.reportError(self, re)
|
| 1587 |
+
self._errHandler.recover(self, re)
|
| 1588 |
+
finally:
|
| 1589 |
+
self.exitRule()
|
| 1590 |
+
return localctx
|
| 1591 |
+
|
| 1592 |
+
|
| 1593 |
+
|
| 1594 |
+
|
| 1595 |
+
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/OmegaConfGrammarParserListener.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated from /tmp/build-via-sdist-fm63w174/omegaconf-2.3.0/omegaconf/grammar/OmegaConfGrammarParser.g4 by ANTLR 4.9.3
|
| 2 |
+
from antlr4 import *
|
| 3 |
+
if __name__ is not None and "." in __name__:
|
| 4 |
+
from .OmegaConfGrammarParser import OmegaConfGrammarParser
|
| 5 |
+
else:
|
| 6 |
+
from OmegaConfGrammarParser import OmegaConfGrammarParser
|
| 7 |
+
|
| 8 |
+
# This class defines a complete listener for a parse tree produced by OmegaConfGrammarParser.
|
| 9 |
+
class OmegaConfGrammarParserListener(ParseTreeListener):
|
| 10 |
+
|
| 11 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#configValue.
|
| 12 |
+
def enterConfigValue(self, ctx:OmegaConfGrammarParser.ConfigValueContext):
|
| 13 |
+
pass
|
| 14 |
+
|
| 15 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#configValue.
|
| 16 |
+
def exitConfigValue(self, ctx:OmegaConfGrammarParser.ConfigValueContext):
|
| 17 |
+
pass
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#singleElement.
|
| 21 |
+
def enterSingleElement(self, ctx:OmegaConfGrammarParser.SingleElementContext):
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#singleElement.
|
| 25 |
+
def exitSingleElement(self, ctx:OmegaConfGrammarParser.SingleElementContext):
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#text.
|
| 30 |
+
def enterText(self, ctx:OmegaConfGrammarParser.TextContext):
|
| 31 |
+
pass
|
| 32 |
+
|
| 33 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#text.
|
| 34 |
+
def exitText(self, ctx:OmegaConfGrammarParser.TextContext):
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#element.
|
| 39 |
+
def enterElement(self, ctx:OmegaConfGrammarParser.ElementContext):
|
| 40 |
+
pass
|
| 41 |
+
|
| 42 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#element.
|
| 43 |
+
def exitElement(self, ctx:OmegaConfGrammarParser.ElementContext):
|
| 44 |
+
pass
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#listContainer.
|
| 48 |
+
def enterListContainer(self, ctx:OmegaConfGrammarParser.ListContainerContext):
|
| 49 |
+
pass
|
| 50 |
+
|
| 51 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#listContainer.
|
| 52 |
+
def exitListContainer(self, ctx:OmegaConfGrammarParser.ListContainerContext):
|
| 53 |
+
pass
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#dictContainer.
|
| 57 |
+
def enterDictContainer(self, ctx:OmegaConfGrammarParser.DictContainerContext):
|
| 58 |
+
pass
|
| 59 |
+
|
| 60 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#dictContainer.
|
| 61 |
+
def exitDictContainer(self, ctx:OmegaConfGrammarParser.DictContainerContext):
|
| 62 |
+
pass
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#dictKeyValuePair.
|
| 66 |
+
def enterDictKeyValuePair(self, ctx:OmegaConfGrammarParser.DictKeyValuePairContext):
|
| 67 |
+
pass
|
| 68 |
+
|
| 69 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#dictKeyValuePair.
|
| 70 |
+
def exitDictKeyValuePair(self, ctx:OmegaConfGrammarParser.DictKeyValuePairContext):
|
| 71 |
+
pass
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#sequence.
|
| 75 |
+
def enterSequence(self, ctx:OmegaConfGrammarParser.SequenceContext):
|
| 76 |
+
pass
|
| 77 |
+
|
| 78 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#sequence.
|
| 79 |
+
def exitSequence(self, ctx:OmegaConfGrammarParser.SequenceContext):
|
| 80 |
+
pass
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#interpolation.
|
| 84 |
+
def enterInterpolation(self, ctx:OmegaConfGrammarParser.InterpolationContext):
|
| 85 |
+
pass
|
| 86 |
+
|
| 87 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#interpolation.
|
| 88 |
+
def exitInterpolation(self, ctx:OmegaConfGrammarParser.InterpolationContext):
|
| 89 |
+
pass
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#interpolationNode.
|
| 93 |
+
def enterInterpolationNode(self, ctx:OmegaConfGrammarParser.InterpolationNodeContext):
|
| 94 |
+
pass
|
| 95 |
+
|
| 96 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#interpolationNode.
|
| 97 |
+
def exitInterpolationNode(self, ctx:OmegaConfGrammarParser.InterpolationNodeContext):
|
| 98 |
+
pass
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#interpolationResolver.
|
| 102 |
+
def enterInterpolationResolver(self, ctx:OmegaConfGrammarParser.InterpolationResolverContext):
|
| 103 |
+
pass
|
| 104 |
+
|
| 105 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#interpolationResolver.
|
| 106 |
+
def exitInterpolationResolver(self, ctx:OmegaConfGrammarParser.InterpolationResolverContext):
|
| 107 |
+
pass
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#configKey.
|
| 111 |
+
def enterConfigKey(self, ctx:OmegaConfGrammarParser.ConfigKeyContext):
|
| 112 |
+
pass
|
| 113 |
+
|
| 114 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#configKey.
|
| 115 |
+
def exitConfigKey(self, ctx:OmegaConfGrammarParser.ConfigKeyContext):
|
| 116 |
+
pass
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#resolverName.
|
| 120 |
+
def enterResolverName(self, ctx:OmegaConfGrammarParser.ResolverNameContext):
|
| 121 |
+
pass
|
| 122 |
+
|
| 123 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#resolverName.
|
| 124 |
+
def exitResolverName(self, ctx:OmegaConfGrammarParser.ResolverNameContext):
|
| 125 |
+
pass
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#quotedValue.
|
| 129 |
+
def enterQuotedValue(self, ctx:OmegaConfGrammarParser.QuotedValueContext):
|
| 130 |
+
pass
|
| 131 |
+
|
| 132 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#quotedValue.
|
| 133 |
+
def exitQuotedValue(self, ctx:OmegaConfGrammarParser.QuotedValueContext):
|
| 134 |
+
pass
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#primitive.
|
| 138 |
+
def enterPrimitive(self, ctx:OmegaConfGrammarParser.PrimitiveContext):
|
| 139 |
+
pass
|
| 140 |
+
|
| 141 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#primitive.
|
| 142 |
+
def exitPrimitive(self, ctx:OmegaConfGrammarParser.PrimitiveContext):
|
| 143 |
+
pass
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
# Enter a parse tree produced by OmegaConfGrammarParser#dictKey.
|
| 147 |
+
def enterDictKey(self, ctx:OmegaConfGrammarParser.DictKeyContext):
|
| 148 |
+
pass
|
| 149 |
+
|
| 150 |
+
# Exit a parse tree produced by OmegaConfGrammarParser#dictKey.
|
| 151 |
+
def exitDictKey(self, ctx:OmegaConfGrammarParser.DictKeyContext):
|
| 152 |
+
pass
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
del OmegaConfGrammarParser
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/OmegaConfGrammarParserVisitor.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated from /tmp/build-via-sdist-fm63w174/omegaconf-2.3.0/omegaconf/grammar/OmegaConfGrammarParser.g4 by ANTLR 4.9.3
|
| 2 |
+
from antlr4 import *
|
| 3 |
+
if __name__ is not None and "." in __name__:
|
| 4 |
+
from .OmegaConfGrammarParser import OmegaConfGrammarParser
|
| 5 |
+
else:
|
| 6 |
+
from OmegaConfGrammarParser import OmegaConfGrammarParser
|
| 7 |
+
|
| 8 |
+
# This class defines a complete generic visitor for a parse tree produced by OmegaConfGrammarParser.
|
| 9 |
+
|
| 10 |
+
class OmegaConfGrammarParserVisitor(ParseTreeVisitor):
|
| 11 |
+
|
| 12 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#configValue.
|
| 13 |
+
def visitConfigValue(self, ctx:OmegaConfGrammarParser.ConfigValueContext):
|
| 14 |
+
return self.visitChildren(ctx)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#singleElement.
|
| 18 |
+
def visitSingleElement(self, ctx:OmegaConfGrammarParser.SingleElementContext):
|
| 19 |
+
return self.visitChildren(ctx)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#text.
|
| 23 |
+
def visitText(self, ctx:OmegaConfGrammarParser.TextContext):
|
| 24 |
+
return self.visitChildren(ctx)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#element.
|
| 28 |
+
def visitElement(self, ctx:OmegaConfGrammarParser.ElementContext):
|
| 29 |
+
return self.visitChildren(ctx)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#listContainer.
|
| 33 |
+
def visitListContainer(self, ctx:OmegaConfGrammarParser.ListContainerContext):
|
| 34 |
+
return self.visitChildren(ctx)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#dictContainer.
|
| 38 |
+
def visitDictContainer(self, ctx:OmegaConfGrammarParser.DictContainerContext):
|
| 39 |
+
return self.visitChildren(ctx)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#dictKeyValuePair.
|
| 43 |
+
def visitDictKeyValuePair(self, ctx:OmegaConfGrammarParser.DictKeyValuePairContext):
|
| 44 |
+
return self.visitChildren(ctx)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#sequence.
|
| 48 |
+
def visitSequence(self, ctx:OmegaConfGrammarParser.SequenceContext):
|
| 49 |
+
return self.visitChildren(ctx)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#interpolation.
|
| 53 |
+
def visitInterpolation(self, ctx:OmegaConfGrammarParser.InterpolationContext):
|
| 54 |
+
return self.visitChildren(ctx)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#interpolationNode.
|
| 58 |
+
def visitInterpolationNode(self, ctx:OmegaConfGrammarParser.InterpolationNodeContext):
|
| 59 |
+
return self.visitChildren(ctx)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#interpolationResolver.
|
| 63 |
+
def visitInterpolationResolver(self, ctx:OmegaConfGrammarParser.InterpolationResolverContext):
|
| 64 |
+
return self.visitChildren(ctx)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#configKey.
|
| 68 |
+
def visitConfigKey(self, ctx:OmegaConfGrammarParser.ConfigKeyContext):
|
| 69 |
+
return self.visitChildren(ctx)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#resolverName.
|
| 73 |
+
def visitResolverName(self, ctx:OmegaConfGrammarParser.ResolverNameContext):
|
| 74 |
+
return self.visitChildren(ctx)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#quotedValue.
|
| 78 |
+
def visitQuotedValue(self, ctx:OmegaConfGrammarParser.QuotedValueContext):
|
| 79 |
+
return self.visitChildren(ctx)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#primitive.
|
| 83 |
+
def visitPrimitive(self, ctx:OmegaConfGrammarParser.PrimitiveContext):
|
| 84 |
+
return self.visitChildren(ctx)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
# Visit a parse tree produced by OmegaConfGrammarParser#dictKey.
|
| 88 |
+
def visitDictKey(self, ctx:OmegaConfGrammarParser.DictKeyContext):
|
| 89 |
+
return self.visitChildren(ctx)
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
del OmegaConfGrammarParser
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/__init__.py
ADDED
|
File without changes
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/__pycache__/OmegaConfGrammarLexer.cpython-310.pyc
ADDED
|
Binary file (12.6 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/omegaconf/grammar/gen/__pycache__/OmegaConfGrammarParser.cpython-310.pyc
ADDED
|
Binary file (47.1 kB). View file
|
|
|