Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- parrot/lib/libncurses.so.6.4 +3 -0
- parrot/lib/python3.10/idlelib/Icons/idle_32.gif +3 -0
- parrot/lib/python3.10/idlelib/Icons/python.gif +3 -0
- parrot/lib/python3.10/importlib/metadata/__pycache__/_collections.cpython-310.pyc +0 -0
- parrot/lib/python3.10/importlib/metadata/__pycache__/_meta.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/PatternGrammar3.10.16.final.0.pickle +3 -0
- parrot/lib/python3.10/site-packages/COPYING +29 -0
- parrot/lib/python3.10/site-packages/cv2/LICENSE-3RD-PARTY.txt +0 -0
- parrot/lib/python3.10/site-packages/cv2/LICENSE.txt +21 -0
- parrot/lib/python3.10/site-packages/cv2/__init__.py +181 -0
- parrot/lib/python3.10/site-packages/cv2/__init__.pyi +0 -0
- parrot/lib/python3.10/site-packages/cv2/aruco/__init__.pyi +303 -0
- parrot/lib/python3.10/site-packages/cv2/config-3.py +24 -0
- parrot/lib/python3.10/site-packages/cv2/config.py +5 -0
- parrot/lib/python3.10/site-packages/cv2/gapi/__init__.py +323 -0
- parrot/lib/python3.10/site-packages/cv2/gapi/__init__.pyi +349 -0
- parrot/lib/python3.10/site-packages/cv2/gapi/wip/__init__.pyi +41 -0
- parrot/lib/python3.10/site-packages/cv2/gapi/wip/gst/__init__.pyi +17 -0
- parrot/lib/python3.10/site-packages/cv2/ipp/__init__.pyi +14 -0
- parrot/lib/python3.10/site-packages/cv2/load_config_py2.py +6 -0
- parrot/lib/python3.10/site-packages/cv2/load_config_py3.py +9 -0
- parrot/lib/python3.10/site-packages/cv2/py.typed +0 -0
- parrot/lib/python3.10/site-packages/cv2/version.py +5 -0
- parrot/lib/python3.10/site-packages/cycler/__init__.py +573 -0
- parrot/lib/python3.10/site-packages/cycler/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/cycler/py.typed +0 -0
- parrot/lib/python3.10/site-packages/example.py +169 -0
- parrot/lib/python3.10/site-packages/ffmpy.py +217 -0
- parrot/lib/python3.10/site-packages/hjson/__pycache__/ordered_dict.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/hjson/encoder.py +521 -0
- parrot/lib/python3.10/site-packages/hjson/tests/__init__.py +61 -0
- parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_fail.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_float.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_item_sort_key.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_pass1.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_unicode.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_bigint_as_string.py +67 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_bitsize_int_as_string.py +73 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_check_circular.py +30 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_decimal.py +71 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_default.py +9 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_dump.py +130 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_encode_basestring_ascii.py +42 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_errors.py +51 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_fail.py +143 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_float.py +25 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_for_json.py +97 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_hjson.py +65 -0
- parrot/lib/python3.10/site-packages/hjson/tests/test_indent.py +86 -0
.gitattributes
CHANGED
|
@@ -68,3 +68,4 @@ parrot/lib/libz.so.1 filter=lfs diff=lfs merge=lfs -text
|
|
| 68 |
parrot/lib/libatomic.so filter=lfs diff=lfs merge=lfs -text
|
| 69 |
parrot/bin/lzma filter=lfs diff=lfs merge=lfs -text
|
| 70 |
parrot/lib/libncursesw.so.6 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 68 |
parrot/lib/libatomic.so filter=lfs diff=lfs merge=lfs -text
|
| 69 |
parrot/bin/lzma filter=lfs diff=lfs merge=lfs -text
|
| 70 |
parrot/lib/libncursesw.so.6 filter=lfs diff=lfs merge=lfs -text
|
| 71 |
+
parrot/lib/libncurses.so.6.4 filter=lfs diff=lfs merge=lfs -text
|
parrot/lib/libncurses.so.6.4
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7fa4e5e93804d78660b0eef727cdb4211209e1742e4ad3669348022668d90962
|
| 3 |
+
size 271304
|
parrot/lib/python3.10/idlelib/Icons/idle_32.gif
ADDED
|
|
Git LFS Details
|
parrot/lib/python3.10/idlelib/Icons/python.gif
ADDED
|
|
Git LFS Details
|
parrot/lib/python3.10/importlib/metadata/__pycache__/_collections.cpython-310.pyc
ADDED
|
Binary file (1.51 kB). View file
|
|
|
parrot/lib/python3.10/importlib/metadata/__pycache__/_meta.cpython-310.pyc
ADDED
|
Binary file (2.52 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/PatternGrammar3.10.16.final.0.pickle
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:36ee934395b9209737b13893ddaff05fad8e239c2fdfac29d401d3fceeb30768
|
| 3 |
+
size 1225
|
parrot/lib/python3.10/site-packages/COPYING
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2011, Stavros Korokithakis
|
| 2 |
+
All rights reserved.
|
| 3 |
+
|
| 4 |
+
Redistribution and use in source and binary forms, with or without
|
| 5 |
+
modification, are permitted provided that the following conditions are
|
| 6 |
+
met:
|
| 7 |
+
|
| 8 |
+
Redistributions of source code must retain the above copyright notice,
|
| 9 |
+
this list of conditions and the following disclaimer.
|
| 10 |
+
|
| 11 |
+
Redistributions in binary form must reproduce the above copyright
|
| 12 |
+
notice, this list of conditions and the following disclaimer in the
|
| 13 |
+
documentation and/or other materials provided with the distribution.
|
| 14 |
+
|
| 15 |
+
Neither the name of Stochastic Technologies nor the names of its
|
| 16 |
+
contributors may be used to endorse or promote products derived from
|
| 17 |
+
this software without specific prior written permission.
|
| 18 |
+
|
| 19 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
parrot/lib/python3.10/site-packages/cv2/LICENSE-3RD-PARTY.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
parrot/lib/python3.10/site-packages/cv2/LICENSE.txt
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) Olli-Pekka Heinisuo
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
parrot/lib/python3.10/site-packages/cv2/__init__.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
OpenCV Python binary extension loader
|
| 3 |
+
'''
|
| 4 |
+
import os
|
| 5 |
+
import importlib
|
| 6 |
+
import sys
|
| 7 |
+
|
| 8 |
+
__all__ = []
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
import numpy
|
| 12 |
+
import numpy.core.multiarray
|
| 13 |
+
except ImportError:
|
| 14 |
+
print('OpenCV bindings requires "numpy" package.')
|
| 15 |
+
print('Install it via command:')
|
| 16 |
+
print(' pip install numpy')
|
| 17 |
+
raise
|
| 18 |
+
|
| 19 |
+
# TODO
|
| 20 |
+
# is_x64 = sys.maxsize > 2**32
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def __load_extra_py_code_for_module(base, name, enable_debug_print=False):
|
| 24 |
+
module_name = "{}.{}".format(__name__, name)
|
| 25 |
+
export_module_name = "{}.{}".format(base, name)
|
| 26 |
+
native_module = sys.modules.pop(module_name, None)
|
| 27 |
+
try:
|
| 28 |
+
py_module = importlib.import_module(module_name)
|
| 29 |
+
except ImportError as err:
|
| 30 |
+
if enable_debug_print:
|
| 31 |
+
print("Can't load Python code for module:", module_name,
|
| 32 |
+
". Reason:", err)
|
| 33 |
+
# Extension doesn't contain extra py code
|
| 34 |
+
return False
|
| 35 |
+
|
| 36 |
+
if base in sys.modules and not hasattr(sys.modules[base], name):
|
| 37 |
+
setattr(sys.modules[base], name, py_module)
|
| 38 |
+
sys.modules[export_module_name] = py_module
|
| 39 |
+
# If it is C extension module it is already loaded by cv2 package
|
| 40 |
+
if native_module:
|
| 41 |
+
setattr(py_module, "_native", native_module)
|
| 42 |
+
for k, v in filter(lambda kv: not hasattr(py_module, kv[0]),
|
| 43 |
+
native_module.__dict__.items()):
|
| 44 |
+
if enable_debug_print: print(' symbol({}): {} = {}'.format(name, k, v))
|
| 45 |
+
setattr(py_module, k, v)
|
| 46 |
+
return True
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def __collect_extra_submodules(enable_debug_print=False):
|
| 50 |
+
def modules_filter(module):
|
| 51 |
+
return all((
|
| 52 |
+
# module is not internal
|
| 53 |
+
not module.startswith("_"),
|
| 54 |
+
not module.startswith("python-"),
|
| 55 |
+
# it is not a file
|
| 56 |
+
os.path.isdir(os.path.join(_extra_submodules_init_path, module))
|
| 57 |
+
))
|
| 58 |
+
if sys.version_info[0] < 3:
|
| 59 |
+
if enable_debug_print:
|
| 60 |
+
print("Extra submodules is loaded only for Python 3")
|
| 61 |
+
return []
|
| 62 |
+
|
| 63 |
+
__INIT_FILE_PATH = os.path.abspath(__file__)
|
| 64 |
+
_extra_submodules_init_path = os.path.dirname(__INIT_FILE_PATH)
|
| 65 |
+
return filter(modules_filter, os.listdir(_extra_submodules_init_path))
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def bootstrap():
|
| 69 |
+
import sys
|
| 70 |
+
|
| 71 |
+
import copy
|
| 72 |
+
save_sys_path = copy.copy(sys.path)
|
| 73 |
+
|
| 74 |
+
if hasattr(sys, 'OpenCV_LOADER'):
|
| 75 |
+
print(sys.path)
|
| 76 |
+
raise ImportError('ERROR: recursion is detected during loading of "cv2" binary extensions. Check OpenCV installation.')
|
| 77 |
+
sys.OpenCV_LOADER = True
|
| 78 |
+
|
| 79 |
+
DEBUG = False
|
| 80 |
+
if hasattr(sys, 'OpenCV_LOADER_DEBUG'):
|
| 81 |
+
DEBUG = True
|
| 82 |
+
|
| 83 |
+
import platform
|
| 84 |
+
if DEBUG: print('OpenCV loader: os.name="{}" platform.system()="{}"'.format(os.name, str(platform.system())))
|
| 85 |
+
|
| 86 |
+
LOADER_DIR = os.path.dirname(os.path.abspath(os.path.realpath(__file__)))
|
| 87 |
+
|
| 88 |
+
PYTHON_EXTENSIONS_PATHS = []
|
| 89 |
+
BINARIES_PATHS = []
|
| 90 |
+
|
| 91 |
+
g_vars = globals()
|
| 92 |
+
l_vars = locals().copy()
|
| 93 |
+
|
| 94 |
+
if sys.version_info[:2] < (3, 0):
|
| 95 |
+
from . load_config_py2 import exec_file_wrapper
|
| 96 |
+
else:
|
| 97 |
+
from . load_config_py3 import exec_file_wrapper
|
| 98 |
+
|
| 99 |
+
def load_first_config(fnames, required=True):
|
| 100 |
+
for fname in fnames:
|
| 101 |
+
fpath = os.path.join(LOADER_DIR, fname)
|
| 102 |
+
if not os.path.exists(fpath):
|
| 103 |
+
if DEBUG: print('OpenCV loader: config not found, skip: {}'.format(fpath))
|
| 104 |
+
continue
|
| 105 |
+
if DEBUG: print('OpenCV loader: loading config: {}'.format(fpath))
|
| 106 |
+
exec_file_wrapper(fpath, g_vars, l_vars)
|
| 107 |
+
return True
|
| 108 |
+
if required:
|
| 109 |
+
raise ImportError('OpenCV loader: missing configuration file: {}. Check OpenCV installation.'.format(fnames))
|
| 110 |
+
|
| 111 |
+
load_first_config(['config.py'], True)
|
| 112 |
+
load_first_config([
|
| 113 |
+
'config-{}.{}.py'.format(sys.version_info[0], sys.version_info[1]),
|
| 114 |
+
'config-{}.py'.format(sys.version_info[0])
|
| 115 |
+
], True)
|
| 116 |
+
|
| 117 |
+
if DEBUG: print('OpenCV loader: PYTHON_EXTENSIONS_PATHS={}'.format(str(l_vars['PYTHON_EXTENSIONS_PATHS'])))
|
| 118 |
+
if DEBUG: print('OpenCV loader: BINARIES_PATHS={}'.format(str(l_vars['BINARIES_PATHS'])))
|
| 119 |
+
|
| 120 |
+
applySysPathWorkaround = False
|
| 121 |
+
if hasattr(sys, 'OpenCV_REPLACE_SYS_PATH_0'):
|
| 122 |
+
applySysPathWorkaround = True
|
| 123 |
+
else:
|
| 124 |
+
try:
|
| 125 |
+
BASE_DIR = os.path.dirname(LOADER_DIR)
|
| 126 |
+
if sys.path[0] == BASE_DIR or os.path.realpath(sys.path[0]) == BASE_DIR:
|
| 127 |
+
applySysPathWorkaround = True
|
| 128 |
+
except:
|
| 129 |
+
if DEBUG: print('OpenCV loader: exception during checking workaround for sys.path[0]')
|
| 130 |
+
pass # applySysPathWorkaround is False
|
| 131 |
+
|
| 132 |
+
for p in reversed(l_vars['PYTHON_EXTENSIONS_PATHS']):
|
| 133 |
+
sys.path.insert(1 if not applySysPathWorkaround else 0, p)
|
| 134 |
+
|
| 135 |
+
if os.name == 'nt':
|
| 136 |
+
if sys.version_info[:2] >= (3, 8): # https://github.com/python/cpython/pull/12302
|
| 137 |
+
for p in l_vars['BINARIES_PATHS']:
|
| 138 |
+
try:
|
| 139 |
+
os.add_dll_directory(p)
|
| 140 |
+
except Exception as e:
|
| 141 |
+
if DEBUG: print('Failed os.add_dll_directory(): '+ str(e))
|
| 142 |
+
pass
|
| 143 |
+
os.environ['PATH'] = ';'.join(l_vars['BINARIES_PATHS']) + ';' + os.environ.get('PATH', '')
|
| 144 |
+
if DEBUG: print('OpenCV loader: PATH={}'.format(str(os.environ['PATH'])))
|
| 145 |
+
else:
|
| 146 |
+
# amending of LD_LIBRARY_PATH works for sub-processes only
|
| 147 |
+
os.environ['LD_LIBRARY_PATH'] = ':'.join(l_vars['BINARIES_PATHS']) + ':' + os.environ.get('LD_LIBRARY_PATH', '')
|
| 148 |
+
|
| 149 |
+
if DEBUG: print("Relink everything from native cv2 module to cv2 package")
|
| 150 |
+
|
| 151 |
+
py_module = sys.modules.pop("cv2")
|
| 152 |
+
|
| 153 |
+
native_module = importlib.import_module("cv2")
|
| 154 |
+
|
| 155 |
+
sys.modules["cv2"] = py_module
|
| 156 |
+
setattr(py_module, "_native", native_module)
|
| 157 |
+
|
| 158 |
+
for item_name, item in filter(lambda kv: kv[0] not in ("__file__", "__loader__", "__spec__",
|
| 159 |
+
"__name__", "__package__"),
|
| 160 |
+
native_module.__dict__.items()):
|
| 161 |
+
if item_name not in g_vars:
|
| 162 |
+
g_vars[item_name] = item
|
| 163 |
+
|
| 164 |
+
sys.path = save_sys_path # multiprocessing should start from bootstrap code (https://github.com/opencv/opencv/issues/18502)
|
| 165 |
+
|
| 166 |
+
try:
|
| 167 |
+
del sys.OpenCV_LOADER
|
| 168 |
+
except Exception as e:
|
| 169 |
+
if DEBUG:
|
| 170 |
+
print("Exception during delete OpenCV_LOADER:", e)
|
| 171 |
+
|
| 172 |
+
if DEBUG: print('OpenCV loader: binary extension... OK')
|
| 173 |
+
|
| 174 |
+
for submodule in __collect_extra_submodules(DEBUG):
|
| 175 |
+
if __load_extra_py_code_for_module("cv2", submodule, DEBUG):
|
| 176 |
+
if DEBUG: print("Extra Python code for", submodule, "is loaded")
|
| 177 |
+
|
| 178 |
+
if DEBUG: print('OpenCV loader: DONE')
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
bootstrap()
|
parrot/lib/python3.10/site-packages/cv2/__init__.pyi
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
parrot/lib/python3.10/site-packages/cv2/aruco/__init__.pyi
ADDED
|
@@ -0,0 +1,303 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__all__: list[str] = []
|
| 2 |
+
|
| 3 |
+
import cv2
|
| 4 |
+
import cv2.typing
|
| 5 |
+
import typing as _typing
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
# Enumerations
|
| 9 |
+
CORNER_REFINE_NONE: int
|
| 10 |
+
CORNER_REFINE_SUBPIX: int
|
| 11 |
+
CORNER_REFINE_CONTOUR: int
|
| 12 |
+
CORNER_REFINE_APRILTAG: int
|
| 13 |
+
CornerRefineMethod = int
|
| 14 |
+
"""One of [CORNER_REFINE_NONE, CORNER_REFINE_SUBPIX, CORNER_REFINE_CONTOUR, CORNER_REFINE_APRILTAG]"""
|
| 15 |
+
|
| 16 |
+
DICT_4X4_50: int
|
| 17 |
+
DICT_4X4_100: int
|
| 18 |
+
DICT_4X4_250: int
|
| 19 |
+
DICT_4X4_1000: int
|
| 20 |
+
DICT_5X5_50: int
|
| 21 |
+
DICT_5X5_100: int
|
| 22 |
+
DICT_5X5_250: int
|
| 23 |
+
DICT_5X5_1000: int
|
| 24 |
+
DICT_6X6_50: int
|
| 25 |
+
DICT_6X6_100: int
|
| 26 |
+
DICT_6X6_250: int
|
| 27 |
+
DICT_6X6_1000: int
|
| 28 |
+
DICT_7X7_50: int
|
| 29 |
+
DICT_7X7_100: int
|
| 30 |
+
DICT_7X7_250: int
|
| 31 |
+
DICT_7X7_1000: int
|
| 32 |
+
DICT_ARUCO_ORIGINAL: int
|
| 33 |
+
DICT_APRILTAG_16h5: int
|
| 34 |
+
DICT_APRILTAG_16H5: int
|
| 35 |
+
DICT_APRILTAG_25h9: int
|
| 36 |
+
DICT_APRILTAG_25H9: int
|
| 37 |
+
DICT_APRILTAG_36h10: int
|
| 38 |
+
DICT_APRILTAG_36H10: int
|
| 39 |
+
DICT_APRILTAG_36h11: int
|
| 40 |
+
DICT_APRILTAG_36H11: int
|
| 41 |
+
DICT_ARUCO_MIP_36h12: int
|
| 42 |
+
DICT_ARUCO_MIP_36H12: int
|
| 43 |
+
PredefinedDictionaryType = int
|
| 44 |
+
"""One of [DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL, DICT_APRILTAG_16h5, DICT_APRILTAG_16H5, DICT_APRILTAG_25h9, DICT_APRILTAG_25H9, DICT_APRILTAG_36h10, DICT_APRILTAG_36H10, DICT_APRILTAG_36h11, DICT_APRILTAG_36H11, DICT_ARUCO_MIP_36h12, DICT_ARUCO_MIP_36H12]"""
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
# Classes
|
| 49 |
+
class Board:
|
| 50 |
+
# Functions
|
| 51 |
+
@_typing.overload
|
| 52 |
+
def __init__(self, objPoints: _typing.Sequence[cv2.typing.MatLike], dictionary: Dictionary, ids: cv2.typing.MatLike) -> None: ...
|
| 53 |
+
@_typing.overload
|
| 54 |
+
def __init__(self, objPoints: _typing.Sequence[cv2.UMat], dictionary: Dictionary, ids: cv2.UMat) -> None: ...
|
| 55 |
+
|
| 56 |
+
def getDictionary(self) -> Dictionary: ...
|
| 57 |
+
|
| 58 |
+
def getObjPoints(self) -> _typing.Sequence[_typing.Sequence[cv2.typing.Point3f]]: ...
|
| 59 |
+
|
| 60 |
+
def getIds(self) -> _typing.Sequence[int]: ...
|
| 61 |
+
|
| 62 |
+
def getRightBottomCorner(self) -> cv2.typing.Point3f: ...
|
| 63 |
+
|
| 64 |
+
@_typing.overload
|
| 65 |
+
def matchImagePoints(self, detectedCorners: _typing.Sequence[cv2.typing.MatLike], detectedIds: cv2.typing.MatLike, objPoints: cv2.typing.MatLike | None = ..., imgPoints: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
|
| 66 |
+
@_typing.overload
|
| 67 |
+
def matchImagePoints(self, detectedCorners: _typing.Sequence[cv2.UMat], detectedIds: cv2.UMat, objPoints: cv2.UMat | None = ..., imgPoints: cv2.UMat | None = ...) -> tuple[cv2.UMat, cv2.UMat]: ...
|
| 68 |
+
|
| 69 |
+
@_typing.overload
|
| 70 |
+
def generateImage(self, outSize: cv2.typing.Size, img: cv2.typing.MatLike | None = ..., marginSize: int = ..., borderBits: int = ...) -> cv2.typing.MatLike: ...
|
| 71 |
+
@_typing.overload
|
| 72 |
+
def generateImage(self, outSize: cv2.typing.Size, img: cv2.UMat | None = ..., marginSize: int = ..., borderBits: int = ...) -> cv2.UMat: ...
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class GridBoard(Board):
|
| 76 |
+
# Functions
|
| 77 |
+
@_typing.overload
|
| 78 |
+
def __init__(self, size: cv2.typing.Size, markerLength: float, markerSeparation: float, dictionary: Dictionary, ids: cv2.typing.MatLike | None = ...) -> None: ...
|
| 79 |
+
@_typing.overload
|
| 80 |
+
def __init__(self, size: cv2.typing.Size, markerLength: float, markerSeparation: float, dictionary: Dictionary, ids: cv2.UMat | None = ...) -> None: ...
|
| 81 |
+
|
| 82 |
+
def getGridSize(self) -> cv2.typing.Size: ...
|
| 83 |
+
|
| 84 |
+
def getMarkerLength(self) -> float: ...
|
| 85 |
+
|
| 86 |
+
def getMarkerSeparation(self) -> float: ...
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
class CharucoBoard(Board):
|
| 90 |
+
# Functions
|
| 91 |
+
@_typing.overload
|
| 92 |
+
def __init__(self, size: cv2.typing.Size, squareLength: float, markerLength: float, dictionary: Dictionary, ids: cv2.typing.MatLike | None = ...) -> None: ...
|
| 93 |
+
@_typing.overload
|
| 94 |
+
def __init__(self, size: cv2.typing.Size, squareLength: float, markerLength: float, dictionary: Dictionary, ids: cv2.UMat | None = ...) -> None: ...
|
| 95 |
+
|
| 96 |
+
def setLegacyPattern(self, legacyPattern: bool) -> None: ...
|
| 97 |
+
|
| 98 |
+
def getLegacyPattern(self) -> bool: ...
|
| 99 |
+
|
| 100 |
+
def getChessboardSize(self) -> cv2.typing.Size: ...
|
| 101 |
+
|
| 102 |
+
def getSquareLength(self) -> float: ...
|
| 103 |
+
|
| 104 |
+
def getMarkerLength(self) -> float: ...
|
| 105 |
+
|
| 106 |
+
def getChessboardCorners(self) -> _typing.Sequence[cv2.typing.Point3f]: ...
|
| 107 |
+
|
| 108 |
+
@_typing.overload
|
| 109 |
+
def checkCharucoCornersCollinear(self, charucoIds: cv2.typing.MatLike) -> bool: ...
|
| 110 |
+
@_typing.overload
|
| 111 |
+
def checkCharucoCornersCollinear(self, charucoIds: cv2.UMat) -> bool: ...
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
class DetectorParameters:
|
| 115 |
+
adaptiveThreshWinSizeMin: int
|
| 116 |
+
adaptiveThreshWinSizeMax: int
|
| 117 |
+
adaptiveThreshWinSizeStep: int
|
| 118 |
+
adaptiveThreshConstant: float
|
| 119 |
+
minMarkerPerimeterRate: float
|
| 120 |
+
maxMarkerPerimeterRate: float
|
| 121 |
+
polygonalApproxAccuracyRate: float
|
| 122 |
+
minCornerDistanceRate: float
|
| 123 |
+
minDistanceToBorder: int
|
| 124 |
+
minMarkerDistanceRate: float
|
| 125 |
+
minGroupDistance: float
|
| 126 |
+
cornerRefinementMethod: int
|
| 127 |
+
cornerRefinementWinSize: int
|
| 128 |
+
relativeCornerRefinmentWinSize: float
|
| 129 |
+
cornerRefinementMaxIterations: int
|
| 130 |
+
cornerRefinementMinAccuracy: float
|
| 131 |
+
markerBorderBits: int
|
| 132 |
+
perspectiveRemovePixelPerCell: int
|
| 133 |
+
perspectiveRemoveIgnoredMarginPerCell: float
|
| 134 |
+
maxErroneousBitsInBorderRate: float
|
| 135 |
+
minOtsuStdDev: float
|
| 136 |
+
errorCorrectionRate: float
|
| 137 |
+
aprilTagQuadDecimate: float
|
| 138 |
+
aprilTagQuadSigma: float
|
| 139 |
+
aprilTagMinClusterPixels: int
|
| 140 |
+
aprilTagMaxNmaxima: int
|
| 141 |
+
aprilTagCriticalRad: float
|
| 142 |
+
aprilTagMaxLineFitMse: float
|
| 143 |
+
aprilTagMinWhiteBlackDiff: int
|
| 144 |
+
aprilTagDeglitch: int
|
| 145 |
+
detectInvertedMarker: bool
|
| 146 |
+
useAruco3Detection: bool
|
| 147 |
+
minSideLengthCanonicalImg: int
|
| 148 |
+
minMarkerLengthRatioOriginalImg: float
|
| 149 |
+
|
| 150 |
+
# Functions
|
| 151 |
+
def __init__(self) -> None: ...
|
| 152 |
+
|
| 153 |
+
def readDetectorParameters(self, fn: cv2.FileNode) -> bool: ...
|
| 154 |
+
|
| 155 |
+
def writeDetectorParameters(self, fs: cv2.FileStorage, name: str = ...) -> bool: ...
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class RefineParameters:
|
| 159 |
+
minRepDistance: float
|
| 160 |
+
errorCorrectionRate: float
|
| 161 |
+
checkAllOrders: bool
|
| 162 |
+
|
| 163 |
+
# Functions
|
| 164 |
+
def __init__(self, minRepDistance: float = ..., errorCorrectionRate: float = ..., checkAllOrders: bool = ...) -> None: ...
|
| 165 |
+
|
| 166 |
+
def readRefineParameters(self, fn: cv2.FileNode) -> bool: ...
|
| 167 |
+
|
| 168 |
+
def writeRefineParameters(self, fs: cv2.FileStorage, name: str = ...) -> bool: ...
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
class ArucoDetector(cv2.Algorithm):
|
| 172 |
+
# Functions
|
| 173 |
+
def __init__(self, dictionary: Dictionary = ..., detectorParams: DetectorParameters = ..., refineParams: RefineParameters = ...) -> None: ...
|
| 174 |
+
|
| 175 |
+
@_typing.overload
|
| 176 |
+
def detectMarkers(self, image: cv2.typing.MatLike, corners: _typing.Sequence[cv2.typing.MatLike] | None = ..., ids: cv2.typing.MatLike | None = ..., rejectedImgPoints: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> tuple[_typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike]]: ...
|
| 177 |
+
@_typing.overload
|
| 178 |
+
def detectMarkers(self, image: cv2.UMat, corners: _typing.Sequence[cv2.UMat] | None = ..., ids: cv2.UMat | None = ..., rejectedImgPoints: _typing.Sequence[cv2.UMat] | None = ...) -> tuple[_typing.Sequence[cv2.UMat], cv2.UMat, _typing.Sequence[cv2.UMat]]: ...
|
| 179 |
+
|
| 180 |
+
@_typing.overload
|
| 181 |
+
def refineDetectedMarkers(self, image: cv2.typing.MatLike, board: Board, detectedCorners: _typing.Sequence[cv2.typing.MatLike], detectedIds: cv2.typing.MatLike, rejectedCorners: _typing.Sequence[cv2.typing.MatLike], cameraMatrix: cv2.typing.MatLike | None = ..., distCoeffs: cv2.typing.MatLike | None = ..., recoveredIdxs: cv2.typing.MatLike | None = ...) -> tuple[_typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
|
| 182 |
+
@_typing.overload
|
| 183 |
+
def refineDetectedMarkers(self, image: cv2.UMat, board: Board, detectedCorners: _typing.Sequence[cv2.UMat], detectedIds: cv2.UMat, rejectedCorners: _typing.Sequence[cv2.UMat], cameraMatrix: cv2.UMat | None = ..., distCoeffs: cv2.UMat | None = ..., recoveredIdxs: cv2.UMat | None = ...) -> tuple[_typing.Sequence[cv2.UMat], cv2.UMat, _typing.Sequence[cv2.UMat], cv2.UMat]: ...
|
| 184 |
+
|
| 185 |
+
def getDictionary(self) -> Dictionary: ...
|
| 186 |
+
|
| 187 |
+
def setDictionary(self, dictionary: Dictionary) -> None: ...
|
| 188 |
+
|
| 189 |
+
def getDetectorParameters(self) -> DetectorParameters: ...
|
| 190 |
+
|
| 191 |
+
def setDetectorParameters(self, detectorParameters: DetectorParameters) -> None: ...
|
| 192 |
+
|
| 193 |
+
def getRefineParameters(self) -> RefineParameters: ...
|
| 194 |
+
|
| 195 |
+
def setRefineParameters(self, refineParameters: RefineParameters) -> None: ...
|
| 196 |
+
|
| 197 |
+
def write(self, fs: cv2.FileStorage, name: str) -> None: ...
|
| 198 |
+
|
| 199 |
+
def read(self, fn: cv2.FileNode) -> None: ...
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
class Dictionary:
|
| 203 |
+
bytesList: cv2.typing.MatLike
|
| 204 |
+
markerSize: int
|
| 205 |
+
maxCorrectionBits: int
|
| 206 |
+
|
| 207 |
+
# Functions
|
| 208 |
+
@_typing.overload
|
| 209 |
+
def __init__(self) -> None: ...
|
| 210 |
+
@_typing.overload
|
| 211 |
+
def __init__(self, bytesList: cv2.typing.MatLike, _markerSize: int, maxcorr: int = ...) -> None: ...
|
| 212 |
+
|
| 213 |
+
def readDictionary(self, fn: cv2.FileNode) -> bool: ...
|
| 214 |
+
|
| 215 |
+
def writeDictionary(self, fs: cv2.FileStorage, name: str = ...) -> None: ...
|
| 216 |
+
|
| 217 |
+
def identify(self, onlyBits: cv2.typing.MatLike, maxCorrectionRate: float) -> tuple[bool, int, int]: ...
|
| 218 |
+
|
| 219 |
+
@_typing.overload
|
| 220 |
+
def getDistanceToId(self, bits: cv2.typing.MatLike, id: int, allRotations: bool = ...) -> int: ...
|
| 221 |
+
@_typing.overload
|
| 222 |
+
def getDistanceToId(self, bits: cv2.UMat, id: int, allRotations: bool = ...) -> int: ...
|
| 223 |
+
|
| 224 |
+
@_typing.overload
|
| 225 |
+
def generateImageMarker(self, id: int, sidePixels: int, _img: cv2.typing.MatLike | None = ..., borderBits: int = ...) -> cv2.typing.MatLike: ...
|
| 226 |
+
@_typing.overload
|
| 227 |
+
def generateImageMarker(self, id: int, sidePixels: int, _img: cv2.UMat | None = ..., borderBits: int = ...) -> cv2.UMat: ...
|
| 228 |
+
|
| 229 |
+
@staticmethod
|
| 230 |
+
def getByteListFromBits(bits: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
|
| 231 |
+
|
| 232 |
+
@staticmethod
|
| 233 |
+
def getBitsFromByteList(byteList: cv2.typing.MatLike, markerSize: int) -> cv2.typing.MatLike: ...
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
class CharucoParameters:
|
| 237 |
+
cameraMatrix: cv2.typing.MatLike
|
| 238 |
+
distCoeffs: cv2.typing.MatLike
|
| 239 |
+
minMarkers: int
|
| 240 |
+
tryRefineMarkers: bool
|
| 241 |
+
|
| 242 |
+
# Functions
|
| 243 |
+
def __init__(self) -> None: ...
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
class CharucoDetector(cv2.Algorithm):
|
| 247 |
+
# Functions
|
| 248 |
+
def __init__(self, board: CharucoBoard, charucoParams: CharucoParameters = ..., detectorParams: DetectorParameters = ..., refineParams: RefineParameters = ...) -> None: ...
|
| 249 |
+
|
| 250 |
+
def getBoard(self) -> CharucoBoard: ...
|
| 251 |
+
|
| 252 |
+
def setBoard(self, board: CharucoBoard) -> None: ...
|
| 253 |
+
|
| 254 |
+
def getCharucoParameters(self) -> CharucoParameters: ...
|
| 255 |
+
|
| 256 |
+
def setCharucoParameters(self, charucoParameters: CharucoParameters) -> None: ...
|
| 257 |
+
|
| 258 |
+
def getDetectorParameters(self) -> DetectorParameters: ...
|
| 259 |
+
|
| 260 |
+
def setDetectorParameters(self, detectorParameters: DetectorParameters) -> None: ...
|
| 261 |
+
|
| 262 |
+
def getRefineParameters(self) -> RefineParameters: ...
|
| 263 |
+
|
| 264 |
+
def setRefineParameters(self, refineParameters: RefineParameters) -> None: ...
|
| 265 |
+
|
| 266 |
+
@_typing.overload
|
| 267 |
+
def detectBoard(self, image: cv2.typing.MatLike, charucoCorners: cv2.typing.MatLike | None = ..., charucoIds: cv2.typing.MatLike | None = ..., markerCorners: _typing.Sequence[cv2.typing.MatLike] | None = ..., markerIds: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
|
| 268 |
+
@_typing.overload
|
| 269 |
+
def detectBoard(self, image: cv2.UMat, charucoCorners: cv2.UMat | None = ..., charucoIds: cv2.UMat | None = ..., markerCorners: _typing.Sequence[cv2.UMat] | None = ..., markerIds: cv2.UMat | None = ...) -> tuple[cv2.UMat, cv2.UMat, _typing.Sequence[cv2.UMat], cv2.UMat]: ...
|
| 270 |
+
|
| 271 |
+
@_typing.overload
|
| 272 |
+
def detectDiamonds(self, image: cv2.typing.MatLike, diamondCorners: _typing.Sequence[cv2.typing.MatLike] | None = ..., diamondIds: cv2.typing.MatLike | None = ..., markerCorners: _typing.Sequence[cv2.typing.MatLike] | None = ..., markerIds: cv2.typing.MatLike | None = ...) -> tuple[_typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
|
| 273 |
+
@_typing.overload
|
| 274 |
+
def detectDiamonds(self, image: cv2.UMat, diamondCorners: _typing.Sequence[cv2.UMat] | None = ..., diamondIds: cv2.UMat | None = ..., markerCorners: _typing.Sequence[cv2.UMat] | None = ..., markerIds: cv2.UMat | None = ...) -> tuple[_typing.Sequence[cv2.UMat], cv2.UMat, _typing.Sequence[cv2.UMat], cv2.UMat]: ...
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
# Functions
|
| 279 |
+
@_typing.overload
|
| 280 |
+
def drawDetectedCornersCharuco(image: cv2.typing.MatLike, charucoCorners: cv2.typing.MatLike, charucoIds: cv2.typing.MatLike | None = ..., cornerColor: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
|
| 281 |
+
@_typing.overload
|
| 282 |
+
def drawDetectedCornersCharuco(image: cv2.UMat, charucoCorners: cv2.UMat, charucoIds: cv2.UMat | None = ..., cornerColor: cv2.typing.Scalar = ...) -> cv2.UMat: ...
|
| 283 |
+
|
| 284 |
+
@_typing.overload
|
| 285 |
+
def drawDetectedDiamonds(image: cv2.typing.MatLike, diamondCorners: _typing.Sequence[cv2.typing.MatLike], diamondIds: cv2.typing.MatLike | None = ..., borderColor: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
|
| 286 |
+
@_typing.overload
|
| 287 |
+
def drawDetectedDiamonds(image: cv2.UMat, diamondCorners: _typing.Sequence[cv2.UMat], diamondIds: cv2.UMat | None = ..., borderColor: cv2.typing.Scalar = ...) -> cv2.UMat: ...
|
| 288 |
+
|
| 289 |
+
@_typing.overload
|
| 290 |
+
def drawDetectedMarkers(image: cv2.typing.MatLike, corners: _typing.Sequence[cv2.typing.MatLike], ids: cv2.typing.MatLike | None = ..., borderColor: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
|
| 291 |
+
@_typing.overload
|
| 292 |
+
def drawDetectedMarkers(image: cv2.UMat, corners: _typing.Sequence[cv2.UMat], ids: cv2.UMat | None = ..., borderColor: cv2.typing.Scalar = ...) -> cv2.UMat: ...
|
| 293 |
+
|
| 294 |
+
def extendDictionary(nMarkers: int, markerSize: int, baseDictionary: Dictionary = ..., randomSeed: int = ...) -> Dictionary: ...
|
| 295 |
+
|
| 296 |
+
@_typing.overload
|
| 297 |
+
def generateImageMarker(dictionary: Dictionary, id: int, sidePixels: int, img: cv2.typing.MatLike | None = ..., borderBits: int = ...) -> cv2.typing.MatLike: ...
|
| 298 |
+
@_typing.overload
|
| 299 |
+
def generateImageMarker(dictionary: Dictionary, id: int, sidePixels: int, img: cv2.UMat | None = ..., borderBits: int = ...) -> cv2.UMat: ...
|
| 300 |
+
|
| 301 |
+
def getPredefinedDictionary(dict: int) -> Dictionary: ...
|
| 302 |
+
|
| 303 |
+
|
parrot/lib/python3.10/site-packages/cv2/config-3.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
PYTHON_EXTENSIONS_PATHS = [
|
| 2 |
+
LOADER_DIR
|
| 3 |
+
] + PYTHON_EXTENSIONS_PATHS
|
| 4 |
+
|
| 5 |
+
ci_and_not_headless = False
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
from .version import ci_build, headless
|
| 9 |
+
|
| 10 |
+
ci_and_not_headless = ci_build and not headless
|
| 11 |
+
except:
|
| 12 |
+
pass
|
| 13 |
+
|
| 14 |
+
# the Qt plugin is included currently only in the pre-built wheels
|
| 15 |
+
if sys.platform.startswith("linux") and ci_and_not_headless:
|
| 16 |
+
os.environ["QT_QPA_PLATFORM_PLUGIN_PATH"] = os.path.join(
|
| 17 |
+
os.path.dirname(os.path.abspath(__file__)), "qt", "plugins"
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
# Qt will throw warning on Linux if fonts are not found
|
| 21 |
+
if sys.platform.startswith("linux") and ci_and_not_headless:
|
| 22 |
+
os.environ["QT_QPA_FONTDIR"] = os.path.join(
|
| 23 |
+
os.path.dirname(os.path.abspath(__file__)), "qt", "fonts"
|
| 24 |
+
)
|
parrot/lib/python3.10/site-packages/cv2/config.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
BINARIES_PATHS = [
|
| 4 |
+
os.path.join(os.path.join(LOADER_DIR, '../../'), 'lib64')
|
| 5 |
+
] + BINARIES_PATHS
|
parrot/lib/python3.10/site-packages/cv2/gapi/__init__.py
ADDED
|
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__all__ = ['op', 'kernel']
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
import cv2 as cv
|
| 5 |
+
|
| 6 |
+
# NB: Register function in specific module
|
| 7 |
+
def register(mname):
|
| 8 |
+
def parameterized(func):
|
| 9 |
+
sys.modules[mname].__dict__[func.__name__] = func
|
| 10 |
+
return func
|
| 11 |
+
return parameterized
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@register('cv2.gapi')
|
| 15 |
+
def networks(*args):
|
| 16 |
+
return cv.gapi_GNetPackage(list(map(cv.detail.strip, args)))
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@register('cv2.gapi')
|
| 20 |
+
def compile_args(*args):
|
| 21 |
+
return list(map(cv.GCompileArg, args))
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@register('cv2')
|
| 25 |
+
def GIn(*args):
|
| 26 |
+
return [*args]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@register('cv2')
|
| 30 |
+
def GOut(*args):
|
| 31 |
+
return [*args]
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@register('cv2')
|
| 35 |
+
def gin(*args):
|
| 36 |
+
return [*args]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@register('cv2.gapi')
|
| 40 |
+
def descr_of(*args):
|
| 41 |
+
return [*args]
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@register('cv2')
|
| 45 |
+
class GOpaque():
|
| 46 |
+
# NB: Inheritance from c++ class cause segfault.
|
| 47 |
+
# So just aggregate cv.GOpaqueT instead of inheritance
|
| 48 |
+
def __new__(cls, argtype):
|
| 49 |
+
return cv.GOpaqueT(argtype)
|
| 50 |
+
|
| 51 |
+
class Bool():
|
| 52 |
+
def __new__(self):
|
| 53 |
+
return cv.GOpaqueT(cv.gapi.CV_BOOL)
|
| 54 |
+
|
| 55 |
+
class Int():
|
| 56 |
+
def __new__(self):
|
| 57 |
+
return cv.GOpaqueT(cv.gapi.CV_INT)
|
| 58 |
+
|
| 59 |
+
class Int64():
|
| 60 |
+
def __new__(self):
|
| 61 |
+
return cv.GOpaqueT(cv.gapi.CV_INT64)
|
| 62 |
+
|
| 63 |
+
class UInt64():
|
| 64 |
+
def __new__(self):
|
| 65 |
+
return cv.GOpaqueT(cv.gapi.CV_UINT64)
|
| 66 |
+
|
| 67 |
+
class Double():
|
| 68 |
+
def __new__(self):
|
| 69 |
+
return cv.GOpaqueT(cv.gapi.CV_DOUBLE)
|
| 70 |
+
|
| 71 |
+
class Float():
|
| 72 |
+
def __new__(self):
|
| 73 |
+
return cv.GOpaqueT(cv.gapi.CV_FLOAT)
|
| 74 |
+
|
| 75 |
+
class String():
|
| 76 |
+
def __new__(self):
|
| 77 |
+
return cv.GOpaqueT(cv.gapi.CV_STRING)
|
| 78 |
+
|
| 79 |
+
class Point():
|
| 80 |
+
def __new__(self):
|
| 81 |
+
return cv.GOpaqueT(cv.gapi.CV_POINT)
|
| 82 |
+
|
| 83 |
+
class Point2f():
|
| 84 |
+
def __new__(self):
|
| 85 |
+
return cv.GOpaqueT(cv.gapi.CV_POINT2F)
|
| 86 |
+
|
| 87 |
+
class Point3f():
|
| 88 |
+
def __new__(self):
|
| 89 |
+
return cv.GOpaqueT(cv.gapi.CV_POINT3F)
|
| 90 |
+
|
| 91 |
+
class Size():
|
| 92 |
+
def __new__(self):
|
| 93 |
+
return cv.GOpaqueT(cv.gapi.CV_SIZE)
|
| 94 |
+
|
| 95 |
+
class Rect():
|
| 96 |
+
def __new__(self):
|
| 97 |
+
return cv.GOpaqueT(cv.gapi.CV_RECT)
|
| 98 |
+
|
| 99 |
+
class Prim():
|
| 100 |
+
def __new__(self):
|
| 101 |
+
return cv.GOpaqueT(cv.gapi.CV_DRAW_PRIM)
|
| 102 |
+
|
| 103 |
+
class Any():
|
| 104 |
+
def __new__(self):
|
| 105 |
+
return cv.GOpaqueT(cv.gapi.CV_ANY)
|
| 106 |
+
|
| 107 |
+
@register('cv2')
|
| 108 |
+
class GArray():
|
| 109 |
+
# NB: Inheritance from c++ class cause segfault.
|
| 110 |
+
# So just aggregate cv.GArrayT instead of inheritance
|
| 111 |
+
def __new__(cls, argtype):
|
| 112 |
+
return cv.GArrayT(argtype)
|
| 113 |
+
|
| 114 |
+
class Bool():
|
| 115 |
+
def __new__(self):
|
| 116 |
+
return cv.GArrayT(cv.gapi.CV_BOOL)
|
| 117 |
+
|
| 118 |
+
class Int():
|
| 119 |
+
def __new__(self):
|
| 120 |
+
return cv.GArrayT(cv.gapi.CV_INT)
|
| 121 |
+
|
| 122 |
+
class Int64():
|
| 123 |
+
def __new__(self):
|
| 124 |
+
return cv.GArrayT(cv.gapi.CV_INT64)
|
| 125 |
+
|
| 126 |
+
class UInt64():
|
| 127 |
+
def __new__(self):
|
| 128 |
+
return cv.GArrayT(cv.gapi.CV_UINT64)
|
| 129 |
+
|
| 130 |
+
class Double():
|
| 131 |
+
def __new__(self):
|
| 132 |
+
return cv.GArrayT(cv.gapi.CV_DOUBLE)
|
| 133 |
+
|
| 134 |
+
class Float():
|
| 135 |
+
def __new__(self):
|
| 136 |
+
return cv.GArrayT(cv.gapi.CV_FLOAT)
|
| 137 |
+
|
| 138 |
+
class String():
|
| 139 |
+
def __new__(self):
|
| 140 |
+
return cv.GArrayT(cv.gapi.CV_STRING)
|
| 141 |
+
|
| 142 |
+
class Point():
|
| 143 |
+
def __new__(self):
|
| 144 |
+
return cv.GArrayT(cv.gapi.CV_POINT)
|
| 145 |
+
|
| 146 |
+
class Point2f():
|
| 147 |
+
def __new__(self):
|
| 148 |
+
return cv.GArrayT(cv.gapi.CV_POINT2F)
|
| 149 |
+
|
| 150 |
+
class Point3f():
|
| 151 |
+
def __new__(self):
|
| 152 |
+
return cv.GArrayT(cv.gapi.CV_POINT3F)
|
| 153 |
+
|
| 154 |
+
class Size():
|
| 155 |
+
def __new__(self):
|
| 156 |
+
return cv.GArrayT(cv.gapi.CV_SIZE)
|
| 157 |
+
|
| 158 |
+
class Rect():
|
| 159 |
+
def __new__(self):
|
| 160 |
+
return cv.GArrayT(cv.gapi.CV_RECT)
|
| 161 |
+
|
| 162 |
+
class Scalar():
|
| 163 |
+
def __new__(self):
|
| 164 |
+
return cv.GArrayT(cv.gapi.CV_SCALAR)
|
| 165 |
+
|
| 166 |
+
class Mat():
|
| 167 |
+
def __new__(self):
|
| 168 |
+
return cv.GArrayT(cv.gapi.CV_MAT)
|
| 169 |
+
|
| 170 |
+
class GMat():
|
| 171 |
+
def __new__(self):
|
| 172 |
+
return cv.GArrayT(cv.gapi.CV_GMAT)
|
| 173 |
+
|
| 174 |
+
class Prim():
|
| 175 |
+
def __new__(self):
|
| 176 |
+
return cv.GArray(cv.gapi.CV_DRAW_PRIM)
|
| 177 |
+
|
| 178 |
+
class Any():
|
| 179 |
+
def __new__(self):
|
| 180 |
+
return cv.GArray(cv.gapi.CV_ANY)
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
# NB: Top lvl decorator takes arguments
|
| 184 |
+
def op(op_id, in_types, out_types):
|
| 185 |
+
|
| 186 |
+
garray_types= {
|
| 187 |
+
cv.GArray.Bool: cv.gapi.CV_BOOL,
|
| 188 |
+
cv.GArray.Int: cv.gapi.CV_INT,
|
| 189 |
+
cv.GArray.Int64: cv.gapi.CV_INT64,
|
| 190 |
+
cv.GArray.UInt64: cv.gapi.CV_UINT64,
|
| 191 |
+
cv.GArray.Double: cv.gapi.CV_DOUBLE,
|
| 192 |
+
cv.GArray.Float: cv.gapi.CV_FLOAT,
|
| 193 |
+
cv.GArray.String: cv.gapi.CV_STRING,
|
| 194 |
+
cv.GArray.Point: cv.gapi.CV_POINT,
|
| 195 |
+
cv.GArray.Point2f: cv.gapi.CV_POINT2F,
|
| 196 |
+
cv.GArray.Point3f: cv.gapi.CV_POINT3F,
|
| 197 |
+
cv.GArray.Size: cv.gapi.CV_SIZE,
|
| 198 |
+
cv.GArray.Rect: cv.gapi.CV_RECT,
|
| 199 |
+
cv.GArray.Scalar: cv.gapi.CV_SCALAR,
|
| 200 |
+
cv.GArray.Mat: cv.gapi.CV_MAT,
|
| 201 |
+
cv.GArray.GMat: cv.gapi.CV_GMAT,
|
| 202 |
+
cv.GArray.Prim: cv.gapi.CV_DRAW_PRIM,
|
| 203 |
+
cv.GArray.Any: cv.gapi.CV_ANY
|
| 204 |
+
}
|
| 205 |
+
|
| 206 |
+
gopaque_types= {
|
| 207 |
+
cv.GOpaque.Size: cv.gapi.CV_SIZE,
|
| 208 |
+
cv.GOpaque.Rect: cv.gapi.CV_RECT,
|
| 209 |
+
cv.GOpaque.Bool: cv.gapi.CV_BOOL,
|
| 210 |
+
cv.GOpaque.Int: cv.gapi.CV_INT,
|
| 211 |
+
cv.GOpaque.Int64: cv.gapi.CV_INT64,
|
| 212 |
+
cv.GOpaque.UInt64: cv.gapi.CV_UINT64,
|
| 213 |
+
cv.GOpaque.Double: cv.gapi.CV_DOUBLE,
|
| 214 |
+
cv.GOpaque.Float: cv.gapi.CV_FLOAT,
|
| 215 |
+
cv.GOpaque.String: cv.gapi.CV_STRING,
|
| 216 |
+
cv.GOpaque.Point: cv.gapi.CV_POINT,
|
| 217 |
+
cv.GOpaque.Point2f: cv.gapi.CV_POINT2F,
|
| 218 |
+
cv.GOpaque.Point3f: cv.gapi.CV_POINT3F,
|
| 219 |
+
cv.GOpaque.Size: cv.gapi.CV_SIZE,
|
| 220 |
+
cv.GOpaque.Rect: cv.gapi.CV_RECT,
|
| 221 |
+
cv.GOpaque.Prim: cv.gapi.CV_DRAW_PRIM,
|
| 222 |
+
cv.GOpaque.Any: cv.gapi.CV_ANY
|
| 223 |
+
}
|
| 224 |
+
|
| 225 |
+
type2str = {
|
| 226 |
+
cv.gapi.CV_BOOL: 'cv.gapi.CV_BOOL' ,
|
| 227 |
+
cv.gapi.CV_INT: 'cv.gapi.CV_INT' ,
|
| 228 |
+
cv.gapi.CV_INT64: 'cv.gapi.CV_INT64' ,
|
| 229 |
+
cv.gapi.CV_UINT64: 'cv.gapi.CV_UINT64' ,
|
| 230 |
+
cv.gapi.CV_DOUBLE: 'cv.gapi.CV_DOUBLE' ,
|
| 231 |
+
cv.gapi.CV_FLOAT: 'cv.gapi.CV_FLOAT' ,
|
| 232 |
+
cv.gapi.CV_STRING: 'cv.gapi.CV_STRING' ,
|
| 233 |
+
cv.gapi.CV_POINT: 'cv.gapi.CV_POINT' ,
|
| 234 |
+
cv.gapi.CV_POINT2F: 'cv.gapi.CV_POINT2F' ,
|
| 235 |
+
cv.gapi.CV_POINT3F: 'cv.gapi.CV_POINT3F' ,
|
| 236 |
+
cv.gapi.CV_SIZE: 'cv.gapi.CV_SIZE',
|
| 237 |
+
cv.gapi.CV_RECT: 'cv.gapi.CV_RECT',
|
| 238 |
+
cv.gapi.CV_SCALAR: 'cv.gapi.CV_SCALAR',
|
| 239 |
+
cv.gapi.CV_MAT: 'cv.gapi.CV_MAT',
|
| 240 |
+
cv.gapi.CV_GMAT: 'cv.gapi.CV_GMAT',
|
| 241 |
+
cv.gapi.CV_DRAW_PRIM: 'cv.gapi.CV_DRAW_PRIM'
|
| 242 |
+
}
|
| 243 |
+
|
| 244 |
+
# NB: Second lvl decorator takes class to decorate
|
| 245 |
+
def op_with_params(cls):
|
| 246 |
+
if not in_types:
|
| 247 |
+
raise Exception('{} operation should have at least one input!'.format(cls.__name__))
|
| 248 |
+
|
| 249 |
+
if not out_types:
|
| 250 |
+
raise Exception('{} operation should have at least one output!'.format(cls.__name__))
|
| 251 |
+
|
| 252 |
+
for i, t in enumerate(out_types):
|
| 253 |
+
if t not in [cv.GMat, cv.GScalar, *garray_types, *gopaque_types]:
|
| 254 |
+
raise Exception('{} unsupported output type: {} in position: {}'
|
| 255 |
+
.format(cls.__name__, t.__name__, i))
|
| 256 |
+
|
| 257 |
+
def on(*args):
|
| 258 |
+
if len(in_types) != len(args):
|
| 259 |
+
raise Exception('Invalid number of input elements!\nExpected: {}, Actual: {}'
|
| 260 |
+
.format(len(in_types), len(args)))
|
| 261 |
+
|
| 262 |
+
for i, (t, a) in enumerate(zip(in_types, args)):
|
| 263 |
+
if t in garray_types:
|
| 264 |
+
if not isinstance(a, cv.GArrayT):
|
| 265 |
+
raise Exception("{} invalid type for argument {}.\nExpected: {}, Actual: {}"
|
| 266 |
+
.format(cls.__name__, i, cv.GArrayT.__name__, type(a).__name__))
|
| 267 |
+
|
| 268 |
+
elif a.type() != garray_types[t]:
|
| 269 |
+
raise Exception("{} invalid GArrayT type for argument {}.\nExpected: {}, Actual: {}"
|
| 270 |
+
.format(cls.__name__, i, type2str[garray_types[t]], type2str[a.type()]))
|
| 271 |
+
|
| 272 |
+
elif t in gopaque_types:
|
| 273 |
+
if not isinstance(a, cv.GOpaqueT):
|
| 274 |
+
raise Exception("{} invalid type for argument {}.\nExpected: {}, Actual: {}"
|
| 275 |
+
.format(cls.__name__, i, cv.GOpaqueT.__name__, type(a).__name__))
|
| 276 |
+
|
| 277 |
+
elif a.type() != gopaque_types[t]:
|
| 278 |
+
raise Exception("{} invalid GOpaque type for argument {}.\nExpected: {}, Actual: {}"
|
| 279 |
+
.format(cls.__name__, i, type2str[gopaque_types[t]], type2str[a.type()]))
|
| 280 |
+
|
| 281 |
+
else:
|
| 282 |
+
if t != type(a):
|
| 283 |
+
raise Exception('{} invalid input type for argument {}.\nExpected: {}, Actual: {}'
|
| 284 |
+
.format(cls.__name__, i, t.__name__, type(a).__name__))
|
| 285 |
+
|
| 286 |
+
op = cv.gapi.__op(op_id, cls.outMeta, *args)
|
| 287 |
+
|
| 288 |
+
out_protos = []
|
| 289 |
+
for i, out_type in enumerate(out_types):
|
| 290 |
+
if out_type == cv.GMat:
|
| 291 |
+
out_protos.append(op.getGMat())
|
| 292 |
+
elif out_type == cv.GScalar:
|
| 293 |
+
out_protos.append(op.getGScalar())
|
| 294 |
+
elif out_type in gopaque_types:
|
| 295 |
+
out_protos.append(op.getGOpaque(gopaque_types[out_type]))
|
| 296 |
+
elif out_type in garray_types:
|
| 297 |
+
out_protos.append(op.getGArray(garray_types[out_type]))
|
| 298 |
+
else:
|
| 299 |
+
raise Exception("""In {}: G-API operation can't produce the output with type: {} in position: {}"""
|
| 300 |
+
.format(cls.__name__, out_type.__name__, i))
|
| 301 |
+
|
| 302 |
+
return tuple(out_protos) if len(out_protos) != 1 else out_protos[0]
|
| 303 |
+
|
| 304 |
+
# NB: Extend operation class
|
| 305 |
+
cls.id = op_id
|
| 306 |
+
cls.on = staticmethod(on)
|
| 307 |
+
return cls
|
| 308 |
+
|
| 309 |
+
return op_with_params
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
def kernel(op_cls):
|
| 313 |
+
# NB: Second lvl decorator takes class to decorate
|
| 314 |
+
def kernel_with_params(cls):
|
| 315 |
+
# NB: Add new members to kernel class
|
| 316 |
+
cls.id = op_cls.id
|
| 317 |
+
cls.outMeta = op_cls.outMeta
|
| 318 |
+
return cls
|
| 319 |
+
|
| 320 |
+
return kernel_with_params
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
cv.gapi.wip.GStreamerPipeline = cv.gapi_wip_gst_GStreamerPipeline
|
parrot/lib/python3.10/site-packages/cv2/gapi/__init__.pyi
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__all__: list[str] = []
|
| 2 |
+
|
| 3 |
+
import cv2
|
| 4 |
+
import cv2.typing
|
| 5 |
+
import typing as _typing
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
from cv2.gapi import core as core
|
| 9 |
+
from cv2.gapi import ie as ie
|
| 10 |
+
from cv2.gapi import imgproc as imgproc
|
| 11 |
+
from cv2.gapi import oak as oak
|
| 12 |
+
from cv2.gapi import onnx as onnx
|
| 13 |
+
from cv2.gapi import ot as ot
|
| 14 |
+
from cv2.gapi import ov as ov
|
| 15 |
+
from cv2.gapi import own as own
|
| 16 |
+
from cv2.gapi import render as render
|
| 17 |
+
from cv2.gapi import streaming as streaming
|
| 18 |
+
from cv2.gapi import video as video
|
| 19 |
+
from cv2.gapi import wip as wip
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# Enumerations
|
| 23 |
+
StereoOutputFormat_DEPTH_FLOAT16: int
|
| 24 |
+
STEREO_OUTPUT_FORMAT_DEPTH_FLOAT16: int
|
| 25 |
+
StereoOutputFormat_DEPTH_FLOAT32: int
|
| 26 |
+
STEREO_OUTPUT_FORMAT_DEPTH_FLOAT32: int
|
| 27 |
+
StereoOutputFormat_DISPARITY_FIXED16_11_5: int
|
| 28 |
+
STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_11_5: int
|
| 29 |
+
StereoOutputFormat_DISPARITY_FIXED16_12_4: int
|
| 30 |
+
STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_12_4: int
|
| 31 |
+
StereoOutputFormat_DEPTH_16F: int
|
| 32 |
+
STEREO_OUTPUT_FORMAT_DEPTH_16F: int
|
| 33 |
+
StereoOutputFormat_DEPTH_32F: int
|
| 34 |
+
STEREO_OUTPUT_FORMAT_DEPTH_32F: int
|
| 35 |
+
StereoOutputFormat_DISPARITY_16Q_10_5: int
|
| 36 |
+
STEREO_OUTPUT_FORMAT_DISPARITY_16Q_10_5: int
|
| 37 |
+
StereoOutputFormat_DISPARITY_16Q_11_4: int
|
| 38 |
+
STEREO_OUTPUT_FORMAT_DISPARITY_16Q_11_4: int
|
| 39 |
+
StereoOutputFormat = int
|
| 40 |
+
"""One of [StereoOutputFormat_DEPTH_FLOAT16, STEREO_OUTPUT_FORMAT_DEPTH_FLOAT16, StereoOutputFormat_DEPTH_FLOAT32, STEREO_OUTPUT_FORMAT_DEPTH_FLOAT32, StereoOutputFormat_DISPARITY_FIXED16_11_5, STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_11_5, StereoOutputFormat_DISPARITY_FIXED16_12_4, STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_12_4, StereoOutputFormat_DEPTH_16F, STEREO_OUTPUT_FORMAT_DEPTH_16F, StereoOutputFormat_DEPTH_32F, STEREO_OUTPUT_FORMAT_DEPTH_32F, StereoOutputFormat_DISPARITY_16Q_10_5, STEREO_OUTPUT_FORMAT_DISPARITY_16Q_10_5, StereoOutputFormat_DISPARITY_16Q_11_4, STEREO_OUTPUT_FORMAT_DISPARITY_16Q_11_4]"""
|
| 41 |
+
|
| 42 |
+
CV_BOOL: int
|
| 43 |
+
CV_INT: int
|
| 44 |
+
CV_INT64: int
|
| 45 |
+
CV_UINT64: int
|
| 46 |
+
CV_DOUBLE: int
|
| 47 |
+
CV_FLOAT: int
|
| 48 |
+
CV_STRING: int
|
| 49 |
+
CV_POINT: int
|
| 50 |
+
CV_POINT2F: int
|
| 51 |
+
CV_POINT3F: int
|
| 52 |
+
CV_SIZE: int
|
| 53 |
+
CV_RECT: int
|
| 54 |
+
CV_SCALAR: int
|
| 55 |
+
CV_MAT: int
|
| 56 |
+
CV_GMAT: int
|
| 57 |
+
CV_DRAW_PRIM: int
|
| 58 |
+
CV_ANY: int
|
| 59 |
+
ArgType = int
|
| 60 |
+
"""One of [CV_BOOL, CV_INT, CV_INT64, CV_UINT64, CV_DOUBLE, CV_FLOAT, CV_STRING, CV_POINT, CV_POINT2F, CV_POINT3F, CV_SIZE, CV_RECT, CV_SCALAR, CV_MAT, CV_GMAT, CV_DRAW_PRIM, CV_ANY]"""
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
# Classes
|
| 65 |
+
class GNetParam:
|
| 66 |
+
...
|
| 67 |
+
|
| 68 |
+
class GNetPackage:
|
| 69 |
+
# Functions
|
| 70 |
+
@_typing.overload
|
| 71 |
+
def __init__(self) -> None: ...
|
| 72 |
+
@_typing.overload
|
| 73 |
+
def __init__(self, nets: _typing.Sequence[GNetParam]) -> None: ...
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
# Functions
|
| 78 |
+
def BGR2Gray(src: cv2.GMat) -> cv2.GMat: ...
|
| 79 |
+
|
| 80 |
+
def BGR2I420(src: cv2.GMat) -> cv2.GMat: ...
|
| 81 |
+
|
| 82 |
+
def BGR2LUV(src: cv2.GMat) -> cv2.GMat: ...
|
| 83 |
+
|
| 84 |
+
def BGR2RGB(src: cv2.GMat) -> cv2.GMat: ...
|
| 85 |
+
|
| 86 |
+
def BGR2YUV(src: cv2.GMat) -> cv2.GMat: ...
|
| 87 |
+
|
| 88 |
+
def BayerGR2RGB(src_gr: cv2.GMat) -> cv2.GMat: ...
|
| 89 |
+
|
| 90 |
+
def Canny(image: cv2.GMat, threshold1: float, threshold2: float, apertureSize: int = ..., L2gradient: bool = ...) -> cv2.GMat: ...
|
| 91 |
+
|
| 92 |
+
def I4202BGR(src: cv2.GMat) -> cv2.GMat: ...
|
| 93 |
+
|
| 94 |
+
def I4202RGB(src: cv2.GMat) -> cv2.GMat: ...
|
| 95 |
+
|
| 96 |
+
def LUT(src: cv2.GMat, lut: cv2.typing.MatLike) -> cv2.GMat: ...
|
| 97 |
+
|
| 98 |
+
def LUV2BGR(src: cv2.GMat) -> cv2.GMat: ...
|
| 99 |
+
|
| 100 |
+
def Laplacian(src: cv2.GMat, ddepth: int, ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ...) -> cv2.GMat: ...
|
| 101 |
+
|
| 102 |
+
def NV12toBGR(src_y: cv2.GMat, src_uv: cv2.GMat) -> cv2.GMat: ...
|
| 103 |
+
|
| 104 |
+
def NV12toGray(src_y: cv2.GMat, src_uv: cv2.GMat) -> cv2.GMat: ...
|
| 105 |
+
|
| 106 |
+
def NV12toRGB(src_y: cv2.GMat, src_uv: cv2.GMat) -> cv2.GMat: ...
|
| 107 |
+
|
| 108 |
+
@_typing.overload
|
| 109 |
+
def RGB2Gray(src: cv2.GMat) -> cv2.GMat: ...
|
| 110 |
+
@_typing.overload
|
| 111 |
+
def RGB2Gray(src: cv2.GMat, rY: float, gY: float, bY: float) -> cv2.GMat: ...
|
| 112 |
+
|
| 113 |
+
def RGB2HSV(src: cv2.GMat) -> cv2.GMat: ...
|
| 114 |
+
|
| 115 |
+
def RGB2I420(src: cv2.GMat) -> cv2.GMat: ...
|
| 116 |
+
|
| 117 |
+
def RGB2Lab(src: cv2.GMat) -> cv2.GMat: ...
|
| 118 |
+
|
| 119 |
+
def RGB2YUV(src: cv2.GMat) -> cv2.GMat: ...
|
| 120 |
+
|
| 121 |
+
def RGB2YUV422(src: cv2.GMat) -> cv2.GMat: ...
|
| 122 |
+
|
| 123 |
+
def Sobel(src: cv2.GMat, ddepth: int, dx: int, dy: int, ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 124 |
+
|
| 125 |
+
def SobelXY(src: cv2.GMat, ddepth: int, order: int, ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
|
| 126 |
+
|
| 127 |
+
def YUV2BGR(src: cv2.GMat) -> cv2.GMat: ...
|
| 128 |
+
|
| 129 |
+
def YUV2RGB(src: cv2.GMat) -> cv2.GMat: ...
|
| 130 |
+
|
| 131 |
+
def absDiff(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 132 |
+
|
| 133 |
+
def absDiffC(src: cv2.GMat, c: cv2.GScalar) -> cv2.GMat: ...
|
| 134 |
+
|
| 135 |
+
def add(src1: cv2.GMat, src2: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
|
| 136 |
+
|
| 137 |
+
@_typing.overload
|
| 138 |
+
def addC(src1: cv2.GMat, c: cv2.GScalar, ddepth: int = ...) -> cv2.GMat: ...
|
| 139 |
+
@_typing.overload
|
| 140 |
+
def addC(c: cv2.GScalar, src1: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
|
| 141 |
+
|
| 142 |
+
def addWeighted(src1: cv2.GMat, alpha: float, src2: cv2.GMat, beta: float, gamma: float, ddepth: int = ...) -> cv2.GMat: ...
|
| 143 |
+
|
| 144 |
+
def bilateralFilter(src: cv2.GMat, d: int, sigmaColor: float, sigmaSpace: float, borderType: int = ...) -> cv2.GMat: ...
|
| 145 |
+
|
| 146 |
+
@_typing.overload
|
| 147 |
+
def bitwise_and(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 148 |
+
@_typing.overload
|
| 149 |
+
def bitwise_and(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
|
| 150 |
+
|
| 151 |
+
def bitwise_not(src: cv2.GMat) -> cv2.GMat: ...
|
| 152 |
+
|
| 153 |
+
@_typing.overload
|
| 154 |
+
def bitwise_or(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 155 |
+
@_typing.overload
|
| 156 |
+
def bitwise_or(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
|
| 157 |
+
|
| 158 |
+
@_typing.overload
|
| 159 |
+
def bitwise_xor(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 160 |
+
@_typing.overload
|
| 161 |
+
def bitwise_xor(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
|
| 162 |
+
|
| 163 |
+
def blur(src: cv2.GMat, ksize: cv2.typing.Size, anchor: cv2.typing.Point = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 164 |
+
|
| 165 |
+
@_typing.overload
|
| 166 |
+
def boundingRect(src: cv2.GMat) -> cv2.GOpaqueT: ...
|
| 167 |
+
@_typing.overload
|
| 168 |
+
def boundingRect(src: cv2.GArrayT) -> cv2.GOpaqueT: ...
|
| 169 |
+
@_typing.overload
|
| 170 |
+
def boundingRect(src: cv2.GArrayT) -> cv2.GOpaqueT: ...
|
| 171 |
+
|
| 172 |
+
def boxFilter(src: cv2.GMat, dtype: int, ksize: cv2.typing.Size, anchor: cv2.typing.Point = ..., normalize: bool = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 173 |
+
|
| 174 |
+
def cartToPolar(x: cv2.GMat, y: cv2.GMat, angleInDegrees: bool = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
|
| 175 |
+
|
| 176 |
+
@_typing.overload
|
| 177 |
+
def cmpEQ(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 178 |
+
@_typing.overload
|
| 179 |
+
def cmpEQ(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
|
| 180 |
+
|
| 181 |
+
@_typing.overload
|
| 182 |
+
def cmpGE(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 183 |
+
@_typing.overload
|
| 184 |
+
def cmpGE(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
|
| 185 |
+
|
| 186 |
+
@_typing.overload
|
| 187 |
+
def cmpGT(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 188 |
+
@_typing.overload
|
| 189 |
+
def cmpGT(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
|
| 190 |
+
|
| 191 |
+
@_typing.overload
|
| 192 |
+
def cmpLE(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 193 |
+
@_typing.overload
|
| 194 |
+
def cmpLE(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
|
| 195 |
+
|
| 196 |
+
@_typing.overload
|
| 197 |
+
def cmpLT(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 198 |
+
@_typing.overload
|
| 199 |
+
def cmpLT(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
|
| 200 |
+
|
| 201 |
+
@_typing.overload
|
| 202 |
+
def cmpNE(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 203 |
+
@_typing.overload
|
| 204 |
+
def cmpNE(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
|
| 205 |
+
|
| 206 |
+
def combine(lhs: cv2.GKernelPackage, rhs: cv2.GKernelPackage) -> cv2.GKernelPackage: ...
|
| 207 |
+
|
| 208 |
+
@_typing.overload
|
| 209 |
+
def concatHor(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 210 |
+
@_typing.overload
|
| 211 |
+
def concatHor(v: _typing.Sequence[cv2.GMat]) -> cv2.GMat: ...
|
| 212 |
+
|
| 213 |
+
@_typing.overload
|
| 214 |
+
def concatVert(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 215 |
+
@_typing.overload
|
| 216 |
+
def concatVert(v: _typing.Sequence[cv2.GMat]) -> cv2.GMat: ...
|
| 217 |
+
|
| 218 |
+
def convertTo(src: cv2.GMat, rdepth: int, alpha: float = ..., beta: float = ...) -> cv2.GMat: ...
|
| 219 |
+
|
| 220 |
+
def copy(in_: cv2.GMat) -> cv2.GMat: ...
|
| 221 |
+
|
| 222 |
+
def countNonZero(src: cv2.GMat) -> cv2.GOpaqueT: ...
|
| 223 |
+
|
| 224 |
+
def crop(src: cv2.GMat, rect: cv2.typing.Rect) -> cv2.GMat: ...
|
| 225 |
+
|
| 226 |
+
def dilate(src: cv2.GMat, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 227 |
+
|
| 228 |
+
def dilate3x3(src: cv2.GMat, iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 229 |
+
|
| 230 |
+
def div(src1: cv2.GMat, src2: cv2.GMat, scale: float, ddepth: int = ...) -> cv2.GMat: ...
|
| 231 |
+
|
| 232 |
+
def divC(src: cv2.GMat, divisor: cv2.GScalar, scale: float, ddepth: int = ...) -> cv2.GMat: ...
|
| 233 |
+
|
| 234 |
+
def divRC(divident: cv2.GScalar, src: cv2.GMat, scale: float, ddepth: int = ...) -> cv2.GMat: ...
|
| 235 |
+
|
| 236 |
+
def equalizeHist(src: cv2.GMat) -> cv2.GMat: ...
|
| 237 |
+
|
| 238 |
+
def erode(src: cv2.GMat, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 239 |
+
|
| 240 |
+
def erode3x3(src: cv2.GMat, iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 241 |
+
|
| 242 |
+
def filter2D(src: cv2.GMat, ddepth: int, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., delta: cv2.typing.Scalar = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 243 |
+
|
| 244 |
+
def flip(src: cv2.GMat, flipCode: int) -> cv2.GMat: ...
|
| 245 |
+
|
| 246 |
+
def gaussianBlur(src: cv2.GMat, ksize: cv2.typing.Size, sigmaX: float, sigmaY: float = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 247 |
+
|
| 248 |
+
def goodFeaturesToTrack(image: cv2.GMat, maxCorners: int, qualityLevel: float, minDistance: float, mask: cv2.typing.MatLike | None = ..., blockSize: int = ..., useHarrisDetector: bool = ..., k: float = ...) -> cv2.GArrayT: ...
|
| 249 |
+
|
| 250 |
+
def inRange(src: cv2.GMat, threshLow: cv2.GScalar, threshUp: cv2.GScalar) -> cv2.GMat: ...
|
| 251 |
+
|
| 252 |
+
@_typing.overload
|
| 253 |
+
def infer(name: str, inputs: cv2.GInferInputs) -> cv2.GInferOutputs: ...
|
| 254 |
+
@_typing.overload
|
| 255 |
+
def infer(name: str, roi: cv2.GOpaqueT, inputs: cv2.GInferInputs) -> cv2.GInferOutputs: ...
|
| 256 |
+
@_typing.overload
|
| 257 |
+
def infer(name: str, rois: cv2.GArrayT, inputs: cv2.GInferInputs) -> cv2.GInferListOutputs: ...
|
| 258 |
+
|
| 259 |
+
def infer2(name: str, in_: cv2.GMat, inputs: cv2.GInferListInputs) -> cv2.GInferListOutputs: ...
|
| 260 |
+
|
| 261 |
+
def integral(src: cv2.GMat, sdepth: int = ..., sqdepth: int = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
|
| 262 |
+
|
| 263 |
+
@_typing.overload
|
| 264 |
+
def kmeans(data: cv2.GMat, K: int, bestLabels: cv2.GMat, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GMat, cv2.GMat]: ...
|
| 265 |
+
@_typing.overload
|
| 266 |
+
def kmeans(data: cv2.GMat, K: int, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GMat, cv2.GMat]: ...
|
| 267 |
+
@_typing.overload
|
| 268 |
+
def kmeans(data: cv2.GArrayT, K: int, bestLabels: cv2.GArrayT, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GArrayT, cv2.GArrayT]: ...
|
| 269 |
+
@_typing.overload
|
| 270 |
+
def kmeans(data: cv2.GArrayT, K: int, bestLabels: cv2.GArrayT, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GArrayT, cv2.GArrayT]: ...
|
| 271 |
+
|
| 272 |
+
def mask(src: cv2.GMat, mask: cv2.GMat) -> cv2.GMat: ...
|
| 273 |
+
|
| 274 |
+
def max(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 275 |
+
|
| 276 |
+
def mean(src: cv2.GMat) -> cv2.GScalar: ...
|
| 277 |
+
|
| 278 |
+
def medianBlur(src: cv2.GMat, ksize: int) -> cv2.GMat: ...
|
| 279 |
+
|
| 280 |
+
def merge3(src1: cv2.GMat, src2: cv2.GMat, src3: cv2.GMat) -> cv2.GMat: ...
|
| 281 |
+
|
| 282 |
+
def merge4(src1: cv2.GMat, src2: cv2.GMat, src3: cv2.GMat, src4: cv2.GMat) -> cv2.GMat: ...
|
| 283 |
+
|
| 284 |
+
def min(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
|
| 285 |
+
|
| 286 |
+
def morphologyEx(src: cv2.GMat, op: cv2.MorphTypes, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: cv2.BorderTypes = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 287 |
+
|
| 288 |
+
def mul(src1: cv2.GMat, src2: cv2.GMat, scale: float = ..., ddepth: int = ...) -> cv2.GMat: ...
|
| 289 |
+
|
| 290 |
+
@_typing.overload
|
| 291 |
+
def mulC(src: cv2.GMat, multiplier: float, ddepth: int = ...) -> cv2.GMat: ...
|
| 292 |
+
@_typing.overload
|
| 293 |
+
def mulC(src: cv2.GMat, multiplier: cv2.GScalar, ddepth: int = ...) -> cv2.GMat: ...
|
| 294 |
+
@_typing.overload
|
| 295 |
+
def mulC(multiplier: cv2.GScalar, src: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
|
| 296 |
+
|
| 297 |
+
def normInf(src: cv2.GMat) -> cv2.GScalar: ...
|
| 298 |
+
|
| 299 |
+
def normL1(src: cv2.GMat) -> cv2.GScalar: ...
|
| 300 |
+
|
| 301 |
+
def normL2(src: cv2.GMat) -> cv2.GScalar: ...
|
| 302 |
+
|
| 303 |
+
def normalize(src: cv2.GMat, alpha: float, beta: float, norm_type: int, ddepth: int = ...) -> cv2.GMat: ...
|
| 304 |
+
|
| 305 |
+
@_typing.overload
|
| 306 |
+
def parseSSD(in_: cv2.GMat, inSz: cv2.GOpaqueT, confidenceThreshold: float = ..., filterLabel: int = ...) -> tuple[cv2.GArrayT, cv2.GArrayT]: ...
|
| 307 |
+
@_typing.overload
|
| 308 |
+
def parseSSD(in_: cv2.GMat, inSz: cv2.GOpaqueT, confidenceThreshold: float, alignmentToSquare: bool, filterOutOfBounds: bool) -> cv2.GArrayT: ...
|
| 309 |
+
|
| 310 |
+
def parseYolo(in_: cv2.GMat, inSz: cv2.GOpaqueT, confidenceThreshold: float = ..., nmsThreshold: float = ..., anchors: _typing.Sequence[float] = ...) -> tuple[cv2.GArrayT, cv2.GArrayT]: ...
|
| 311 |
+
|
| 312 |
+
def phase(x: cv2.GMat, y: cv2.GMat, angleInDegrees: bool = ...) -> cv2.GMat: ...
|
| 313 |
+
|
| 314 |
+
def polarToCart(magnitude: cv2.GMat, angle: cv2.GMat, angleInDegrees: bool = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
|
| 315 |
+
|
| 316 |
+
def remap(src: cv2.GMat, map1: cv2.typing.MatLike, map2: cv2.typing.MatLike, interpolation: int, borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 317 |
+
|
| 318 |
+
def resize(src: cv2.GMat, dsize: cv2.typing.Size, fx: float = ..., fy: float = ..., interpolation: int = ...) -> cv2.GMat: ...
|
| 319 |
+
|
| 320 |
+
def select(src1: cv2.GMat, src2: cv2.GMat, mask: cv2.GMat) -> cv2.GMat: ...
|
| 321 |
+
|
| 322 |
+
def sepFilter(src: cv2.GMat, ddepth: int, kernelX: cv2.typing.MatLike, kernelY: cv2.typing.MatLike, anchor: cv2.typing.Point, delta: cv2.typing.Scalar, borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 323 |
+
|
| 324 |
+
def split3(src: cv2.GMat) -> tuple[cv2.GMat, cv2.GMat, cv2.GMat]: ...
|
| 325 |
+
|
| 326 |
+
def split4(src: cv2.GMat) -> tuple[cv2.GMat, cv2.GMat, cv2.GMat, cv2.GMat]: ...
|
| 327 |
+
|
| 328 |
+
def sqrt(src: cv2.GMat) -> cv2.GMat: ...
|
| 329 |
+
|
| 330 |
+
def sub(src1: cv2.GMat, src2: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
|
| 331 |
+
|
| 332 |
+
def subC(src: cv2.GMat, c: cv2.GScalar, ddepth: int = ...) -> cv2.GMat: ...
|
| 333 |
+
|
| 334 |
+
def subRC(c: cv2.GScalar, src: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
|
| 335 |
+
|
| 336 |
+
def sum(src: cv2.GMat) -> cv2.GScalar: ...
|
| 337 |
+
|
| 338 |
+
@_typing.overload
|
| 339 |
+
def threshold(src: cv2.GMat, thresh: cv2.GScalar, maxval: cv2.GScalar, type: int) -> cv2.GMat: ...
|
| 340 |
+
@_typing.overload
|
| 341 |
+
def threshold(src: cv2.GMat, maxval: cv2.GScalar, type: int) -> tuple[cv2.GMat, cv2.GScalar]: ...
|
| 342 |
+
|
| 343 |
+
def transpose(src: cv2.GMat) -> cv2.GMat: ...
|
| 344 |
+
|
| 345 |
+
def warpAffine(src: cv2.GMat, M: cv2.typing.MatLike, dsize: cv2.typing.Size, flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 346 |
+
|
| 347 |
+
def warpPerspective(src: cv2.GMat, M: cv2.typing.MatLike, dsize: cv2.typing.Size, flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
|
| 348 |
+
|
| 349 |
+
|
parrot/lib/python3.10/site-packages/cv2/gapi/wip/__init__.pyi
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__all__: list[str] = []
|
| 2 |
+
|
| 3 |
+
import cv2
|
| 4 |
+
import cv2.gapi
|
| 5 |
+
import cv2.gapi.wip.gst
|
| 6 |
+
import cv2.typing
|
| 7 |
+
import typing as _typing
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
from cv2.gapi.wip import draw as draw
|
| 11 |
+
from cv2.gapi.wip import gst as gst
|
| 12 |
+
from cv2.gapi.wip import onevpl as onevpl
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# Classes
|
| 16 |
+
class GOutputs:
|
| 17 |
+
# Functions
|
| 18 |
+
def getGMat(self) -> cv2.GMat: ...
|
| 19 |
+
|
| 20 |
+
def getGScalar(self) -> cv2.GScalar: ...
|
| 21 |
+
|
| 22 |
+
def getGArray(self, type: cv2.gapi.ArgType) -> cv2.GArrayT: ...
|
| 23 |
+
|
| 24 |
+
def getGOpaque(self, type: cv2.gapi.ArgType) -> cv2.GOpaqueT: ...
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class IStreamSource:
|
| 28 |
+
...
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# Functions
|
| 32 |
+
def get_streaming_source(pipeline: cv2.gapi.wip.gst.GStreamerPipeline, appsinkName: str, outputType: cv2.gapi.wip.gst.GStreamerSource_OutputType = ...) -> IStreamSource: ...
|
| 33 |
+
|
| 34 |
+
@_typing.overload
|
| 35 |
+
def make_capture_src(path: str, properties: cv2.typing.map_int_and_double = ...) -> IStreamSource: ...
|
| 36 |
+
@_typing.overload
|
| 37 |
+
def make_capture_src(id: int, properties: cv2.typing.map_int_and_double = ...) -> IStreamSource: ...
|
| 38 |
+
|
| 39 |
+
def make_gst_src(pipeline: str, outputType: cv2.gapi.wip.gst.GStreamerSource_OutputType = ...) -> IStreamSource: ...
|
| 40 |
+
|
| 41 |
+
|
parrot/lib/python3.10/site-packages/cv2/gapi/wip/gst/__init__.pyi
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__all__: list[str] = []
|
| 2 |
+
|
| 3 |
+
GStreamerSource_OutputType_FRAME: int
|
| 4 |
+
GSTREAMER_SOURCE_OUTPUT_TYPE_FRAME: int
|
| 5 |
+
GStreamerSource_OutputType_MAT: int
|
| 6 |
+
GSTREAMER_SOURCE_OUTPUT_TYPE_MAT: int
|
| 7 |
+
GStreamerSource_OutputType = int
|
| 8 |
+
"""One of [GStreamerSource_OutputType_FRAME, GSTREAMER_SOURCE_OUTPUT_TYPE_FRAME, GStreamerSource_OutputType_MAT, GSTREAMER_SOURCE_OUTPUT_TYPE_MAT]"""
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
# Classes
|
| 12 |
+
class GStreamerPipeline:
|
| 13 |
+
# Functions
|
| 14 |
+
def __init__(self, pipeline: str) -> None: ...
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
|
parrot/lib/python3.10/site-packages/cv2/ipp/__init__.pyi
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__all__: list[str] = []
|
| 2 |
+
|
| 3 |
+
# Functions
|
| 4 |
+
def getIppVersion() -> str: ...
|
| 5 |
+
|
| 6 |
+
def setUseIPP(flag: bool) -> None: ...
|
| 7 |
+
|
| 8 |
+
def setUseIPP_NotExact(flag: bool) -> None: ...
|
| 9 |
+
|
| 10 |
+
def useIPP() -> bool: ...
|
| 11 |
+
|
| 12 |
+
def useIPP_NotExact() -> bool: ...
|
| 13 |
+
|
| 14 |
+
|
parrot/lib/python3.10/site-packages/cv2/load_config_py2.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# flake8: noqa
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
if sys.version_info[:2] < (3, 0):
|
| 5 |
+
def exec_file_wrapper(fpath, g_vars, l_vars):
|
| 6 |
+
execfile(fpath, g_vars, l_vars)
|
parrot/lib/python3.10/site-packages/cv2/load_config_py3.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# flake8: noqa
|
| 2 |
+
import os
|
| 3 |
+
import sys
|
| 4 |
+
|
| 5 |
+
if sys.version_info[:2] >= (3, 0):
|
| 6 |
+
def exec_file_wrapper(fpath, g_vars, l_vars):
|
| 7 |
+
with open(fpath) as f:
|
| 8 |
+
code = compile(f.read(), os.path.basename(fpath), 'exec')
|
| 9 |
+
exec(code, g_vars, l_vars)
|
parrot/lib/python3.10/site-packages/cv2/py.typed
ADDED
|
File without changes
|
parrot/lib/python3.10/site-packages/cv2/version.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
opencv_version = "4.10.0.84"
|
| 2 |
+
contrib = False
|
| 3 |
+
headless = False
|
| 4 |
+
rolling = False
|
| 5 |
+
ci_build = True
|
parrot/lib/python3.10/site-packages/cycler/__init__.py
ADDED
|
@@ -0,0 +1,573 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Cycler
|
| 3 |
+
======
|
| 4 |
+
|
| 5 |
+
Cycling through combinations of values, producing dictionaries.
|
| 6 |
+
|
| 7 |
+
You can add cyclers::
|
| 8 |
+
|
| 9 |
+
from cycler import cycler
|
| 10 |
+
cc = (cycler(color=list('rgb')) +
|
| 11 |
+
cycler(linestyle=['-', '--', '-.']))
|
| 12 |
+
for d in cc:
|
| 13 |
+
print(d)
|
| 14 |
+
|
| 15 |
+
Results in::
|
| 16 |
+
|
| 17 |
+
{'color': 'r', 'linestyle': '-'}
|
| 18 |
+
{'color': 'g', 'linestyle': '--'}
|
| 19 |
+
{'color': 'b', 'linestyle': '-.'}
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
You can multiply cyclers::
|
| 23 |
+
|
| 24 |
+
from cycler import cycler
|
| 25 |
+
cc = (cycler(color=list('rgb')) *
|
| 26 |
+
cycler(linestyle=['-', '--', '-.']))
|
| 27 |
+
for d in cc:
|
| 28 |
+
print(d)
|
| 29 |
+
|
| 30 |
+
Results in::
|
| 31 |
+
|
| 32 |
+
{'color': 'r', 'linestyle': '-'}
|
| 33 |
+
{'color': 'r', 'linestyle': '--'}
|
| 34 |
+
{'color': 'r', 'linestyle': '-.'}
|
| 35 |
+
{'color': 'g', 'linestyle': '-'}
|
| 36 |
+
{'color': 'g', 'linestyle': '--'}
|
| 37 |
+
{'color': 'g', 'linestyle': '-.'}
|
| 38 |
+
{'color': 'b', 'linestyle': '-'}
|
| 39 |
+
{'color': 'b', 'linestyle': '--'}
|
| 40 |
+
{'color': 'b', 'linestyle': '-.'}
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
from __future__ import annotations
|
| 45 |
+
|
| 46 |
+
from collections.abc import Hashable, Iterable, Generator
|
| 47 |
+
import copy
|
| 48 |
+
from functools import reduce
|
| 49 |
+
from itertools import product, cycle
|
| 50 |
+
from operator import mul, add
|
| 51 |
+
# Dict, List, Union required for runtime cast calls
|
| 52 |
+
from typing import TypeVar, Generic, Callable, Union, Dict, List, Any, overload, cast
|
| 53 |
+
|
| 54 |
+
__version__ = "0.12.1"
|
| 55 |
+
|
| 56 |
+
K = TypeVar("K", bound=Hashable)
|
| 57 |
+
L = TypeVar("L", bound=Hashable)
|
| 58 |
+
V = TypeVar("V")
|
| 59 |
+
U = TypeVar("U")
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def _process_keys(
|
| 63 |
+
left: Cycler[K, V] | Iterable[dict[K, V]],
|
| 64 |
+
right: Cycler[K, V] | Iterable[dict[K, V]] | None,
|
| 65 |
+
) -> set[K]:
|
| 66 |
+
"""
|
| 67 |
+
Helper function to compose cycler keys.
|
| 68 |
+
|
| 69 |
+
Parameters
|
| 70 |
+
----------
|
| 71 |
+
left, right : iterable of dictionaries or None
|
| 72 |
+
The cyclers to be composed.
|
| 73 |
+
|
| 74 |
+
Returns
|
| 75 |
+
-------
|
| 76 |
+
keys : set
|
| 77 |
+
The keys in the composition of the two cyclers.
|
| 78 |
+
"""
|
| 79 |
+
l_peek: dict[K, V] = next(iter(left)) if left != [] else {}
|
| 80 |
+
r_peek: dict[K, V] = next(iter(right)) if right is not None else {}
|
| 81 |
+
l_key: set[K] = set(l_peek.keys())
|
| 82 |
+
r_key: set[K] = set(r_peek.keys())
|
| 83 |
+
if l_key & r_key:
|
| 84 |
+
raise ValueError("Can not compose overlapping cycles")
|
| 85 |
+
return l_key | r_key
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def concat(left: Cycler[K, V], right: Cycler[K, U]) -> Cycler[K, V | U]:
|
| 89 |
+
r"""
|
| 90 |
+
Concatenate `Cycler`\s, as if chained using `itertools.chain`.
|
| 91 |
+
|
| 92 |
+
The keys must match exactly.
|
| 93 |
+
|
| 94 |
+
Examples
|
| 95 |
+
--------
|
| 96 |
+
>>> num = cycler('a', range(3))
|
| 97 |
+
>>> let = cycler('a', 'abc')
|
| 98 |
+
>>> num.concat(let)
|
| 99 |
+
cycler('a', [0, 1, 2, 'a', 'b', 'c'])
|
| 100 |
+
|
| 101 |
+
Returns
|
| 102 |
+
-------
|
| 103 |
+
`Cycler`
|
| 104 |
+
The concatenated cycler.
|
| 105 |
+
"""
|
| 106 |
+
if left.keys != right.keys:
|
| 107 |
+
raise ValueError(
|
| 108 |
+
"Keys do not match:\n"
|
| 109 |
+
"\tIntersection: {both!r}\n"
|
| 110 |
+
"\tDisjoint: {just_one!r}".format(
|
| 111 |
+
both=left.keys & right.keys, just_one=left.keys ^ right.keys
|
| 112 |
+
)
|
| 113 |
+
)
|
| 114 |
+
_l = cast(Dict[K, List[Union[V, U]]], left.by_key())
|
| 115 |
+
_r = cast(Dict[K, List[Union[V, U]]], right.by_key())
|
| 116 |
+
return reduce(add, (_cycler(k, _l[k] + _r[k]) for k in left.keys))
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
class Cycler(Generic[K, V]):
|
| 120 |
+
"""
|
| 121 |
+
Composable cycles.
|
| 122 |
+
|
| 123 |
+
This class has compositions methods:
|
| 124 |
+
|
| 125 |
+
``+``
|
| 126 |
+
for 'inner' products (zip)
|
| 127 |
+
|
| 128 |
+
``+=``
|
| 129 |
+
in-place ``+``
|
| 130 |
+
|
| 131 |
+
``*``
|
| 132 |
+
for outer products (`itertools.product`) and integer multiplication
|
| 133 |
+
|
| 134 |
+
``*=``
|
| 135 |
+
in-place ``*``
|
| 136 |
+
|
| 137 |
+
and supports basic slicing via ``[]``.
|
| 138 |
+
|
| 139 |
+
Parameters
|
| 140 |
+
----------
|
| 141 |
+
left, right : Cycler or None
|
| 142 |
+
The 'left' and 'right' cyclers.
|
| 143 |
+
op : func or None
|
| 144 |
+
Function which composes the 'left' and 'right' cyclers.
|
| 145 |
+
"""
|
| 146 |
+
|
| 147 |
+
def __call__(self):
|
| 148 |
+
return cycle(self)
|
| 149 |
+
|
| 150 |
+
def __init__(
|
| 151 |
+
self,
|
| 152 |
+
left: Cycler[K, V] | Iterable[dict[K, V]] | None,
|
| 153 |
+
right: Cycler[K, V] | None = None,
|
| 154 |
+
op: Any = None,
|
| 155 |
+
):
|
| 156 |
+
"""
|
| 157 |
+
Semi-private init.
|
| 158 |
+
|
| 159 |
+
Do not use this directly, use `cycler` function instead.
|
| 160 |
+
"""
|
| 161 |
+
if isinstance(left, Cycler):
|
| 162 |
+
self._left: Cycler[K, V] | list[dict[K, V]] = Cycler(
|
| 163 |
+
left._left, left._right, left._op
|
| 164 |
+
)
|
| 165 |
+
elif left is not None:
|
| 166 |
+
# Need to copy the dictionary or else that will be a residual
|
| 167 |
+
# mutable that could lead to strange errors
|
| 168 |
+
self._left = [copy.copy(v) for v in left]
|
| 169 |
+
else:
|
| 170 |
+
self._left = []
|
| 171 |
+
|
| 172 |
+
if isinstance(right, Cycler):
|
| 173 |
+
self._right: Cycler[K, V] | None = Cycler(
|
| 174 |
+
right._left, right._right, right._op
|
| 175 |
+
)
|
| 176 |
+
else:
|
| 177 |
+
self._right = None
|
| 178 |
+
|
| 179 |
+
self._keys: set[K] = _process_keys(self._left, self._right)
|
| 180 |
+
self._op: Any = op
|
| 181 |
+
|
| 182 |
+
def __contains__(self, k):
|
| 183 |
+
return k in self._keys
|
| 184 |
+
|
| 185 |
+
@property
|
| 186 |
+
def keys(self) -> set[K]:
|
| 187 |
+
"""The keys this Cycler knows about."""
|
| 188 |
+
return set(self._keys)
|
| 189 |
+
|
| 190 |
+
def change_key(self, old: K, new: K) -> None:
|
| 191 |
+
"""
|
| 192 |
+
Change a key in this cycler to a new name.
|
| 193 |
+
Modification is performed in-place.
|
| 194 |
+
|
| 195 |
+
Does nothing if the old key is the same as the new key.
|
| 196 |
+
Raises a ValueError if the new key is already a key.
|
| 197 |
+
Raises a KeyError if the old key isn't a key.
|
| 198 |
+
"""
|
| 199 |
+
if old == new:
|
| 200 |
+
return
|
| 201 |
+
if new in self._keys:
|
| 202 |
+
raise ValueError(
|
| 203 |
+
f"Can't replace {old} with {new}, {new} is already a key"
|
| 204 |
+
)
|
| 205 |
+
if old not in self._keys:
|
| 206 |
+
raise KeyError(
|
| 207 |
+
f"Can't replace {old} with {new}, {old} is not a key"
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
self._keys.remove(old)
|
| 211 |
+
self._keys.add(new)
|
| 212 |
+
|
| 213 |
+
if self._right is not None and old in self._right.keys:
|
| 214 |
+
self._right.change_key(old, new)
|
| 215 |
+
|
| 216 |
+
# self._left should always be non-None
|
| 217 |
+
# if self._keys is non-empty.
|
| 218 |
+
elif isinstance(self._left, Cycler):
|
| 219 |
+
self._left.change_key(old, new)
|
| 220 |
+
else:
|
| 221 |
+
# It should be completely safe at this point to
|
| 222 |
+
# assume that the old key can be found in each
|
| 223 |
+
# iteration.
|
| 224 |
+
self._left = [{new: entry[old]} for entry in self._left]
|
| 225 |
+
|
| 226 |
+
@classmethod
|
| 227 |
+
def _from_iter(cls, label: K, itr: Iterable[V]) -> Cycler[K, V]:
|
| 228 |
+
"""
|
| 229 |
+
Class method to create 'base' Cycler objects
|
| 230 |
+
that do not have a 'right' or 'op' and for which
|
| 231 |
+
the 'left' object is not another Cycler.
|
| 232 |
+
|
| 233 |
+
Parameters
|
| 234 |
+
----------
|
| 235 |
+
label : hashable
|
| 236 |
+
The property key.
|
| 237 |
+
|
| 238 |
+
itr : iterable
|
| 239 |
+
Finite length iterable of the property values.
|
| 240 |
+
|
| 241 |
+
Returns
|
| 242 |
+
-------
|
| 243 |
+
`Cycler`
|
| 244 |
+
New 'base' cycler.
|
| 245 |
+
"""
|
| 246 |
+
ret: Cycler[K, V] = cls(None)
|
| 247 |
+
ret._left = list({label: v} for v in itr)
|
| 248 |
+
ret._keys = {label}
|
| 249 |
+
return ret
|
| 250 |
+
|
| 251 |
+
def __getitem__(self, key: slice) -> Cycler[K, V]:
|
| 252 |
+
# TODO : maybe add numpy style fancy slicing
|
| 253 |
+
if isinstance(key, slice):
|
| 254 |
+
trans = self.by_key()
|
| 255 |
+
return reduce(add, (_cycler(k, v[key]) for k, v in trans.items()))
|
| 256 |
+
else:
|
| 257 |
+
raise ValueError("Can only use slices with Cycler.__getitem__")
|
| 258 |
+
|
| 259 |
+
def __iter__(self) -> Generator[dict[K, V], None, None]:
|
| 260 |
+
if self._right is None:
|
| 261 |
+
for left in self._left:
|
| 262 |
+
yield dict(left)
|
| 263 |
+
else:
|
| 264 |
+
if self._op is None:
|
| 265 |
+
raise TypeError(
|
| 266 |
+
"Operation cannot be None when both left and right are defined"
|
| 267 |
+
)
|
| 268 |
+
for a, b in self._op(self._left, self._right):
|
| 269 |
+
out = {}
|
| 270 |
+
out.update(a)
|
| 271 |
+
out.update(b)
|
| 272 |
+
yield out
|
| 273 |
+
|
| 274 |
+
def __add__(self, other: Cycler[L, U]) -> Cycler[K | L, V | U]:
|
| 275 |
+
"""
|
| 276 |
+
Pair-wise combine two equal length cyclers (zip).
|
| 277 |
+
|
| 278 |
+
Parameters
|
| 279 |
+
----------
|
| 280 |
+
other : Cycler
|
| 281 |
+
"""
|
| 282 |
+
if len(self) != len(other):
|
| 283 |
+
raise ValueError(
|
| 284 |
+
f"Can only add equal length cycles, not {len(self)} and {len(other)}"
|
| 285 |
+
)
|
| 286 |
+
return Cycler(
|
| 287 |
+
cast(Cycler[Union[K, L], Union[V, U]], self),
|
| 288 |
+
cast(Cycler[Union[K, L], Union[V, U]], other),
|
| 289 |
+
zip
|
| 290 |
+
)
|
| 291 |
+
|
| 292 |
+
@overload
|
| 293 |
+
def __mul__(self, other: Cycler[L, U]) -> Cycler[K | L, V | U]:
|
| 294 |
+
...
|
| 295 |
+
|
| 296 |
+
@overload
|
| 297 |
+
def __mul__(self, other: int) -> Cycler[K, V]:
|
| 298 |
+
...
|
| 299 |
+
|
| 300 |
+
def __mul__(self, other):
|
| 301 |
+
"""
|
| 302 |
+
Outer product of two cyclers (`itertools.product`) or integer
|
| 303 |
+
multiplication.
|
| 304 |
+
|
| 305 |
+
Parameters
|
| 306 |
+
----------
|
| 307 |
+
other : Cycler or int
|
| 308 |
+
"""
|
| 309 |
+
if isinstance(other, Cycler):
|
| 310 |
+
return Cycler(
|
| 311 |
+
cast(Cycler[Union[K, L], Union[V, U]], self),
|
| 312 |
+
cast(Cycler[Union[K, L], Union[V, U]], other),
|
| 313 |
+
product
|
| 314 |
+
)
|
| 315 |
+
elif isinstance(other, int):
|
| 316 |
+
trans = self.by_key()
|
| 317 |
+
return reduce(
|
| 318 |
+
add, (_cycler(k, v * other) for k, v in trans.items())
|
| 319 |
+
)
|
| 320 |
+
else:
|
| 321 |
+
return NotImplemented
|
| 322 |
+
|
| 323 |
+
@overload
|
| 324 |
+
def __rmul__(self, other: Cycler[L, U]) -> Cycler[K | L, V | U]:
|
| 325 |
+
...
|
| 326 |
+
|
| 327 |
+
@overload
|
| 328 |
+
def __rmul__(self, other: int) -> Cycler[K, V]:
|
| 329 |
+
...
|
| 330 |
+
|
| 331 |
+
def __rmul__(self, other):
|
| 332 |
+
return self * other
|
| 333 |
+
|
| 334 |
+
def __len__(self) -> int:
|
| 335 |
+
op_dict: dict[Callable, Callable[[int, int], int]] = {zip: min, product: mul}
|
| 336 |
+
if self._right is None:
|
| 337 |
+
return len(self._left)
|
| 338 |
+
l_len = len(self._left)
|
| 339 |
+
r_len = len(self._right)
|
| 340 |
+
return op_dict[self._op](l_len, r_len)
|
| 341 |
+
|
| 342 |
+
# iadd and imul do not exapand the the type as the returns must be consistent with
|
| 343 |
+
# self, thus they flag as inconsistent with add/mul
|
| 344 |
+
def __iadd__(self, other: Cycler[K, V]) -> Cycler[K, V]: # type: ignore[misc]
|
| 345 |
+
"""
|
| 346 |
+
In-place pair-wise combine two equal length cyclers (zip).
|
| 347 |
+
|
| 348 |
+
Parameters
|
| 349 |
+
----------
|
| 350 |
+
other : Cycler
|
| 351 |
+
"""
|
| 352 |
+
if not isinstance(other, Cycler):
|
| 353 |
+
raise TypeError("Cannot += with a non-Cycler object")
|
| 354 |
+
# True shallow copy of self is fine since this is in-place
|
| 355 |
+
old_self = copy.copy(self)
|
| 356 |
+
self._keys = _process_keys(old_self, other)
|
| 357 |
+
self._left = old_self
|
| 358 |
+
self._op = zip
|
| 359 |
+
self._right = Cycler(other._left, other._right, other._op)
|
| 360 |
+
return self
|
| 361 |
+
|
| 362 |
+
def __imul__(self, other: Cycler[K, V] | int) -> Cycler[K, V]: # type: ignore[misc]
|
| 363 |
+
"""
|
| 364 |
+
In-place outer product of two cyclers (`itertools.product`).
|
| 365 |
+
|
| 366 |
+
Parameters
|
| 367 |
+
----------
|
| 368 |
+
other : Cycler
|
| 369 |
+
"""
|
| 370 |
+
if not isinstance(other, Cycler):
|
| 371 |
+
raise TypeError("Cannot *= with a non-Cycler object")
|
| 372 |
+
# True shallow copy of self is fine since this is in-place
|
| 373 |
+
old_self = copy.copy(self)
|
| 374 |
+
self._keys = _process_keys(old_self, other)
|
| 375 |
+
self._left = old_self
|
| 376 |
+
self._op = product
|
| 377 |
+
self._right = Cycler(other._left, other._right, other._op)
|
| 378 |
+
return self
|
| 379 |
+
|
| 380 |
+
def __eq__(self, other: object) -> bool:
|
| 381 |
+
if not isinstance(other, Cycler):
|
| 382 |
+
return False
|
| 383 |
+
if len(self) != len(other):
|
| 384 |
+
return False
|
| 385 |
+
if self.keys ^ other.keys:
|
| 386 |
+
return False
|
| 387 |
+
return all(a == b for a, b in zip(self, other))
|
| 388 |
+
|
| 389 |
+
__hash__ = None # type: ignore
|
| 390 |
+
|
| 391 |
+
def __repr__(self) -> str:
|
| 392 |
+
op_map = {zip: "+", product: "*"}
|
| 393 |
+
if self._right is None:
|
| 394 |
+
lab = self.keys.pop()
|
| 395 |
+
itr = list(v[lab] for v in self)
|
| 396 |
+
return f"cycler({lab!r}, {itr!r})"
|
| 397 |
+
else:
|
| 398 |
+
op = op_map.get(self._op, "?")
|
| 399 |
+
msg = "({left!r} {op} {right!r})"
|
| 400 |
+
return msg.format(left=self._left, op=op, right=self._right)
|
| 401 |
+
|
| 402 |
+
def _repr_html_(self) -> str:
|
| 403 |
+
# an table showing the value of each key through a full cycle
|
| 404 |
+
output = "<table>"
|
| 405 |
+
sorted_keys = sorted(self.keys, key=repr)
|
| 406 |
+
for key in sorted_keys:
|
| 407 |
+
output += f"<th>{key!r}</th>"
|
| 408 |
+
for d in iter(self):
|
| 409 |
+
output += "<tr>"
|
| 410 |
+
for k in sorted_keys:
|
| 411 |
+
output += f"<td>{d[k]!r}</td>"
|
| 412 |
+
output += "</tr>"
|
| 413 |
+
output += "</table>"
|
| 414 |
+
return output
|
| 415 |
+
|
| 416 |
+
def by_key(self) -> dict[K, list[V]]:
|
| 417 |
+
"""
|
| 418 |
+
Values by key.
|
| 419 |
+
|
| 420 |
+
This returns the transposed values of the cycler. Iterating
|
| 421 |
+
over a `Cycler` yields dicts with a single value for each key,
|
| 422 |
+
this method returns a `dict` of `list` which are the values
|
| 423 |
+
for the given key.
|
| 424 |
+
|
| 425 |
+
The returned value can be used to create an equivalent `Cycler`
|
| 426 |
+
using only `+`.
|
| 427 |
+
|
| 428 |
+
Returns
|
| 429 |
+
-------
|
| 430 |
+
transpose : dict
|
| 431 |
+
dict of lists of the values for each key.
|
| 432 |
+
"""
|
| 433 |
+
|
| 434 |
+
# TODO : sort out if this is a bottle neck, if there is a better way
|
| 435 |
+
# and if we care.
|
| 436 |
+
|
| 437 |
+
keys = self.keys
|
| 438 |
+
out: dict[K, list[V]] = {k: list() for k in keys}
|
| 439 |
+
|
| 440 |
+
for d in self:
|
| 441 |
+
for k in keys:
|
| 442 |
+
out[k].append(d[k])
|
| 443 |
+
return out
|
| 444 |
+
|
| 445 |
+
# for back compatibility
|
| 446 |
+
_transpose = by_key
|
| 447 |
+
|
| 448 |
+
def simplify(self) -> Cycler[K, V]:
|
| 449 |
+
"""
|
| 450 |
+
Simplify the cycler into a sum (but no products) of cyclers.
|
| 451 |
+
|
| 452 |
+
Returns
|
| 453 |
+
-------
|
| 454 |
+
simple : Cycler
|
| 455 |
+
"""
|
| 456 |
+
# TODO: sort out if it is worth the effort to make sure this is
|
| 457 |
+
# balanced. Currently it is is
|
| 458 |
+
# (((a + b) + c) + d) vs
|
| 459 |
+
# ((a + b) + (c + d))
|
| 460 |
+
# I would believe that there is some performance implications
|
| 461 |
+
trans = self.by_key()
|
| 462 |
+
return reduce(add, (_cycler(k, v) for k, v in trans.items()))
|
| 463 |
+
|
| 464 |
+
concat = concat
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
@overload
|
| 468 |
+
def cycler(arg: Cycler[K, V]) -> Cycler[K, V]:
|
| 469 |
+
...
|
| 470 |
+
|
| 471 |
+
|
| 472 |
+
@overload
|
| 473 |
+
def cycler(**kwargs: Iterable[V]) -> Cycler[str, V]:
|
| 474 |
+
...
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
@overload
|
| 478 |
+
def cycler(label: K, itr: Iterable[V]) -> Cycler[K, V]:
|
| 479 |
+
...
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def cycler(*args, **kwargs):
|
| 483 |
+
"""
|
| 484 |
+
Create a new `Cycler` object from a single positional argument,
|
| 485 |
+
a pair of positional arguments, or the combination of keyword arguments.
|
| 486 |
+
|
| 487 |
+
cycler(arg)
|
| 488 |
+
cycler(label1=itr1[, label2=iter2[, ...]])
|
| 489 |
+
cycler(label, itr)
|
| 490 |
+
|
| 491 |
+
Form 1 simply copies a given `Cycler` object.
|
| 492 |
+
|
| 493 |
+
Form 2 composes a `Cycler` as an inner product of the
|
| 494 |
+
pairs of keyword arguments. In other words, all of the
|
| 495 |
+
iterables are cycled simultaneously, as if through zip().
|
| 496 |
+
|
| 497 |
+
Form 3 creates a `Cycler` from a label and an iterable.
|
| 498 |
+
This is useful for when the label cannot be a keyword argument
|
| 499 |
+
(e.g., an integer or a name that has a space in it).
|
| 500 |
+
|
| 501 |
+
Parameters
|
| 502 |
+
----------
|
| 503 |
+
arg : Cycler
|
| 504 |
+
Copy constructor for Cycler (does a shallow copy of iterables).
|
| 505 |
+
label : name
|
| 506 |
+
The property key. In the 2-arg form of the function,
|
| 507 |
+
the label can be any hashable object. In the keyword argument
|
| 508 |
+
form of the function, it must be a valid python identifier.
|
| 509 |
+
itr : iterable
|
| 510 |
+
Finite length iterable of the property values.
|
| 511 |
+
Can be a single-property `Cycler` that would
|
| 512 |
+
be like a key change, but as a shallow copy.
|
| 513 |
+
|
| 514 |
+
Returns
|
| 515 |
+
-------
|
| 516 |
+
cycler : Cycler
|
| 517 |
+
New `Cycler` for the given property
|
| 518 |
+
|
| 519 |
+
"""
|
| 520 |
+
if args and kwargs:
|
| 521 |
+
raise TypeError(
|
| 522 |
+
"cycler() can only accept positional OR keyword arguments -- not both."
|
| 523 |
+
)
|
| 524 |
+
|
| 525 |
+
if len(args) == 1:
|
| 526 |
+
if not isinstance(args[0], Cycler):
|
| 527 |
+
raise TypeError(
|
| 528 |
+
"If only one positional argument given, it must "
|
| 529 |
+
"be a Cycler instance."
|
| 530 |
+
)
|
| 531 |
+
return Cycler(args[0])
|
| 532 |
+
elif len(args) == 2:
|
| 533 |
+
return _cycler(*args)
|
| 534 |
+
elif len(args) > 2:
|
| 535 |
+
raise TypeError(
|
| 536 |
+
"Only a single Cycler can be accepted as the lone "
|
| 537 |
+
"positional argument. Use keyword arguments instead."
|
| 538 |
+
)
|
| 539 |
+
|
| 540 |
+
if kwargs:
|
| 541 |
+
return reduce(add, (_cycler(k, v) for k, v in kwargs.items()))
|
| 542 |
+
|
| 543 |
+
raise TypeError("Must have at least a positional OR keyword arguments")
|
| 544 |
+
|
| 545 |
+
|
| 546 |
+
def _cycler(label: K, itr: Iterable[V]) -> Cycler[K, V]:
|
| 547 |
+
"""
|
| 548 |
+
Create a new `Cycler` object from a property name and iterable of values.
|
| 549 |
+
|
| 550 |
+
Parameters
|
| 551 |
+
----------
|
| 552 |
+
label : hashable
|
| 553 |
+
The property key.
|
| 554 |
+
itr : iterable
|
| 555 |
+
Finite length iterable of the property values.
|
| 556 |
+
|
| 557 |
+
Returns
|
| 558 |
+
-------
|
| 559 |
+
cycler : Cycler
|
| 560 |
+
New `Cycler` for the given property
|
| 561 |
+
"""
|
| 562 |
+
if isinstance(itr, Cycler):
|
| 563 |
+
keys = itr.keys
|
| 564 |
+
if len(keys) != 1:
|
| 565 |
+
msg = "Can not create Cycler from a multi-property Cycler"
|
| 566 |
+
raise ValueError(msg)
|
| 567 |
+
|
| 568 |
+
lab = keys.pop()
|
| 569 |
+
# Doesn't need to be a new list because
|
| 570 |
+
# _from_iter() will be creating that new list anyway.
|
| 571 |
+
itr = (v[lab] for v in itr)
|
| 572 |
+
|
| 573 |
+
return Cycler._from_iter(label, itr)
|
parrot/lib/python3.10/site-packages/cycler/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (16.6 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/cycler/py.typed
ADDED
|
File without changes
|
parrot/lib/python3.10/site-packages/example.py
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#################################################################################
|
| 2 |
+
# Copyright (c) 2020, NVIDIA Corporation. All rights reserved. #
|
| 3 |
+
# #
|
| 4 |
+
# Redistribution and use in source and binary forms, with or without #
|
| 5 |
+
# modification, are permitted provided that the following conditions are met: #
|
| 6 |
+
# #
|
| 7 |
+
# * Redistributions of source code must retain the above copyright notice, #
|
| 8 |
+
# this list of conditions and the following disclaimer. #
|
| 9 |
+
# * Redistributions in binary form must reproduce the above copyright #
|
| 10 |
+
# notice, this list of conditions and the following disclaimer in the #
|
| 11 |
+
# documentation and/or other materials provided with the distribution. #
|
| 12 |
+
# * Neither the name of the NVIDIA Corporation nor the names of its #
|
| 13 |
+
# contributors may be used to endorse or promote products derived from #
|
| 14 |
+
# this software without specific prior written permission. #
|
| 15 |
+
# #
|
| 16 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" #
|
| 17 |
+
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #
|
| 18 |
+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #
|
| 19 |
+
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE #
|
| 20 |
+
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #
|
| 21 |
+
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #
|
| 22 |
+
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #
|
| 23 |
+
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #
|
| 24 |
+
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #
|
| 25 |
+
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF #
|
| 26 |
+
# THE POSSIBILITY OF SUCH DAMAGE. #
|
| 27 |
+
#################################################################################
|
| 28 |
+
|
| 29 |
+
#
|
| 30 |
+
# Sample script to demonstrate the usage of NVML API python bindings
|
| 31 |
+
#
|
| 32 |
+
|
| 33 |
+
# To Run:
|
| 34 |
+
# $ python ./example.py
|
| 35 |
+
|
| 36 |
+
from pynvml import *
|
| 37 |
+
|
| 38 |
+
#
|
| 39 |
+
# Helper function
|
| 40 |
+
#
|
| 41 |
+
def StrVirt(mode):
|
| 42 |
+
if mode == NVML_GPU_VIRTUALIZATION_MODE_NONE:
|
| 43 |
+
return "None";
|
| 44 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_PASSTHROUGH:
|
| 45 |
+
return "Pass-Through";
|
| 46 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_VGPU:
|
| 47 |
+
return "VGPU";
|
| 48 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_HOST_VGPU:
|
| 49 |
+
return "Host VGPU";
|
| 50 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_HOST_VSGA:
|
| 51 |
+
return "Host VSGA";
|
| 52 |
+
else:
|
| 53 |
+
return "Unknown";
|
| 54 |
+
|
| 55 |
+
#
|
| 56 |
+
# Converts errors into string messages
|
| 57 |
+
#
|
| 58 |
+
def handleError(err):
|
| 59 |
+
if (err.value == NVML_ERROR_NOT_SUPPORTED):
|
| 60 |
+
return "N/A"
|
| 61 |
+
else:
|
| 62 |
+
return err.__str__()
|
| 63 |
+
|
| 64 |
+
#######
|
| 65 |
+
def deviceQuery():
|
| 66 |
+
|
| 67 |
+
strResult = ''
|
| 68 |
+
try:
|
| 69 |
+
#
|
| 70 |
+
# Initialize NVML
|
| 71 |
+
#
|
| 72 |
+
nvmlInit()
|
| 73 |
+
|
| 74 |
+
strResult += ' <driver_version>' + str(nvmlSystemGetDriverVersion()) + '</driver_version>\n'
|
| 75 |
+
|
| 76 |
+
deviceCount = nvmlDeviceGetCount()
|
| 77 |
+
strResult += ' <attached_gpus>' + str(deviceCount) + '</attached_gpus>\n'
|
| 78 |
+
|
| 79 |
+
for i in range(0, deviceCount):
|
| 80 |
+
handle = nvmlDeviceGetHandleByIndex(i)
|
| 81 |
+
|
| 82 |
+
pciInfo = nvmlDeviceGetPciInfo(handle)
|
| 83 |
+
|
| 84 |
+
strResult += ' <gpu id="%s">\n' % pciInfo.busId
|
| 85 |
+
|
| 86 |
+
strResult += ' <product_name>' + nvmlDeviceGetName(handle) + '</product_name>\n'
|
| 87 |
+
|
| 88 |
+
brandNames = {NVML_BRAND_UNKNOWN : "Unknown",
|
| 89 |
+
NVML_BRAND_QUADRO : "Quadro",
|
| 90 |
+
NVML_BRAND_TESLA : "Tesla",
|
| 91 |
+
NVML_BRAND_NVS : "NVS",
|
| 92 |
+
NVML_BRAND_GRID : "Grid",
|
| 93 |
+
NVML_BRAND_TITAN : "Titan",
|
| 94 |
+
NVML_BRAND_GEFORCE : "GeForce",
|
| 95 |
+
NVML_BRAND_NVIDIA_VAPPS : "NVIDIA Virtual Applications",
|
| 96 |
+
NVML_BRAND_NVIDIA_VPC : "NVIDIA Virtual PC",
|
| 97 |
+
NVML_BRAND_NVIDIA_VCS : "NVIDIA Virtual Compute Server",
|
| 98 |
+
NVML_BRAND_NVIDIA_VWS : "NVIDIA RTX Virtual Workstation",
|
| 99 |
+
NVML_BRAND_NVIDIA_CLOUD_GAMING : "NVIDIA Cloud Gaming",
|
| 100 |
+
NVML_BRAND_QUADRO_RTX : "Quadro RTX",
|
| 101 |
+
NVML_BRAND_NVIDIA_RTX : "NVIDIA RTX",
|
| 102 |
+
NVML_BRAND_NVIDIA : "NVIDIA",
|
| 103 |
+
NVML_BRAND_GEFORCE_RTX : "GeForce RTX",
|
| 104 |
+
NVML_BRAND_TITAN_RTX : "TITAN RTX",
|
| 105 |
+
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
try:
|
| 109 |
+
# If nvmlDeviceGetBrand() succeeds it is guaranteed to be in the dictionary
|
| 110 |
+
brandName = brandNames[nvmlDeviceGetBrand(handle)]
|
| 111 |
+
except NVMLError as err:
|
| 112 |
+
brandName = handleError(err)
|
| 113 |
+
|
| 114 |
+
strResult += ' <product_brand>' + brandName + '</product_brand>\n'
|
| 115 |
+
|
| 116 |
+
try:
|
| 117 |
+
serial = nvmlDeviceGetSerial(handle)
|
| 118 |
+
except NVMLError as err:
|
| 119 |
+
serial = handleError(err)
|
| 120 |
+
|
| 121 |
+
strResult += ' <serial>' + serial + '</serial>\n'
|
| 122 |
+
|
| 123 |
+
try:
|
| 124 |
+
uuid = nvmlDeviceGetUUID(handle)
|
| 125 |
+
except NVMLError as err:
|
| 126 |
+
uuid = handleError(err)
|
| 127 |
+
|
| 128 |
+
strResult += ' <uuid>' + uuid + '</uuid>\n'
|
| 129 |
+
|
| 130 |
+
strResult += ' <gpu_virtualization_mode>\n'
|
| 131 |
+
try:
|
| 132 |
+
mode = StrVirt(nvmlDeviceGetVirtualizationMode(handle))
|
| 133 |
+
except NVMLError as err:
|
| 134 |
+
mode = handleError(err)
|
| 135 |
+
strResult += ' <virtualization_mode>' + mode + '</virtualization_mode>\n'
|
| 136 |
+
strResult += ' </gpu_virtualization_mode>\n'
|
| 137 |
+
|
| 138 |
+
try:
|
| 139 |
+
gridLicensableFeatures = nvmlDeviceGetGridLicensableFeatures(handle)
|
| 140 |
+
if gridLicensableFeatures.isGridLicenseSupported == 1:
|
| 141 |
+
strResult += ' <vgpu_software_licensed_product>\n'
|
| 142 |
+
for i in range(gridLicensableFeatures.licensableFeaturesCount):
|
| 143 |
+
if gridLicensableFeatures.gridLicensableFeatures[i].featureState == 0:
|
| 144 |
+
if nvmlDeviceGetVirtualizationMode(handle) == NVML_GPU_VIRTUALIZATION_MODE_PASSTHROUGH:
|
| 145 |
+
strResult += ' <licensed_product_name>' + 'NVIDIA Virtual Applications' + '</licensed_product_name>\n'
|
| 146 |
+
strResult += ' <license_status>' + 'Licensed' + '</license_status>\n'
|
| 147 |
+
else:
|
| 148 |
+
strResult += ' <licensed_product_name>' + gridLicensableFeatures.gridLicensableFeatures[i].productName + '</licensed_product_name>\n'
|
| 149 |
+
strResult += ' <license_status>' + 'Unlicensed' + '</license_status>\n'
|
| 150 |
+
else:
|
| 151 |
+
strResult += ' <licensed_product_name>' + gridLicensableFeatures.gridLicensableFeatures[i].productName + '</licensed_product_name>\n'
|
| 152 |
+
strResult += ' <license_status>' + 'Licensed' + '</license_status>\n'
|
| 153 |
+
strResult += ' </vgpu_software_licensed_product>\n'
|
| 154 |
+
except NVMLError as err:
|
| 155 |
+
gridLicensableFeatures = handleError(err)
|
| 156 |
+
|
| 157 |
+
strResult += ' </gpu>\n'
|
| 158 |
+
|
| 159 |
+
except NVMLError as err:
|
| 160 |
+
strResult += 'example.py: ' + err.__str__() + '\n'
|
| 161 |
+
|
| 162 |
+
nvmlShutdown()
|
| 163 |
+
|
| 164 |
+
return strResult
|
| 165 |
+
|
| 166 |
+
# If this is not exectued when module is imported
|
| 167 |
+
if __name__ == "__main__":
|
| 168 |
+
print(deviceQuery())
|
| 169 |
+
|
parrot/lib/python3.10/site-packages/ffmpy.py
ADDED
|
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import errno
|
| 2 |
+
import itertools
|
| 3 |
+
import shlex
|
| 4 |
+
import subprocess
|
| 5 |
+
from typing import IO, Any, List, Mapping, Optional, Sequence, Tuple, Union
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class FFmpeg(object):
|
| 9 |
+
"""Wrapper for various `FFmpeg <https://www.ffmpeg.org/>`_ related applications (ffmpeg,
|
| 10 |
+
ffprobe).
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
def __init__(
|
| 14 |
+
self,
|
| 15 |
+
executable: str = "ffmpeg",
|
| 16 |
+
global_options: Optional[Union[Sequence[str], str]] = None,
|
| 17 |
+
inputs: Optional[Mapping[str, Optional[Union[Sequence[str], str]]]] = None,
|
| 18 |
+
outputs: Optional[Mapping[str, Optional[Union[Sequence[str], str]]]] = None,
|
| 19 |
+
) -> None:
|
| 20 |
+
"""Initialize FFmpeg command line wrapper.
|
| 21 |
+
|
| 22 |
+
Compiles FFmpeg command line from passed arguments (executable path, options, inputs and
|
| 23 |
+
outputs). ``inputs`` and ``outputs`` are dictionares containing inputs/outputs as keys and
|
| 24 |
+
their respective options as values. One dictionary value (set of options) must be either a
|
| 25 |
+
single space separated string, or a list or strings without spaces (i.e. each part of the
|
| 26 |
+
option is a separate item of the list, the result of calling ``split()`` on the options
|
| 27 |
+
string). If the value is a list, it cannot be mixed, i.e. cannot contain items with spaces.
|
| 28 |
+
An exception are complex FFmpeg command lines that contain quotes: the quoted part must be
|
| 29 |
+
one string, even if it contains spaces (see *Examples* for more info).
|
| 30 |
+
For more info about FFmpeg command line format see `here
|
| 31 |
+
<https://ffmpeg.org/ffmpeg.html#Synopsis>`_.
|
| 32 |
+
|
| 33 |
+
:param str executable: path to ffmpeg executable; by default the ``ffmpeg`` command will be
|
| 34 |
+
searched for in the ``PATH``, but can be overridden with an absolute path to ``ffmpeg``
|
| 35 |
+
executable
|
| 36 |
+
:param iterable global_options: global options passed to ``ffmpeg`` executable (e.g.
|
| 37 |
+
``-y``, ``-v`` etc.); can be specified either as a list/tuple/set of strings, or one
|
| 38 |
+
space-separated string; by default no global options are passed
|
| 39 |
+
:param dict inputs: a dictionary specifying one or more input arguments as keys with their
|
| 40 |
+
corresponding options (either as a list of strings or a single space separated string) as
|
| 41 |
+
values
|
| 42 |
+
:param dict outputs: a dictionary specifying one or more output arguments as keys with their
|
| 43 |
+
corresponding options (either as a list of strings or a single space separated string) as
|
| 44 |
+
values
|
| 45 |
+
"""
|
| 46 |
+
self.executable = executable
|
| 47 |
+
self._cmd = [executable]
|
| 48 |
+
self._cmd += _normalize_options(global_options, split_mixed=True)
|
| 49 |
+
|
| 50 |
+
if inputs is not None:
|
| 51 |
+
self._cmd += _merge_args_opts(inputs, add_minus_i_option=True)
|
| 52 |
+
|
| 53 |
+
if outputs is not None:
|
| 54 |
+
self._cmd += _merge_args_opts(outputs)
|
| 55 |
+
|
| 56 |
+
self.cmd = subprocess.list2cmdline(self._cmd)
|
| 57 |
+
self.process: Optional[subprocess.Popen] = None
|
| 58 |
+
|
| 59 |
+
def __repr__(self) -> str:
|
| 60 |
+
return "<{0!r} {1!r}>".format(self.__class__.__name__, self.cmd)
|
| 61 |
+
|
| 62 |
+
def run(
|
| 63 |
+
self,
|
| 64 |
+
input_data: Optional[bytes] = None,
|
| 65 |
+
stdout: Optional[Union[IO, int]] = None,
|
| 66 |
+
stderr: Optional[Union[IO, int]] = None,
|
| 67 |
+
env: Optional[Mapping[str, str]] = None,
|
| 68 |
+
**kwargs: Any
|
| 69 |
+
) -> Tuple[Optional[bytes], Optional[bytes]]:
|
| 70 |
+
"""Execute FFmpeg command line.
|
| 71 |
+
|
| 72 |
+
``input_data`` can contain input for FFmpeg in case ``pipe`` protocol is used for input.
|
| 73 |
+
``stdout`` and ``stderr`` specify where to redirect the ``stdout`` and ``stderr`` of the
|
| 74 |
+
process. By default no redirection is done, which means all output goes to running shell
|
| 75 |
+
(this mode should normally only be used for debugging purposes). If FFmpeg ``pipe`` protocol
|
| 76 |
+
is used for output, ``stdout`` must be redirected to a pipe by passing `subprocess.PIPE` as
|
| 77 |
+
``stdout`` argument. You can pass custom environment to ffmpeg process with ``env``.
|
| 78 |
+
|
| 79 |
+
Returns a 2-tuple containing ``stdout`` and ``stderr`` of the process. If there was no
|
| 80 |
+
redirection or if the output was redirected to e.g. `os.devnull`, the value returned will
|
| 81 |
+
be a tuple of two `None` values, otherwise it will contain the actual ``stdout`` and
|
| 82 |
+
``stderr`` data returned by ffmpeg process.
|
| 83 |
+
|
| 84 |
+
More info about ``pipe`` protocol `here <https://ffmpeg.org/ffmpeg-protocols.html#pipe>`_.
|
| 85 |
+
|
| 86 |
+
:param str input_data: input data for FFmpeg to deal with (audio, video etc.) as bytes (e.g.
|
| 87 |
+
the result of reading a file in binary mode)
|
| 88 |
+
:param stdout: redirect FFmpeg ``stdout`` there (default is `None` which means no
|
| 89 |
+
redirection)
|
| 90 |
+
:param stderr: redirect FFmpeg ``stderr`` there (default is `None` which means no
|
| 91 |
+
redirection)
|
| 92 |
+
:param env: custom environment for ffmpeg process
|
| 93 |
+
:param kwargs: any other keyword arguments to be forwarded to `subprocess.Popen
|
| 94 |
+
<https://docs.python.org/3/library/subprocess.html#subprocess.Popen>`_
|
| 95 |
+
:return: a 2-tuple containing ``stdout`` and ``stderr`` of the process
|
| 96 |
+
:rtype: tuple
|
| 97 |
+
:raise: `FFRuntimeError` in case FFmpeg command exits with a non-zero code;
|
| 98 |
+
`FFExecutableNotFoundError` in case the executable path passed was not valid
|
| 99 |
+
"""
|
| 100 |
+
try:
|
| 101 |
+
self.process = subprocess.Popen(
|
| 102 |
+
self._cmd, stdin=subprocess.PIPE, stdout=stdout, stderr=stderr, env=env, **kwargs
|
| 103 |
+
)
|
| 104 |
+
except OSError as e:
|
| 105 |
+
if e.errno == errno.ENOENT:
|
| 106 |
+
raise FFExecutableNotFoundError("Executable '{0}' not found".format(self.executable))
|
| 107 |
+
else:
|
| 108 |
+
raise
|
| 109 |
+
|
| 110 |
+
o_stdout, o_stderr = self.process.communicate(input=input_data)
|
| 111 |
+
if self.process.returncode != 0:
|
| 112 |
+
raise FFRuntimeError(self.cmd, self.process.returncode, o_stdout, o_stderr)
|
| 113 |
+
|
| 114 |
+
return o_stdout, o_stderr
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class FFprobe(FFmpeg):
|
| 118 |
+
"""Wrapper for `ffprobe <https://www.ffmpeg.org/ffprobe.html>`_."""
|
| 119 |
+
|
| 120 |
+
def __init__(
|
| 121 |
+
self,
|
| 122 |
+
executable: str = "ffprobe",
|
| 123 |
+
global_options: Optional[Union[Sequence[str], str]] = None,
|
| 124 |
+
inputs: Optional[Mapping[str, Optional[Union[Sequence[str], str]]]] = None,
|
| 125 |
+
) -> None:
|
| 126 |
+
"""Create an instance of FFprobe.
|
| 127 |
+
|
| 128 |
+
Compiles FFprobe command line from passed arguments (executable path, options, inputs).
|
| 129 |
+
FFprobe executable by default is taken from ``PATH`` but can be overridden with an
|
| 130 |
+
absolute path. For more info about FFprobe command line format see
|
| 131 |
+
`here <https://ffmpeg.org/ffprobe.html#Synopsis>`_.
|
| 132 |
+
|
| 133 |
+
:param str executable: absolute path to ffprobe executable
|
| 134 |
+
:param iterable global_options: global options passed to ffmpeg executable; can be specified
|
| 135 |
+
either as a list/tuple of strings or a space-separated string
|
| 136 |
+
:param dict inputs: a dictionary specifying one or more inputs as keys with their
|
| 137 |
+
corresponding options as values
|
| 138 |
+
"""
|
| 139 |
+
super(FFprobe, self).__init__(
|
| 140 |
+
executable=executable, global_options=global_options, inputs=inputs
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class FFExecutableNotFoundError(Exception):
|
| 145 |
+
"""Raise when FFmpeg/FFprobe executable was not found."""
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
class FFRuntimeError(Exception):
|
| 149 |
+
"""Raise when FFmpeg/FFprobe command line execution returns a non-zero exit code.
|
| 150 |
+
|
| 151 |
+
The resulting exception object will contain the attributes relates to command line execution:
|
| 152 |
+
``cmd``, ``exit_code``, ``stdout``, ``stderr``.
|
| 153 |
+
"""
|
| 154 |
+
|
| 155 |
+
def __init__(self, cmd: str, exit_code: int, stdout: bytes, stderr: bytes) -> None:
|
| 156 |
+
self.cmd = cmd
|
| 157 |
+
self.exit_code = exit_code
|
| 158 |
+
self.stdout = stdout
|
| 159 |
+
self.stderr = stderr
|
| 160 |
+
|
| 161 |
+
message = "`{0}` exited with status {1}\n\nSTDOUT:\n{2}\n\nSTDERR:\n{3}".format(
|
| 162 |
+
self.cmd, exit_code, (stdout or b"").decode(), (stderr or b"").decode()
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
super(FFRuntimeError, self).__init__(message)
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
def _merge_args_opts(
|
| 169 |
+
args_opts_dict: Mapping[str, Optional[Union[Sequence[str], str]]],
|
| 170 |
+
add_minus_i_option: bool = False,
|
| 171 |
+
) -> List[str]:
|
| 172 |
+
"""Merge options with their corresponding arguments.
|
| 173 |
+
|
| 174 |
+
Iterates over the dictionary holding arguments (keys) and options (values). Merges each
|
| 175 |
+
options string with its corresponding argument.
|
| 176 |
+
|
| 177 |
+
:param dict args_opts_dict: a dictionary of arguments and options
|
| 178 |
+
:param dict kwargs: *input_option* - if specified prepends ``-i`` to input argument
|
| 179 |
+
:return: merged list of strings with arguments and their corresponding options
|
| 180 |
+
:rtype: list
|
| 181 |
+
"""
|
| 182 |
+
merged: List[str] = []
|
| 183 |
+
|
| 184 |
+
for arg, opt in args_opts_dict.items():
|
| 185 |
+
merged += _normalize_options(opt)
|
| 186 |
+
|
| 187 |
+
if not arg:
|
| 188 |
+
continue
|
| 189 |
+
|
| 190 |
+
if add_minus_i_option:
|
| 191 |
+
merged.append("-i")
|
| 192 |
+
|
| 193 |
+
merged.append(arg)
|
| 194 |
+
|
| 195 |
+
return merged
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def _normalize_options(
|
| 199 |
+
options: Optional[Union[Sequence[str], str]], split_mixed: bool = False
|
| 200 |
+
) -> List[str]:
|
| 201 |
+
"""Normalize options string or list of strings.
|
| 202 |
+
|
| 203 |
+
Splits `options` into a list of strings. If `split_mixed` is `True`, splits (flattens) mixed
|
| 204 |
+
options (i.e. list of strings with spaces) into separate items.
|
| 205 |
+
|
| 206 |
+
:param options: options string or list of strings
|
| 207 |
+
:param bool split_mixed: whether to split mixed options into separate items
|
| 208 |
+
"""
|
| 209 |
+
if options is None:
|
| 210 |
+
return []
|
| 211 |
+
elif isinstance(options, str):
|
| 212 |
+
return shlex.split(options)
|
| 213 |
+
else:
|
| 214 |
+
if split_mixed:
|
| 215 |
+
return list(itertools.chain(*[shlex.split(o) for o in options]))
|
| 216 |
+
else:
|
| 217 |
+
return list(options)
|
parrot/lib/python3.10/site-packages/hjson/__pycache__/ordered_dict.cpython-310.pyc
ADDED
|
Binary file (4.03 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/hjson/encoder.py
ADDED
|
@@ -0,0 +1,521 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Implementation of JSONEncoder
|
| 2 |
+
"""
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
import re
|
| 5 |
+
from operator import itemgetter
|
| 6 |
+
from decimal import Decimal
|
| 7 |
+
from .compat import u, unichr, binary_type, string_types, integer_types, PY3
|
| 8 |
+
from .decoder import PosInf
|
| 9 |
+
|
| 10 |
+
#ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]')
|
| 11 |
+
# This is required because u() will mangle the string and ur'' isn't valid
|
| 12 |
+
# python3 syntax
|
| 13 |
+
ESCAPE = re.compile(u'[\\x00-\\x1f\\\\"\\b\\f\\n\\r\\t\u2028\u2029]')
|
| 14 |
+
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
| 15 |
+
HAS_UTF8 = re.compile(r'[\x80-\xff]')
|
| 16 |
+
ESCAPE_DCT = {
|
| 17 |
+
'\\': '\\\\',
|
| 18 |
+
'"': '\\"',
|
| 19 |
+
'\b': '\\b',
|
| 20 |
+
'\f': '\\f',
|
| 21 |
+
'\n': '\\n',
|
| 22 |
+
'\r': '\\r',
|
| 23 |
+
'\t': '\\t',
|
| 24 |
+
}
|
| 25 |
+
for i in range(0x20):
|
| 26 |
+
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
|
| 27 |
+
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
| 28 |
+
for i in [0x2028, 0x2029]:
|
| 29 |
+
ESCAPE_DCT.setdefault(unichr(i), '\\u%04x' % (i,))
|
| 30 |
+
|
| 31 |
+
FLOAT_REPR = repr
|
| 32 |
+
|
| 33 |
+
def encode_basestring(s, _PY3=PY3, _q=u('"')):
|
| 34 |
+
"""Return a JSON representation of a Python string
|
| 35 |
+
|
| 36 |
+
"""
|
| 37 |
+
if _PY3:
|
| 38 |
+
if isinstance(s, binary_type):
|
| 39 |
+
s = s.decode('utf-8')
|
| 40 |
+
else:
|
| 41 |
+
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
| 42 |
+
s = s.decode('utf-8')
|
| 43 |
+
def replace(match):
|
| 44 |
+
return ESCAPE_DCT[match.group(0)]
|
| 45 |
+
return _q + ESCAPE.sub(replace, s) + _q
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def py_encode_basestring_ascii(s, _PY3=PY3):
|
| 49 |
+
"""Return an ASCII-only JSON representation of a Python string
|
| 50 |
+
|
| 51 |
+
"""
|
| 52 |
+
if _PY3:
|
| 53 |
+
if isinstance(s, binary_type):
|
| 54 |
+
s = s.decode('utf-8')
|
| 55 |
+
else:
|
| 56 |
+
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
| 57 |
+
s = s.decode('utf-8')
|
| 58 |
+
def replace(match):
|
| 59 |
+
s = match.group(0)
|
| 60 |
+
try:
|
| 61 |
+
return ESCAPE_DCT[s]
|
| 62 |
+
except KeyError:
|
| 63 |
+
n = ord(s)
|
| 64 |
+
if n < 0x10000:
|
| 65 |
+
#return '\\u{0:04x}'.format(n)
|
| 66 |
+
return '\\u%04x' % (n,)
|
| 67 |
+
else:
|
| 68 |
+
# surrogate pair
|
| 69 |
+
n -= 0x10000
|
| 70 |
+
s1 = 0xd800 | ((n >> 10) & 0x3ff)
|
| 71 |
+
s2 = 0xdc00 | (n & 0x3ff)
|
| 72 |
+
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
|
| 73 |
+
return '\\u%04x\\u%04x' % (s1, s2)
|
| 74 |
+
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
encode_basestring_ascii = (
|
| 78 |
+
py_encode_basestring_ascii)
|
| 79 |
+
|
| 80 |
+
class JSONEncoder(object):
|
| 81 |
+
"""Extensible JSON <http://json.org> encoder for Python data structures.
|
| 82 |
+
|
| 83 |
+
Supports the following objects and types by default:
|
| 84 |
+
|
| 85 |
+
+-------------------+---------------+
|
| 86 |
+
| Python | JSON |
|
| 87 |
+
+===================+===============+
|
| 88 |
+
| dict, namedtuple | object |
|
| 89 |
+
+-------------------+---------------+
|
| 90 |
+
| list, tuple | array |
|
| 91 |
+
+-------------------+---------------+
|
| 92 |
+
| str, unicode | string |
|
| 93 |
+
+-------------------+---------------+
|
| 94 |
+
| int, long, float | number |
|
| 95 |
+
+-------------------+---------------+
|
| 96 |
+
| True | true |
|
| 97 |
+
+-------------------+---------------+
|
| 98 |
+
| False | false |
|
| 99 |
+
+-------------------+---------------+
|
| 100 |
+
| None | null |
|
| 101 |
+
+-------------------+---------------+
|
| 102 |
+
|
| 103 |
+
To extend this to recognize other objects, subclass and implement a
|
| 104 |
+
``.default()`` method with another method that returns a serializable
|
| 105 |
+
object for ``o`` if possible, otherwise it should call the superclass
|
| 106 |
+
implementation (to raise ``TypeError``).
|
| 107 |
+
|
| 108 |
+
"""
|
| 109 |
+
item_separator = ', '
|
| 110 |
+
key_separator = ': '
|
| 111 |
+
|
| 112 |
+
def __init__(self, skipkeys=False, ensure_ascii=True,
|
| 113 |
+
check_circular=True, sort_keys=False,
|
| 114 |
+
indent=None, separators=None, encoding='utf-8', default=None,
|
| 115 |
+
use_decimal=True, namedtuple_as_object=True,
|
| 116 |
+
tuple_as_array=True, bigint_as_string=False,
|
| 117 |
+
item_sort_key=None, for_json=False,
|
| 118 |
+
int_as_string_bitcount=None):
|
| 119 |
+
"""Constructor for JSONEncoder, with sensible defaults.
|
| 120 |
+
|
| 121 |
+
If skipkeys is false, then it is a TypeError to attempt
|
| 122 |
+
encoding of keys that are not str, int, long, float or None. If
|
| 123 |
+
skipkeys is True, such items are simply skipped.
|
| 124 |
+
|
| 125 |
+
If ensure_ascii is true, the output is guaranteed to be str
|
| 126 |
+
objects with all incoming unicode characters escaped. If
|
| 127 |
+
ensure_ascii is false, the output will be unicode object.
|
| 128 |
+
|
| 129 |
+
If check_circular is true, then lists, dicts, and custom encoded
|
| 130 |
+
objects will be checked for circular references during encoding to
|
| 131 |
+
prevent an infinite recursion (which would cause an OverflowError).
|
| 132 |
+
Otherwise, no such check takes place.
|
| 133 |
+
|
| 134 |
+
If sort_keys is true, then the output of dictionaries will be
|
| 135 |
+
sorted by key; this is useful for regression tests to ensure
|
| 136 |
+
that JSON serializations can be compared on a day-to-day basis.
|
| 137 |
+
|
| 138 |
+
If indent is a string, then JSON array elements and object members
|
| 139 |
+
will be pretty-printed with a newline followed by that string repeated
|
| 140 |
+
for each level of nesting. ``None`` (the default) selects the most compact
|
| 141 |
+
representation without any newlines. For backwards compatibility with
|
| 142 |
+
versions of hjson earlier than 2.1.0, an integer is also accepted
|
| 143 |
+
and is converted to a string with that many spaces.
|
| 144 |
+
|
| 145 |
+
If specified, separators should be an (item_separator, key_separator)
|
| 146 |
+
tuple. The default is (', ', ': ') if *indent* is ``None`` and
|
| 147 |
+
(',', ': ') otherwise. To get the most compact JSON representation,
|
| 148 |
+
you should specify (',', ':') to eliminate whitespace.
|
| 149 |
+
|
| 150 |
+
If specified, default is a function that gets called for objects
|
| 151 |
+
that can't otherwise be serialized. It should return a JSON encodable
|
| 152 |
+
version of the object or raise a ``TypeError``.
|
| 153 |
+
|
| 154 |
+
If encoding is not None, then all input strings will be
|
| 155 |
+
transformed into unicode using that encoding prior to JSON-encoding.
|
| 156 |
+
The default is UTF-8.
|
| 157 |
+
|
| 158 |
+
If use_decimal is true (not the default), ``decimal.Decimal`` will
|
| 159 |
+
be supported directly by the encoder. For the inverse, decode JSON
|
| 160 |
+
with ``parse_float=decimal.Decimal``.
|
| 161 |
+
|
| 162 |
+
If namedtuple_as_object is true (the default), objects with
|
| 163 |
+
``_asdict()`` methods will be encoded as JSON objects.
|
| 164 |
+
|
| 165 |
+
If tuple_as_array is true (the default), tuple (and subclasses) will
|
| 166 |
+
be encoded as JSON arrays.
|
| 167 |
+
|
| 168 |
+
If bigint_as_string is true (not the default), ints 2**53 and higher
|
| 169 |
+
or lower than -2**53 will be encoded as strings. This is to avoid the
|
| 170 |
+
rounding that happens in Javascript otherwise.
|
| 171 |
+
|
| 172 |
+
If int_as_string_bitcount is a positive number (n), then int of size
|
| 173 |
+
greater than or equal to 2**n or lower than or equal to -2**n will be
|
| 174 |
+
encoded as strings.
|
| 175 |
+
|
| 176 |
+
If specified, item_sort_key is a callable used to sort the items in
|
| 177 |
+
each dictionary. This is useful if you want to sort items other than
|
| 178 |
+
in alphabetical order by key.
|
| 179 |
+
|
| 180 |
+
If for_json is true (not the default), objects with a ``for_json()``
|
| 181 |
+
method will use the return value of that method for encoding as JSON
|
| 182 |
+
instead of the object.
|
| 183 |
+
|
| 184 |
+
"""
|
| 185 |
+
|
| 186 |
+
self.skipkeys = skipkeys
|
| 187 |
+
self.ensure_ascii = ensure_ascii
|
| 188 |
+
self.check_circular = check_circular
|
| 189 |
+
self.sort_keys = sort_keys
|
| 190 |
+
self.use_decimal = use_decimal
|
| 191 |
+
self.namedtuple_as_object = namedtuple_as_object
|
| 192 |
+
self.tuple_as_array = tuple_as_array
|
| 193 |
+
self.bigint_as_string = bigint_as_string
|
| 194 |
+
self.item_sort_key = item_sort_key
|
| 195 |
+
self.for_json = for_json
|
| 196 |
+
self.int_as_string_bitcount = int_as_string_bitcount
|
| 197 |
+
if indent is not None and not isinstance(indent, string_types):
|
| 198 |
+
indent = indent * ' '
|
| 199 |
+
self.indent = indent
|
| 200 |
+
if separators is not None:
|
| 201 |
+
self.item_separator, self.key_separator = separators
|
| 202 |
+
elif indent is not None:
|
| 203 |
+
self.item_separator = ','
|
| 204 |
+
if default is not None:
|
| 205 |
+
self.default = default
|
| 206 |
+
self.encoding = encoding
|
| 207 |
+
|
| 208 |
+
def default(self, o):
|
| 209 |
+
"""Implement this method in a subclass such that it returns
|
| 210 |
+
a serializable object for ``o``, or calls the base implementation
|
| 211 |
+
(to raise a ``TypeError``).
|
| 212 |
+
|
| 213 |
+
For example, to support arbitrary iterators, you could
|
| 214 |
+
implement default like this::
|
| 215 |
+
|
| 216 |
+
def default(self, o):
|
| 217 |
+
try:
|
| 218 |
+
iterable = iter(o)
|
| 219 |
+
except TypeError:
|
| 220 |
+
pass
|
| 221 |
+
else:
|
| 222 |
+
return list(iterable)
|
| 223 |
+
return JSONEncoder.default(self, o)
|
| 224 |
+
|
| 225 |
+
"""
|
| 226 |
+
raise TypeError(repr(o) + " is not JSON serializable")
|
| 227 |
+
|
| 228 |
+
def encode(self, o):
|
| 229 |
+
"""Return a JSON string representation of a Python data structure.
|
| 230 |
+
|
| 231 |
+
>>> from hjson import JSONEncoder
|
| 232 |
+
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
|
| 233 |
+
'{"foo": ["bar", "baz"]}'
|
| 234 |
+
|
| 235 |
+
"""
|
| 236 |
+
# This is for extremely simple cases and benchmarks.
|
| 237 |
+
if isinstance(o, binary_type):
|
| 238 |
+
_encoding = self.encoding
|
| 239 |
+
if (_encoding is not None and not (_encoding == 'utf-8')):
|
| 240 |
+
o = o.decode(_encoding)
|
| 241 |
+
if isinstance(o, string_types):
|
| 242 |
+
if self.ensure_ascii:
|
| 243 |
+
return encode_basestring_ascii(o)
|
| 244 |
+
else:
|
| 245 |
+
return encode_basestring(o)
|
| 246 |
+
# This doesn't pass the iterator directly to ''.join() because the
|
| 247 |
+
# exceptions aren't as detailed. The list call should be roughly
|
| 248 |
+
# equivalent to the PySequence_Fast that ''.join() would do.
|
| 249 |
+
chunks = self.iterencode(o, _one_shot=True)
|
| 250 |
+
if not isinstance(chunks, (list, tuple)):
|
| 251 |
+
chunks = list(chunks)
|
| 252 |
+
if self.ensure_ascii:
|
| 253 |
+
return ''.join(chunks)
|
| 254 |
+
else:
|
| 255 |
+
return u''.join(chunks)
|
| 256 |
+
|
| 257 |
+
def iterencode(self, o, _one_shot=False):
|
| 258 |
+
"""Encode the given object and yield each string
|
| 259 |
+
representation as available.
|
| 260 |
+
|
| 261 |
+
For example::
|
| 262 |
+
|
| 263 |
+
for chunk in JSONEncoder().iterencode(bigobject):
|
| 264 |
+
mysocket.write(chunk)
|
| 265 |
+
|
| 266 |
+
"""
|
| 267 |
+
if self.check_circular:
|
| 268 |
+
markers = {}
|
| 269 |
+
else:
|
| 270 |
+
markers = None
|
| 271 |
+
if self.ensure_ascii:
|
| 272 |
+
_encoder = encode_basestring_ascii
|
| 273 |
+
else:
|
| 274 |
+
_encoder = encode_basestring
|
| 275 |
+
if self.encoding != 'utf-8':
|
| 276 |
+
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
|
| 277 |
+
if isinstance(o, binary_type):
|
| 278 |
+
o = o.decode(_encoding)
|
| 279 |
+
return _orig_encoder(o)
|
| 280 |
+
|
| 281 |
+
def floatstr(o, _repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
|
| 282 |
+
# Check for specials. Note that this type of test is processor
|
| 283 |
+
# and/or platform-specific, so do tests which don't depend on
|
| 284 |
+
# the internals.
|
| 285 |
+
|
| 286 |
+
if o != o:
|
| 287 |
+
text = 'null'
|
| 288 |
+
elif o == _inf:
|
| 289 |
+
text = 'null'
|
| 290 |
+
elif o == _neginf:
|
| 291 |
+
text = 'null'
|
| 292 |
+
else:
|
| 293 |
+
return _repr(o)
|
| 294 |
+
|
| 295 |
+
return text
|
| 296 |
+
|
| 297 |
+
key_memo = {}
|
| 298 |
+
int_as_string_bitcount = (
|
| 299 |
+
53 if self.bigint_as_string else self.int_as_string_bitcount)
|
| 300 |
+
_iterencode = _make_iterencode(
|
| 301 |
+
markers, self.default, _encoder, self.indent, floatstr,
|
| 302 |
+
self.key_separator, self.item_separator, self.sort_keys,
|
| 303 |
+
self.skipkeys, _one_shot, self.use_decimal,
|
| 304 |
+
self.namedtuple_as_object, self.tuple_as_array,
|
| 305 |
+
int_as_string_bitcount,
|
| 306 |
+
self.item_sort_key, self.encoding, self.for_json,
|
| 307 |
+
Decimal=Decimal)
|
| 308 |
+
try:
|
| 309 |
+
return _iterencode(o, 0)
|
| 310 |
+
finally:
|
| 311 |
+
key_memo.clear()
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
|
| 315 |
+
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
|
| 316 |
+
_use_decimal, _namedtuple_as_object, _tuple_as_array,
|
| 317 |
+
_int_as_string_bitcount, _item_sort_key,
|
| 318 |
+
_encoding,_for_json,
|
| 319 |
+
## HACK: hand-optimized bytecode; turn globals into locals
|
| 320 |
+
_PY3=PY3,
|
| 321 |
+
ValueError=ValueError,
|
| 322 |
+
string_types=string_types,
|
| 323 |
+
Decimal=Decimal,
|
| 324 |
+
dict=dict,
|
| 325 |
+
float=float,
|
| 326 |
+
id=id,
|
| 327 |
+
integer_types=integer_types,
|
| 328 |
+
isinstance=isinstance,
|
| 329 |
+
list=list,
|
| 330 |
+
str=str,
|
| 331 |
+
tuple=tuple,
|
| 332 |
+
):
|
| 333 |
+
if _item_sort_key and not callable(_item_sort_key):
|
| 334 |
+
raise TypeError("item_sort_key must be None or callable")
|
| 335 |
+
elif _sort_keys and not _item_sort_key:
|
| 336 |
+
_item_sort_key = itemgetter(0)
|
| 337 |
+
|
| 338 |
+
if (_int_as_string_bitcount is not None and
|
| 339 |
+
(_int_as_string_bitcount <= 0 or
|
| 340 |
+
not isinstance(_int_as_string_bitcount, integer_types))):
|
| 341 |
+
raise TypeError("int_as_string_bitcount must be a positive integer")
|
| 342 |
+
|
| 343 |
+
def _encode_int(value):
|
| 344 |
+
skip_quoting = (
|
| 345 |
+
_int_as_string_bitcount is None
|
| 346 |
+
or
|
| 347 |
+
_int_as_string_bitcount < 1
|
| 348 |
+
)
|
| 349 |
+
if (
|
| 350 |
+
skip_quoting or
|
| 351 |
+
(-1 << _int_as_string_bitcount)
|
| 352 |
+
< value <
|
| 353 |
+
(1 << _int_as_string_bitcount)
|
| 354 |
+
):
|
| 355 |
+
return str(value)
|
| 356 |
+
return '"' + str(value) + '"'
|
| 357 |
+
|
| 358 |
+
def _iterencode_list(lst, _current_indent_level):
|
| 359 |
+
if not lst:
|
| 360 |
+
yield '[]'
|
| 361 |
+
return
|
| 362 |
+
if markers is not None:
|
| 363 |
+
markerid = id(lst)
|
| 364 |
+
if markerid in markers:
|
| 365 |
+
raise ValueError("Circular reference detected")
|
| 366 |
+
markers[markerid] = lst
|
| 367 |
+
buf = '['
|
| 368 |
+
if _indent is not None:
|
| 369 |
+
_current_indent_level += 1
|
| 370 |
+
newline_indent = '\n' + (_indent * _current_indent_level)
|
| 371 |
+
separator = _item_separator + newline_indent
|
| 372 |
+
buf += newline_indent
|
| 373 |
+
else:
|
| 374 |
+
newline_indent = None
|
| 375 |
+
separator = _item_separator
|
| 376 |
+
first = True
|
| 377 |
+
for value in lst:
|
| 378 |
+
if first:
|
| 379 |
+
first = False
|
| 380 |
+
else:
|
| 381 |
+
buf = separator
|
| 382 |
+
yield buf
|
| 383 |
+
|
| 384 |
+
for chunk in _iterencode(value, _current_indent_level):
|
| 385 |
+
yield chunk
|
| 386 |
+
|
| 387 |
+
if newline_indent is not None:
|
| 388 |
+
_current_indent_level -= 1
|
| 389 |
+
yield '\n' + (_indent * _current_indent_level)
|
| 390 |
+
yield ']'
|
| 391 |
+
if markers is not None:
|
| 392 |
+
del markers[markerid]
|
| 393 |
+
|
| 394 |
+
def _stringify_key(key):
|
| 395 |
+
if isinstance(key, string_types): # pragma: no cover
|
| 396 |
+
pass
|
| 397 |
+
elif isinstance(key, binary_type):
|
| 398 |
+
key = key.decode(_encoding)
|
| 399 |
+
elif isinstance(key, float):
|
| 400 |
+
key = _floatstr(key)
|
| 401 |
+
elif key is True:
|
| 402 |
+
key = 'true'
|
| 403 |
+
elif key is False:
|
| 404 |
+
key = 'false'
|
| 405 |
+
elif key is None:
|
| 406 |
+
key = 'null'
|
| 407 |
+
elif isinstance(key, integer_types):
|
| 408 |
+
key = str(key)
|
| 409 |
+
elif _use_decimal and isinstance(key, Decimal):
|
| 410 |
+
key = str(key)
|
| 411 |
+
elif _skipkeys:
|
| 412 |
+
key = None
|
| 413 |
+
else:
|
| 414 |
+
raise TypeError("key " + repr(key) + " is not a string")
|
| 415 |
+
return key
|
| 416 |
+
|
| 417 |
+
def _iterencode_dict(dct, _current_indent_level):
|
| 418 |
+
if not dct:
|
| 419 |
+
yield '{}'
|
| 420 |
+
return
|
| 421 |
+
if markers is not None:
|
| 422 |
+
markerid = id(dct)
|
| 423 |
+
if markerid in markers:
|
| 424 |
+
raise ValueError("Circular reference detected")
|
| 425 |
+
markers[markerid] = dct
|
| 426 |
+
yield '{'
|
| 427 |
+
if _indent is not None:
|
| 428 |
+
_current_indent_level += 1
|
| 429 |
+
newline_indent = '\n' + (_indent * _current_indent_level)
|
| 430 |
+
item_separator = _item_separator + newline_indent
|
| 431 |
+
yield newline_indent
|
| 432 |
+
else:
|
| 433 |
+
newline_indent = None
|
| 434 |
+
item_separator = _item_separator
|
| 435 |
+
first = True
|
| 436 |
+
if _PY3:
|
| 437 |
+
iteritems = dct.items()
|
| 438 |
+
else:
|
| 439 |
+
iteritems = dct.iteritems()
|
| 440 |
+
if _item_sort_key:
|
| 441 |
+
items = []
|
| 442 |
+
for k, v in dct.items():
|
| 443 |
+
if not isinstance(k, string_types):
|
| 444 |
+
k = _stringify_key(k)
|
| 445 |
+
if k is None:
|
| 446 |
+
continue
|
| 447 |
+
items.append((k, v))
|
| 448 |
+
items.sort(key=_item_sort_key)
|
| 449 |
+
else:
|
| 450 |
+
items = iteritems
|
| 451 |
+
for key, value in items:
|
| 452 |
+
if not (_item_sort_key or isinstance(key, string_types)):
|
| 453 |
+
key = _stringify_key(key)
|
| 454 |
+
if key is None:
|
| 455 |
+
# _skipkeys must be True
|
| 456 |
+
continue
|
| 457 |
+
if first:
|
| 458 |
+
first = False
|
| 459 |
+
else:
|
| 460 |
+
yield item_separator
|
| 461 |
+
yield _encoder(key)
|
| 462 |
+
yield _key_separator
|
| 463 |
+
|
| 464 |
+
for chunk in _iterencode(value, _current_indent_level):
|
| 465 |
+
yield chunk
|
| 466 |
+
|
| 467 |
+
if newline_indent is not None:
|
| 468 |
+
_current_indent_level -= 1
|
| 469 |
+
yield '\n' + (_indent * _current_indent_level)
|
| 470 |
+
yield '}'
|
| 471 |
+
if markers is not None:
|
| 472 |
+
del markers[markerid]
|
| 473 |
+
|
| 474 |
+
def _iterencode(o, _current_indent_level):
|
| 475 |
+
if (isinstance(o, string_types) or
|
| 476 |
+
(_PY3 and isinstance(o, binary_type))):
|
| 477 |
+
yield _encoder(o)
|
| 478 |
+
elif o is None:
|
| 479 |
+
yield 'null'
|
| 480 |
+
elif o is True:
|
| 481 |
+
yield 'true'
|
| 482 |
+
elif o is False:
|
| 483 |
+
yield 'false'
|
| 484 |
+
elif isinstance(o, integer_types):
|
| 485 |
+
yield _encode_int(o)
|
| 486 |
+
elif isinstance(o, float):
|
| 487 |
+
yield _floatstr(o)
|
| 488 |
+
else:
|
| 489 |
+
for_json = _for_json and getattr(o, 'for_json', None)
|
| 490 |
+
if for_json and callable(for_json):
|
| 491 |
+
for chunk in _iterencode(for_json(), _current_indent_level):
|
| 492 |
+
yield chunk
|
| 493 |
+
elif isinstance(o, list):
|
| 494 |
+
for chunk in _iterencode_list(o, _current_indent_level):
|
| 495 |
+
yield chunk
|
| 496 |
+
else:
|
| 497 |
+
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
|
| 498 |
+
if _asdict and callable(_asdict):
|
| 499 |
+
for chunk in _iterencode_dict(_asdict(), _current_indent_level):
|
| 500 |
+
yield chunk
|
| 501 |
+
elif (_tuple_as_array and isinstance(o, tuple)):
|
| 502 |
+
for chunk in _iterencode_list(o, _current_indent_level):
|
| 503 |
+
yield chunk
|
| 504 |
+
elif isinstance(o, dict):
|
| 505 |
+
for chunk in _iterencode_dict(o, _current_indent_level):
|
| 506 |
+
yield chunk
|
| 507 |
+
elif _use_decimal and isinstance(o, Decimal):
|
| 508 |
+
yield str(o)
|
| 509 |
+
else:
|
| 510 |
+
if markers is not None:
|
| 511 |
+
markerid = id(o)
|
| 512 |
+
if markerid in markers:
|
| 513 |
+
raise ValueError("Circular reference detected")
|
| 514 |
+
markers[markerid] = o
|
| 515 |
+
o = _default(o)
|
| 516 |
+
for chunk in _iterencode(o, _current_indent_level):
|
| 517 |
+
yield chunk
|
| 518 |
+
if markers is not None:
|
| 519 |
+
del markers[markerid]
|
| 520 |
+
|
| 521 |
+
return _iterencode
|
parrot/lib/python3.10/site-packages/hjson/tests/__init__.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
import unittest
|
| 3 |
+
import doctest
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def additional_tests(suite=None):
|
| 8 |
+
import hjson
|
| 9 |
+
import hjson.encoder
|
| 10 |
+
import hjson.decoder
|
| 11 |
+
if suite is None:
|
| 12 |
+
suite = unittest.TestSuite()
|
| 13 |
+
for mod in (hjson, hjson.encoder, hjson.decoder):
|
| 14 |
+
suite.addTest(doctest.DocTestSuite(mod))
|
| 15 |
+
return suite
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def all_tests_suite():
|
| 19 |
+
def get_suite():
|
| 20 |
+
return additional_tests(
|
| 21 |
+
unittest.TestLoader().loadTestsFromNames([
|
| 22 |
+
'hjson.tests.test_hjson',
|
| 23 |
+
'hjson.tests.test_bitsize_int_as_string',
|
| 24 |
+
'hjson.tests.test_bigint_as_string',
|
| 25 |
+
'hjson.tests.test_check_circular',
|
| 26 |
+
'hjson.tests.test_decode',
|
| 27 |
+
'hjson.tests.test_default',
|
| 28 |
+
'hjson.tests.test_dump',
|
| 29 |
+
'hjson.tests.test_encode_basestring_ascii',
|
| 30 |
+
'hjson.tests.test_errors',
|
| 31 |
+
'hjson.tests.test_fail',
|
| 32 |
+
'hjson.tests.test_float',
|
| 33 |
+
'hjson.tests.test_indent',
|
| 34 |
+
'hjson.tests.test_pass1',
|
| 35 |
+
'hjson.tests.test_pass2',
|
| 36 |
+
'hjson.tests.test_pass3',
|
| 37 |
+
'hjson.tests.test_recursion',
|
| 38 |
+
'hjson.tests.test_scanstring',
|
| 39 |
+
'hjson.tests.test_separators',
|
| 40 |
+
'hjson.tests.test_unicode',
|
| 41 |
+
'hjson.tests.test_decimal',
|
| 42 |
+
'hjson.tests.test_tuple',
|
| 43 |
+
'hjson.tests.test_namedtuple',
|
| 44 |
+
#'hjson.tests.test_tool', # fails on windows
|
| 45 |
+
'hjson.tests.test_for_json',
|
| 46 |
+
]))
|
| 47 |
+
suite = get_suite()
|
| 48 |
+
return suite
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def main():
|
| 52 |
+
runner = unittest.TextTestRunner(verbosity=1 + sys.argv.count('-v'))
|
| 53 |
+
suite = all_tests_suite()
|
| 54 |
+
raise SystemExit(not runner.run(suite).wasSuccessful())
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
if __name__ == '__main__':
|
| 58 |
+
import os
|
| 59 |
+
import sys
|
| 60 |
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
| 61 |
+
main()
|
parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_fail.cpython-310.pyc
ADDED
|
Binary file (2.94 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_float.cpython-310.pyc
ADDED
|
Binary file (1.42 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_item_sort_key.cpython-310.pyc
ADDED
|
Binary file (1.68 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_pass1.cpython-310.pyc
ADDED
|
Binary file (2.03 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/hjson/tests/__pycache__/test_unicode.cpython-310.pyc
ADDED
|
Binary file (6.58 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/hjson/tests/test_bigint_as_string.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
|
| 3 |
+
import hjson as json
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestBigintAsString(TestCase):
|
| 7 |
+
# Python 2.5, at least the one that ships on Mac OS X, calculates
|
| 8 |
+
# 2 ** 53 as 0! It manages to calculate 1 << 53 correctly.
|
| 9 |
+
values = [(200, 200),
|
| 10 |
+
((1 << 53) - 1, 9007199254740991),
|
| 11 |
+
((1 << 53), '9007199254740992'),
|
| 12 |
+
((1 << 53) + 1, '9007199254740993'),
|
| 13 |
+
(-100, -100),
|
| 14 |
+
((-1 << 53), '-9007199254740992'),
|
| 15 |
+
((-1 << 53) - 1, '-9007199254740993'),
|
| 16 |
+
((-1 << 53) + 1, -9007199254740991)]
|
| 17 |
+
|
| 18 |
+
options = (
|
| 19 |
+
{"bigint_as_string": True},
|
| 20 |
+
{"int_as_string_bitcount": 53}
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
def test_ints(self):
|
| 24 |
+
for opts in self.options:
|
| 25 |
+
for val, expect in self.values:
|
| 26 |
+
self.assertEqual(
|
| 27 |
+
val,
|
| 28 |
+
json.loads(json.dumpsJSON(val)))
|
| 29 |
+
self.assertEqual(
|
| 30 |
+
expect,
|
| 31 |
+
json.loads(json.dumpsJSON(val, **opts)))
|
| 32 |
+
|
| 33 |
+
def test_lists(self):
|
| 34 |
+
for opts in self.options:
|
| 35 |
+
for val, expect in self.values:
|
| 36 |
+
val = [val, val]
|
| 37 |
+
expect = [expect, expect]
|
| 38 |
+
self.assertEqual(
|
| 39 |
+
val,
|
| 40 |
+
json.loads(json.dumpsJSON(val)))
|
| 41 |
+
self.assertEqual(
|
| 42 |
+
expect,
|
| 43 |
+
json.loads(json.dumpsJSON(val, **opts)))
|
| 44 |
+
|
| 45 |
+
def test_dicts(self):
|
| 46 |
+
for opts in self.options:
|
| 47 |
+
for val, expect in self.values:
|
| 48 |
+
val = {'k': val}
|
| 49 |
+
expect = {'k': expect}
|
| 50 |
+
self.assertEqual(
|
| 51 |
+
val,
|
| 52 |
+
json.loads(json.dumpsJSON(val)))
|
| 53 |
+
self.assertEqual(
|
| 54 |
+
expect,
|
| 55 |
+
json.loads(json.dumpsJSON(val, **opts)))
|
| 56 |
+
|
| 57 |
+
def test_dict_keys(self):
|
| 58 |
+
for opts in self.options:
|
| 59 |
+
for val, _ in self.values:
|
| 60 |
+
expect = {str(val): 'value'}
|
| 61 |
+
val = {val: 'value'}
|
| 62 |
+
self.assertEqual(
|
| 63 |
+
expect,
|
| 64 |
+
json.loads(json.dumpsJSON(val)))
|
| 65 |
+
self.assertEqual(
|
| 66 |
+
expect,
|
| 67 |
+
json.loads(json.dumpsJSON(val, **opts)))
|
parrot/lib/python3.10/site-packages/hjson/tests/test_bitsize_int_as_string.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
|
| 3 |
+
import hjson as json
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestBitSizeIntAsString(TestCase):
|
| 7 |
+
# Python 2.5, at least the one that ships on Mac OS X, calculates
|
| 8 |
+
# 2 ** 31 as 0! It manages to calculate 1 << 31 correctly.
|
| 9 |
+
values = [
|
| 10 |
+
(200, 200),
|
| 11 |
+
((1 << 31) - 1, (1 << 31) - 1),
|
| 12 |
+
((1 << 31), str(1 << 31)),
|
| 13 |
+
((1 << 31) + 1, str((1 << 31) + 1)),
|
| 14 |
+
(-100, -100),
|
| 15 |
+
((-1 << 31), str(-1 << 31)),
|
| 16 |
+
((-1 << 31) - 1, str((-1 << 31) - 1)),
|
| 17 |
+
((-1 << 31) + 1, (-1 << 31) + 1),
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
def test_invalid_counts(self):
|
| 21 |
+
for n in ['foo', -1, 0, 1.0]:
|
| 22 |
+
self.assertRaises(
|
| 23 |
+
TypeError,
|
| 24 |
+
json.dumpsJSON, 0, int_as_string_bitcount=n)
|
| 25 |
+
|
| 26 |
+
def test_ints_outside_range_fails(self):
|
| 27 |
+
self.assertNotEqual(
|
| 28 |
+
str(1 << 15),
|
| 29 |
+
json.loads(json.dumpsJSON(1 << 15, int_as_string_bitcount=16)),
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
def test_ints(self):
|
| 33 |
+
for val, expect in self.values:
|
| 34 |
+
self.assertEqual(
|
| 35 |
+
val,
|
| 36 |
+
json.loads(json.dumpsJSON(val)))
|
| 37 |
+
self.assertEqual(
|
| 38 |
+
expect,
|
| 39 |
+
json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)),
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
def test_lists(self):
|
| 43 |
+
for val, expect in self.values:
|
| 44 |
+
val = [val, val]
|
| 45 |
+
expect = [expect, expect]
|
| 46 |
+
self.assertEqual(
|
| 47 |
+
val,
|
| 48 |
+
json.loads(json.dumpsJSON(val)))
|
| 49 |
+
self.assertEqual(
|
| 50 |
+
expect,
|
| 51 |
+
json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)))
|
| 52 |
+
|
| 53 |
+
def test_dicts(self):
|
| 54 |
+
for val, expect in self.values:
|
| 55 |
+
val = {'k': val}
|
| 56 |
+
expect = {'k': expect}
|
| 57 |
+
self.assertEqual(
|
| 58 |
+
val,
|
| 59 |
+
json.loads(json.dumpsJSON(val)))
|
| 60 |
+
self.assertEqual(
|
| 61 |
+
expect,
|
| 62 |
+
json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)))
|
| 63 |
+
|
| 64 |
+
def test_dict_keys(self):
|
| 65 |
+
for val, _ in self.values:
|
| 66 |
+
expect = {str(val): 'value'}
|
| 67 |
+
val = {val: 'value'}
|
| 68 |
+
self.assertEqual(
|
| 69 |
+
expect,
|
| 70 |
+
json.loads(json.dumpsJSON(val)))
|
| 71 |
+
self.assertEqual(
|
| 72 |
+
expect,
|
| 73 |
+
json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)))
|
parrot/lib/python3.10/site-packages/hjson/tests/test_check_circular.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
import hjson as json
|
| 3 |
+
|
| 4 |
+
def default_iterable(obj):
|
| 5 |
+
return list(obj)
|
| 6 |
+
|
| 7 |
+
class TestCheckCircular(TestCase):
|
| 8 |
+
def test_circular_dict(self):
|
| 9 |
+
dct = {}
|
| 10 |
+
dct['a'] = dct
|
| 11 |
+
self.assertRaises(ValueError, json.dumpsJSON, dct)
|
| 12 |
+
|
| 13 |
+
def test_circular_list(self):
|
| 14 |
+
lst = []
|
| 15 |
+
lst.append(lst)
|
| 16 |
+
self.assertRaises(ValueError, json.dumpsJSON, lst)
|
| 17 |
+
|
| 18 |
+
def test_circular_composite(self):
|
| 19 |
+
dct2 = {}
|
| 20 |
+
dct2['a'] = []
|
| 21 |
+
dct2['a'].append(dct2)
|
| 22 |
+
self.assertRaises(ValueError, json.dumpsJSON, dct2)
|
| 23 |
+
|
| 24 |
+
def test_circular_default(self):
|
| 25 |
+
json.dumpsJSON([set()], default=default_iterable)
|
| 26 |
+
self.assertRaises(TypeError, json.dumpsJSON, [set()])
|
| 27 |
+
|
| 28 |
+
def test_circular_off_default(self):
|
| 29 |
+
json.dumpsJSON([set()], default=default_iterable, check_circular=False)
|
| 30 |
+
self.assertRaises(TypeError, json.dumpsJSON, [set()], check_circular=False)
|
parrot/lib/python3.10/site-packages/hjson/tests/test_decimal.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import decimal
|
| 2 |
+
from decimal import Decimal
|
| 3 |
+
from unittest import TestCase
|
| 4 |
+
from hjson.compat import StringIO, reload_module
|
| 5 |
+
|
| 6 |
+
import hjson as json
|
| 7 |
+
|
| 8 |
+
class TestDecimal(TestCase):
|
| 9 |
+
NUMS = "1.0", "10.00", "1.1", "1234567890.1234567890", "500"
|
| 10 |
+
def dumps(self, obj, **kw):
|
| 11 |
+
sio = StringIO()
|
| 12 |
+
json.dumpJSON(obj, sio, **kw)
|
| 13 |
+
res = json.dumpsJSON(obj, **kw)
|
| 14 |
+
self.assertEqual(res, sio.getvalue())
|
| 15 |
+
return res
|
| 16 |
+
|
| 17 |
+
def loads(self, s, **kw):
|
| 18 |
+
sio = StringIO(s)
|
| 19 |
+
res = json.loads(s, **kw)
|
| 20 |
+
self.assertEqual(res, json.load(sio, **kw))
|
| 21 |
+
return res
|
| 22 |
+
|
| 23 |
+
def test_decimal_encode(self):
|
| 24 |
+
for d in map(Decimal, self.NUMS):
|
| 25 |
+
self.assertEqual(self.dumps(d, use_decimal=True), str(d))
|
| 26 |
+
|
| 27 |
+
def test_decimal_decode(self):
|
| 28 |
+
for s in self.NUMS:
|
| 29 |
+
self.assertEqual(self.loads(s, parse_float=Decimal), Decimal(s))
|
| 30 |
+
|
| 31 |
+
def test_stringify_key(self):
|
| 32 |
+
for d in map(Decimal, self.NUMS):
|
| 33 |
+
v = {d: d}
|
| 34 |
+
self.assertEqual(
|
| 35 |
+
self.loads(
|
| 36 |
+
self.dumps(v, use_decimal=True), parse_float=Decimal),
|
| 37 |
+
{str(d): d})
|
| 38 |
+
|
| 39 |
+
def test_decimal_roundtrip(self):
|
| 40 |
+
for d in map(Decimal, self.NUMS):
|
| 41 |
+
# The type might not be the same (int and Decimal) but they
|
| 42 |
+
# should still compare equal.
|
| 43 |
+
for v in [d, [d], {'': d}]:
|
| 44 |
+
self.assertEqual(
|
| 45 |
+
self.loads(
|
| 46 |
+
self.dumps(v, use_decimal=True), parse_float=Decimal),
|
| 47 |
+
v)
|
| 48 |
+
|
| 49 |
+
def test_decimal_defaults(self):
|
| 50 |
+
d = Decimal('1.1')
|
| 51 |
+
# use_decimal=True is the default
|
| 52 |
+
self.assertRaises(TypeError, json.dumpsJSON, d, use_decimal=False)
|
| 53 |
+
self.assertEqual('1.1', json.dumpsJSON(d))
|
| 54 |
+
self.assertEqual('1.1', json.dumpsJSON(d, use_decimal=True))
|
| 55 |
+
self.assertRaises(TypeError, json.dumpJSON, d, StringIO(),
|
| 56 |
+
use_decimal=False)
|
| 57 |
+
sio = StringIO()
|
| 58 |
+
json.dumpJSON(d, sio)
|
| 59 |
+
self.assertEqual('1.1', sio.getvalue())
|
| 60 |
+
sio = StringIO()
|
| 61 |
+
json.dumpJSON(d, sio, use_decimal=True)
|
| 62 |
+
self.assertEqual('1.1', sio.getvalue())
|
| 63 |
+
|
| 64 |
+
def test_decimal_reload(self):
|
| 65 |
+
# Simulate a subinterpreter that reloads the Python modules but not
|
| 66 |
+
# the C code https://github.com/simplejson/simplejson/issues/34
|
| 67 |
+
global Decimal
|
| 68 |
+
Decimal = reload_module(decimal).Decimal
|
| 69 |
+
import hjson.encoder
|
| 70 |
+
hjson.encoder.Decimal = Decimal
|
| 71 |
+
self.test_decimal_roundtrip()
|
parrot/lib/python3.10/site-packages/hjson/tests/test_default.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
|
| 3 |
+
import hjson as json
|
| 4 |
+
|
| 5 |
+
class TestDefault(TestCase):
|
| 6 |
+
def test_default(self):
|
| 7 |
+
self.assertEqual(
|
| 8 |
+
json.dumpsJSON(type, default=repr),
|
| 9 |
+
json.dumpsJSON(repr(type)))
|
parrot/lib/python3.10/site-packages/hjson/tests/test_dump.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
from hjson.compat import StringIO, long_type, b, binary_type, PY3
|
| 3 |
+
import hjson as json
|
| 4 |
+
|
| 5 |
+
def as_text_type(s):
|
| 6 |
+
if PY3 and isinstance(s, binary_type):
|
| 7 |
+
return s.decode('ascii')
|
| 8 |
+
return s
|
| 9 |
+
|
| 10 |
+
class TestDump(TestCase):
|
| 11 |
+
def test_dump(self):
|
| 12 |
+
sio = StringIO()
|
| 13 |
+
json.dumpJSON({}, sio)
|
| 14 |
+
self.assertEqual(sio.getvalue(), '{}')
|
| 15 |
+
|
| 16 |
+
def test_constants(self):
|
| 17 |
+
for c in [None, True, False]:
|
| 18 |
+
self.assertTrue(json.loads(json.dumpsJSON(c)) is c)
|
| 19 |
+
self.assertTrue(json.loads(json.dumpsJSON([c]))[0] is c)
|
| 20 |
+
self.assertTrue(json.loads(json.dumpsJSON({'a': c}))['a'] is c)
|
| 21 |
+
|
| 22 |
+
def test_stringify_key(self):
|
| 23 |
+
items = [(b('bytes'), 'bytes'),
|
| 24 |
+
(1.0, '1.0'),
|
| 25 |
+
(10, '10'),
|
| 26 |
+
(True, 'true'),
|
| 27 |
+
(False, 'false'),
|
| 28 |
+
(None, 'null'),
|
| 29 |
+
(long_type(100), '100')]
|
| 30 |
+
for k, expect in items:
|
| 31 |
+
self.assertEqual(
|
| 32 |
+
json.loads(json.dumpsJSON({k: expect})),
|
| 33 |
+
{expect: expect})
|
| 34 |
+
self.assertEqual(
|
| 35 |
+
json.loads(json.dumpsJSON({k: expect}, sort_keys=True)),
|
| 36 |
+
{expect: expect})
|
| 37 |
+
self.assertRaises(TypeError, json.dumpsJSON, {json: 1})
|
| 38 |
+
for v in [{}, {'other': 1}, {b('derp'): 1, 'herp': 2}]:
|
| 39 |
+
for sort_keys in [False, True]:
|
| 40 |
+
v0 = dict(v)
|
| 41 |
+
v0[json] = 1
|
| 42 |
+
v1 = dict((as_text_type(key), val) for (key, val) in v.items())
|
| 43 |
+
self.assertEqual(
|
| 44 |
+
json.loads(json.dumpsJSON(v0, skipkeys=True, sort_keys=sort_keys)),
|
| 45 |
+
v1)
|
| 46 |
+
self.assertEqual(
|
| 47 |
+
json.loads(json.dumpsJSON({'': v0}, skipkeys=True, sort_keys=sort_keys)),
|
| 48 |
+
{'': v1})
|
| 49 |
+
self.assertEqual(
|
| 50 |
+
json.loads(json.dumpsJSON([v0], skipkeys=True, sort_keys=sort_keys)),
|
| 51 |
+
[v1])
|
| 52 |
+
|
| 53 |
+
def test_dumps(self):
|
| 54 |
+
self.assertEqual(json.dumpsJSON({}), '{}')
|
| 55 |
+
|
| 56 |
+
def test_encode_truefalse(self):
|
| 57 |
+
self.assertEqual(json.dumpsJSON(
|
| 58 |
+
{True: False, False: True}, sort_keys=True),
|
| 59 |
+
'{"false": true, "true": false}')
|
| 60 |
+
self.assertEqual(
|
| 61 |
+
json.dumpsJSON(
|
| 62 |
+
{2: 3.0,
|
| 63 |
+
4.0: long_type(5),
|
| 64 |
+
False: 1,
|
| 65 |
+
long_type(6): True,
|
| 66 |
+
"7": 0},
|
| 67 |
+
sort_keys=True),
|
| 68 |
+
'{"2": 3.0, "4.0": 5, "6": true, "7": 0, "false": 1}')
|
| 69 |
+
|
| 70 |
+
def test_ordered_dict(self):
|
| 71 |
+
# http://bugs.python.org/issue6105
|
| 72 |
+
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
|
| 73 |
+
s = json.dumpsJSON(json.OrderedDict(items))
|
| 74 |
+
self.assertEqual(
|
| 75 |
+
s,
|
| 76 |
+
'{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}')
|
| 77 |
+
|
| 78 |
+
def test_indent_unknown_type_acceptance(self):
|
| 79 |
+
"""
|
| 80 |
+
A test against the regression mentioned at `github issue 29`_.
|
| 81 |
+
|
| 82 |
+
The indent parameter should accept any type which pretends to be
|
| 83 |
+
an instance of int or long when it comes to being multiplied by
|
| 84 |
+
strings, even if it is not actually an int or long, for
|
| 85 |
+
backwards compatibility.
|
| 86 |
+
|
| 87 |
+
.. _github issue 29:
|
| 88 |
+
http://github.com/simplejson/simplejson/issue/29
|
| 89 |
+
"""
|
| 90 |
+
|
| 91 |
+
class AwesomeInt(object):
|
| 92 |
+
"""An awesome reimplementation of integers"""
|
| 93 |
+
|
| 94 |
+
def __init__(self, *args, **kwargs):
|
| 95 |
+
if len(args) > 0:
|
| 96 |
+
# [construct from literals, objects, etc.]
|
| 97 |
+
# ...
|
| 98 |
+
|
| 99 |
+
# Finally, if args[0] is an integer, store it
|
| 100 |
+
if isinstance(args[0], int):
|
| 101 |
+
self._int = args[0]
|
| 102 |
+
|
| 103 |
+
# [various methods]
|
| 104 |
+
|
| 105 |
+
def __mul__(self, other):
|
| 106 |
+
# [various ways to multiply AwesomeInt objects]
|
| 107 |
+
# ... finally, if the right-hand operand is not awesome enough,
|
| 108 |
+
# try to do a normal integer multiplication
|
| 109 |
+
if hasattr(self, '_int'):
|
| 110 |
+
return self._int * other
|
| 111 |
+
else:
|
| 112 |
+
raise NotImplementedError("To do non-awesome things with"
|
| 113 |
+
" this object, please construct it from an integer!")
|
| 114 |
+
|
| 115 |
+
s = json.dumpsJSON([0, 1, 2], indent=AwesomeInt(3))
|
| 116 |
+
self.assertEqual(s, '[\n 0,\n 1,\n 2\n]')
|
| 117 |
+
|
| 118 |
+
def test_accumulator(self):
|
| 119 |
+
# the C API uses an accumulator that collects after 100,000 appends
|
| 120 |
+
lst = [0] * 100000
|
| 121 |
+
self.assertEqual(json.loads(json.dumpsJSON(lst)), lst)
|
| 122 |
+
|
| 123 |
+
def test_sort_keys(self):
|
| 124 |
+
# https://github.com/simplejson/simplejson/issues/106
|
| 125 |
+
for num_keys in range(2, 32):
|
| 126 |
+
p = dict((str(x), x) for x in range(num_keys))
|
| 127 |
+
sio = StringIO()
|
| 128 |
+
json.dumpJSON(p, sio, sort_keys=True)
|
| 129 |
+
self.assertEqual(sio.getvalue(), json.dumpsJSON(p, sort_keys=True))
|
| 130 |
+
self.assertEqual(json.loads(sio.getvalue()), p)
|
parrot/lib/python3.10/site-packages/hjson/tests/test_encode_basestring_ascii.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
|
| 3 |
+
import hjson.encoder
|
| 4 |
+
from hjson.compat import b
|
| 5 |
+
|
| 6 |
+
CASES = [
|
| 7 |
+
(u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
|
| 8 |
+
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
|
| 9 |
+
(u'controls', '"controls"'),
|
| 10 |
+
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
|
| 11 |
+
(u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
|
| 12 |
+
(u' s p a c e d ', '" s p a c e d "'),
|
| 13 |
+
(u'\U0001d120', '"\\ud834\\udd20"'),
|
| 14 |
+
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
| 15 |
+
(b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
|
| 16 |
+
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
| 17 |
+
(b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
|
| 18 |
+
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
| 19 |
+
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
| 20 |
+
(u"`1~!@#$%^&*()_+-={':[,]}|;.</>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.</>?"'),
|
| 21 |
+
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
|
| 22 |
+
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
class TestEncodeBaseStringAscii(TestCase):
|
| 26 |
+
def test_py_encode_basestring_ascii(self):
|
| 27 |
+
self._test_encode_basestring_ascii(hjson.encoder.encode_basestring_ascii)
|
| 28 |
+
|
| 29 |
+
def _test_encode_basestring_ascii(self, encode_basestring_ascii):
|
| 30 |
+
fname = encode_basestring_ascii.__name__
|
| 31 |
+
for input_string, expect in CASES:
|
| 32 |
+
result = encode_basestring_ascii(input_string)
|
| 33 |
+
#self.assertEqual(result, expect,
|
| 34 |
+
# '{0!r} != {1!r} for {2}({3!r})'.format(
|
| 35 |
+
# result, expect, fname, input_string))
|
| 36 |
+
self.assertEqual(result, expect,
|
| 37 |
+
'%r != %r for %s(%r)' % (result, expect, fname, input_string))
|
| 38 |
+
|
| 39 |
+
def test_sorted_dict(self):
|
| 40 |
+
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
|
| 41 |
+
s = hjson.dumpsJSON(dict(items), sort_keys=True)
|
| 42 |
+
self.assertEqual(s, '{"five": 5, "four": 4, "one": 1, "three": 3, "two": 2}')
|
parrot/lib/python3.10/site-packages/hjson/tests/test_errors.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys, pickle
|
| 2 |
+
from unittest import TestCase
|
| 3 |
+
|
| 4 |
+
import hjson as json
|
| 5 |
+
from hjson.compat import u, b
|
| 6 |
+
|
| 7 |
+
class TestErrors(TestCase):
|
| 8 |
+
def test_string_keys_error(self):
|
| 9 |
+
data = [{'a': 'A', 'b': (2, 4), 'c': 3.0, ('d',): 'D tuple'}]
|
| 10 |
+
self.assertRaises(TypeError, json.dumpsJSON, data)
|
| 11 |
+
|
| 12 |
+
def test_decode_error(self):
|
| 13 |
+
err = None
|
| 14 |
+
try:
|
| 15 |
+
json.loads('{}\na\nb')
|
| 16 |
+
except json.HjsonDecodeError:
|
| 17 |
+
err = sys.exc_info()[1]
|
| 18 |
+
else:
|
| 19 |
+
self.fail('Expected HjsonDecodeError')
|
| 20 |
+
self.assertEqual(err.lineno, 2)
|
| 21 |
+
self.assertEqual(err.colno, 1)
|
| 22 |
+
self.assertEqual(err.endlineno, 3)
|
| 23 |
+
self.assertEqual(err.endcolno, 2)
|
| 24 |
+
|
| 25 |
+
def test_scan_error(self):
|
| 26 |
+
err = None
|
| 27 |
+
for t in (u, b):
|
| 28 |
+
try:
|
| 29 |
+
json.loads(t('{"asdf": "'))
|
| 30 |
+
except json.HjsonDecodeError:
|
| 31 |
+
err = sys.exc_info()[1]
|
| 32 |
+
else:
|
| 33 |
+
self.fail('Expected HjsonDecodeError')
|
| 34 |
+
self.assertEqual(err.lineno, 1)
|
| 35 |
+
self.assertEqual(err.colno, 10)
|
| 36 |
+
|
| 37 |
+
def test_error_is_pickable(self):
|
| 38 |
+
err = None
|
| 39 |
+
try:
|
| 40 |
+
json.loads('{}\na\nb')
|
| 41 |
+
except json.HjsonDecodeError:
|
| 42 |
+
err = sys.exc_info()[1]
|
| 43 |
+
else:
|
| 44 |
+
self.fail('Expected HjsonDecodeError')
|
| 45 |
+
s = pickle.dumps(err)
|
| 46 |
+
e = pickle.loads(s)
|
| 47 |
+
|
| 48 |
+
self.assertEqual(err.msg, e.msg)
|
| 49 |
+
self.assertEqual(err.doc, e.doc)
|
| 50 |
+
self.assertEqual(err.pos, e.pos)
|
| 51 |
+
self.assertEqual(err.end, e.end)
|
parrot/lib/python3.10/site-packages/hjson/tests/test_fail.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from unittest import TestCase
|
| 3 |
+
|
| 4 |
+
import hjson as json
|
| 5 |
+
|
| 6 |
+
# 2007-10-05
|
| 7 |
+
JSONDOCS = [
|
| 8 |
+
# http://json.org/JSON_checker/test/fail1.json
|
| 9 |
+
# '"A JSON payload should be an object or array, not a string."',
|
| 10 |
+
# http://json.org/JSON_checker/test/fail2.json
|
| 11 |
+
'["Unclosed array"',
|
| 12 |
+
# http://json.org/JSON_checker/test/fail3.json
|
| 13 |
+
#'{unquoted_key: "keys must be quoted"}',
|
| 14 |
+
# http://json.org/JSON_checker/test/fail4.json
|
| 15 |
+
# '["extra comma",]',
|
| 16 |
+
# http://json.org/JSON_checker/test/fail5.json
|
| 17 |
+
'["double extra comma",,]',
|
| 18 |
+
# http://json.org/JSON_checker/test/fail6.json
|
| 19 |
+
'[ , "<-- missing value"]',
|
| 20 |
+
# http://json.org/JSON_checker/test/fail7.json
|
| 21 |
+
'["Comma after the close"],',
|
| 22 |
+
# http://json.org/JSON_checker/test/fail8.json
|
| 23 |
+
'["Extra close"]]',
|
| 24 |
+
# http://json.org/JSON_checker/test/fail9.json
|
| 25 |
+
# '{"Extra comma": true,}',
|
| 26 |
+
# http://json.org/JSON_checker/test/fail10.json
|
| 27 |
+
'{"Extra value after close": true} "misplaced quoted value"',
|
| 28 |
+
# http://json.org/JSON_checker/test/fail11.json
|
| 29 |
+
'{"Illegal expression": 1 + 2}',
|
| 30 |
+
# http://json.org/JSON_checker/test/fail12.json
|
| 31 |
+
'{"Illegal invocation": alert()}',
|
| 32 |
+
# http://json.org/JSON_checker/test/fail13.json
|
| 33 |
+
'{"Numbers cannot have leading zeroes": 013}',
|
| 34 |
+
# http://json.org/JSON_checker/test/fail14.json
|
| 35 |
+
'{"Numbers cannot be hex": 0x14}',
|
| 36 |
+
# http://json.org/JSON_checker/test/fail15.json
|
| 37 |
+
'["Illegal backslash escape: \\x15"]',
|
| 38 |
+
# http://json.org/JSON_checker/test/fail16.json
|
| 39 |
+
'[\\naked]',
|
| 40 |
+
# http://json.org/JSON_checker/test/fail17.json
|
| 41 |
+
'["Illegal backslash escape: \\017"]',
|
| 42 |
+
# http://json.org/JSON_checker/test/fail18.json
|
| 43 |
+
# '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]',
|
| 44 |
+
# http://json.org/JSON_checker/test/fail19.json
|
| 45 |
+
'{"Missing colon" null}',
|
| 46 |
+
# http://json.org/JSON_checker/test/fail20.json
|
| 47 |
+
'{"Double colon":: null}',
|
| 48 |
+
# http://json.org/JSON_checker/test/fail21.json
|
| 49 |
+
'{"Comma instead of colon", null}',
|
| 50 |
+
# http://json.org/JSON_checker/test/fail22.json
|
| 51 |
+
'["Colon instead of comma": false]',
|
| 52 |
+
# http://json.org/JSON_checker/test/fail23.json
|
| 53 |
+
'["Bad value", truth]',
|
| 54 |
+
# http://json.org/JSON_checker/test/fail24.json
|
| 55 |
+
#"['single quote']",
|
| 56 |
+
# http://json.org/JSON_checker/test/fail25.json
|
| 57 |
+
'["\ttab\tcharacter\tin\tstring\t"]',
|
| 58 |
+
# http://json.org/JSON_checker/test/fail26.json
|
| 59 |
+
'["tab\\ character\\ in\\ string\\ "]',
|
| 60 |
+
# http://json.org/JSON_checker/test/fail27.json
|
| 61 |
+
'["line\nbreak"]',
|
| 62 |
+
# http://json.org/JSON_checker/test/fail28.json
|
| 63 |
+
'["line\\\nbreak"]',
|
| 64 |
+
# http://json.org/JSON_checker/test/fail29.json
|
| 65 |
+
'[0e]',
|
| 66 |
+
# http://json.org/JSON_checker/test/fail30.json
|
| 67 |
+
'[0e+]',
|
| 68 |
+
# http://json.org/JSON_checker/test/fail31.json
|
| 69 |
+
'[0e+-1]',
|
| 70 |
+
# http://json.org/JSON_checker/test/fail32.json
|
| 71 |
+
'{"Comma instead if closing brace": true,',
|
| 72 |
+
# http://json.org/JSON_checker/test/fail33.json
|
| 73 |
+
'["mismatch"}',
|
| 74 |
+
# http://code.google.com/p/simplejson/issues/detail?id=3
|
| 75 |
+
u'["A\u001FZ control characters in string"]',
|
| 76 |
+
# misc based on coverage
|
| 77 |
+
'{',
|
| 78 |
+
'{]',
|
| 79 |
+
'{"foo": "bar"]',
|
| 80 |
+
'{"foo": "bar"',
|
| 81 |
+
]
|
| 82 |
+
|
| 83 |
+
class TestFail(TestCase):
|
| 84 |
+
def test_failures(self):
|
| 85 |
+
for idx, doc in enumerate(JSONDOCS):
|
| 86 |
+
idx = idx + 1
|
| 87 |
+
try:
|
| 88 |
+
json.loads(doc)
|
| 89 |
+
except json.HjsonDecodeError:
|
| 90 |
+
pass
|
| 91 |
+
else:
|
| 92 |
+
self.fail("Expected failure for fail%d.json: %r" % (idx, doc))
|
| 93 |
+
|
| 94 |
+
def test_array_decoder_issue46(self):
|
| 95 |
+
# http://code.google.com/p/simplejson/issues/detail?id=46
|
| 96 |
+
for doc in [u'[,]', '[,]']:
|
| 97 |
+
try:
|
| 98 |
+
json.loads(doc)
|
| 99 |
+
except json.HjsonDecodeError:
|
| 100 |
+
pass
|
| 101 |
+
except Exception:
|
| 102 |
+
e = sys.exc_info()[1]
|
| 103 |
+
self.fail("Unexpected exception raised %r %s" % (e, e))
|
| 104 |
+
else:
|
| 105 |
+
self.fail("Unexpected success parsing '[,]'")
|
| 106 |
+
|
| 107 |
+
def test_truncated_input(self):
|
| 108 |
+
test_cases = [
|
| 109 |
+
('[', "End of input while parsing an array", 1),
|
| 110 |
+
# ('[42', "Expecting ',' delimiter", 3),
|
| 111 |
+
('[42,', 'Expecting value', 4),
|
| 112 |
+
('["', 'Unterminated string starting at', 1),
|
| 113 |
+
('["spam', 'Unterminated string starting at', 1),
|
| 114 |
+
# ('["spam"', "Expecting ',' delimiter", 7),
|
| 115 |
+
('["spam",', 'Expecting value', 8),
|
| 116 |
+
('{', 'Bad key name (eof)', 1),
|
| 117 |
+
('{"', 'Unterminated string starting at', 1),
|
| 118 |
+
('{"spam', 'Unterminated string starting at', 1),
|
| 119 |
+
('{"spam"', "Expecting ':' delimiter", 7),
|
| 120 |
+
('{"spam":', 'Expecting value', 8),
|
| 121 |
+
# ('{"spam":42', "Expecting ',' delimiter", 10),
|
| 122 |
+
('{"spam":42,', 'Bad key name (eof)', 11),
|
| 123 |
+
('"', 'Unterminated string starting at', 0),
|
| 124 |
+
('"spam', 'Unterminated string starting at', 0),
|
| 125 |
+
('[,', "Found a punctuator character", 1),
|
| 126 |
+
]
|
| 127 |
+
for data, msg, idx in test_cases:
|
| 128 |
+
try:
|
| 129 |
+
json.loads(data)
|
| 130 |
+
except json.HjsonDecodeError:
|
| 131 |
+
e = sys.exc_info()[1]
|
| 132 |
+
self.assertEqual(
|
| 133 |
+
e.msg[:len(msg)],
|
| 134 |
+
msg,
|
| 135 |
+
"%r doesn't start with %r for %r" % (e.msg, msg, data))
|
| 136 |
+
self.assertEqual(
|
| 137 |
+
e.pos, idx,
|
| 138 |
+
"pos %r != %r for %r" % (e.pos, idx, data))
|
| 139 |
+
except Exception:
|
| 140 |
+
e = sys.exc_info()[1]
|
| 141 |
+
self.fail("Unexpected exception raised %r %s" % (e, e))
|
| 142 |
+
else:
|
| 143 |
+
self.fail("Unexpected success parsing '%r'" % (data,))
|
parrot/lib/python3.10/site-packages/hjson/tests/test_float.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
from unittest import TestCase
|
| 3 |
+
from hjson.compat import long_type, text_type
|
| 4 |
+
import hjson as json
|
| 5 |
+
from hjson.decoder import NaN, PosInf, NegInf
|
| 6 |
+
|
| 7 |
+
class TestFloat(TestCase):
|
| 8 |
+
|
| 9 |
+
def test_degenerates_ignore(self):
|
| 10 |
+
for f in (PosInf, NegInf, NaN):
|
| 11 |
+
self.assertEqual(json.loads(json.dumpsJSON(f)), None)
|
| 12 |
+
|
| 13 |
+
def test_floats(self):
|
| 14 |
+
for num in [1617161771.7650001, math.pi, math.pi**100,
|
| 15 |
+
math.pi**-100, 3.1]:
|
| 16 |
+
self.assertEqual(float(json.dumpsJSON(num)), num)
|
| 17 |
+
self.assertEqual(json.loads(json.dumpsJSON(num)), num)
|
| 18 |
+
self.assertEqual(json.loads(text_type(json.dumpsJSON(num))), num)
|
| 19 |
+
|
| 20 |
+
def test_ints(self):
|
| 21 |
+
for num in [1, long_type(1), 1<<32, 1<<64]:
|
| 22 |
+
self.assertEqual(json.dumpsJSON(num), str(num))
|
| 23 |
+
self.assertEqual(int(json.dumpsJSON(num)), num)
|
| 24 |
+
self.assertEqual(json.loads(json.dumpsJSON(num)), num)
|
| 25 |
+
self.assertEqual(json.loads(text_type(json.dumpsJSON(num))), num)
|
parrot/lib/python3.10/site-packages/hjson/tests/test_for_json.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import unittest
|
| 2 |
+
import hjson as json
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class ForJson(object):
|
| 6 |
+
def for_json(self):
|
| 7 |
+
return {'for_json': 1}
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class NestedForJson(object):
|
| 11 |
+
def for_json(self):
|
| 12 |
+
return {'nested': ForJson()}
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class ForJsonList(object):
|
| 16 |
+
def for_json(self):
|
| 17 |
+
return ['list']
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class DictForJson(dict):
|
| 21 |
+
def for_json(self):
|
| 22 |
+
return {'alpha': 1}
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class ListForJson(list):
|
| 26 |
+
def for_json(self):
|
| 27 |
+
return ['list']
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class TestForJson(unittest.TestCase):
|
| 31 |
+
def assertRoundTrip(self, obj, other, for_json=True):
|
| 32 |
+
if for_json is None:
|
| 33 |
+
# None will use the default
|
| 34 |
+
s = json.dumpsJSON(obj)
|
| 35 |
+
else:
|
| 36 |
+
s = json.dumpsJSON(obj, for_json=for_json)
|
| 37 |
+
self.assertEqual(
|
| 38 |
+
json.loads(s),
|
| 39 |
+
other)
|
| 40 |
+
|
| 41 |
+
def test_for_json_encodes_stand_alone_object(self):
|
| 42 |
+
self.assertRoundTrip(
|
| 43 |
+
ForJson(),
|
| 44 |
+
ForJson().for_json())
|
| 45 |
+
|
| 46 |
+
def test_for_json_encodes_object_nested_in_dict(self):
|
| 47 |
+
self.assertRoundTrip(
|
| 48 |
+
{'hooray': ForJson()},
|
| 49 |
+
{'hooray': ForJson().for_json()})
|
| 50 |
+
|
| 51 |
+
def test_for_json_encodes_object_nested_in_list_within_dict(self):
|
| 52 |
+
self.assertRoundTrip(
|
| 53 |
+
{'list': [0, ForJson(), 2, 3]},
|
| 54 |
+
{'list': [0, ForJson().for_json(), 2, 3]})
|
| 55 |
+
|
| 56 |
+
def test_for_json_encodes_object_nested_within_object(self):
|
| 57 |
+
self.assertRoundTrip(
|
| 58 |
+
NestedForJson(),
|
| 59 |
+
{'nested': {'for_json': 1}})
|
| 60 |
+
|
| 61 |
+
def test_for_json_encodes_list(self):
|
| 62 |
+
self.assertRoundTrip(
|
| 63 |
+
ForJsonList(),
|
| 64 |
+
ForJsonList().for_json())
|
| 65 |
+
|
| 66 |
+
def test_for_json_encodes_list_within_object(self):
|
| 67 |
+
self.assertRoundTrip(
|
| 68 |
+
{'nested': ForJsonList()},
|
| 69 |
+
{'nested': ForJsonList().for_json()})
|
| 70 |
+
|
| 71 |
+
def test_for_json_encodes_dict_subclass(self):
|
| 72 |
+
self.assertRoundTrip(
|
| 73 |
+
DictForJson(a=1),
|
| 74 |
+
DictForJson(a=1).for_json())
|
| 75 |
+
|
| 76 |
+
def test_for_json_encodes_list_subclass(self):
|
| 77 |
+
self.assertRoundTrip(
|
| 78 |
+
ListForJson(['l']),
|
| 79 |
+
ListForJson(['l']).for_json())
|
| 80 |
+
|
| 81 |
+
def test_for_json_ignored_if_not_true_with_dict_subclass(self):
|
| 82 |
+
for for_json in (None, False):
|
| 83 |
+
self.assertRoundTrip(
|
| 84 |
+
DictForJson(a=1),
|
| 85 |
+
{'a': 1},
|
| 86 |
+
for_json=for_json)
|
| 87 |
+
|
| 88 |
+
def test_for_json_ignored_if_not_true_with_list_subclass(self):
|
| 89 |
+
for for_json in (None, False):
|
| 90 |
+
self.assertRoundTrip(
|
| 91 |
+
ListForJson(['l']),
|
| 92 |
+
['l'],
|
| 93 |
+
for_json=for_json)
|
| 94 |
+
|
| 95 |
+
def test_raises_typeerror_if_for_json_not_true_with_object(self):
|
| 96 |
+
self.assertRaises(TypeError, json.dumpsJSON, ForJson())
|
| 97 |
+
self.assertRaises(TypeError, json.dumpsJSON, ForJson(), for_json=False)
|
parrot/lib/python3.10/site-packages/hjson/tests/test_hjson.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import with_statement
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
import subprocess
|
| 6 |
+
import tempfile
|
| 7 |
+
import codecs # dump
|
| 8 |
+
|
| 9 |
+
from unittest import TestCase
|
| 10 |
+
|
| 11 |
+
import hjson
|
| 12 |
+
|
| 13 |
+
class TestAssets(TestCase):
|
| 14 |
+
|
| 15 |
+
def __init__(self, *args, **kwargs):
|
| 16 |
+
super(TestAssets, self).__init__(*args, **kwargs)
|
| 17 |
+
self.assetsDir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "assets")
|
| 18 |
+
self.assets = self.load('testlist.txt', False).split('\n')
|
| 19 |
+
self.maxDiff = None
|
| 20 |
+
self.verma, self.vermi = sys.version_info[0:2]
|
| 21 |
+
|
| 22 |
+
def load(self, name, cr):
|
| 23 |
+
name = os.path.join(self.assetsDir, name)
|
| 24 |
+
with open(name, 'rb') as f:
|
| 25 |
+
text = f.read().decode('utf-8')
|
| 26 |
+
text = text.replace('\r', '')
|
| 27 |
+
if cr: text = text.replace('\n', '\r\n')
|
| 28 |
+
return text
|
| 29 |
+
|
| 30 |
+
def check(self, name, file, inputCr):
|
| 31 |
+
text = self.load(file, inputCr)
|
| 32 |
+
shouldFail = name[0:4] == "fail"
|
| 33 |
+
|
| 34 |
+
try:
|
| 35 |
+
data = hjson.loads(text)
|
| 36 |
+
self.assertFalse(shouldFail, file)
|
| 37 |
+
|
| 38 |
+
text1 = hjson.dumpsJSON(data)
|
| 39 |
+
hjson1 = hjson.dumps(data, ensure_ascii=False);
|
| 40 |
+
result = hjson.loads(self.load(name + "_result.json", inputCr))
|
| 41 |
+
text2 = hjson.dumpsJSON(result)
|
| 42 |
+
hjson2 = self.load(name + "_result.hjson", False)
|
| 43 |
+
|
| 44 |
+
# dbg
|
| 45 |
+
# with open(name + "_dbg1.txt", "w") as tmp: tmp.write(hjson1.encode("utf-8"))
|
| 46 |
+
# with open(name + "_dbg2.txt", "w") as tmp: tmp.write(hjson2.encode("utf-8"))
|
| 47 |
+
# with codecs.open(name + "_dbg3.txt", 'w', 'utf-8') as tmp: hjson.dump(data, tmp)
|
| 48 |
+
|
| 49 |
+
if self.verma>2 or self.vermi>6:
|
| 50 |
+
# final check fails on py2.6 because of string formatting issues
|
| 51 |
+
self.assertEqual(text2, text1, file)
|
| 52 |
+
self.assertEqual(hjson2, hjson1, file)
|
| 53 |
+
|
| 54 |
+
except hjson.HjsonDecodeError as e:
|
| 55 |
+
if not shouldFail:
|
| 56 |
+
self.fail("raised error on parsing %s: %r" % (file, e))
|
| 57 |
+
|
| 58 |
+
def test_files(self):
|
| 59 |
+
for file in self.assets:
|
| 60 |
+
name, sep, ext = file.partition("_test.")
|
| 61 |
+
if name.startswith("stringify/quotes") or \
|
| 62 |
+
name.startswith("extra/"): continue # ignore/not supported
|
| 63 |
+
|
| 64 |
+
self.check(name, file, True)
|
| 65 |
+
self.check(name, file, False)
|
parrot/lib/python3.10/site-packages/hjson/tests/test_indent.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
import textwrap
|
| 3 |
+
|
| 4 |
+
import hjson as json
|
| 5 |
+
from hjson.compat import StringIO
|
| 6 |
+
|
| 7 |
+
class TestIndent(TestCase):
|
| 8 |
+
def test_indent(self):
|
| 9 |
+
h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh',
|
| 10 |
+
'i-vhbjkhnth',
|
| 11 |
+
{'nifty': 87}, {'field': 'yes', 'morefield': False} ]
|
| 12 |
+
|
| 13 |
+
expect = textwrap.dedent("""\
|
| 14 |
+
[
|
| 15 |
+
\t[
|
| 16 |
+
\t\t"blorpie"
|
| 17 |
+
\t],
|
| 18 |
+
\t[
|
| 19 |
+
\t\t"whoops"
|
| 20 |
+
\t],
|
| 21 |
+
\t[],
|
| 22 |
+
\t"d-shtaeou",
|
| 23 |
+
\t"d-nthiouh",
|
| 24 |
+
\t"i-vhbjkhnth",
|
| 25 |
+
\t{
|
| 26 |
+
\t\t"nifty": 87
|
| 27 |
+
\t},
|
| 28 |
+
\t{
|
| 29 |
+
\t\t"field": "yes",
|
| 30 |
+
\t\t"morefield": false
|
| 31 |
+
\t}
|
| 32 |
+
]""")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
d1 = json.dumpsJSON(h)
|
| 36 |
+
d2 = json.dumpsJSON(h, indent='\t', sort_keys=True, separators=(',', ': '))
|
| 37 |
+
d3 = json.dumpsJSON(h, indent=' ', sort_keys=True, separators=(',', ': '))
|
| 38 |
+
d4 = json.dumpsJSON(h, indent=2, sort_keys=True, separators=(',', ': '))
|
| 39 |
+
|
| 40 |
+
h1 = json.loads(d1)
|
| 41 |
+
h2 = json.loads(d2)
|
| 42 |
+
h3 = json.loads(d3)
|
| 43 |
+
h4 = json.loads(d4)
|
| 44 |
+
|
| 45 |
+
self.assertEqual(h1, h)
|
| 46 |
+
self.assertEqual(h2, h)
|
| 47 |
+
self.assertEqual(h3, h)
|
| 48 |
+
self.assertEqual(h4, h)
|
| 49 |
+
self.assertEqual(d3, expect.replace('\t', ' '))
|
| 50 |
+
self.assertEqual(d4, expect.replace('\t', ' '))
|
| 51 |
+
# NOTE: Python 2.4 textwrap.dedent converts tabs to spaces,
|
| 52 |
+
# so the following is expected to fail. Python 2.4 is not a
|
| 53 |
+
# supported platform in hjson 2.1.0+.
|
| 54 |
+
self.assertEqual(d2, expect)
|
| 55 |
+
|
| 56 |
+
def test_indent0(self):
|
| 57 |
+
h = {3: 1}
|
| 58 |
+
def check(indent, expected):
|
| 59 |
+
d1 = json.dumpsJSON(h, indent=indent)
|
| 60 |
+
self.assertEqual(d1, expected)
|
| 61 |
+
|
| 62 |
+
sio = StringIO()
|
| 63 |
+
json.dumpJSON(h, sio, indent=indent)
|
| 64 |
+
self.assertEqual(sio.getvalue(), expected)
|
| 65 |
+
|
| 66 |
+
# indent=0 should emit newlines
|
| 67 |
+
check(0, '{\n"3": 1\n}')
|
| 68 |
+
# indent=None is more compact
|
| 69 |
+
check(None, '{"3": 1}')
|
| 70 |
+
|
| 71 |
+
def test_separators(self):
|
| 72 |
+
lst = [1,2,3,4]
|
| 73 |
+
expect = '[\n1,\n2,\n3,\n4\n]'
|
| 74 |
+
expect_spaces = '[\n1, \n2, \n3, \n4\n]'
|
| 75 |
+
# Ensure that separators still works
|
| 76 |
+
self.assertEqual(
|
| 77 |
+
expect_spaces,
|
| 78 |
+
json.dumpsJSON(lst, indent=0, separators=(', ', ': ')))
|
| 79 |
+
# Force the new defaults
|
| 80 |
+
self.assertEqual(
|
| 81 |
+
expect,
|
| 82 |
+
json.dumpsJSON(lst, indent=0, separators=(',', ': ')))
|
| 83 |
+
# Added in 2.1.4
|
| 84 |
+
self.assertEqual(
|
| 85 |
+
expect,
|
| 86 |
+
json.dumpsJSON(lst, indent=0))
|