Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/INSTALLER +1 -0
- evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/LICENSE.rst +28 -0
- evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/METADATA +103 -0
- evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/RECORD +40 -0
- evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/REQUESTED +0 -0
- evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/WHEEL +5 -0
- evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/top_level.txt +1 -0
- evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/METADATA +59 -0
- evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/RECORD +25 -0
- evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/REQUESTED +0 -0
- evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/WHEEL +4 -0
- evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/licenses/LICENSE +24 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/__init__.py +642 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/compat.py +46 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/decoder.py +569 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/encoder.py +521 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/encoderH.py +552 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/ordered_dict.py +119 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/scanner.py +56 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__init__.py +61 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_bigint_as_string.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_bitsize_int_as_string.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_check_circular.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_decode.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_default.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_dump.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_encode_basestring_ascii.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_errors.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_fail.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_float.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_hjson.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_indent.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_item_sort_key.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_namedtuple.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_pass1.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_pass3.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_recursion.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_separators.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_tool.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_tuple.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_unicode.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_bigint_as_string.py +67 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_bitsize_int_as_string.py +73 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_check_circular.py +30 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_decimal.py +71 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_decode.py +139 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_default.py +9 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_dump.py +130 -0
- evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_encode_basestring_ascii.py +42 -0
.gitattributes
CHANGED
|
@@ -2624,3 +2624,4 @@ evalkit_tf446/lib/python3.10/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl filt
|
|
| 2624 |
evalkit_tf446/lib/python3.10/tkinter/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 2625 |
evalkit_tf446/lib/libtcl8.6.so filter=lfs diff=lfs merge=lfs -text
|
| 2626 |
evalkit_tf446/lib/python3.10/lib2to3/tests/__pycache__/test_fixers.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 2624 |
evalkit_tf446/lib/python3.10/tkinter/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 2625 |
evalkit_tf446/lib/libtcl8.6.so filter=lfs diff=lfs merge=lfs -text
|
| 2626 |
evalkit_tf446/lib/python3.10/lib2to3/tests/__pycache__/test_fixers.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 2627 |
+
evalkit_tf446/lib/python3.10/site-packages/sentencepiece/_sentencepiece.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/LICENSE.rst
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright 2014 Pallets
|
| 2 |
+
|
| 3 |
+
Redistribution and use in source and binary forms, with or without
|
| 4 |
+
modification, are permitted provided that the following conditions are
|
| 5 |
+
met:
|
| 6 |
+
|
| 7 |
+
1. Redistributions of source code must retain the above copyright
|
| 8 |
+
notice, this list of conditions and the following disclaimer.
|
| 9 |
+
|
| 10 |
+
2. Redistributions in binary form must reproduce the above copyright
|
| 11 |
+
notice, this list of conditions and the following disclaimer in the
|
| 12 |
+
documentation and/or other materials provided with the distribution.
|
| 13 |
+
|
| 14 |
+
3. Neither the name of the copyright holder nor the names of its
|
| 15 |
+
contributors may be used to endorse or promote products derived from
|
| 16 |
+
this software without specific prior written permission.
|
| 17 |
+
|
| 18 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 19 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 20 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
| 21 |
+
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 22 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 23 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
| 24 |
+
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
| 25 |
+
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
| 26 |
+
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
| 27 |
+
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
| 28 |
+
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/METADATA
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: click
|
| 3 |
+
Version: 8.1.7
|
| 4 |
+
Summary: Composable command line interface toolkit
|
| 5 |
+
Home-page: https://palletsprojects.com/p/click/
|
| 6 |
+
Maintainer: Pallets
|
| 7 |
+
Maintainer-email: contact@palletsprojects.com
|
| 8 |
+
License: BSD-3-Clause
|
| 9 |
+
Project-URL: Donate, https://palletsprojects.com/donate
|
| 10 |
+
Project-URL: Documentation, https://click.palletsprojects.com/
|
| 11 |
+
Project-URL: Changes, https://click.palletsprojects.com/changes/
|
| 12 |
+
Project-URL: Source Code, https://github.com/pallets/click/
|
| 13 |
+
Project-URL: Issue Tracker, https://github.com/pallets/click/issues/
|
| 14 |
+
Project-URL: Chat, https://discord.gg/pallets
|
| 15 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 16 |
+
Classifier: Intended Audience :: Developers
|
| 17 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 18 |
+
Classifier: Operating System :: OS Independent
|
| 19 |
+
Classifier: Programming Language :: Python
|
| 20 |
+
Requires-Python: >=3.7
|
| 21 |
+
Description-Content-Type: text/x-rst
|
| 22 |
+
License-File: LICENSE.rst
|
| 23 |
+
Requires-Dist: colorama ; platform_system == "Windows"
|
| 24 |
+
Requires-Dist: importlib-metadata ; python_version < "3.8"
|
| 25 |
+
|
| 26 |
+
\$ click\_
|
| 27 |
+
==========
|
| 28 |
+
|
| 29 |
+
Click is a Python package for creating beautiful command line interfaces
|
| 30 |
+
in a composable way with as little code as necessary. It's the "Command
|
| 31 |
+
Line Interface Creation Kit". It's highly configurable but comes with
|
| 32 |
+
sensible defaults out of the box.
|
| 33 |
+
|
| 34 |
+
It aims to make the process of writing command line tools quick and fun
|
| 35 |
+
while also preventing any frustration caused by the inability to
|
| 36 |
+
implement an intended CLI API.
|
| 37 |
+
|
| 38 |
+
Click in three points:
|
| 39 |
+
|
| 40 |
+
- Arbitrary nesting of commands
|
| 41 |
+
- Automatic help page generation
|
| 42 |
+
- Supports lazy loading of subcommands at runtime
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
Installing
|
| 46 |
+
----------
|
| 47 |
+
|
| 48 |
+
Install and update using `pip`_:
|
| 49 |
+
|
| 50 |
+
.. code-block:: text
|
| 51 |
+
|
| 52 |
+
$ pip install -U click
|
| 53 |
+
|
| 54 |
+
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
A Simple Example
|
| 58 |
+
----------------
|
| 59 |
+
|
| 60 |
+
.. code-block:: python
|
| 61 |
+
|
| 62 |
+
import click
|
| 63 |
+
|
| 64 |
+
@click.command()
|
| 65 |
+
@click.option("--count", default=1, help="Number of greetings.")
|
| 66 |
+
@click.option("--name", prompt="Your name", help="The person to greet.")
|
| 67 |
+
def hello(count, name):
|
| 68 |
+
"""Simple program that greets NAME for a total of COUNT times."""
|
| 69 |
+
for _ in range(count):
|
| 70 |
+
click.echo(f"Hello, {name}!")
|
| 71 |
+
|
| 72 |
+
if __name__ == '__main__':
|
| 73 |
+
hello()
|
| 74 |
+
|
| 75 |
+
.. code-block:: text
|
| 76 |
+
|
| 77 |
+
$ python hello.py --count=3
|
| 78 |
+
Your name: Click
|
| 79 |
+
Hello, Click!
|
| 80 |
+
Hello, Click!
|
| 81 |
+
Hello, Click!
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
Donate
|
| 85 |
+
------
|
| 86 |
+
|
| 87 |
+
The Pallets organization develops and supports Click and other popular
|
| 88 |
+
packages. In order to grow the community of contributors and users, and
|
| 89 |
+
allow the maintainers to devote more time to the projects, `please
|
| 90 |
+
donate today`_.
|
| 91 |
+
|
| 92 |
+
.. _please donate today: https://palletsprojects.com/donate
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
Links
|
| 96 |
+
-----
|
| 97 |
+
|
| 98 |
+
- Documentation: https://click.palletsprojects.com/
|
| 99 |
+
- Changes: https://click.palletsprojects.com/changes/
|
| 100 |
+
- PyPI Releases: https://pypi.org/project/click/
|
| 101 |
+
- Source Code: https://github.com/pallets/click
|
| 102 |
+
- Issue Tracker: https://github.com/pallets/click/issues
|
| 103 |
+
- Chat: https://discord.gg/pallets
|
evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/RECORD
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
click-8.1.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
click-8.1.7.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475
|
| 3 |
+
click-8.1.7.dist-info/METADATA,sha256=qIMevCxGA9yEmJOM_4WHuUJCwWpsIEVbCPOhs45YPN4,3014
|
| 4 |
+
click-8.1.7.dist-info/RECORD,,
|
| 5 |
+
click-8.1.7.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
click-8.1.7.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92
|
| 7 |
+
click-8.1.7.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6
|
| 8 |
+
click/__init__.py,sha256=YDDbjm406dTOA0V8bTtdGnhN7zj5j-_dFRewZF_pLvw,3138
|
| 9 |
+
click/__pycache__/__init__.cpython-310.pyc,,
|
| 10 |
+
click/__pycache__/_compat.cpython-310.pyc,,
|
| 11 |
+
click/__pycache__/_termui_impl.cpython-310.pyc,,
|
| 12 |
+
click/__pycache__/_textwrap.cpython-310.pyc,,
|
| 13 |
+
click/__pycache__/_winconsole.cpython-310.pyc,,
|
| 14 |
+
click/__pycache__/core.cpython-310.pyc,,
|
| 15 |
+
click/__pycache__/decorators.cpython-310.pyc,,
|
| 16 |
+
click/__pycache__/exceptions.cpython-310.pyc,,
|
| 17 |
+
click/__pycache__/formatting.cpython-310.pyc,,
|
| 18 |
+
click/__pycache__/globals.cpython-310.pyc,,
|
| 19 |
+
click/__pycache__/parser.cpython-310.pyc,,
|
| 20 |
+
click/__pycache__/shell_completion.cpython-310.pyc,,
|
| 21 |
+
click/__pycache__/termui.cpython-310.pyc,,
|
| 22 |
+
click/__pycache__/testing.cpython-310.pyc,,
|
| 23 |
+
click/__pycache__/types.cpython-310.pyc,,
|
| 24 |
+
click/__pycache__/utils.cpython-310.pyc,,
|
| 25 |
+
click/_compat.py,sha256=5318agQpbt4kroKsbqDOYpTSWzL_YCZVUQiTT04yXmc,18744
|
| 26 |
+
click/_termui_impl.py,sha256=3dFYv4445Nw-rFvZOTBMBPYwB1bxnmNk9Du6Dm_oBSU,24069
|
| 27 |
+
click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353
|
| 28 |
+
click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860
|
| 29 |
+
click/core.py,sha256=j6oEWtGgGna8JarD6WxhXmNnxLnfRjwXglbBc-8jr7U,114086
|
| 30 |
+
click/decorators.py,sha256=-ZlbGYgV-oI8jr_oH4RpuL1PFS-5QmeuEAsLDAYgxtw,18719
|
| 31 |
+
click/exceptions.py,sha256=fyROO-47HWFDjt2qupo7A3J32VlpM-ovJnfowu92K3s,9273
|
| 32 |
+
click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706
|
| 33 |
+
click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961
|
| 34 |
+
click/parser.py,sha256=LKyYQE9ZLj5KgIDXkrcTHQRXIggfoivX14_UVIn56YA,19067
|
| 35 |
+
click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 36 |
+
click/shell_completion.py,sha256=Ty3VM_ts0sQhj6u7eFTiLwHPoTgcXTGEAUg2OpLqYKw,18460
|
| 37 |
+
click/termui.py,sha256=H7Q8FpmPelhJ2ovOhfCRhjMtCpNyjFXryAMLZODqsdc,28324
|
| 38 |
+
click/testing.py,sha256=1Qd4kS5bucn1hsNIRryd0WtTMuCpkA93grkWxT8POsU,16084
|
| 39 |
+
click/types.py,sha256=TZvz3hKvBztf-Hpa2enOmP4eznSPLzijjig5b_0XMxE,36391
|
| 40 |
+
click/utils.py,sha256=1476UduUNY6UePGU4m18uzVHLt1sKM2PP3yWsQhbItM,20298
|
evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.41.1)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
evalkit_tf446/lib/python3.10/site-packages/click-8.1.7.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
click
|
evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.3
|
| 2 |
+
Name: filelock
|
| 3 |
+
Version: 3.16.1
|
| 4 |
+
Summary: A platform independent file lock.
|
| 5 |
+
Project-URL: Documentation, https://py-filelock.readthedocs.io
|
| 6 |
+
Project-URL: Homepage, https://github.com/tox-dev/py-filelock
|
| 7 |
+
Project-URL: Source, https://github.com/tox-dev/py-filelock
|
| 8 |
+
Project-URL: Tracker, https://github.com/tox-dev/py-filelock/issues
|
| 9 |
+
Maintainer-email: Bernát Gábor <gaborjbernat@gmail.com>
|
| 10 |
+
License-Expression: Unlicense
|
| 11 |
+
License-File: LICENSE
|
| 12 |
+
Keywords: application,cache,directory,log,user
|
| 13 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 14 |
+
Classifier: Intended Audience :: Developers
|
| 15 |
+
Classifier: License :: OSI Approved :: The Unlicense (Unlicense)
|
| 16 |
+
Classifier: Operating System :: OS Independent
|
| 17 |
+
Classifier: Programming Language :: Python
|
| 18 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 25 |
+
Classifier: Topic :: Internet
|
| 26 |
+
Classifier: Topic :: Software Development :: Libraries
|
| 27 |
+
Classifier: Topic :: System
|
| 28 |
+
Requires-Python: >=3.8
|
| 29 |
+
Provides-Extra: docs
|
| 30 |
+
Requires-Dist: furo>=2024.8.6; extra == 'docs'
|
| 31 |
+
Requires-Dist: sphinx-autodoc-typehints>=2.4.1; extra == 'docs'
|
| 32 |
+
Requires-Dist: sphinx>=8.0.2; extra == 'docs'
|
| 33 |
+
Provides-Extra: testing
|
| 34 |
+
Requires-Dist: covdefaults>=2.3; extra == 'testing'
|
| 35 |
+
Requires-Dist: coverage>=7.6.1; extra == 'testing'
|
| 36 |
+
Requires-Dist: diff-cover>=9.2; extra == 'testing'
|
| 37 |
+
Requires-Dist: pytest-asyncio>=0.24; extra == 'testing'
|
| 38 |
+
Requires-Dist: pytest-cov>=5; extra == 'testing'
|
| 39 |
+
Requires-Dist: pytest-mock>=3.14; extra == 'testing'
|
| 40 |
+
Requires-Dist: pytest-timeout>=2.3.1; extra == 'testing'
|
| 41 |
+
Requires-Dist: pytest>=8.3.3; extra == 'testing'
|
| 42 |
+
Requires-Dist: virtualenv>=20.26.4; extra == 'testing'
|
| 43 |
+
Provides-Extra: typing
|
| 44 |
+
Requires-Dist: typing-extensions>=4.12.2; (python_version < '3.11') and extra == 'typing'
|
| 45 |
+
Description-Content-Type: text/markdown
|
| 46 |
+
|
| 47 |
+
# filelock
|
| 48 |
+
|
| 49 |
+
[](https://pypi.org/project/filelock/)
|
| 50 |
+
[](https://pypi.org/project/filelock/)
|
| 52 |
+
[](https://py-filelock.readthedocs.io/en/latest/?badge=latest)
|
| 54 |
+
[](https://github.com/psf/black)
|
| 56 |
+
[](https://pepy.tech/project/filelock)
|
| 57 |
+
[](https://github.com/tox-dev/py-filelock/actions/workflows/check.yml)
|
| 58 |
+
|
| 59 |
+
For more information checkout the [official documentation](https://py-filelock.readthedocs.io/en/latest/index.html).
|
evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
filelock-3.16.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
filelock-3.16.1.dist-info/METADATA,sha256=LXL5-XQe_eTKkdNs76A6jSicQ1DBSTXqkDcjsprWvIM,2944
|
| 3 |
+
filelock-3.16.1.dist-info/RECORD,,
|
| 4 |
+
filelock-3.16.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 5 |
+
filelock-3.16.1.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
| 6 |
+
filelock-3.16.1.dist-info/licenses/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210
|
| 7 |
+
filelock/__init__.py,sha256=_t_-OAGXo_qyPa9lNQ1YnzVYEvSW3I0onPqzpomsVVg,1769
|
| 8 |
+
filelock/__pycache__/__init__.cpython-310.pyc,,
|
| 9 |
+
filelock/__pycache__/_api.cpython-310.pyc,,
|
| 10 |
+
filelock/__pycache__/_error.cpython-310.pyc,,
|
| 11 |
+
filelock/__pycache__/_soft.cpython-310.pyc,,
|
| 12 |
+
filelock/__pycache__/_unix.cpython-310.pyc,,
|
| 13 |
+
filelock/__pycache__/_util.cpython-310.pyc,,
|
| 14 |
+
filelock/__pycache__/_windows.cpython-310.pyc,,
|
| 15 |
+
filelock/__pycache__/asyncio.cpython-310.pyc,,
|
| 16 |
+
filelock/__pycache__/version.cpython-310.pyc,,
|
| 17 |
+
filelock/_api.py,sha256=GVeBEGjpDD8S1bYqG6_u0MZfbYHS6XrHs_n3PVKq-h0,14541
|
| 18 |
+
filelock/_error.py,sha256=-5jMcjTu60YAvAO1UbqDD1GIEjVkwr8xCFwDBtMeYDg,787
|
| 19 |
+
filelock/_soft.py,sha256=haqtc_TB_KJbYv2a8iuEAclKuM4fMG1vTcp28sK919c,1711
|
| 20 |
+
filelock/_unix.py,sha256=-FXP0tjInBHUYygOlMpp4taUmD87QOkrD_4ybg_iT7Q,2259
|
| 21 |
+
filelock/_util.py,sha256=QHBoNFIYfbAThhotH3Q8E2acFc84wpG49-T-uu017ZE,1715
|
| 22 |
+
filelock/_windows.py,sha256=eMKL8dZKrgekf5VYVGR14an29JGEInRtUO8ui9ABywg,2177
|
| 23 |
+
filelock/asyncio.py,sha256=3D4JP4Ms5IXTGib5eOekyr6uH6rZlieV_moVGY36juA,12463
|
| 24 |
+
filelock/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 25 |
+
filelock/version.py,sha256=KSOBzuLwiqiVWDPGfMj1ntr25YrY6JBDr8RvinQX_FM,413
|
evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.25.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
evalkit_tf446/lib/python3.10/site-packages/filelock-3.16.1.dist-info/licenses/LICENSE
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
This is free and unencumbered software released into the public domain.
|
| 2 |
+
|
| 3 |
+
Anyone is free to copy, modify, publish, use, compile, sell, or
|
| 4 |
+
distribute this software, either in source code form or as a compiled
|
| 5 |
+
binary, for any purpose, commercial or non-commercial, and by any
|
| 6 |
+
means.
|
| 7 |
+
|
| 8 |
+
In jurisdictions that recognize copyright laws, the author or authors
|
| 9 |
+
of this software dedicate any and all copyright interest in the
|
| 10 |
+
software to the public domain. We make this dedication for the benefit
|
| 11 |
+
of the public at large and to the detriment of our heirs and
|
| 12 |
+
successors. We intend this dedication to be an overt act of
|
| 13 |
+
relinquishment in perpetuity of all present and future rights to this
|
| 14 |
+
software under copyright law.
|
| 15 |
+
|
| 16 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 17 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 18 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
| 19 |
+
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
| 20 |
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
| 21 |
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 22 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 23 |
+
|
| 24 |
+
For more information, please refer to <http://unlicense.org>
|
evalkit_tf446/lib/python3.10/site-packages/hjson/__init__.py
ADDED
|
@@ -0,0 +1,642 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""Hjson, the Human JSON. A configuration file format that caters to
|
| 2 |
+
humans and helps reduce the errors they make.
|
| 3 |
+
|
| 4 |
+
For details and syntax see <https://hjson.github.io>.
|
| 5 |
+
|
| 6 |
+
Decoding Hjson::
|
| 7 |
+
|
| 8 |
+
>>> import hjson
|
| 9 |
+
>>> text = "{\n foo: a\n bar: 1\n}"
|
| 10 |
+
>>> hjson.loads(text)
|
| 11 |
+
OrderedDict([('foo', 'a'), ('bar', 1)])
|
| 12 |
+
|
| 13 |
+
Encoding Python object hierarchies::
|
| 14 |
+
|
| 15 |
+
>>> import hjson
|
| 16 |
+
>>> # hjson.dumps({'foo': 'text', 'bar': (1, 2)})
|
| 17 |
+
>>> hjson.dumps(OrderedDict([('foo', 'text'), ('bar', (1, 2))]))
|
| 18 |
+
'{\n foo: text\n bar:\n [\n 1\n 2\n ]\n}'
|
| 19 |
+
|
| 20 |
+
Encoding as JSON::
|
| 21 |
+
|
| 22 |
+
Note that this is probably not as performant as the simplejson version.
|
| 23 |
+
|
| 24 |
+
>>> import hjson
|
| 25 |
+
>>> hjson.dumpsJSON(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
| 26 |
+
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
| 27 |
+
|
| 28 |
+
Using hjson.tool from the shell to validate and pretty-print::
|
| 29 |
+
|
| 30 |
+
$ echo '{"json":"obj"}' | python -m hjson.tool
|
| 31 |
+
{
|
| 32 |
+
json: obj
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
Other formats are -c for compact or -j for formatted JSON.
|
| 36 |
+
|
| 37 |
+
"""
|
| 38 |
+
from __future__ import absolute_import
|
| 39 |
+
__version__ = '3.1.0'
|
| 40 |
+
__all__ = [
|
| 41 |
+
'dump', 'dumps', 'load', 'loads',
|
| 42 |
+
'dumpJSON', 'dumpsJSON',
|
| 43 |
+
'HjsonDecoder', 'HjsonDecodeError', 'HjsonEncoder', 'JSONEncoder',
|
| 44 |
+
'OrderedDict', 'simple_first',
|
| 45 |
+
]
|
| 46 |
+
|
| 47 |
+
# based on simplejson by
|
| 48 |
+
# __author__ = 'Bob Ippolito <bob@redivi.com>'
|
| 49 |
+
__author__ = 'Christian Zangl <coralllama@gmail.com>'
|
| 50 |
+
|
| 51 |
+
from decimal import Decimal
|
| 52 |
+
|
| 53 |
+
from .scanner import HjsonDecodeError
|
| 54 |
+
from .decoder import HjsonDecoder
|
| 55 |
+
from .encoderH import HjsonEncoder
|
| 56 |
+
from .encoder import JSONEncoder
|
| 57 |
+
def _import_OrderedDict():
|
| 58 |
+
import collections
|
| 59 |
+
try:
|
| 60 |
+
return collections.OrderedDict
|
| 61 |
+
except AttributeError:
|
| 62 |
+
from . import ordered_dict
|
| 63 |
+
return ordered_dict.OrderedDict
|
| 64 |
+
OrderedDict = _import_OrderedDict()
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
_default_decoder = HjsonDecoder(encoding=None, object_hook=None,
|
| 68 |
+
object_pairs_hook=OrderedDict)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
|
| 72 |
+
parse_int=None, object_pairs_hook=OrderedDict,
|
| 73 |
+
use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
|
| 74 |
+
**kw):
|
| 75 |
+
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
|
| 76 |
+
a JSON document) to a Python object.
|
| 77 |
+
|
| 78 |
+
*encoding* determines the encoding used to interpret any
|
| 79 |
+
:class:`str` objects decoded by this instance (``'utf-8'`` by
|
| 80 |
+
default). It has no effect when decoding :class:`unicode` objects.
|
| 81 |
+
|
| 82 |
+
Note that currently only encodings that are a superset of ASCII work,
|
| 83 |
+
strings of other encodings should be passed in as :class:`unicode`.
|
| 84 |
+
|
| 85 |
+
*object_hook*, if specified, will be called with the result of every
|
| 86 |
+
JSON object decoded and its return value will be used in place of the
|
| 87 |
+
given :class:`dict`. This can be used to provide custom
|
| 88 |
+
deserializations (e.g. to support JSON-RPC class hinting).
|
| 89 |
+
|
| 90 |
+
*object_pairs_hook* is an optional function that will be called with
|
| 91 |
+
the result of any object literal decode with an ordered list of pairs.
|
| 92 |
+
The return value of *object_pairs_hook* will be used instead of the
|
| 93 |
+
:class:`dict`. This feature can be used to implement custom decoders
|
| 94 |
+
that rely on the order that the key and value pairs are decoded (for
|
| 95 |
+
example, :func:`collections.OrderedDict` will remember the order of
|
| 96 |
+
insertion). If *object_hook* is also defined, the *object_pairs_hook*
|
| 97 |
+
takes priority.
|
| 98 |
+
|
| 99 |
+
*parse_float*, if specified, will be called with the string of every
|
| 100 |
+
JSON float to be decoded. By default, this is equivalent to
|
| 101 |
+
``float(num_str)``. This can be used to use another datatype or parser
|
| 102 |
+
for JSON floats (e.g. :class:`decimal.Decimal`).
|
| 103 |
+
|
| 104 |
+
*parse_int*, if specified, will be called with the string of every
|
| 105 |
+
JSON int to be decoded. By default, this is equivalent to
|
| 106 |
+
``int(num_str)``. This can be used to use another datatype or parser
|
| 107 |
+
for JSON integers (e.g. :class:`float`).
|
| 108 |
+
|
| 109 |
+
If *use_decimal* is true (default: ``False``) then it implies
|
| 110 |
+
parse_float=decimal.Decimal for parity with ``dump``.
|
| 111 |
+
|
| 112 |
+
To use a custom ``HjsonDecoder`` subclass, specify it with the ``cls``
|
| 113 |
+
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
|
| 114 |
+
of subclassing whenever possible.
|
| 115 |
+
|
| 116 |
+
"""
|
| 117 |
+
return loads(fp.read(),
|
| 118 |
+
encoding=encoding, cls=cls, object_hook=object_hook,
|
| 119 |
+
parse_float=parse_float, parse_int=parse_int,
|
| 120 |
+
object_pairs_hook=object_pairs_hook,
|
| 121 |
+
use_decimal=use_decimal, **kw)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
|
| 125 |
+
parse_int=None, object_pairs_hook=None,
|
| 126 |
+
use_decimal=False, **kw):
|
| 127 |
+
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
|
| 128 |
+
document) to a Python object.
|
| 129 |
+
|
| 130 |
+
*encoding* determines the encoding used to interpret any
|
| 131 |
+
:class:`str` objects decoded by this instance (``'utf-8'`` by
|
| 132 |
+
default). It has no effect when decoding :class:`unicode` objects.
|
| 133 |
+
|
| 134 |
+
Note that currently only encodings that are a superset of ASCII work,
|
| 135 |
+
strings of other encodings should be passed in as :class:`unicode`.
|
| 136 |
+
|
| 137 |
+
*object_hook*, if specified, will be called with the result of every
|
| 138 |
+
JSON object decoded and its return value will be used in place of the
|
| 139 |
+
given :class:`dict`. This can be used to provide custom
|
| 140 |
+
deserializations (e.g. to support JSON-RPC class hinting).
|
| 141 |
+
|
| 142 |
+
*object_pairs_hook* is an optional function that will be called with
|
| 143 |
+
the result of any object literal decode with an ordered list of pairs.
|
| 144 |
+
The return value of *object_pairs_hook* will be used instead of the
|
| 145 |
+
:class:`dict`. This feature can be used to implement custom decoders
|
| 146 |
+
that rely on the order that the key and value pairs are decoded (for
|
| 147 |
+
example, :func:`collections.OrderedDict` will remember the order of
|
| 148 |
+
insertion). If *object_hook* is also defined, the *object_pairs_hook*
|
| 149 |
+
takes priority.
|
| 150 |
+
|
| 151 |
+
*parse_float*, if specified, will be called with the string of every
|
| 152 |
+
JSON float to be decoded. By default, this is equivalent to
|
| 153 |
+
``float(num_str)``. This can be used to use another datatype or parser
|
| 154 |
+
for JSON floats (e.g. :class:`decimal.Decimal`).
|
| 155 |
+
|
| 156 |
+
*parse_int*, if specified, will be called with the string of every
|
| 157 |
+
JSON int to be decoded. By default, this is equivalent to
|
| 158 |
+
``int(num_str)``. This can be used to use another datatype or parser
|
| 159 |
+
for JSON integers (e.g. :class:`float`).
|
| 160 |
+
|
| 161 |
+
If *use_decimal* is true (default: ``False``) then it implies
|
| 162 |
+
parse_float=decimal.Decimal for parity with ``dump``.
|
| 163 |
+
|
| 164 |
+
To use a custom ``HjsonDecoder`` subclass, specify it with the ``cls``
|
| 165 |
+
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
|
| 166 |
+
of subclassing whenever possible.
|
| 167 |
+
|
| 168 |
+
"""
|
| 169 |
+
if (cls is None and encoding is None and object_hook is None and
|
| 170 |
+
parse_int is None and parse_float is None and
|
| 171 |
+
object_pairs_hook is None
|
| 172 |
+
and not use_decimal and not kw):
|
| 173 |
+
return _default_decoder.decode(s)
|
| 174 |
+
if cls is None:
|
| 175 |
+
cls = HjsonDecoder
|
| 176 |
+
if object_hook is not None:
|
| 177 |
+
kw['object_hook'] = object_hook
|
| 178 |
+
if object_pairs_hook is not None:
|
| 179 |
+
kw['object_pairs_hook'] = object_pairs_hook
|
| 180 |
+
if parse_float is not None:
|
| 181 |
+
kw['parse_float'] = parse_float
|
| 182 |
+
if parse_int is not None:
|
| 183 |
+
kw['parse_int'] = parse_int
|
| 184 |
+
if use_decimal:
|
| 185 |
+
if parse_float is not None:
|
| 186 |
+
raise TypeError("use_decimal=True implies parse_float=Decimal")
|
| 187 |
+
kw['parse_float'] = Decimal
|
| 188 |
+
return cls(encoding=encoding, **kw).decode(s)
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
_default_hjson_encoder = HjsonEncoder(
|
| 192 |
+
skipkeys=False,
|
| 193 |
+
ensure_ascii=True,
|
| 194 |
+
check_circular=True,
|
| 195 |
+
indent=None,
|
| 196 |
+
encoding='utf-8',
|
| 197 |
+
default=None,
|
| 198 |
+
use_decimal=True,
|
| 199 |
+
namedtuple_as_object=True,
|
| 200 |
+
tuple_as_array=True,
|
| 201 |
+
bigint_as_string=False,
|
| 202 |
+
item_sort_key=None,
|
| 203 |
+
for_json=False,
|
| 204 |
+
int_as_string_bitcount=None,
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 208 |
+
cls=None, indent=None,
|
| 209 |
+
encoding='utf-8', default=None, use_decimal=True,
|
| 210 |
+
namedtuple_as_object=True, tuple_as_array=True,
|
| 211 |
+
bigint_as_string=False, sort_keys=False, item_sort_key=None,
|
| 212 |
+
for_json=False, int_as_string_bitcount=None, **kw):
|
| 213 |
+
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
| 214 |
+
``.write()``-supporting file-like object).
|
| 215 |
+
|
| 216 |
+
If *skipkeys* is true then ``dict`` keys that are not basic types
|
| 217 |
+
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
| 218 |
+
will be skipped instead of raising a ``TypeError``.
|
| 219 |
+
|
| 220 |
+
If *ensure_ascii* is false, then the some chunks written to ``fp``
|
| 221 |
+
may be ``unicode`` instances, subject to normal Python ``str`` to
|
| 222 |
+
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
|
| 223 |
+
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
|
| 224 |
+
to cause an error.
|
| 225 |
+
|
| 226 |
+
If *check_circular* is false, then the circular reference check
|
| 227 |
+
for container types will be skipped and a circular reference will
|
| 228 |
+
result in an ``OverflowError`` (or worse).
|
| 229 |
+
|
| 230 |
+
*indent* defines the amount of whitespace that the JSON array elements
|
| 231 |
+
and object members will be indented for each level of nesting.
|
| 232 |
+
The default is two spaces.
|
| 233 |
+
|
| 234 |
+
*encoding* is the character encoding for str instances, default is UTF-8.
|
| 235 |
+
|
| 236 |
+
*default(obj)* is a function that should return a serializable version
|
| 237 |
+
of obj or raise ``TypeError``. The default simply raises ``TypeError``.
|
| 238 |
+
|
| 239 |
+
If *use_decimal* is true (default: ``True``) then decimal.Decimal
|
| 240 |
+
will be natively serialized to JSON with full precision.
|
| 241 |
+
|
| 242 |
+
If *namedtuple_as_object* is true (default: ``True``),
|
| 243 |
+
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
|
| 244 |
+
as JSON objects.
|
| 245 |
+
|
| 246 |
+
If *tuple_as_array* is true (default: ``True``),
|
| 247 |
+
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
|
| 248 |
+
|
| 249 |
+
If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher
|
| 250 |
+
or lower than -2**53 will be encoded as strings. This is to avoid the
|
| 251 |
+
rounding that happens in Javascript otherwise. Note that this is still a
|
| 252 |
+
lossy operation that will not round-trip correctly and should be used
|
| 253 |
+
sparingly.
|
| 254 |
+
|
| 255 |
+
If *int_as_string_bitcount* is a positive number (n), then int of size
|
| 256 |
+
greater than or equal to 2**n or lower than or equal to -2**n will be
|
| 257 |
+
encoded as strings.
|
| 258 |
+
|
| 259 |
+
If specified, *item_sort_key* is a callable used to sort the items in
|
| 260 |
+
each dictionary. This is useful if you want to sort items other than
|
| 261 |
+
in alphabetical order by key. This option takes precedence over
|
| 262 |
+
*sort_keys*.
|
| 263 |
+
|
| 264 |
+
If *sort_keys* is true (default: ``False``), the output of dictionaries
|
| 265 |
+
will be sorted by item.
|
| 266 |
+
|
| 267 |
+
If *for_json* is true (default: ``False``), objects with a ``for_json()``
|
| 268 |
+
method will use the return value of that method for encoding as JSON
|
| 269 |
+
instead of the object.
|
| 270 |
+
|
| 271 |
+
To use a custom ``HjsonEncoder`` subclass (e.g. one that overrides the
|
| 272 |
+
``.default()`` method to serialize additional types), specify it with
|
| 273 |
+
the ``cls`` kwarg. NOTE: You should use *default* or *for_json* instead
|
| 274 |
+
of subclassing whenever possible.
|
| 275 |
+
|
| 276 |
+
"""
|
| 277 |
+
# cached encoder
|
| 278 |
+
if (not skipkeys and ensure_ascii and
|
| 279 |
+
check_circular and
|
| 280 |
+
cls is None and indent is None and
|
| 281 |
+
encoding == 'utf-8' and default is None and use_decimal
|
| 282 |
+
and namedtuple_as_object and tuple_as_array
|
| 283 |
+
and not bigint_as_string and not sort_keys
|
| 284 |
+
and not item_sort_key and not for_json
|
| 285 |
+
and int_as_string_bitcount is None
|
| 286 |
+
and not kw
|
| 287 |
+
):
|
| 288 |
+
iterable = _default_hjson_encoder.iterencode(obj)
|
| 289 |
+
else:
|
| 290 |
+
if cls is None:
|
| 291 |
+
cls = HjsonEncoder
|
| 292 |
+
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 293 |
+
check_circular=check_circular, indent=indent,
|
| 294 |
+
encoding=encoding,
|
| 295 |
+
default=default, use_decimal=use_decimal,
|
| 296 |
+
namedtuple_as_object=namedtuple_as_object,
|
| 297 |
+
tuple_as_array=tuple_as_array,
|
| 298 |
+
bigint_as_string=bigint_as_string,
|
| 299 |
+
sort_keys=sort_keys,
|
| 300 |
+
item_sort_key=item_sort_key,
|
| 301 |
+
for_json=for_json,
|
| 302 |
+
int_as_string_bitcount=int_as_string_bitcount,
|
| 303 |
+
**kw).iterencode(obj)
|
| 304 |
+
# could accelerate with writelines in some versions of Python, at
|
| 305 |
+
# a debuggability cost
|
| 306 |
+
for chunk in iterable:
|
| 307 |
+
fp.write(chunk)
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 311 |
+
cls=None, indent=None,
|
| 312 |
+
encoding='utf-8', default=None, use_decimal=True,
|
| 313 |
+
namedtuple_as_object=True, tuple_as_array=True,
|
| 314 |
+
bigint_as_string=False, sort_keys=False, item_sort_key=None,
|
| 315 |
+
for_json=False, int_as_string_bitcount=None, **kw):
|
| 316 |
+
"""Serialize ``obj`` to a JSON formatted ``str``.
|
| 317 |
+
|
| 318 |
+
If ``skipkeys`` is false then ``dict`` keys that are not basic types
|
| 319 |
+
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
| 320 |
+
will be skipped instead of raising a ``TypeError``.
|
| 321 |
+
|
| 322 |
+
If ``ensure_ascii`` is false, then the return value will be a
|
| 323 |
+
``unicode`` instance subject to normal Python ``str`` to ``unicode``
|
| 324 |
+
coercion rules instead of being escaped to an ASCII ``str``.
|
| 325 |
+
|
| 326 |
+
If ``check_circular`` is false, then the circular reference check
|
| 327 |
+
for container types will be skipped and a circular reference will
|
| 328 |
+
result in an ``OverflowError`` (or worse).
|
| 329 |
+
|
| 330 |
+
*indent* defines the amount of whitespace that the JSON array elements
|
| 331 |
+
and object members will be indented for each level of nesting.
|
| 332 |
+
The default is two spaces.
|
| 333 |
+
|
| 334 |
+
``encoding`` is the character encoding for str instances, default is UTF-8.
|
| 335 |
+
|
| 336 |
+
``default(obj)`` is a function that should return a serializable version
|
| 337 |
+
of obj or raise TypeError. The default simply raises TypeError.
|
| 338 |
+
|
| 339 |
+
If *use_decimal* is true (default: ``True``) then decimal.Decimal
|
| 340 |
+
will be natively serialized to JSON with full precision.
|
| 341 |
+
|
| 342 |
+
If *namedtuple_as_object* is true (default: ``True``),
|
| 343 |
+
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
|
| 344 |
+
as JSON objects.
|
| 345 |
+
|
| 346 |
+
If *tuple_as_array* is true (default: ``True``),
|
| 347 |
+
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
|
| 348 |
+
|
| 349 |
+
If *bigint_as_string* is true (not the default), ints 2**53 and higher
|
| 350 |
+
or lower than -2**53 will be encoded as strings. This is to avoid the
|
| 351 |
+
rounding that happens in Javascript otherwise.
|
| 352 |
+
|
| 353 |
+
If *int_as_string_bitcount* is a positive number (n), then int of size
|
| 354 |
+
greater than or equal to 2**n or lower than or equal to -2**n will be
|
| 355 |
+
encoded as strings.
|
| 356 |
+
|
| 357 |
+
If specified, *item_sort_key* is a callable used to sort the items in
|
| 358 |
+
each dictionary. This is useful if you want to sort items other than
|
| 359 |
+
in alphabetical order by key. This option takes precendence over
|
| 360 |
+
*sort_keys*.
|
| 361 |
+
|
| 362 |
+
If *sort_keys* is true (default: ``False``), the output of dictionaries
|
| 363 |
+
will be sorted by item.
|
| 364 |
+
|
| 365 |
+
If *for_json* is true (default: ``False``), objects with a ``for_json()``
|
| 366 |
+
method will use the return value of that method for encoding as JSON
|
| 367 |
+
instead of the object.
|
| 368 |
+
|
| 369 |
+
To use a custom ``HjsonEncoder`` subclass (e.g. one that overrides the
|
| 370 |
+
``.default()`` method to serialize additional types), specify it with
|
| 371 |
+
the ``cls`` kwarg. NOTE: You should use *default* instead of subclassing
|
| 372 |
+
whenever possible.
|
| 373 |
+
|
| 374 |
+
"""
|
| 375 |
+
# cached encoder
|
| 376 |
+
if (
|
| 377 |
+
not skipkeys and ensure_ascii and
|
| 378 |
+
check_circular and
|
| 379 |
+
cls is None and indent is None and
|
| 380 |
+
encoding == 'utf-8' and default is None and use_decimal
|
| 381 |
+
and namedtuple_as_object and tuple_as_array
|
| 382 |
+
and not bigint_as_string and not sort_keys
|
| 383 |
+
and not item_sort_key and not for_json
|
| 384 |
+
and int_as_string_bitcount is None
|
| 385 |
+
and not kw
|
| 386 |
+
):
|
| 387 |
+
return _default_hjson_encoder.encode(obj)
|
| 388 |
+
if cls is None:
|
| 389 |
+
cls = HjsonEncoder
|
| 390 |
+
return cls(
|
| 391 |
+
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 392 |
+
check_circular=check_circular, indent=indent,
|
| 393 |
+
encoding=encoding, default=default,
|
| 394 |
+
use_decimal=use_decimal,
|
| 395 |
+
namedtuple_as_object=namedtuple_as_object,
|
| 396 |
+
tuple_as_array=tuple_as_array,
|
| 397 |
+
bigint_as_string=bigint_as_string,
|
| 398 |
+
sort_keys=sort_keys,
|
| 399 |
+
item_sort_key=item_sort_key,
|
| 400 |
+
for_json=for_json,
|
| 401 |
+
int_as_string_bitcount=int_as_string_bitcount,
|
| 402 |
+
**kw).encode(obj)
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
_default_json_encoder = JSONEncoder(
|
| 407 |
+
skipkeys=False,
|
| 408 |
+
ensure_ascii=True,
|
| 409 |
+
check_circular=True,
|
| 410 |
+
indent=None,
|
| 411 |
+
separators=None,
|
| 412 |
+
encoding='utf-8',
|
| 413 |
+
default=None,
|
| 414 |
+
use_decimal=True,
|
| 415 |
+
namedtuple_as_object=True,
|
| 416 |
+
tuple_as_array=True,
|
| 417 |
+
bigint_as_string=False,
|
| 418 |
+
item_sort_key=None,
|
| 419 |
+
for_json=False,
|
| 420 |
+
int_as_string_bitcount=None,
|
| 421 |
+
)
|
| 422 |
+
|
| 423 |
+
def dumpJSON(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 424 |
+
cls=None, indent=None, separators=None,
|
| 425 |
+
encoding='utf-8', default=None, use_decimal=True,
|
| 426 |
+
namedtuple_as_object=True, tuple_as_array=True,
|
| 427 |
+
bigint_as_string=False, sort_keys=False, item_sort_key=None,
|
| 428 |
+
for_json=False, int_as_string_bitcount=None, **kw):
|
| 429 |
+
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
| 430 |
+
``.write()``-supporting file-like object).
|
| 431 |
+
|
| 432 |
+
If *skipkeys* is true then ``dict`` keys that are not basic types
|
| 433 |
+
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
| 434 |
+
will be skipped instead of raising a ``TypeError``.
|
| 435 |
+
|
| 436 |
+
If *ensure_ascii* is false, then the some chunks written to ``fp``
|
| 437 |
+
may be ``unicode`` instances, subject to normal Python ``str`` to
|
| 438 |
+
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
|
| 439 |
+
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
|
| 440 |
+
to cause an error.
|
| 441 |
+
|
| 442 |
+
If *check_circular* is false, then the circular reference check
|
| 443 |
+
for container types will be skipped and a circular reference will
|
| 444 |
+
result in an ``OverflowError`` (or worse).
|
| 445 |
+
|
| 446 |
+
If *indent* is a string, then JSON array elements and object members
|
| 447 |
+
will be pretty-printed with a newline followed by that string repeated
|
| 448 |
+
for each level of nesting. ``None`` (the default) selects the most compact
|
| 449 |
+
representation without any newlines. An integer is also accepted
|
| 450 |
+
and is converted to a string with that many spaces.
|
| 451 |
+
|
| 452 |
+
If specified, *separators* should be an
|
| 453 |
+
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
|
| 454 |
+
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
|
| 455 |
+
compact JSON representation, you should specify ``(',', ':')`` to eliminate
|
| 456 |
+
whitespace.
|
| 457 |
+
|
| 458 |
+
*encoding* is the character encoding for str instances, default is UTF-8.
|
| 459 |
+
|
| 460 |
+
*default(obj)* is a function that should return a serializable version
|
| 461 |
+
of obj or raise ``TypeError``. The default simply raises ``TypeError``.
|
| 462 |
+
|
| 463 |
+
If *use_decimal* is true (default: ``True``) then decimal.Decimal
|
| 464 |
+
will be natively serialized to JSON with full precision.
|
| 465 |
+
|
| 466 |
+
If *namedtuple_as_object* is true (default: ``True``),
|
| 467 |
+
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
|
| 468 |
+
as JSON objects.
|
| 469 |
+
|
| 470 |
+
If *tuple_as_array* is true (default: ``True``),
|
| 471 |
+
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
|
| 472 |
+
|
| 473 |
+
If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher
|
| 474 |
+
or lower than -2**53 will be encoded as strings. This is to avoid the
|
| 475 |
+
rounding that happens in Javascript otherwise. Note that this is still a
|
| 476 |
+
lossy operation that will not round-trip correctly and should be used
|
| 477 |
+
sparingly.
|
| 478 |
+
|
| 479 |
+
If *int_as_string_bitcount* is a positive number (n), then int of size
|
| 480 |
+
greater than or equal to 2**n or lower than or equal to -2**n will be
|
| 481 |
+
encoded as strings.
|
| 482 |
+
|
| 483 |
+
If specified, *item_sort_key* is a callable used to sort the items in
|
| 484 |
+
each dictionary. This is useful if you want to sort items other than
|
| 485 |
+
in alphabetical order by key. This option takes precedence over
|
| 486 |
+
*sort_keys*.
|
| 487 |
+
|
| 488 |
+
If *sort_keys* is true (default: ``False``), the output of dictionaries
|
| 489 |
+
will be sorted by item.
|
| 490 |
+
|
| 491 |
+
If *for_json* is true (default: ``False``), objects with a ``for_json()``
|
| 492 |
+
method will use the return value of that method for encoding as JSON
|
| 493 |
+
instead of the object.
|
| 494 |
+
|
| 495 |
+
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
| 496 |
+
``.default()`` method to serialize additional types), specify it with
|
| 497 |
+
the ``cls`` kwarg. NOTE: You should use *default* or *for_json* instead
|
| 498 |
+
of subclassing whenever possible.
|
| 499 |
+
|
| 500 |
+
"""
|
| 501 |
+
# cached encoder
|
| 502 |
+
if (not skipkeys and ensure_ascii and
|
| 503 |
+
check_circular and
|
| 504 |
+
cls is None and indent is None and separators is None and
|
| 505 |
+
encoding == 'utf-8' and default is None and use_decimal
|
| 506 |
+
and namedtuple_as_object and tuple_as_array
|
| 507 |
+
and not bigint_as_string and not sort_keys
|
| 508 |
+
and not item_sort_key and not for_json
|
| 509 |
+
and int_as_string_bitcount is None
|
| 510 |
+
and not kw
|
| 511 |
+
):
|
| 512 |
+
iterable = _default_json_encoder.iterencode(obj)
|
| 513 |
+
else:
|
| 514 |
+
if cls is None:
|
| 515 |
+
cls = JSONEncoder
|
| 516 |
+
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 517 |
+
check_circular=check_circular, indent=indent,
|
| 518 |
+
separators=separators, encoding=encoding,
|
| 519 |
+
default=default, use_decimal=use_decimal,
|
| 520 |
+
namedtuple_as_object=namedtuple_as_object,
|
| 521 |
+
tuple_as_array=tuple_as_array,
|
| 522 |
+
bigint_as_string=bigint_as_string,
|
| 523 |
+
sort_keys=sort_keys,
|
| 524 |
+
item_sort_key=item_sort_key,
|
| 525 |
+
for_json=for_json,
|
| 526 |
+
int_as_string_bitcount=int_as_string_bitcount,
|
| 527 |
+
**kw).iterencode(obj)
|
| 528 |
+
# could accelerate with writelines in some versions of Python, at
|
| 529 |
+
# a debuggability cost
|
| 530 |
+
for chunk in iterable:
|
| 531 |
+
fp.write(chunk)
|
| 532 |
+
|
| 533 |
+
|
| 534 |
+
def dumpsJSON(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 535 |
+
cls=None, indent=None, separators=None,
|
| 536 |
+
encoding='utf-8', default=None, use_decimal=True,
|
| 537 |
+
namedtuple_as_object=True, tuple_as_array=True,
|
| 538 |
+
bigint_as_string=False, sort_keys=False, item_sort_key=None,
|
| 539 |
+
for_json=False, int_as_string_bitcount=None, **kw):
|
| 540 |
+
"""Serialize ``obj`` to a JSON formatted ``str``.
|
| 541 |
+
|
| 542 |
+
If ``skipkeys`` is false then ``dict`` keys that are not basic types
|
| 543 |
+
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
| 544 |
+
will be skipped instead of raising a ``TypeError``.
|
| 545 |
+
|
| 546 |
+
If ``ensure_ascii`` is false, then the return value will be a
|
| 547 |
+
``unicode`` instance subject to normal Python ``str`` to ``unicode``
|
| 548 |
+
coercion rules instead of being escaped to an ASCII ``str``.
|
| 549 |
+
|
| 550 |
+
If ``check_circular`` is false, then the circular reference check
|
| 551 |
+
for container types will be skipped and a circular reference will
|
| 552 |
+
result in an ``OverflowError`` (or worse).
|
| 553 |
+
|
| 554 |
+
If ``indent`` is a string, then JSON array elements and object members
|
| 555 |
+
will be pretty-printed with a newline followed by that string repeated
|
| 556 |
+
for each level of nesting. ``None`` (the default) selects the most compact
|
| 557 |
+
representation without any newlines. An integer is also accepted
|
| 558 |
+
and is converted to a string with that many spaces.
|
| 559 |
+
|
| 560 |
+
If specified, ``separators`` should be an
|
| 561 |
+
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
|
| 562 |
+
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
|
| 563 |
+
compact JSON representation, you should specify ``(',', ':')`` to eliminate
|
| 564 |
+
whitespace.
|
| 565 |
+
|
| 566 |
+
``encoding`` is the character encoding for str instances, default is UTF-8.
|
| 567 |
+
|
| 568 |
+
``default(obj)`` is a function that should return a serializable version
|
| 569 |
+
of obj or raise TypeError. The default simply raises TypeError.
|
| 570 |
+
|
| 571 |
+
If *use_decimal* is true (default: ``True``) then decimal.Decimal
|
| 572 |
+
will be natively serialized to JSON with full precision.
|
| 573 |
+
|
| 574 |
+
If *namedtuple_as_object* is true (default: ``True``),
|
| 575 |
+
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
|
| 576 |
+
as JSON objects.
|
| 577 |
+
|
| 578 |
+
If *tuple_as_array* is true (default: ``True``),
|
| 579 |
+
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
|
| 580 |
+
|
| 581 |
+
If *bigint_as_string* is true (not the default), ints 2**53 and higher
|
| 582 |
+
or lower than -2**53 will be encoded as strings. This is to avoid the
|
| 583 |
+
rounding that happens in Javascript otherwise.
|
| 584 |
+
|
| 585 |
+
If *int_as_string_bitcount* is a positive number (n), then int of size
|
| 586 |
+
greater than or equal to 2**n or lower than or equal to -2**n will be
|
| 587 |
+
encoded as strings.
|
| 588 |
+
|
| 589 |
+
If specified, *item_sort_key* is a callable used to sort the items in
|
| 590 |
+
each dictionary. This is useful if you want to sort items other than
|
| 591 |
+
in alphabetical order by key. This option takes precendence over
|
| 592 |
+
*sort_keys*.
|
| 593 |
+
|
| 594 |
+
If *sort_keys* is true (default: ``False``), the output of dictionaries
|
| 595 |
+
will be sorted by item.
|
| 596 |
+
|
| 597 |
+
If *for_json* is true (default: ``False``), objects with a ``for_json()``
|
| 598 |
+
method will use the return value of that method for encoding as JSON
|
| 599 |
+
instead of the object.
|
| 600 |
+
|
| 601 |
+
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
| 602 |
+
``.default()`` method to serialize additional types), specify it with
|
| 603 |
+
the ``cls`` kwarg. NOTE: You should use *default* instead of subclassing
|
| 604 |
+
whenever possible.
|
| 605 |
+
|
| 606 |
+
"""
|
| 607 |
+
# cached encoder
|
| 608 |
+
if (
|
| 609 |
+
not skipkeys and ensure_ascii and
|
| 610 |
+
check_circular and
|
| 611 |
+
cls is None and indent is None and separators is None and
|
| 612 |
+
encoding == 'utf-8' and default is None and use_decimal
|
| 613 |
+
and namedtuple_as_object and tuple_as_array
|
| 614 |
+
and not bigint_as_string and not sort_keys
|
| 615 |
+
and not item_sort_key and not for_json
|
| 616 |
+
and int_as_string_bitcount is None
|
| 617 |
+
and not kw
|
| 618 |
+
):
|
| 619 |
+
return _default_json_encoder.encode(obj)
|
| 620 |
+
if cls is None:
|
| 621 |
+
cls = JSONEncoder
|
| 622 |
+
return cls(
|
| 623 |
+
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 624 |
+
check_circular=check_circular, indent=indent,
|
| 625 |
+
separators=separators, encoding=encoding, default=default,
|
| 626 |
+
use_decimal=use_decimal,
|
| 627 |
+
namedtuple_as_object=namedtuple_as_object,
|
| 628 |
+
tuple_as_array=tuple_as_array,
|
| 629 |
+
bigint_as_string=bigint_as_string,
|
| 630 |
+
sort_keys=sort_keys,
|
| 631 |
+
item_sort_key=item_sort_key,
|
| 632 |
+
for_json=for_json,
|
| 633 |
+
int_as_string_bitcount=int_as_string_bitcount,
|
| 634 |
+
**kw).encode(obj)
|
| 635 |
+
|
| 636 |
+
|
| 637 |
+
|
| 638 |
+
def simple_first(kv):
|
| 639 |
+
"""Helper function to pass to item_sort_key to sort simple
|
| 640 |
+
elements to the top, then container elements.
|
| 641 |
+
"""
|
| 642 |
+
return (isinstance(kv[1], (list, dict, tuple)), kv[0])
|
evalkit_tf446/lib/python3.10/site-packages/hjson/compat.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Python 3 compatibility shims
|
| 2 |
+
"""
|
| 3 |
+
import sys
|
| 4 |
+
if sys.version_info[0] < 3:
|
| 5 |
+
PY3 = False
|
| 6 |
+
def b(s):
|
| 7 |
+
return s
|
| 8 |
+
def u(s):
|
| 9 |
+
return unicode(s, 'unicode_escape')
|
| 10 |
+
import cStringIO as StringIO
|
| 11 |
+
StringIO = BytesIO = StringIO.StringIO
|
| 12 |
+
text_type = unicode
|
| 13 |
+
binary_type = str
|
| 14 |
+
string_types = (basestring,)
|
| 15 |
+
integer_types = (int, long)
|
| 16 |
+
unichr = unichr
|
| 17 |
+
reload_module = reload
|
| 18 |
+
def fromhex(s):
|
| 19 |
+
return s.decode('hex')
|
| 20 |
+
|
| 21 |
+
else:
|
| 22 |
+
PY3 = True
|
| 23 |
+
if sys.version_info[:2] >= (3, 4):
|
| 24 |
+
from importlib import reload as reload_module
|
| 25 |
+
else:
|
| 26 |
+
from imp import reload as reload_module
|
| 27 |
+
import codecs
|
| 28 |
+
def b(s):
|
| 29 |
+
return codecs.latin_1_encode(s)[0]
|
| 30 |
+
def u(s):
|
| 31 |
+
return s
|
| 32 |
+
import io
|
| 33 |
+
StringIO = io.StringIO
|
| 34 |
+
BytesIO = io.BytesIO
|
| 35 |
+
text_type = str
|
| 36 |
+
binary_type = bytes
|
| 37 |
+
string_types = (str,)
|
| 38 |
+
integer_types = (int,)
|
| 39 |
+
|
| 40 |
+
def unichr(s):
|
| 41 |
+
return u(chr(s))
|
| 42 |
+
|
| 43 |
+
def fromhex(s):
|
| 44 |
+
return bytes.fromhex(s)
|
| 45 |
+
|
| 46 |
+
long_type = integer_types[-1]
|
evalkit_tf446/lib/python3.10/site-packages/hjson/decoder.py
ADDED
|
@@ -0,0 +1,569 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Implementation of HjsonDecoder
|
| 2 |
+
"""
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
import re
|
| 5 |
+
import sys
|
| 6 |
+
import struct
|
| 7 |
+
from .compat import fromhex, b, u, text_type, binary_type, PY3, unichr
|
| 8 |
+
from .scanner import HjsonDecodeError
|
| 9 |
+
|
| 10 |
+
# NOTE (3.1.0): HjsonDecodeError may still be imported from this module for
|
| 11 |
+
# compatibility, but it was never in the __all__
|
| 12 |
+
__all__ = ['HjsonDecoder']
|
| 13 |
+
|
| 14 |
+
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
|
| 15 |
+
|
| 16 |
+
def _floatconstants():
|
| 17 |
+
_BYTES = fromhex('7FF80000000000007FF0000000000000')
|
| 18 |
+
# The struct module in Python 2.4 would get frexp() out of range here
|
| 19 |
+
# when an endian is specified in the format string. Fixed in Python 2.5+
|
| 20 |
+
if sys.byteorder != 'big':
|
| 21 |
+
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
|
| 22 |
+
nan, inf = struct.unpack('dd', _BYTES)
|
| 23 |
+
return nan, inf, -inf
|
| 24 |
+
|
| 25 |
+
NaN, PosInf, NegInf = _floatconstants()
|
| 26 |
+
|
| 27 |
+
WHITESPACE = ' \t\n\r'
|
| 28 |
+
PUNCTUATOR = '{}[],:'
|
| 29 |
+
|
| 30 |
+
NUMBER_RE = re.compile(r'[\t ]*(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?[\t ]*')
|
| 31 |
+
STRINGCHUNK = re.compile(r'(.*?)([\'"\\\x00-\x1f])', FLAGS)
|
| 32 |
+
BACKSLASH = {
|
| 33 |
+
'"': u('"'), '\'': u('\''), '\\': u('\u005c'), '/': u('/'),
|
| 34 |
+
'b': u('\b'), 'f': u('\f'), 'n': u('\n'), 'r': u('\r'), 't': u('\t'),
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
DEFAULT_ENCODING = "utf-8"
|
| 38 |
+
|
| 39 |
+
def getNext(s, end):
|
| 40 |
+
while 1:
|
| 41 |
+
# Use a slice to prevent IndexError from being raised
|
| 42 |
+
ch = s[end:end + 1]
|
| 43 |
+
# Skip whitespace.
|
| 44 |
+
while ch in WHITESPACE:
|
| 45 |
+
if ch == '': return ch, end
|
| 46 |
+
end += 1
|
| 47 |
+
ch = s[end:end + 1]
|
| 48 |
+
|
| 49 |
+
# Hjson allows comments
|
| 50 |
+
ch2 = s[end + 1:end + 2]
|
| 51 |
+
if ch == '#' or ch == '/' and ch2 == '/':
|
| 52 |
+
end = getEol(s, end)
|
| 53 |
+
elif ch == '/' and ch2 == '*':
|
| 54 |
+
end += 2
|
| 55 |
+
ch = s[end]
|
| 56 |
+
while ch != '' and not (ch == '*' and s[end + 1] == '/'):
|
| 57 |
+
end += 1
|
| 58 |
+
ch = s[end]
|
| 59 |
+
if ch != '':
|
| 60 |
+
end += 2
|
| 61 |
+
else:
|
| 62 |
+
break
|
| 63 |
+
|
| 64 |
+
return ch, end
|
| 65 |
+
|
| 66 |
+
def getEol(s, end):
|
| 67 |
+
# skip until eol
|
| 68 |
+
|
| 69 |
+
while 1:
|
| 70 |
+
ch = s[end:end + 1]
|
| 71 |
+
if ch == '\r' or ch == '\n' or ch == '':
|
| 72 |
+
return end
|
| 73 |
+
end += 1
|
| 74 |
+
|
| 75 |
+
def skipIndent(s, end, n):
|
| 76 |
+
ch = s[end:end + 1]
|
| 77 |
+
while ch != '' and ch in " \t\r" and (n > 0 or n < 0):
|
| 78 |
+
end += 1
|
| 79 |
+
n -= 1
|
| 80 |
+
ch = s[end:end + 1]
|
| 81 |
+
return end
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def scanstring(s, end, encoding=None, strict=True,
|
| 85 |
+
_b=BACKSLASH, _m=STRINGCHUNK.match, _join=u('').join,
|
| 86 |
+
_PY3=PY3, _maxunicode=sys.maxunicode):
|
| 87 |
+
"""Scan the string s for a JSON string. End is the index of the
|
| 88 |
+
character in s after the quote that started the JSON string.
|
| 89 |
+
Unescapes all valid JSON string escape sequences and raises ValueError
|
| 90 |
+
on attempt to decode an invalid string. If strict is False then literal
|
| 91 |
+
control characters are allowed in the string.
|
| 92 |
+
|
| 93 |
+
Returns a tuple of the decoded string and the index of the character in s
|
| 94 |
+
after the end quote."""
|
| 95 |
+
if encoding is None:
|
| 96 |
+
encoding = DEFAULT_ENCODING
|
| 97 |
+
chunks = []
|
| 98 |
+
_append = chunks.append
|
| 99 |
+
begin = end - 1
|
| 100 |
+
# callers make sure that string starts with " or '
|
| 101 |
+
exitCh = s[begin]
|
| 102 |
+
while 1:
|
| 103 |
+
chunk = _m(s, end)
|
| 104 |
+
if chunk is None:
|
| 105 |
+
raise HjsonDecodeError(
|
| 106 |
+
"Unterminated string starting at", s, begin)
|
| 107 |
+
end = chunk.end()
|
| 108 |
+
content, terminator = chunk.groups()
|
| 109 |
+
# Content is contains zero or more unescaped string characters
|
| 110 |
+
if content:
|
| 111 |
+
if not _PY3 and not isinstance(content, text_type):
|
| 112 |
+
content = text_type(content, encoding)
|
| 113 |
+
_append(content)
|
| 114 |
+
# Terminator is the end of string, a literal control character,
|
| 115 |
+
# or a backslash denoting that an escape sequence follows
|
| 116 |
+
if terminator == exitCh:
|
| 117 |
+
break
|
| 118 |
+
elif terminator == '"' or terminator == '\'':
|
| 119 |
+
_append(terminator)
|
| 120 |
+
continue
|
| 121 |
+
elif terminator != '\\':
|
| 122 |
+
if strict:
|
| 123 |
+
msg = "Invalid control character %r at"
|
| 124 |
+
raise HjsonDecodeError(msg, s, end)
|
| 125 |
+
else:
|
| 126 |
+
_append(terminator)
|
| 127 |
+
continue
|
| 128 |
+
try:
|
| 129 |
+
esc = s[end]
|
| 130 |
+
except IndexError:
|
| 131 |
+
raise HjsonDecodeError(
|
| 132 |
+
"Unterminated string starting at", s, begin)
|
| 133 |
+
# If not a unicode escape sequence, must be in the lookup table
|
| 134 |
+
if esc != 'u':
|
| 135 |
+
try:
|
| 136 |
+
char = _b[esc]
|
| 137 |
+
except KeyError:
|
| 138 |
+
msg = "Invalid \\X escape sequence %r"
|
| 139 |
+
raise HjsonDecodeError(msg, s, end)
|
| 140 |
+
end += 1
|
| 141 |
+
else:
|
| 142 |
+
# Unicode escape sequence
|
| 143 |
+
msg = "Invalid \\uXXXX escape sequence"
|
| 144 |
+
esc = s[end + 1:end + 5]
|
| 145 |
+
escX = esc[1:2]
|
| 146 |
+
if len(esc) != 4 or escX == 'x' or escX == 'X':
|
| 147 |
+
raise HjsonDecodeError(msg, s, end - 1)
|
| 148 |
+
try:
|
| 149 |
+
uni = int(esc, 16)
|
| 150 |
+
except ValueError:
|
| 151 |
+
raise HjsonDecodeError(msg, s, end - 1)
|
| 152 |
+
end += 5
|
| 153 |
+
# Check for surrogate pair on UCS-4 systems
|
| 154 |
+
# Note that this will join high/low surrogate pairs
|
| 155 |
+
# but will also pass unpaired surrogates through
|
| 156 |
+
if (_maxunicode > 65535 and
|
| 157 |
+
uni & 0xfc00 == 0xd800 and
|
| 158 |
+
s[end:end + 2] == '\\u'):
|
| 159 |
+
esc2 = s[end + 2:end + 6]
|
| 160 |
+
escX = esc2[1:2]
|
| 161 |
+
if len(esc2) == 4 and not (escX == 'x' or escX == 'X'):
|
| 162 |
+
try:
|
| 163 |
+
uni2 = int(esc2, 16)
|
| 164 |
+
except ValueError:
|
| 165 |
+
raise HjsonDecodeError(msg, s, end)
|
| 166 |
+
if uni2 & 0xfc00 == 0xdc00:
|
| 167 |
+
uni = 0x10000 + (((uni - 0xd800) << 10) |
|
| 168 |
+
(uni2 - 0xdc00))
|
| 169 |
+
end += 6
|
| 170 |
+
char = unichr(uni)
|
| 171 |
+
# Append the unescaped character
|
| 172 |
+
_append(char)
|
| 173 |
+
return _join(chunks), end
|
| 174 |
+
|
| 175 |
+
def mlscanstring(s, end):
|
| 176 |
+
"""Scan a multiline string"""
|
| 177 |
+
|
| 178 |
+
string = ""
|
| 179 |
+
triple = 0
|
| 180 |
+
|
| 181 |
+
# we are at ''' - get indent
|
| 182 |
+
indent = 0
|
| 183 |
+
while 1:
|
| 184 |
+
ch = s[end-indent-1]
|
| 185 |
+
if ch == '\n': break
|
| 186 |
+
indent += 1
|
| 187 |
+
|
| 188 |
+
# skip white/to (newline)
|
| 189 |
+
end = skipIndent(s, end + 3, -1)
|
| 190 |
+
|
| 191 |
+
ch = s[end]
|
| 192 |
+
if ch == '\n': end = skipIndent(s, end + 1, indent)
|
| 193 |
+
|
| 194 |
+
# When parsing multiline string values, we must look for ' characters
|
| 195 |
+
while 1:
|
| 196 |
+
ch = s[end:end + 1]
|
| 197 |
+
if ch == '':
|
| 198 |
+
raise HjsonDecodeError("Bad multiline string", s, end);
|
| 199 |
+
elif ch == '\'':
|
| 200 |
+
triple += 1
|
| 201 |
+
end += 1
|
| 202 |
+
if triple == 3:
|
| 203 |
+
if string and string[-1] == '\n':
|
| 204 |
+
string = string[:-1] # remove last EOL
|
| 205 |
+
return string, end
|
| 206 |
+
else:
|
| 207 |
+
continue
|
| 208 |
+
else:
|
| 209 |
+
while triple > 0:
|
| 210 |
+
string += '\''
|
| 211 |
+
triple -= 1
|
| 212 |
+
|
| 213 |
+
if ch == '\n':
|
| 214 |
+
string += ch
|
| 215 |
+
end = skipIndent(s, end + 1, indent)
|
| 216 |
+
else:
|
| 217 |
+
if ch != '\r':
|
| 218 |
+
string += ch
|
| 219 |
+
end += 1
|
| 220 |
+
|
| 221 |
+
def scantfnns(context, s, end):
|
| 222 |
+
"""Scan s until eol. return string, True, False or None"""
|
| 223 |
+
|
| 224 |
+
chf, begin = getNext(s, end)
|
| 225 |
+
end = begin
|
| 226 |
+
|
| 227 |
+
if chf in PUNCTUATOR:
|
| 228 |
+
raise HjsonDecodeError("Found a punctuator character when expecting a quoteless string (check your syntax)", s, end);
|
| 229 |
+
|
| 230 |
+
while 1:
|
| 231 |
+
ch = s[end:end + 1]
|
| 232 |
+
|
| 233 |
+
isEol = ch == '\r' or ch == '\n' or ch == ''
|
| 234 |
+
if isEol or ch == ',' or \
|
| 235 |
+
ch == '}' or ch == ']' or \
|
| 236 |
+
ch == '#' or \
|
| 237 |
+
ch == '/' and (s[end + 1:end + 2] == '/' or s[end + 1:end + 2] == '*'):
|
| 238 |
+
|
| 239 |
+
m = None
|
| 240 |
+
mend = end
|
| 241 |
+
if next: mend -= 1
|
| 242 |
+
|
| 243 |
+
if chf == 'n' and s[begin:end].strip() == 'null':
|
| 244 |
+
return None, end
|
| 245 |
+
elif chf == 't' and s[begin:end].strip() == 'true':
|
| 246 |
+
return True, end
|
| 247 |
+
elif chf == 'f' and s[begin:end].strip() == 'false':
|
| 248 |
+
return False, end
|
| 249 |
+
elif chf == '-' or chf >= '0' and chf <= '9':
|
| 250 |
+
m = NUMBER_RE.match(s, begin)
|
| 251 |
+
|
| 252 |
+
if m is not None and m.end() == end:
|
| 253 |
+
integer, frac, exp = m.groups()
|
| 254 |
+
if frac or exp:
|
| 255 |
+
res = context.parse_float(integer + (frac or '') + (exp or ''))
|
| 256 |
+
if int(res) == res and abs(res)<1e10: res = int(res)
|
| 257 |
+
else:
|
| 258 |
+
res = context.parse_int(integer)
|
| 259 |
+
return res, end
|
| 260 |
+
|
| 261 |
+
if isEol:
|
| 262 |
+
return s[begin:end].strip(), end
|
| 263 |
+
|
| 264 |
+
end += 1
|
| 265 |
+
|
| 266 |
+
def scanKeyName(s, end, encoding=None, strict=True):
|
| 267 |
+
"""Scan the string s for a JSON/Hjson key. see scanstring"""
|
| 268 |
+
|
| 269 |
+
ch, end = getNext(s, end)
|
| 270 |
+
|
| 271 |
+
if ch == '"' or ch == '\'':
|
| 272 |
+
return scanstring(s, end + 1, encoding, strict)
|
| 273 |
+
|
| 274 |
+
begin = end
|
| 275 |
+
space = -1
|
| 276 |
+
while 1:
|
| 277 |
+
ch = s[end:end + 1]
|
| 278 |
+
|
| 279 |
+
if ch == '':
|
| 280 |
+
raise HjsonDecodeError("Bad key name (eof)", s, end);
|
| 281 |
+
elif ch == ':':
|
| 282 |
+
if begin == end:
|
| 283 |
+
raise HjsonDecodeError("Found ':' but no key name (for an empty key name use quotes)", s, begin)
|
| 284 |
+
elif space >= 0:
|
| 285 |
+
if space != end - 1: raise HjsonDecodeError("Found whitespace in your key name (use quotes to include)", s, space)
|
| 286 |
+
return s[begin:end].rstrip(), end
|
| 287 |
+
else:
|
| 288 |
+
return s[begin:end], end
|
| 289 |
+
elif ch in WHITESPACE:
|
| 290 |
+
if space < 0 or space == end - 1: space = end
|
| 291 |
+
elif ch == '{' or ch == '}' or ch == '[' or ch == ']' or ch == ',':
|
| 292 |
+
raise HjsonDecodeError("Found '" + ch + "' where a key name was expected (check your syntax or use quotes if the key name includes {}[],: or whitespace)", s, begin)
|
| 293 |
+
end += 1
|
| 294 |
+
|
| 295 |
+
def make_scanner(context):
|
| 296 |
+
parse_object = context.parse_object
|
| 297 |
+
parse_array = context.parse_array
|
| 298 |
+
parse_string = context.parse_string
|
| 299 |
+
parse_mlstring = context.parse_mlstring
|
| 300 |
+
parse_tfnns = context.parse_tfnns
|
| 301 |
+
encoding = context.encoding
|
| 302 |
+
strict = context.strict
|
| 303 |
+
object_hook = context.object_hook
|
| 304 |
+
object_pairs_hook = context.object_pairs_hook
|
| 305 |
+
memo = context.memo
|
| 306 |
+
|
| 307 |
+
def _scan_once(string, idx):
|
| 308 |
+
try:
|
| 309 |
+
ch = string[idx]
|
| 310 |
+
except IndexError:
|
| 311 |
+
raise HjsonDecodeError('Expecting value', string, idx)
|
| 312 |
+
|
| 313 |
+
if ch == '"' or ch == '\'':
|
| 314 |
+
if string[idx:idx + 3] == '\'\'\'':
|
| 315 |
+
return parse_mlstring(string, idx)
|
| 316 |
+
else:
|
| 317 |
+
return parse_string(string, idx + 1, encoding, strict)
|
| 318 |
+
elif ch == '{':
|
| 319 |
+
return parse_object((string, idx + 1), encoding, strict,
|
| 320 |
+
_scan_once, object_hook, object_pairs_hook, memo)
|
| 321 |
+
elif ch == '[':
|
| 322 |
+
return parse_array((string, idx + 1), _scan_once)
|
| 323 |
+
|
| 324 |
+
return parse_tfnns(context, string, idx)
|
| 325 |
+
|
| 326 |
+
def scan_once(string, idx):
|
| 327 |
+
if idx < 0: raise HjsonDecodeError('Expecting value', string, idx)
|
| 328 |
+
try:
|
| 329 |
+
return _scan_once(string, idx)
|
| 330 |
+
finally:
|
| 331 |
+
memo.clear()
|
| 332 |
+
|
| 333 |
+
def scan_object_once(string, idx):
|
| 334 |
+
if idx < 0: raise HjsonDecodeError('Expecting value', string, idx)
|
| 335 |
+
try:
|
| 336 |
+
return parse_object((string, idx), encoding, strict,
|
| 337 |
+
_scan_once, object_hook, object_pairs_hook, memo, True)
|
| 338 |
+
finally:
|
| 339 |
+
memo.clear()
|
| 340 |
+
|
| 341 |
+
return scan_once, scan_object_once
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def JSONObject(state, encoding, strict, scan_once, object_hook,
|
| 345 |
+
object_pairs_hook, memo=None, objectWithoutBraces=False):
|
| 346 |
+
(s, end) = state
|
| 347 |
+
# Backwards compatibility
|
| 348 |
+
if memo is None:
|
| 349 |
+
memo = {}
|
| 350 |
+
memo_get = memo.setdefault
|
| 351 |
+
pairs = []
|
| 352 |
+
|
| 353 |
+
ch, end = getNext(s, end)
|
| 354 |
+
|
| 355 |
+
# Trivial empty object
|
| 356 |
+
if not objectWithoutBraces and ch == '}':
|
| 357 |
+
if object_pairs_hook is not None:
|
| 358 |
+
result = object_pairs_hook(pairs)
|
| 359 |
+
return result, end + 1
|
| 360 |
+
pairs = {}
|
| 361 |
+
if object_hook is not None:
|
| 362 |
+
pairs = object_hook(pairs)
|
| 363 |
+
return pairs, end + 1
|
| 364 |
+
|
| 365 |
+
while True:
|
| 366 |
+
key, end = scanKeyName(s, end, encoding, strict)
|
| 367 |
+
key = memo_get(key, key)
|
| 368 |
+
|
| 369 |
+
ch, end = getNext(s, end)
|
| 370 |
+
if ch != ':':
|
| 371 |
+
raise HjsonDecodeError("Expecting ':' delimiter", s, end)
|
| 372 |
+
|
| 373 |
+
ch, end = getNext(s, end + 1)
|
| 374 |
+
|
| 375 |
+
value, end = scan_once(s, end)
|
| 376 |
+
pairs.append((key, value))
|
| 377 |
+
|
| 378 |
+
ch, end = getNext(s, end)
|
| 379 |
+
|
| 380 |
+
if ch == ',':
|
| 381 |
+
ch, end = getNext(s, end + 1)
|
| 382 |
+
|
| 383 |
+
if objectWithoutBraces:
|
| 384 |
+
if ch == '': break;
|
| 385 |
+
else:
|
| 386 |
+
if ch == '}':
|
| 387 |
+
end += 1
|
| 388 |
+
break
|
| 389 |
+
|
| 390 |
+
ch, end = getNext(s, end)
|
| 391 |
+
|
| 392 |
+
if object_pairs_hook is not None:
|
| 393 |
+
result = object_pairs_hook(pairs)
|
| 394 |
+
return result, end
|
| 395 |
+
pairs = dict(pairs)
|
| 396 |
+
if object_hook is not None:
|
| 397 |
+
pairs = object_hook(pairs)
|
| 398 |
+
return pairs, end
|
| 399 |
+
|
| 400 |
+
def JSONArray(state, scan_once):
|
| 401 |
+
(s, end) = state
|
| 402 |
+
values = []
|
| 403 |
+
|
| 404 |
+
ch, end = getNext(s, end)
|
| 405 |
+
|
| 406 |
+
# Look-ahead for trivial empty array
|
| 407 |
+
if ch == ']':
|
| 408 |
+
return values, end + 1
|
| 409 |
+
elif ch == '':
|
| 410 |
+
raise HjsonDecodeError("End of input while parsing an array (did you forget a closing ']'?)", s, end)
|
| 411 |
+
_append = values.append
|
| 412 |
+
while True:
|
| 413 |
+
value, end = scan_once(s, end)
|
| 414 |
+
_append(value)
|
| 415 |
+
|
| 416 |
+
ch, end = getNext(s, end)
|
| 417 |
+
if ch == ',':
|
| 418 |
+
ch, end = getNext(s, end + 1)
|
| 419 |
+
|
| 420 |
+
if ch == ']':
|
| 421 |
+
end += 1
|
| 422 |
+
break
|
| 423 |
+
|
| 424 |
+
ch, end = getNext(s, end)
|
| 425 |
+
|
| 426 |
+
return values, end
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
class HjsonDecoder(object):
|
| 430 |
+
"""Hjson decoder
|
| 431 |
+
|
| 432 |
+
Performs the following translations in decoding by default:
|
| 433 |
+
|
| 434 |
+
+---------------+-------------------+
|
| 435 |
+
| JSON | Python |
|
| 436 |
+
+===============+===================+
|
| 437 |
+
| object | dict |
|
| 438 |
+
+---------------+-------------------+
|
| 439 |
+
| array | list |
|
| 440 |
+
+---------------+-------------------+
|
| 441 |
+
| string | str, unicode |
|
| 442 |
+
+---------------+-------------------+
|
| 443 |
+
| number (int) | int, long |
|
| 444 |
+
+---------------+-------------------+
|
| 445 |
+
| number (real) | float |
|
| 446 |
+
+---------------+-------------------+
|
| 447 |
+
| true | True |
|
| 448 |
+
+---------------+-------------------+
|
| 449 |
+
| false | False |
|
| 450 |
+
+---------------+-------------------+
|
| 451 |
+
| null | None |
|
| 452 |
+
+---------------+-------------------+
|
| 453 |
+
|
| 454 |
+
"""
|
| 455 |
+
|
| 456 |
+
def __init__(self, encoding=None, object_hook=None, parse_float=None,
|
| 457 |
+
parse_int=None, strict=True,
|
| 458 |
+
object_pairs_hook=None):
|
| 459 |
+
"""
|
| 460 |
+
*encoding* determines the encoding used to interpret any
|
| 461 |
+
:class:`str` objects decoded by this instance (``'utf-8'`` by
|
| 462 |
+
default). It has no effect when decoding :class:`unicode` objects.
|
| 463 |
+
|
| 464 |
+
Note that currently only encodings that are a superset of ASCII work,
|
| 465 |
+
strings of other encodings should be passed in as :class:`unicode`.
|
| 466 |
+
|
| 467 |
+
*object_hook*, if specified, will be called with the result of every
|
| 468 |
+
JSON object decoded and its return value will be used in place of the
|
| 469 |
+
given :class:`dict`. This can be used to provide custom
|
| 470 |
+
deserializations (e.g. to support JSON-RPC class hinting).
|
| 471 |
+
|
| 472 |
+
*object_pairs_hook* is an optional function that will be called with
|
| 473 |
+
the result of any object literal decode with an ordered list of pairs.
|
| 474 |
+
The return value of *object_pairs_hook* will be used instead of the
|
| 475 |
+
:class:`dict`. This feature can be used to implement custom decoders
|
| 476 |
+
that rely on the order that the key and value pairs are decoded (for
|
| 477 |
+
example, :func:`collections.OrderedDict` will remember the order of
|
| 478 |
+
insertion). If *object_hook* is also defined, the *object_pairs_hook*
|
| 479 |
+
takes priority.
|
| 480 |
+
|
| 481 |
+
*parse_float*, if specified, will be called with the string of every
|
| 482 |
+
JSON float to be decoded. By default, this is equivalent to
|
| 483 |
+
``float(num_str)``. This can be used to use another datatype or parser
|
| 484 |
+
for JSON floats (e.g. :class:`decimal.Decimal`).
|
| 485 |
+
|
| 486 |
+
*parse_int*, if specified, will be called with the string of every
|
| 487 |
+
JSON int to be decoded. By default, this is equivalent to
|
| 488 |
+
``int(num_str)``. This can be used to use another datatype or parser
|
| 489 |
+
for JSON integers (e.g. :class:`float`).
|
| 490 |
+
|
| 491 |
+
*strict* controls the parser's behavior when it encounters an
|
| 492 |
+
invalid control character in a string. The default setting of
|
| 493 |
+
``True`` means that unescaped control characters are parse errors, if
|
| 494 |
+
``False`` then control characters will be allowed in strings.
|
| 495 |
+
|
| 496 |
+
"""
|
| 497 |
+
if encoding is None:
|
| 498 |
+
encoding = DEFAULT_ENCODING
|
| 499 |
+
self.encoding = encoding
|
| 500 |
+
self.object_hook = object_hook
|
| 501 |
+
self.object_pairs_hook = object_pairs_hook
|
| 502 |
+
self.parse_float = parse_float or float
|
| 503 |
+
self.parse_int = parse_int or int
|
| 504 |
+
self.strict = strict
|
| 505 |
+
self.parse_object = JSONObject
|
| 506 |
+
self.parse_array = JSONArray
|
| 507 |
+
self.parse_string = scanstring
|
| 508 |
+
self.parse_mlstring = mlscanstring
|
| 509 |
+
self.parse_tfnns = scantfnns
|
| 510 |
+
self.memo = {}
|
| 511 |
+
(self.scan_once, self.scan_object_once) = make_scanner(self)
|
| 512 |
+
|
| 513 |
+
def decode(self, s, _PY3=PY3):
|
| 514 |
+
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
|
| 515 |
+
instance containing a JSON document)
|
| 516 |
+
|
| 517 |
+
"""
|
| 518 |
+
if _PY3 and isinstance(s, binary_type):
|
| 519 |
+
s = s.decode(self.encoding)
|
| 520 |
+
obj, end = self.raw_decode(s)
|
| 521 |
+
ch, end = getNext(s, end)
|
| 522 |
+
if end != len(s):
|
| 523 |
+
raise HjsonDecodeError("Extra data", s, end, len(s))
|
| 524 |
+
return obj
|
| 525 |
+
|
| 526 |
+
def raw_decode(self, s, idx=0, _PY3=PY3):
|
| 527 |
+
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
|
| 528 |
+
beginning with a JSON document) and return a 2-tuple of the Python
|
| 529 |
+
representation and the index in ``s`` where the document ended.
|
| 530 |
+
Optionally, ``idx`` can be used to specify an offset in ``s`` where
|
| 531 |
+
the JSON document begins.
|
| 532 |
+
|
| 533 |
+
This can be used to decode a JSON document from a string that may
|
| 534 |
+
have extraneous data at the end.
|
| 535 |
+
|
| 536 |
+
"""
|
| 537 |
+
if idx < 0:
|
| 538 |
+
# Ensure that raw_decode bails on negative indexes, the regex
|
| 539 |
+
# would otherwise mask this behavior. #98
|
| 540 |
+
raise HjsonDecodeError('Expecting value', s, idx)
|
| 541 |
+
if _PY3 and not isinstance(s, text_type):
|
| 542 |
+
raise TypeError("Input string must be text")
|
| 543 |
+
# strip UTF-8 bom
|
| 544 |
+
if len(s) > idx:
|
| 545 |
+
ord0 = ord(s[idx])
|
| 546 |
+
if ord0 == 0xfeff:
|
| 547 |
+
idx += 1
|
| 548 |
+
elif ord0 == 0xef and s[idx:idx + 3] == '\xef\xbb\xbf':
|
| 549 |
+
idx += 3
|
| 550 |
+
|
| 551 |
+
start_index = idx
|
| 552 |
+
ch, idx = getNext(s, idx)
|
| 553 |
+
|
| 554 |
+
# If blank or comment only file, return dict
|
| 555 |
+
if start_index == 0 and ch == '':
|
| 556 |
+
return {}, 0
|
| 557 |
+
|
| 558 |
+
if ch == '{' or ch == '[':
|
| 559 |
+
return self.scan_once(s, idx)
|
| 560 |
+
else:
|
| 561 |
+
# assume we have a root object without braces
|
| 562 |
+
try:
|
| 563 |
+
return self.scan_object_once(s, idx)
|
| 564 |
+
except HjsonDecodeError as e:
|
| 565 |
+
# test if we are dealing with a single JSON value instead (true/false/null/num/"")
|
| 566 |
+
try:
|
| 567 |
+
return self.scan_once(s, idx)
|
| 568 |
+
except:
|
| 569 |
+
raise e
|
evalkit_tf446/lib/python3.10/site-packages/hjson/encoder.py
ADDED
|
@@ -0,0 +1,521 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Implementation of JSONEncoder
|
| 2 |
+
"""
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
import re
|
| 5 |
+
from operator import itemgetter
|
| 6 |
+
from decimal import Decimal
|
| 7 |
+
from .compat import u, unichr, binary_type, string_types, integer_types, PY3
|
| 8 |
+
from .decoder import PosInf
|
| 9 |
+
|
| 10 |
+
#ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]')
|
| 11 |
+
# This is required because u() will mangle the string and ur'' isn't valid
|
| 12 |
+
# python3 syntax
|
| 13 |
+
ESCAPE = re.compile(u'[\\x00-\\x1f\\\\"\\b\\f\\n\\r\\t\u2028\u2029]')
|
| 14 |
+
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
| 15 |
+
HAS_UTF8 = re.compile(r'[\x80-\xff]')
|
| 16 |
+
ESCAPE_DCT = {
|
| 17 |
+
'\\': '\\\\',
|
| 18 |
+
'"': '\\"',
|
| 19 |
+
'\b': '\\b',
|
| 20 |
+
'\f': '\\f',
|
| 21 |
+
'\n': '\\n',
|
| 22 |
+
'\r': '\\r',
|
| 23 |
+
'\t': '\\t',
|
| 24 |
+
}
|
| 25 |
+
for i in range(0x20):
|
| 26 |
+
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
|
| 27 |
+
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
| 28 |
+
for i in [0x2028, 0x2029]:
|
| 29 |
+
ESCAPE_DCT.setdefault(unichr(i), '\\u%04x' % (i,))
|
| 30 |
+
|
| 31 |
+
FLOAT_REPR = repr
|
| 32 |
+
|
| 33 |
+
def encode_basestring(s, _PY3=PY3, _q=u('"')):
|
| 34 |
+
"""Return a JSON representation of a Python string
|
| 35 |
+
|
| 36 |
+
"""
|
| 37 |
+
if _PY3:
|
| 38 |
+
if isinstance(s, binary_type):
|
| 39 |
+
s = s.decode('utf-8')
|
| 40 |
+
else:
|
| 41 |
+
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
| 42 |
+
s = s.decode('utf-8')
|
| 43 |
+
def replace(match):
|
| 44 |
+
return ESCAPE_DCT[match.group(0)]
|
| 45 |
+
return _q + ESCAPE.sub(replace, s) + _q
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def py_encode_basestring_ascii(s, _PY3=PY3):
|
| 49 |
+
"""Return an ASCII-only JSON representation of a Python string
|
| 50 |
+
|
| 51 |
+
"""
|
| 52 |
+
if _PY3:
|
| 53 |
+
if isinstance(s, binary_type):
|
| 54 |
+
s = s.decode('utf-8')
|
| 55 |
+
else:
|
| 56 |
+
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
| 57 |
+
s = s.decode('utf-8')
|
| 58 |
+
def replace(match):
|
| 59 |
+
s = match.group(0)
|
| 60 |
+
try:
|
| 61 |
+
return ESCAPE_DCT[s]
|
| 62 |
+
except KeyError:
|
| 63 |
+
n = ord(s)
|
| 64 |
+
if n < 0x10000:
|
| 65 |
+
#return '\\u{0:04x}'.format(n)
|
| 66 |
+
return '\\u%04x' % (n,)
|
| 67 |
+
else:
|
| 68 |
+
# surrogate pair
|
| 69 |
+
n -= 0x10000
|
| 70 |
+
s1 = 0xd800 | ((n >> 10) & 0x3ff)
|
| 71 |
+
s2 = 0xdc00 | (n & 0x3ff)
|
| 72 |
+
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
|
| 73 |
+
return '\\u%04x\\u%04x' % (s1, s2)
|
| 74 |
+
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
encode_basestring_ascii = (
|
| 78 |
+
py_encode_basestring_ascii)
|
| 79 |
+
|
| 80 |
+
class JSONEncoder(object):
|
| 81 |
+
"""Extensible JSON <http://json.org> encoder for Python data structures.
|
| 82 |
+
|
| 83 |
+
Supports the following objects and types by default:
|
| 84 |
+
|
| 85 |
+
+-------------------+---------------+
|
| 86 |
+
| Python | JSON |
|
| 87 |
+
+===================+===============+
|
| 88 |
+
| dict, namedtuple | object |
|
| 89 |
+
+-------------------+---------------+
|
| 90 |
+
| list, tuple | array |
|
| 91 |
+
+-------------------+---------------+
|
| 92 |
+
| str, unicode | string |
|
| 93 |
+
+-------------------+---------------+
|
| 94 |
+
| int, long, float | number |
|
| 95 |
+
+-------------------+---------------+
|
| 96 |
+
| True | true |
|
| 97 |
+
+-------------------+---------------+
|
| 98 |
+
| False | false |
|
| 99 |
+
+-------------------+---------------+
|
| 100 |
+
| None | null |
|
| 101 |
+
+-------------------+---------------+
|
| 102 |
+
|
| 103 |
+
To extend this to recognize other objects, subclass and implement a
|
| 104 |
+
``.default()`` method with another method that returns a serializable
|
| 105 |
+
object for ``o`` if possible, otherwise it should call the superclass
|
| 106 |
+
implementation (to raise ``TypeError``).
|
| 107 |
+
|
| 108 |
+
"""
|
| 109 |
+
item_separator = ', '
|
| 110 |
+
key_separator = ': '
|
| 111 |
+
|
| 112 |
+
def __init__(self, skipkeys=False, ensure_ascii=True,
|
| 113 |
+
check_circular=True, sort_keys=False,
|
| 114 |
+
indent=None, separators=None, encoding='utf-8', default=None,
|
| 115 |
+
use_decimal=True, namedtuple_as_object=True,
|
| 116 |
+
tuple_as_array=True, bigint_as_string=False,
|
| 117 |
+
item_sort_key=None, for_json=False,
|
| 118 |
+
int_as_string_bitcount=None):
|
| 119 |
+
"""Constructor for JSONEncoder, with sensible defaults.
|
| 120 |
+
|
| 121 |
+
If skipkeys is false, then it is a TypeError to attempt
|
| 122 |
+
encoding of keys that are not str, int, long, float or None. If
|
| 123 |
+
skipkeys is True, such items are simply skipped.
|
| 124 |
+
|
| 125 |
+
If ensure_ascii is true, the output is guaranteed to be str
|
| 126 |
+
objects with all incoming unicode characters escaped. If
|
| 127 |
+
ensure_ascii is false, the output will be unicode object.
|
| 128 |
+
|
| 129 |
+
If check_circular is true, then lists, dicts, and custom encoded
|
| 130 |
+
objects will be checked for circular references during encoding to
|
| 131 |
+
prevent an infinite recursion (which would cause an OverflowError).
|
| 132 |
+
Otherwise, no such check takes place.
|
| 133 |
+
|
| 134 |
+
If sort_keys is true, then the output of dictionaries will be
|
| 135 |
+
sorted by key; this is useful for regression tests to ensure
|
| 136 |
+
that JSON serializations can be compared on a day-to-day basis.
|
| 137 |
+
|
| 138 |
+
If indent is a string, then JSON array elements and object members
|
| 139 |
+
will be pretty-printed with a newline followed by that string repeated
|
| 140 |
+
for each level of nesting. ``None`` (the default) selects the most compact
|
| 141 |
+
representation without any newlines. For backwards compatibility with
|
| 142 |
+
versions of hjson earlier than 2.1.0, an integer is also accepted
|
| 143 |
+
and is converted to a string with that many spaces.
|
| 144 |
+
|
| 145 |
+
If specified, separators should be an (item_separator, key_separator)
|
| 146 |
+
tuple. The default is (', ', ': ') if *indent* is ``None`` and
|
| 147 |
+
(',', ': ') otherwise. To get the most compact JSON representation,
|
| 148 |
+
you should specify (',', ':') to eliminate whitespace.
|
| 149 |
+
|
| 150 |
+
If specified, default is a function that gets called for objects
|
| 151 |
+
that can't otherwise be serialized. It should return a JSON encodable
|
| 152 |
+
version of the object or raise a ``TypeError``.
|
| 153 |
+
|
| 154 |
+
If encoding is not None, then all input strings will be
|
| 155 |
+
transformed into unicode using that encoding prior to JSON-encoding.
|
| 156 |
+
The default is UTF-8.
|
| 157 |
+
|
| 158 |
+
If use_decimal is true (not the default), ``decimal.Decimal`` will
|
| 159 |
+
be supported directly by the encoder. For the inverse, decode JSON
|
| 160 |
+
with ``parse_float=decimal.Decimal``.
|
| 161 |
+
|
| 162 |
+
If namedtuple_as_object is true (the default), objects with
|
| 163 |
+
``_asdict()`` methods will be encoded as JSON objects.
|
| 164 |
+
|
| 165 |
+
If tuple_as_array is true (the default), tuple (and subclasses) will
|
| 166 |
+
be encoded as JSON arrays.
|
| 167 |
+
|
| 168 |
+
If bigint_as_string is true (not the default), ints 2**53 and higher
|
| 169 |
+
or lower than -2**53 will be encoded as strings. This is to avoid the
|
| 170 |
+
rounding that happens in Javascript otherwise.
|
| 171 |
+
|
| 172 |
+
If int_as_string_bitcount is a positive number (n), then int of size
|
| 173 |
+
greater than or equal to 2**n or lower than or equal to -2**n will be
|
| 174 |
+
encoded as strings.
|
| 175 |
+
|
| 176 |
+
If specified, item_sort_key is a callable used to sort the items in
|
| 177 |
+
each dictionary. This is useful if you want to sort items other than
|
| 178 |
+
in alphabetical order by key.
|
| 179 |
+
|
| 180 |
+
If for_json is true (not the default), objects with a ``for_json()``
|
| 181 |
+
method will use the return value of that method for encoding as JSON
|
| 182 |
+
instead of the object.
|
| 183 |
+
|
| 184 |
+
"""
|
| 185 |
+
|
| 186 |
+
self.skipkeys = skipkeys
|
| 187 |
+
self.ensure_ascii = ensure_ascii
|
| 188 |
+
self.check_circular = check_circular
|
| 189 |
+
self.sort_keys = sort_keys
|
| 190 |
+
self.use_decimal = use_decimal
|
| 191 |
+
self.namedtuple_as_object = namedtuple_as_object
|
| 192 |
+
self.tuple_as_array = tuple_as_array
|
| 193 |
+
self.bigint_as_string = bigint_as_string
|
| 194 |
+
self.item_sort_key = item_sort_key
|
| 195 |
+
self.for_json = for_json
|
| 196 |
+
self.int_as_string_bitcount = int_as_string_bitcount
|
| 197 |
+
if indent is not None and not isinstance(indent, string_types):
|
| 198 |
+
indent = indent * ' '
|
| 199 |
+
self.indent = indent
|
| 200 |
+
if separators is not None:
|
| 201 |
+
self.item_separator, self.key_separator = separators
|
| 202 |
+
elif indent is not None:
|
| 203 |
+
self.item_separator = ','
|
| 204 |
+
if default is not None:
|
| 205 |
+
self.default = default
|
| 206 |
+
self.encoding = encoding
|
| 207 |
+
|
| 208 |
+
def default(self, o):
|
| 209 |
+
"""Implement this method in a subclass such that it returns
|
| 210 |
+
a serializable object for ``o``, or calls the base implementation
|
| 211 |
+
(to raise a ``TypeError``).
|
| 212 |
+
|
| 213 |
+
For example, to support arbitrary iterators, you could
|
| 214 |
+
implement default like this::
|
| 215 |
+
|
| 216 |
+
def default(self, o):
|
| 217 |
+
try:
|
| 218 |
+
iterable = iter(o)
|
| 219 |
+
except TypeError:
|
| 220 |
+
pass
|
| 221 |
+
else:
|
| 222 |
+
return list(iterable)
|
| 223 |
+
return JSONEncoder.default(self, o)
|
| 224 |
+
|
| 225 |
+
"""
|
| 226 |
+
raise TypeError(repr(o) + " is not JSON serializable")
|
| 227 |
+
|
| 228 |
+
def encode(self, o):
|
| 229 |
+
"""Return a JSON string representation of a Python data structure.
|
| 230 |
+
|
| 231 |
+
>>> from hjson import JSONEncoder
|
| 232 |
+
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
|
| 233 |
+
'{"foo": ["bar", "baz"]}'
|
| 234 |
+
|
| 235 |
+
"""
|
| 236 |
+
# This is for extremely simple cases and benchmarks.
|
| 237 |
+
if isinstance(o, binary_type):
|
| 238 |
+
_encoding = self.encoding
|
| 239 |
+
if (_encoding is not None and not (_encoding == 'utf-8')):
|
| 240 |
+
o = o.decode(_encoding)
|
| 241 |
+
if isinstance(o, string_types):
|
| 242 |
+
if self.ensure_ascii:
|
| 243 |
+
return encode_basestring_ascii(o)
|
| 244 |
+
else:
|
| 245 |
+
return encode_basestring(o)
|
| 246 |
+
# This doesn't pass the iterator directly to ''.join() because the
|
| 247 |
+
# exceptions aren't as detailed. The list call should be roughly
|
| 248 |
+
# equivalent to the PySequence_Fast that ''.join() would do.
|
| 249 |
+
chunks = self.iterencode(o, _one_shot=True)
|
| 250 |
+
if not isinstance(chunks, (list, tuple)):
|
| 251 |
+
chunks = list(chunks)
|
| 252 |
+
if self.ensure_ascii:
|
| 253 |
+
return ''.join(chunks)
|
| 254 |
+
else:
|
| 255 |
+
return u''.join(chunks)
|
| 256 |
+
|
| 257 |
+
def iterencode(self, o, _one_shot=False):
|
| 258 |
+
"""Encode the given object and yield each string
|
| 259 |
+
representation as available.
|
| 260 |
+
|
| 261 |
+
For example::
|
| 262 |
+
|
| 263 |
+
for chunk in JSONEncoder().iterencode(bigobject):
|
| 264 |
+
mysocket.write(chunk)
|
| 265 |
+
|
| 266 |
+
"""
|
| 267 |
+
if self.check_circular:
|
| 268 |
+
markers = {}
|
| 269 |
+
else:
|
| 270 |
+
markers = None
|
| 271 |
+
if self.ensure_ascii:
|
| 272 |
+
_encoder = encode_basestring_ascii
|
| 273 |
+
else:
|
| 274 |
+
_encoder = encode_basestring
|
| 275 |
+
if self.encoding != 'utf-8':
|
| 276 |
+
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
|
| 277 |
+
if isinstance(o, binary_type):
|
| 278 |
+
o = o.decode(_encoding)
|
| 279 |
+
return _orig_encoder(o)
|
| 280 |
+
|
| 281 |
+
def floatstr(o, _repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
|
| 282 |
+
# Check for specials. Note that this type of test is processor
|
| 283 |
+
# and/or platform-specific, so do tests which don't depend on
|
| 284 |
+
# the internals.
|
| 285 |
+
|
| 286 |
+
if o != o:
|
| 287 |
+
text = 'null'
|
| 288 |
+
elif o == _inf:
|
| 289 |
+
text = 'null'
|
| 290 |
+
elif o == _neginf:
|
| 291 |
+
text = 'null'
|
| 292 |
+
else:
|
| 293 |
+
return _repr(o)
|
| 294 |
+
|
| 295 |
+
return text
|
| 296 |
+
|
| 297 |
+
key_memo = {}
|
| 298 |
+
int_as_string_bitcount = (
|
| 299 |
+
53 if self.bigint_as_string else self.int_as_string_bitcount)
|
| 300 |
+
_iterencode = _make_iterencode(
|
| 301 |
+
markers, self.default, _encoder, self.indent, floatstr,
|
| 302 |
+
self.key_separator, self.item_separator, self.sort_keys,
|
| 303 |
+
self.skipkeys, _one_shot, self.use_decimal,
|
| 304 |
+
self.namedtuple_as_object, self.tuple_as_array,
|
| 305 |
+
int_as_string_bitcount,
|
| 306 |
+
self.item_sort_key, self.encoding, self.for_json,
|
| 307 |
+
Decimal=Decimal)
|
| 308 |
+
try:
|
| 309 |
+
return _iterencode(o, 0)
|
| 310 |
+
finally:
|
| 311 |
+
key_memo.clear()
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
|
| 315 |
+
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
|
| 316 |
+
_use_decimal, _namedtuple_as_object, _tuple_as_array,
|
| 317 |
+
_int_as_string_bitcount, _item_sort_key,
|
| 318 |
+
_encoding,_for_json,
|
| 319 |
+
## HACK: hand-optimized bytecode; turn globals into locals
|
| 320 |
+
_PY3=PY3,
|
| 321 |
+
ValueError=ValueError,
|
| 322 |
+
string_types=string_types,
|
| 323 |
+
Decimal=Decimal,
|
| 324 |
+
dict=dict,
|
| 325 |
+
float=float,
|
| 326 |
+
id=id,
|
| 327 |
+
integer_types=integer_types,
|
| 328 |
+
isinstance=isinstance,
|
| 329 |
+
list=list,
|
| 330 |
+
str=str,
|
| 331 |
+
tuple=tuple,
|
| 332 |
+
):
|
| 333 |
+
if _item_sort_key and not callable(_item_sort_key):
|
| 334 |
+
raise TypeError("item_sort_key must be None or callable")
|
| 335 |
+
elif _sort_keys and not _item_sort_key:
|
| 336 |
+
_item_sort_key = itemgetter(0)
|
| 337 |
+
|
| 338 |
+
if (_int_as_string_bitcount is not None and
|
| 339 |
+
(_int_as_string_bitcount <= 0 or
|
| 340 |
+
not isinstance(_int_as_string_bitcount, integer_types))):
|
| 341 |
+
raise TypeError("int_as_string_bitcount must be a positive integer")
|
| 342 |
+
|
| 343 |
+
def _encode_int(value):
|
| 344 |
+
skip_quoting = (
|
| 345 |
+
_int_as_string_bitcount is None
|
| 346 |
+
or
|
| 347 |
+
_int_as_string_bitcount < 1
|
| 348 |
+
)
|
| 349 |
+
if (
|
| 350 |
+
skip_quoting or
|
| 351 |
+
(-1 << _int_as_string_bitcount)
|
| 352 |
+
< value <
|
| 353 |
+
(1 << _int_as_string_bitcount)
|
| 354 |
+
):
|
| 355 |
+
return str(value)
|
| 356 |
+
return '"' + str(value) + '"'
|
| 357 |
+
|
| 358 |
+
def _iterencode_list(lst, _current_indent_level):
|
| 359 |
+
if not lst:
|
| 360 |
+
yield '[]'
|
| 361 |
+
return
|
| 362 |
+
if markers is not None:
|
| 363 |
+
markerid = id(lst)
|
| 364 |
+
if markerid in markers:
|
| 365 |
+
raise ValueError("Circular reference detected")
|
| 366 |
+
markers[markerid] = lst
|
| 367 |
+
buf = '['
|
| 368 |
+
if _indent is not None:
|
| 369 |
+
_current_indent_level += 1
|
| 370 |
+
newline_indent = '\n' + (_indent * _current_indent_level)
|
| 371 |
+
separator = _item_separator + newline_indent
|
| 372 |
+
buf += newline_indent
|
| 373 |
+
else:
|
| 374 |
+
newline_indent = None
|
| 375 |
+
separator = _item_separator
|
| 376 |
+
first = True
|
| 377 |
+
for value in lst:
|
| 378 |
+
if first:
|
| 379 |
+
first = False
|
| 380 |
+
else:
|
| 381 |
+
buf = separator
|
| 382 |
+
yield buf
|
| 383 |
+
|
| 384 |
+
for chunk in _iterencode(value, _current_indent_level):
|
| 385 |
+
yield chunk
|
| 386 |
+
|
| 387 |
+
if newline_indent is not None:
|
| 388 |
+
_current_indent_level -= 1
|
| 389 |
+
yield '\n' + (_indent * _current_indent_level)
|
| 390 |
+
yield ']'
|
| 391 |
+
if markers is not None:
|
| 392 |
+
del markers[markerid]
|
| 393 |
+
|
| 394 |
+
def _stringify_key(key):
|
| 395 |
+
if isinstance(key, string_types): # pragma: no cover
|
| 396 |
+
pass
|
| 397 |
+
elif isinstance(key, binary_type):
|
| 398 |
+
key = key.decode(_encoding)
|
| 399 |
+
elif isinstance(key, float):
|
| 400 |
+
key = _floatstr(key)
|
| 401 |
+
elif key is True:
|
| 402 |
+
key = 'true'
|
| 403 |
+
elif key is False:
|
| 404 |
+
key = 'false'
|
| 405 |
+
elif key is None:
|
| 406 |
+
key = 'null'
|
| 407 |
+
elif isinstance(key, integer_types):
|
| 408 |
+
key = str(key)
|
| 409 |
+
elif _use_decimal and isinstance(key, Decimal):
|
| 410 |
+
key = str(key)
|
| 411 |
+
elif _skipkeys:
|
| 412 |
+
key = None
|
| 413 |
+
else:
|
| 414 |
+
raise TypeError("key " + repr(key) + " is not a string")
|
| 415 |
+
return key
|
| 416 |
+
|
| 417 |
+
def _iterencode_dict(dct, _current_indent_level):
|
| 418 |
+
if not dct:
|
| 419 |
+
yield '{}'
|
| 420 |
+
return
|
| 421 |
+
if markers is not None:
|
| 422 |
+
markerid = id(dct)
|
| 423 |
+
if markerid in markers:
|
| 424 |
+
raise ValueError("Circular reference detected")
|
| 425 |
+
markers[markerid] = dct
|
| 426 |
+
yield '{'
|
| 427 |
+
if _indent is not None:
|
| 428 |
+
_current_indent_level += 1
|
| 429 |
+
newline_indent = '\n' + (_indent * _current_indent_level)
|
| 430 |
+
item_separator = _item_separator + newline_indent
|
| 431 |
+
yield newline_indent
|
| 432 |
+
else:
|
| 433 |
+
newline_indent = None
|
| 434 |
+
item_separator = _item_separator
|
| 435 |
+
first = True
|
| 436 |
+
if _PY3:
|
| 437 |
+
iteritems = dct.items()
|
| 438 |
+
else:
|
| 439 |
+
iteritems = dct.iteritems()
|
| 440 |
+
if _item_sort_key:
|
| 441 |
+
items = []
|
| 442 |
+
for k, v in dct.items():
|
| 443 |
+
if not isinstance(k, string_types):
|
| 444 |
+
k = _stringify_key(k)
|
| 445 |
+
if k is None:
|
| 446 |
+
continue
|
| 447 |
+
items.append((k, v))
|
| 448 |
+
items.sort(key=_item_sort_key)
|
| 449 |
+
else:
|
| 450 |
+
items = iteritems
|
| 451 |
+
for key, value in items:
|
| 452 |
+
if not (_item_sort_key or isinstance(key, string_types)):
|
| 453 |
+
key = _stringify_key(key)
|
| 454 |
+
if key is None:
|
| 455 |
+
# _skipkeys must be True
|
| 456 |
+
continue
|
| 457 |
+
if first:
|
| 458 |
+
first = False
|
| 459 |
+
else:
|
| 460 |
+
yield item_separator
|
| 461 |
+
yield _encoder(key)
|
| 462 |
+
yield _key_separator
|
| 463 |
+
|
| 464 |
+
for chunk in _iterencode(value, _current_indent_level):
|
| 465 |
+
yield chunk
|
| 466 |
+
|
| 467 |
+
if newline_indent is not None:
|
| 468 |
+
_current_indent_level -= 1
|
| 469 |
+
yield '\n' + (_indent * _current_indent_level)
|
| 470 |
+
yield '}'
|
| 471 |
+
if markers is not None:
|
| 472 |
+
del markers[markerid]
|
| 473 |
+
|
| 474 |
+
def _iterencode(o, _current_indent_level):
|
| 475 |
+
if (isinstance(o, string_types) or
|
| 476 |
+
(_PY3 and isinstance(o, binary_type))):
|
| 477 |
+
yield _encoder(o)
|
| 478 |
+
elif o is None:
|
| 479 |
+
yield 'null'
|
| 480 |
+
elif o is True:
|
| 481 |
+
yield 'true'
|
| 482 |
+
elif o is False:
|
| 483 |
+
yield 'false'
|
| 484 |
+
elif isinstance(o, integer_types):
|
| 485 |
+
yield _encode_int(o)
|
| 486 |
+
elif isinstance(o, float):
|
| 487 |
+
yield _floatstr(o)
|
| 488 |
+
else:
|
| 489 |
+
for_json = _for_json and getattr(o, 'for_json', None)
|
| 490 |
+
if for_json and callable(for_json):
|
| 491 |
+
for chunk in _iterencode(for_json(), _current_indent_level):
|
| 492 |
+
yield chunk
|
| 493 |
+
elif isinstance(o, list):
|
| 494 |
+
for chunk in _iterencode_list(o, _current_indent_level):
|
| 495 |
+
yield chunk
|
| 496 |
+
else:
|
| 497 |
+
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
|
| 498 |
+
if _asdict and callable(_asdict):
|
| 499 |
+
for chunk in _iterencode_dict(_asdict(), _current_indent_level):
|
| 500 |
+
yield chunk
|
| 501 |
+
elif (_tuple_as_array and isinstance(o, tuple)):
|
| 502 |
+
for chunk in _iterencode_list(o, _current_indent_level):
|
| 503 |
+
yield chunk
|
| 504 |
+
elif isinstance(o, dict):
|
| 505 |
+
for chunk in _iterencode_dict(o, _current_indent_level):
|
| 506 |
+
yield chunk
|
| 507 |
+
elif _use_decimal and isinstance(o, Decimal):
|
| 508 |
+
yield str(o)
|
| 509 |
+
else:
|
| 510 |
+
if markers is not None:
|
| 511 |
+
markerid = id(o)
|
| 512 |
+
if markerid in markers:
|
| 513 |
+
raise ValueError("Circular reference detected")
|
| 514 |
+
markers[markerid] = o
|
| 515 |
+
o = _default(o)
|
| 516 |
+
for chunk in _iterencode(o, _current_indent_level):
|
| 517 |
+
yield chunk
|
| 518 |
+
if markers is not None:
|
| 519 |
+
del markers[markerid]
|
| 520 |
+
|
| 521 |
+
return _iterencode
|
evalkit_tf446/lib/python3.10/site-packages/hjson/encoderH.py
ADDED
|
@@ -0,0 +1,552 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Implementation of HjsonEncoder
|
| 2 |
+
"""
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
import re
|
| 5 |
+
from operator import itemgetter
|
| 6 |
+
from decimal import Decimal
|
| 7 |
+
from .compat import u, unichr, binary_type, string_types, integer_types, PY3
|
| 8 |
+
from .decoder import PosInf
|
| 9 |
+
|
| 10 |
+
# This is required because u() will mangle the string and ur'' isn't valid
|
| 11 |
+
# python3 syntax
|
| 12 |
+
ESCAPE = re.compile(u'[\\x00-\\x1f\\\\"\\b\\f\\n\\r\\t\u2028\u2029\uffff]')
|
| 13 |
+
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
| 14 |
+
HAS_UTF8 = re.compile(r'[\x80-\xff]')
|
| 15 |
+
ESCAPE_DCT = {
|
| 16 |
+
'\\': '\\\\',
|
| 17 |
+
'"': '\\"',
|
| 18 |
+
'\b': '\\b',
|
| 19 |
+
'\f': '\\f',
|
| 20 |
+
'\n': '\\n',
|
| 21 |
+
'\r': '\\r',
|
| 22 |
+
'\t': '\\t',
|
| 23 |
+
}
|
| 24 |
+
for i in range(0x20):
|
| 25 |
+
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
|
| 26 |
+
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
| 27 |
+
for i in [0x2028, 0x2029, 0xffff]:
|
| 28 |
+
ESCAPE_DCT.setdefault(unichr(i), '\\u%04x' % (i,))
|
| 29 |
+
|
| 30 |
+
COMMONRANGE=u'\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff'
|
| 31 |
+
|
| 32 |
+
# NEEDSESCAPE tests if the string can be written without escapes
|
| 33 |
+
NEEDSESCAPE = re.compile(u'[\\\"\x00-\x1f'+COMMONRANGE+']')
|
| 34 |
+
# NEEDSQUOTES tests if the string can be written as a quoteless string (like needsEscape but without \\ and \")
|
| 35 |
+
NEEDSQUOTES = re.compile(u'^\\s|^"|^\'|^#|^\\/\\*|^\\/\\/|^\\{|^\\}|^\\[|^\\]|^:|^,|\\s$|[\x00-\x1f'+COMMONRANGE+u']')
|
| 36 |
+
# NEEDSESCAPEML tests if the string can be written as a multiline string (like needsEscape but without \n, \r, \\, \", \t)
|
| 37 |
+
NEEDSESCAPEML = re.compile(u'\'\'\'|^[\\s]+$|[\x00-\x08\x0b\x0c\x0e-\x1f'+COMMONRANGE+u']')
|
| 38 |
+
|
| 39 |
+
WHITESPACE = ' \t\n\r'
|
| 40 |
+
STARTSWITHNUMBER = re.compile(r'^[\t ]*(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?\s*((,|\]|\}|#|\/\/|\/\*).*)?$');
|
| 41 |
+
STARTSWITHKEYWORD = re.compile(r'^(true|false|null)\s*((,|\]|\}|#|\/\/|\/\*).*)?$');
|
| 42 |
+
NEEDSESCAPENAME = re.compile(r'[,\{\[\}\]\s:#"\']|\/\/|\/\*|'+"'''")
|
| 43 |
+
|
| 44 |
+
FLOAT_REPR = repr
|
| 45 |
+
|
| 46 |
+
def encode_basestring(s, _PY3=PY3, _q=u('"')):
|
| 47 |
+
"""Return a JSON representation of a Python string
|
| 48 |
+
|
| 49 |
+
"""
|
| 50 |
+
if _PY3:
|
| 51 |
+
if isinstance(s, binary_type):
|
| 52 |
+
s = s.decode('utf-8')
|
| 53 |
+
else:
|
| 54 |
+
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
| 55 |
+
s = s.decode('utf-8')
|
| 56 |
+
def replace(match):
|
| 57 |
+
return ESCAPE_DCT[match.group(0)]
|
| 58 |
+
return _q + ESCAPE.sub(replace, s) + _q
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def encode_basestring_ascii(s, _PY3=PY3):
|
| 62 |
+
"""Return an ASCII-only JSON representation of a Python string
|
| 63 |
+
|
| 64 |
+
"""
|
| 65 |
+
if _PY3:
|
| 66 |
+
if isinstance(s, binary_type):
|
| 67 |
+
s = s.decode('utf-8')
|
| 68 |
+
else:
|
| 69 |
+
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
| 70 |
+
s = s.decode('utf-8')
|
| 71 |
+
def replace(match):
|
| 72 |
+
s = match.group(0)
|
| 73 |
+
try:
|
| 74 |
+
return ESCAPE_DCT[s]
|
| 75 |
+
except KeyError:
|
| 76 |
+
n = ord(s)
|
| 77 |
+
if n < 0x10000:
|
| 78 |
+
#return '\\u{0:04x}'.format(n)
|
| 79 |
+
return '\\u%04x' % (n,)
|
| 80 |
+
else:
|
| 81 |
+
# surrogate pair
|
| 82 |
+
n -= 0x10000
|
| 83 |
+
s1 = 0xd800 | ((n >> 10) & 0x3ff)
|
| 84 |
+
s2 = 0xdc00 | (n & 0x3ff)
|
| 85 |
+
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
|
| 86 |
+
return '\\u%04x\\u%04x' % (s1, s2)
|
| 87 |
+
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class HjsonEncoder(object):
|
| 91 |
+
"""Extensible JSON <http://json.org> encoder for Python data structures.
|
| 92 |
+
|
| 93 |
+
Supports the following objects and types by default:
|
| 94 |
+
|
| 95 |
+
+-------------------+---------------+
|
| 96 |
+
| Python | JSON |
|
| 97 |
+
+===================+===============+
|
| 98 |
+
| dict, namedtuple | object |
|
| 99 |
+
+-------------------+---------------+
|
| 100 |
+
| list, tuple | array |
|
| 101 |
+
+-------------------+---------------+
|
| 102 |
+
| str, unicode | string |
|
| 103 |
+
+-------------------+---------------+
|
| 104 |
+
| int, long, float | number |
|
| 105 |
+
+-------------------+---------------+
|
| 106 |
+
| True | true |
|
| 107 |
+
+-------------------+---------------+
|
| 108 |
+
| False | false |
|
| 109 |
+
+-------------------+---------------+
|
| 110 |
+
| None | null |
|
| 111 |
+
+-------------------+---------------+
|
| 112 |
+
|
| 113 |
+
To extend this to recognize other objects, subclass and implement a
|
| 114 |
+
``.default()`` method with another method that returns a serializable
|
| 115 |
+
object for ``o`` if possible, otherwise it should call the superclass
|
| 116 |
+
implementation (to raise ``TypeError``).
|
| 117 |
+
|
| 118 |
+
"""
|
| 119 |
+
|
| 120 |
+
def __init__(self, skipkeys=False, ensure_ascii=True,
|
| 121 |
+
check_circular=True, sort_keys=False,
|
| 122 |
+
indent=' ', encoding='utf-8', default=None,
|
| 123 |
+
use_decimal=True, namedtuple_as_object=True,
|
| 124 |
+
tuple_as_array=True, bigint_as_string=False,
|
| 125 |
+
item_sort_key=None, for_json=False,
|
| 126 |
+
int_as_string_bitcount=None):
|
| 127 |
+
"""Constructor for HjsonEncoder, with sensible defaults.
|
| 128 |
+
|
| 129 |
+
If skipkeys is false, then it is a TypeError to attempt
|
| 130 |
+
encoding of keys that are not str, int, long, float or None. If
|
| 131 |
+
skipkeys is True, such items are simply skipped.
|
| 132 |
+
|
| 133 |
+
If ensure_ascii is true, the output is guaranteed to be str
|
| 134 |
+
objects with all incoming unicode characters escaped. If
|
| 135 |
+
ensure_ascii is false, the output will be unicode object.
|
| 136 |
+
|
| 137 |
+
If check_circular is true, then lists, dicts, and custom encoded
|
| 138 |
+
objects will be checked for circular references during encoding to
|
| 139 |
+
prevent an infinite recursion (which would cause an OverflowError).
|
| 140 |
+
Otherwise, no such check takes place.
|
| 141 |
+
|
| 142 |
+
If sort_keys is true, then the output of dictionaries will be
|
| 143 |
+
sorted by key; this is useful for regression tests to ensure
|
| 144 |
+
that JSON serializations can be compared on a day-to-day basis.
|
| 145 |
+
|
| 146 |
+
If indent is a string, then JSON array elements and object members
|
| 147 |
+
will be pretty-printed with a newline followed by that string repeated
|
| 148 |
+
for each level of nesting.
|
| 149 |
+
|
| 150 |
+
If specified, default is a function that gets called for objects
|
| 151 |
+
that can't otherwise be serialized. It should return a JSON encodable
|
| 152 |
+
version of the object or raise a ``TypeError``.
|
| 153 |
+
|
| 154 |
+
If encoding is not None, then all input strings will be
|
| 155 |
+
transformed into unicode using that encoding prior to JSON-encoding.
|
| 156 |
+
The default is UTF-8.
|
| 157 |
+
|
| 158 |
+
If use_decimal is true (not the default), ``decimal.Decimal`` will
|
| 159 |
+
be supported directly by the encoder. For the inverse, decode JSON
|
| 160 |
+
with ``parse_float=decimal.Decimal``.
|
| 161 |
+
|
| 162 |
+
If namedtuple_as_object is true (the default), objects with
|
| 163 |
+
``_asdict()`` methods will be encoded as JSON objects.
|
| 164 |
+
|
| 165 |
+
If tuple_as_array is true (the default), tuple (and subclasses) will
|
| 166 |
+
be encoded as JSON arrays.
|
| 167 |
+
|
| 168 |
+
If bigint_as_string is true (not the default), ints 2**53 and higher
|
| 169 |
+
or lower than -2**53 will be encoded as strings. This is to avoid the
|
| 170 |
+
rounding that happens in Javascript otherwise.
|
| 171 |
+
|
| 172 |
+
If int_as_string_bitcount is a positive number (n), then int of size
|
| 173 |
+
greater than or equal to 2**n or lower than or equal to -2**n will be
|
| 174 |
+
encoded as strings.
|
| 175 |
+
|
| 176 |
+
If specified, item_sort_key is a callable used to sort the items in
|
| 177 |
+
each dictionary. This is useful if you want to sort items other than
|
| 178 |
+
in alphabetical order by key.
|
| 179 |
+
|
| 180 |
+
If for_json is true (not the default), objects with a ``for_json()``
|
| 181 |
+
method will use the return value of that method for encoding as JSON
|
| 182 |
+
instead of the object.
|
| 183 |
+
|
| 184 |
+
"""
|
| 185 |
+
|
| 186 |
+
self.skipkeys = skipkeys
|
| 187 |
+
self.ensure_ascii = ensure_ascii
|
| 188 |
+
self.check_circular = check_circular
|
| 189 |
+
self.sort_keys = sort_keys
|
| 190 |
+
self.use_decimal = use_decimal
|
| 191 |
+
self.namedtuple_as_object = namedtuple_as_object
|
| 192 |
+
self.tuple_as_array = tuple_as_array
|
| 193 |
+
self.bigint_as_string = bigint_as_string
|
| 194 |
+
self.item_sort_key = item_sort_key
|
| 195 |
+
self.for_json = for_json
|
| 196 |
+
self.int_as_string_bitcount = int_as_string_bitcount
|
| 197 |
+
if indent is not None and not isinstance(indent, string_types):
|
| 198 |
+
indent = indent * ' '
|
| 199 |
+
elif indent is None:
|
| 200 |
+
indent = ' '
|
| 201 |
+
self.indent = indent
|
| 202 |
+
if default is not None:
|
| 203 |
+
self.default = default
|
| 204 |
+
self.encoding = encoding
|
| 205 |
+
|
| 206 |
+
def default(self, o):
|
| 207 |
+
"""Implement this method in a subclass such that it returns
|
| 208 |
+
a serializable object for ``o``, or calls the base implementation
|
| 209 |
+
(to raise a ``TypeError``).
|
| 210 |
+
|
| 211 |
+
For example, to support arbitrary iterators, you could
|
| 212 |
+
implement default like this::
|
| 213 |
+
|
| 214 |
+
def default(self, o):
|
| 215 |
+
try:
|
| 216 |
+
iterable = iter(o)
|
| 217 |
+
except TypeError:
|
| 218 |
+
pass
|
| 219 |
+
else:
|
| 220 |
+
return list(iterable)
|
| 221 |
+
return HjsonEncoder.default(self, o)
|
| 222 |
+
|
| 223 |
+
"""
|
| 224 |
+
raise TypeError(repr(o) + " is not JSON serializable")
|
| 225 |
+
|
| 226 |
+
def encode(self, o):
|
| 227 |
+
"""Return a JSON string representation of a Python data structure.
|
| 228 |
+
|
| 229 |
+
>>> from hjson import HjsonEncoder
|
| 230 |
+
>>> HjsonEncoder().encode({"foo": ["bar", "baz"]})
|
| 231 |
+
'{"foo": ["bar", "baz"]}'
|
| 232 |
+
|
| 233 |
+
"""
|
| 234 |
+
# This is for extremely simple cases and benchmarks.
|
| 235 |
+
if isinstance(o, binary_type):
|
| 236 |
+
_encoding = self.encoding
|
| 237 |
+
if (_encoding is not None and not (_encoding == 'utf-8')):
|
| 238 |
+
o = o.decode(_encoding)
|
| 239 |
+
|
| 240 |
+
# This doesn't pass the iterator directly to ''.join() because the
|
| 241 |
+
# exceptions aren't as detailed. The list call should be roughly
|
| 242 |
+
# equivalent to the PySequence_Fast that ''.join() would do.
|
| 243 |
+
chunks = self.iterencode(o, _one_shot=True)
|
| 244 |
+
if not isinstance(chunks, (list, tuple)):
|
| 245 |
+
chunks = list(chunks)
|
| 246 |
+
if self.ensure_ascii:
|
| 247 |
+
return ''.join(chunks)
|
| 248 |
+
else:
|
| 249 |
+
return u''.join(chunks)
|
| 250 |
+
|
| 251 |
+
def iterencode(self, o, _one_shot=False):
|
| 252 |
+
"""Encode the given object and yield each string
|
| 253 |
+
representation as available.
|
| 254 |
+
|
| 255 |
+
For example::
|
| 256 |
+
|
| 257 |
+
for chunk in HjsonEncoder().iterencode(bigobject):
|
| 258 |
+
mysocket.write(chunk)
|
| 259 |
+
|
| 260 |
+
"""
|
| 261 |
+
if self.check_circular:
|
| 262 |
+
markers = {}
|
| 263 |
+
else:
|
| 264 |
+
markers = None
|
| 265 |
+
if self.ensure_ascii:
|
| 266 |
+
_encoder = encode_basestring_ascii
|
| 267 |
+
else:
|
| 268 |
+
_encoder = encode_basestring
|
| 269 |
+
if self.encoding != 'utf-8':
|
| 270 |
+
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
|
| 271 |
+
if isinstance(o, binary_type):
|
| 272 |
+
o = o.decode(_encoding)
|
| 273 |
+
return _orig_encoder(o)
|
| 274 |
+
|
| 275 |
+
def floatstr(o, _repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
|
| 276 |
+
# Check for specials. Note that this type of test is processor
|
| 277 |
+
# and/or platform-specific, so do tests which don't depend on
|
| 278 |
+
# the internals.
|
| 279 |
+
|
| 280 |
+
if o != o or o == _inf or o == _neginf:
|
| 281 |
+
return 'null'
|
| 282 |
+
else:
|
| 283 |
+
return _repr(o)
|
| 284 |
+
|
| 285 |
+
key_memo = {}
|
| 286 |
+
int_as_string_bitcount = (
|
| 287 |
+
53 if self.bigint_as_string else self.int_as_string_bitcount)
|
| 288 |
+
_iterencode = _make_iterencode(
|
| 289 |
+
markers, self.default, _encoder, self.indent, floatstr,
|
| 290 |
+
self.sort_keys, self.skipkeys, _one_shot, self.use_decimal,
|
| 291 |
+
self.namedtuple_as_object, self.tuple_as_array,
|
| 292 |
+
int_as_string_bitcount,
|
| 293 |
+
self.item_sort_key, self.encoding, self.for_json,
|
| 294 |
+
Decimal=Decimal)
|
| 295 |
+
try:
|
| 296 |
+
return _iterencode(o, 0, True)
|
| 297 |
+
finally:
|
| 298 |
+
key_memo.clear()
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
|
| 302 |
+
_sort_keys, _skipkeys, _one_shot,
|
| 303 |
+
_use_decimal, _namedtuple_as_object, _tuple_as_array,
|
| 304 |
+
_int_as_string_bitcount, _item_sort_key,
|
| 305 |
+
_encoding,_for_json,
|
| 306 |
+
## HACK: hand-optimized bytecode; turn globals into locals
|
| 307 |
+
_PY3=PY3,
|
| 308 |
+
ValueError=ValueError,
|
| 309 |
+
string_types=string_types,
|
| 310 |
+
Decimal=Decimal,
|
| 311 |
+
dict=dict,
|
| 312 |
+
float=float,
|
| 313 |
+
id=id,
|
| 314 |
+
integer_types=integer_types,
|
| 315 |
+
isinstance=isinstance,
|
| 316 |
+
list=list,
|
| 317 |
+
str=str,
|
| 318 |
+
tuple=tuple,
|
| 319 |
+
):
|
| 320 |
+
if _item_sort_key and not callable(_item_sort_key):
|
| 321 |
+
raise TypeError("item_sort_key must be None or callable")
|
| 322 |
+
elif _sort_keys and not _item_sort_key:
|
| 323 |
+
_item_sort_key = itemgetter(0)
|
| 324 |
+
|
| 325 |
+
if (_int_as_string_bitcount is not None and
|
| 326 |
+
(_int_as_string_bitcount <= 0 or
|
| 327 |
+
not isinstance(_int_as_string_bitcount, integer_types))):
|
| 328 |
+
raise TypeError("int_as_string_bitcount must be a positive integer")
|
| 329 |
+
|
| 330 |
+
def _encode_int(value):
|
| 331 |
+
return str(value)
|
| 332 |
+
|
| 333 |
+
def _stringify_key(key):
|
| 334 |
+
if isinstance(key, string_types): # pragma: no cover
|
| 335 |
+
pass
|
| 336 |
+
elif isinstance(key, binary_type):
|
| 337 |
+
key = key.decode(_encoding)
|
| 338 |
+
elif isinstance(key, float):
|
| 339 |
+
key = _floatstr(key)
|
| 340 |
+
elif key is True:
|
| 341 |
+
key = 'true'
|
| 342 |
+
elif key is False:
|
| 343 |
+
key = 'false'
|
| 344 |
+
elif key is None:
|
| 345 |
+
key = 'null'
|
| 346 |
+
elif isinstance(key, integer_types):
|
| 347 |
+
key = str(key)
|
| 348 |
+
elif _use_decimal and isinstance(key, Decimal):
|
| 349 |
+
key = str(key)
|
| 350 |
+
elif _skipkeys:
|
| 351 |
+
key = None
|
| 352 |
+
else:
|
| 353 |
+
raise TypeError("key " + repr(key) + " is not a string")
|
| 354 |
+
return key
|
| 355 |
+
|
| 356 |
+
def _encoder_key(name):
|
| 357 |
+
if not name: return '""'
|
| 358 |
+
|
| 359 |
+
# Check if we can insert this name without quotes
|
| 360 |
+
if NEEDSESCAPENAME.search(name):
|
| 361 |
+
return _encoder(name)
|
| 362 |
+
else:
|
| 363 |
+
# return without quotes
|
| 364 |
+
return name
|
| 365 |
+
|
| 366 |
+
def _encoder_str(str, _current_indent_level):
|
| 367 |
+
if not str: return '""'
|
| 368 |
+
|
| 369 |
+
# Check if we can insert this string without quotes
|
| 370 |
+
# see hjson syntax (must not parse as true, false, null or number)
|
| 371 |
+
|
| 372 |
+
first = str[0]
|
| 373 |
+
isNumber = False
|
| 374 |
+
if first == '-' or first >= '0' and first <= '9':
|
| 375 |
+
isNumber = STARTSWITHNUMBER.match(str) is not None
|
| 376 |
+
|
| 377 |
+
if (NEEDSQUOTES.search(str) or
|
| 378 |
+
isNumber or
|
| 379 |
+
STARTSWITHKEYWORD.match(str) is not None):
|
| 380 |
+
|
| 381 |
+
# If the string contains no control characters, no quote characters, and no
|
| 382 |
+
# backslash characters, then we can safely slap some quotes around it.
|
| 383 |
+
# Otherwise we first check if the string can be expressed in multiline
|
| 384 |
+
# format or we must replace the offending characters with safe escape
|
| 385 |
+
# sequences.
|
| 386 |
+
|
| 387 |
+
if not NEEDSESCAPE.search(str):
|
| 388 |
+
return '"' + str + '"'
|
| 389 |
+
elif not NEEDSESCAPEML.search(str):
|
| 390 |
+
return _encoder_str_ml(str, _current_indent_level + 1)
|
| 391 |
+
else:
|
| 392 |
+
return _encoder(str)
|
| 393 |
+
else:
|
| 394 |
+
# return without quotes
|
| 395 |
+
return str
|
| 396 |
+
|
| 397 |
+
def _encoder_str_ml(str, _current_indent_level):
|
| 398 |
+
|
| 399 |
+
a = str.replace('\r', '').split('\n')
|
| 400 |
+
# gap += indent;
|
| 401 |
+
|
| 402 |
+
if len(a) == 1:
|
| 403 |
+
# The string contains only a single line. We still use the multiline
|
| 404 |
+
# format as it avoids escaping the \ character (e.g. when used in a
|
| 405 |
+
# regex).
|
| 406 |
+
return "'''" + a[0] + "'''"
|
| 407 |
+
else:
|
| 408 |
+
gap = _indent * _current_indent_level
|
| 409 |
+
res = '\n' + gap + "'''"
|
| 410 |
+
for line in a:
|
| 411 |
+
res += '\n'
|
| 412 |
+
if line: res += gap + line
|
| 413 |
+
return res + '\n' + gap + "'''"
|
| 414 |
+
|
| 415 |
+
def _iterencode_dict(dct, _current_indent_level, _isRoot=False):
|
| 416 |
+
if not dct:
|
| 417 |
+
yield '{}'
|
| 418 |
+
return
|
| 419 |
+
if markers is not None:
|
| 420 |
+
markerid = id(dct)
|
| 421 |
+
if markerid in markers:
|
| 422 |
+
raise ValueError("Circular reference detected")
|
| 423 |
+
markers[markerid] = dct
|
| 424 |
+
|
| 425 |
+
if not _isRoot:
|
| 426 |
+
yield '\n' + (_indent * _current_indent_level)
|
| 427 |
+
|
| 428 |
+
_current_indent_level += 1
|
| 429 |
+
newline_indent = '\n' + (_indent * _current_indent_level)
|
| 430 |
+
|
| 431 |
+
yield '{'
|
| 432 |
+
|
| 433 |
+
if _PY3:
|
| 434 |
+
iteritems = dct.items()
|
| 435 |
+
else:
|
| 436 |
+
iteritems = dct.iteritems()
|
| 437 |
+
if _item_sort_key:
|
| 438 |
+
items = []
|
| 439 |
+
for k, v in dct.items():
|
| 440 |
+
if not isinstance(k, string_types):
|
| 441 |
+
k = _stringify_key(k)
|
| 442 |
+
if k is None:
|
| 443 |
+
continue
|
| 444 |
+
items.append((k, v))
|
| 445 |
+
items.sort(key=_item_sort_key)
|
| 446 |
+
else:
|
| 447 |
+
items = iteritems
|
| 448 |
+
for key, value in items:
|
| 449 |
+
if not (_item_sort_key or isinstance(key, string_types)):
|
| 450 |
+
key = _stringify_key(key)
|
| 451 |
+
if key is None:
|
| 452 |
+
# _skipkeys must be True
|
| 453 |
+
continue
|
| 454 |
+
|
| 455 |
+
yield newline_indent
|
| 456 |
+
yield _encoder_key(key)
|
| 457 |
+
|
| 458 |
+
first = True
|
| 459 |
+
for chunk in _iterencode(value, _current_indent_level):
|
| 460 |
+
if first:
|
| 461 |
+
first = False
|
| 462 |
+
if chunk[0 : 1] == '\n': yield ':'
|
| 463 |
+
else: yield ': '
|
| 464 |
+
yield chunk
|
| 465 |
+
|
| 466 |
+
if newline_indent is not None:
|
| 467 |
+
_current_indent_level -= 1
|
| 468 |
+
yield '\n' + (_indent * _current_indent_level)
|
| 469 |
+
yield '}'
|
| 470 |
+
if markers is not None:
|
| 471 |
+
del markers[markerid]
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
def _iterencode_list(lst, _current_indent_level, _isRoot=False):
|
| 475 |
+
if not lst:
|
| 476 |
+
yield '[]'
|
| 477 |
+
return
|
| 478 |
+
if markers is not None:
|
| 479 |
+
markerid = id(lst)
|
| 480 |
+
if markerid in markers:
|
| 481 |
+
raise ValueError("Circular reference detected")
|
| 482 |
+
markers[markerid] = lst
|
| 483 |
+
|
| 484 |
+
if not _isRoot:
|
| 485 |
+
yield '\n' + (_indent * _current_indent_level)
|
| 486 |
+
|
| 487 |
+
_current_indent_level += 1
|
| 488 |
+
newline_indent = '\n' + (_indent * _current_indent_level)
|
| 489 |
+
yield '['
|
| 490 |
+
|
| 491 |
+
for value in lst:
|
| 492 |
+
yield newline_indent
|
| 493 |
+
|
| 494 |
+
for chunk in _iterencode(value, _current_indent_level, True):
|
| 495 |
+
yield chunk
|
| 496 |
+
|
| 497 |
+
if newline_indent is not None:
|
| 498 |
+
_current_indent_level -= 1
|
| 499 |
+
yield '\n' + (_indent * _current_indent_level)
|
| 500 |
+
yield ']'
|
| 501 |
+
if markers is not None:
|
| 502 |
+
del markers[markerid]
|
| 503 |
+
|
| 504 |
+
|
| 505 |
+
def _iterencode(o, _current_indent_level, _isRoot=False):
|
| 506 |
+
if (isinstance(o, string_types) or
|
| 507 |
+
(_PY3 and isinstance(o, binary_type))):
|
| 508 |
+
yield _encoder_str(o, _current_indent_level)
|
| 509 |
+
elif o is None:
|
| 510 |
+
yield 'null'
|
| 511 |
+
elif o is True:
|
| 512 |
+
yield 'true'
|
| 513 |
+
elif o is False:
|
| 514 |
+
yield 'false'
|
| 515 |
+
elif isinstance(o, integer_types):
|
| 516 |
+
yield _encode_int(o)
|
| 517 |
+
elif isinstance(o, float):
|
| 518 |
+
yield _floatstr(o)
|
| 519 |
+
else:
|
| 520 |
+
for_json = _for_json and getattr(o, 'for_json', None)
|
| 521 |
+
if for_json and callable(for_json):
|
| 522 |
+
for chunk in _iterencode(for_json(), _current_indent_level, _isRoot):
|
| 523 |
+
yield chunk
|
| 524 |
+
elif isinstance(o, list):
|
| 525 |
+
for chunk in _iterencode_list(o, _current_indent_level, _isRoot):
|
| 526 |
+
yield chunk
|
| 527 |
+
else:
|
| 528 |
+
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
|
| 529 |
+
if _asdict and callable(_asdict):
|
| 530 |
+
for chunk in _iterencode_dict(_asdict(), _current_indent_level, _isRoot):
|
| 531 |
+
yield chunk
|
| 532 |
+
elif (_tuple_as_array and isinstance(o, tuple)):
|
| 533 |
+
for chunk in _iterencode_list(o, _current_indent_level, _isRoot):
|
| 534 |
+
yield chunk
|
| 535 |
+
elif isinstance(o, dict):
|
| 536 |
+
for chunk in _iterencode_dict(o, _current_indent_level, _isRoot):
|
| 537 |
+
yield chunk
|
| 538 |
+
elif _use_decimal and isinstance(o, Decimal):
|
| 539 |
+
yield str(o)
|
| 540 |
+
else:
|
| 541 |
+
if markers is not None:
|
| 542 |
+
markerid = id(o)
|
| 543 |
+
if markerid in markers:
|
| 544 |
+
raise ValueError("Circular reference detected")
|
| 545 |
+
markers[markerid] = o
|
| 546 |
+
o = _default(o)
|
| 547 |
+
for chunk in _iterencode(o, _current_indent_level, _isRoot):
|
| 548 |
+
yield chunk
|
| 549 |
+
if markers is not None:
|
| 550 |
+
del markers[markerid]
|
| 551 |
+
|
| 552 |
+
return _iterencode
|
evalkit_tf446/lib/python3.10/site-packages/hjson/ordered_dict.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Drop-in replacement for collections.OrderedDict by Raymond Hettinger
|
| 2 |
+
|
| 3 |
+
http://code.activestate.com/recipes/576693/
|
| 4 |
+
|
| 5 |
+
"""
|
| 6 |
+
from UserDict import DictMixin
|
| 7 |
+
|
| 8 |
+
# Modified from original to support Python 2.4, see
|
| 9 |
+
# http://code.google.com/p/simplejson/issues/detail?id=53
|
| 10 |
+
try:
|
| 11 |
+
all
|
| 12 |
+
except NameError:
|
| 13 |
+
def all(seq):
|
| 14 |
+
for elem in seq:
|
| 15 |
+
if not elem:
|
| 16 |
+
return False
|
| 17 |
+
return True
|
| 18 |
+
|
| 19 |
+
class OrderedDict(dict, DictMixin):
|
| 20 |
+
|
| 21 |
+
def __init__(self, *args, **kwds):
|
| 22 |
+
if len(args) > 1:
|
| 23 |
+
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
| 24 |
+
try:
|
| 25 |
+
self.__end
|
| 26 |
+
except AttributeError:
|
| 27 |
+
self.clear()
|
| 28 |
+
self.update(*args, **kwds)
|
| 29 |
+
|
| 30 |
+
def clear(self):
|
| 31 |
+
self.__end = end = []
|
| 32 |
+
end += [None, end, end] # sentinel node for doubly linked list
|
| 33 |
+
self.__map = {} # key --> [key, prev, next]
|
| 34 |
+
dict.clear(self)
|
| 35 |
+
|
| 36 |
+
def __setitem__(self, key, value):
|
| 37 |
+
if key not in self:
|
| 38 |
+
end = self.__end
|
| 39 |
+
curr = end[1]
|
| 40 |
+
curr[2] = end[1] = self.__map[key] = [key, curr, end]
|
| 41 |
+
dict.__setitem__(self, key, value)
|
| 42 |
+
|
| 43 |
+
def __delitem__(self, key):
|
| 44 |
+
dict.__delitem__(self, key)
|
| 45 |
+
key, prev, next = self.__map.pop(key)
|
| 46 |
+
prev[2] = next
|
| 47 |
+
next[1] = prev
|
| 48 |
+
|
| 49 |
+
def __iter__(self):
|
| 50 |
+
end = self.__end
|
| 51 |
+
curr = end[2]
|
| 52 |
+
while curr is not end:
|
| 53 |
+
yield curr[0]
|
| 54 |
+
curr = curr[2]
|
| 55 |
+
|
| 56 |
+
def __reversed__(self):
|
| 57 |
+
end = self.__end
|
| 58 |
+
curr = end[1]
|
| 59 |
+
while curr is not end:
|
| 60 |
+
yield curr[0]
|
| 61 |
+
curr = curr[1]
|
| 62 |
+
|
| 63 |
+
def popitem(self, last=True):
|
| 64 |
+
if not self:
|
| 65 |
+
raise KeyError('dictionary is empty')
|
| 66 |
+
# Modified from original to support Python 2.4, see
|
| 67 |
+
# http://code.google.com/p/simplejson/issues/detail?id=53
|
| 68 |
+
if last:
|
| 69 |
+
key = reversed(self).next()
|
| 70 |
+
else:
|
| 71 |
+
key = iter(self).next()
|
| 72 |
+
value = self.pop(key)
|
| 73 |
+
return key, value
|
| 74 |
+
|
| 75 |
+
def __reduce__(self):
|
| 76 |
+
items = [[k, self[k]] for k in self]
|
| 77 |
+
tmp = self.__map, self.__end
|
| 78 |
+
del self.__map, self.__end
|
| 79 |
+
inst_dict = vars(self).copy()
|
| 80 |
+
self.__map, self.__end = tmp
|
| 81 |
+
if inst_dict:
|
| 82 |
+
return (self.__class__, (items,), inst_dict)
|
| 83 |
+
return self.__class__, (items,)
|
| 84 |
+
|
| 85 |
+
def keys(self):
|
| 86 |
+
return list(self)
|
| 87 |
+
|
| 88 |
+
setdefault = DictMixin.setdefault
|
| 89 |
+
update = DictMixin.update
|
| 90 |
+
pop = DictMixin.pop
|
| 91 |
+
values = DictMixin.values
|
| 92 |
+
items = DictMixin.items
|
| 93 |
+
iterkeys = DictMixin.iterkeys
|
| 94 |
+
itervalues = DictMixin.itervalues
|
| 95 |
+
iteritems = DictMixin.iteritems
|
| 96 |
+
|
| 97 |
+
def __repr__(self):
|
| 98 |
+
if not self:
|
| 99 |
+
return '%s()' % (self.__class__.__name__,)
|
| 100 |
+
return '%s(%r)' % (self.__class__.__name__, self.items())
|
| 101 |
+
|
| 102 |
+
def copy(self):
|
| 103 |
+
return self.__class__(self)
|
| 104 |
+
|
| 105 |
+
@classmethod
|
| 106 |
+
def fromkeys(cls, iterable, value=None):
|
| 107 |
+
d = cls()
|
| 108 |
+
for key in iterable:
|
| 109 |
+
d[key] = value
|
| 110 |
+
return d
|
| 111 |
+
|
| 112 |
+
def __eq__(self, other):
|
| 113 |
+
if isinstance(other, OrderedDict):
|
| 114 |
+
return len(self)==len(other) and \
|
| 115 |
+
all(p==q for p, q in zip(self.items(), other.items()))
|
| 116 |
+
return dict.__eq__(self, other)
|
| 117 |
+
|
| 118 |
+
def __ne__(self, other):
|
| 119 |
+
return not self == other
|
evalkit_tf446/lib/python3.10/site-packages/hjson/scanner.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""JSON token scanner
|
| 2 |
+
"""
|
| 3 |
+
import re
|
| 4 |
+
|
| 5 |
+
__all__ = ['HjsonDecodeError']
|
| 6 |
+
|
| 7 |
+
class HjsonDecodeError(ValueError):
|
| 8 |
+
"""Subclass of ValueError with the following additional properties:
|
| 9 |
+
|
| 10 |
+
msg: The unformatted error message
|
| 11 |
+
doc: The JSON document being parsed
|
| 12 |
+
pos: The start index of doc where parsing failed
|
| 13 |
+
end: The end index of doc where parsing failed (may be None)
|
| 14 |
+
lineno: The line corresponding to pos
|
| 15 |
+
colno: The column corresponding to pos
|
| 16 |
+
endlineno: The line corresponding to end (may be None)
|
| 17 |
+
endcolno: The column corresponding to end (may be None)
|
| 18 |
+
|
| 19 |
+
"""
|
| 20 |
+
# Note that this exception is used from _speedups
|
| 21 |
+
def __init__(self, msg, doc, pos, end=None):
|
| 22 |
+
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
|
| 23 |
+
self.msg = msg
|
| 24 |
+
self.doc = doc
|
| 25 |
+
self.pos = pos
|
| 26 |
+
self.end = end
|
| 27 |
+
self.lineno, self.colno = linecol(doc, pos)
|
| 28 |
+
if end is not None:
|
| 29 |
+
self.endlineno, self.endcolno = linecol(doc, end)
|
| 30 |
+
else:
|
| 31 |
+
self.endlineno, self.endcolno = None, None
|
| 32 |
+
|
| 33 |
+
def __reduce__(self):
|
| 34 |
+
return self.__class__, (self.msg, self.doc, self.pos, self.end)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def linecol(doc, pos):
|
| 38 |
+
lineno = doc.count('\n', 0, pos) + 1
|
| 39 |
+
if lineno == 1:
|
| 40 |
+
colno = pos + 1
|
| 41 |
+
else:
|
| 42 |
+
colno = pos - doc.rindex('\n', 0, pos)
|
| 43 |
+
return lineno, colno
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def errmsg(msg, doc, pos, end=None):
|
| 47 |
+
lineno, colno = linecol(doc, pos)
|
| 48 |
+
msg = msg.replace('%r', repr(doc[pos:pos + 1]))
|
| 49 |
+
if end is None:
|
| 50 |
+
fmt = '%s: line %d column %d (char %d)'
|
| 51 |
+
return fmt % (msg, lineno, colno, pos)
|
| 52 |
+
endlineno, endcolno = linecol(doc, end)
|
| 53 |
+
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
|
| 54 |
+
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
|
| 55 |
+
|
| 56 |
+
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__init__.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
import unittest
|
| 3 |
+
import doctest
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def additional_tests(suite=None):
|
| 8 |
+
import hjson
|
| 9 |
+
import hjson.encoder
|
| 10 |
+
import hjson.decoder
|
| 11 |
+
if suite is None:
|
| 12 |
+
suite = unittest.TestSuite()
|
| 13 |
+
for mod in (hjson, hjson.encoder, hjson.decoder):
|
| 14 |
+
suite.addTest(doctest.DocTestSuite(mod))
|
| 15 |
+
return suite
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def all_tests_suite():
|
| 19 |
+
def get_suite():
|
| 20 |
+
return additional_tests(
|
| 21 |
+
unittest.TestLoader().loadTestsFromNames([
|
| 22 |
+
'hjson.tests.test_hjson',
|
| 23 |
+
'hjson.tests.test_bitsize_int_as_string',
|
| 24 |
+
'hjson.tests.test_bigint_as_string',
|
| 25 |
+
'hjson.tests.test_check_circular',
|
| 26 |
+
'hjson.tests.test_decode',
|
| 27 |
+
'hjson.tests.test_default',
|
| 28 |
+
'hjson.tests.test_dump',
|
| 29 |
+
'hjson.tests.test_encode_basestring_ascii',
|
| 30 |
+
'hjson.tests.test_errors',
|
| 31 |
+
'hjson.tests.test_fail',
|
| 32 |
+
'hjson.tests.test_float',
|
| 33 |
+
'hjson.tests.test_indent',
|
| 34 |
+
'hjson.tests.test_pass1',
|
| 35 |
+
'hjson.tests.test_pass2',
|
| 36 |
+
'hjson.tests.test_pass3',
|
| 37 |
+
'hjson.tests.test_recursion',
|
| 38 |
+
'hjson.tests.test_scanstring',
|
| 39 |
+
'hjson.tests.test_separators',
|
| 40 |
+
'hjson.tests.test_unicode',
|
| 41 |
+
'hjson.tests.test_decimal',
|
| 42 |
+
'hjson.tests.test_tuple',
|
| 43 |
+
'hjson.tests.test_namedtuple',
|
| 44 |
+
#'hjson.tests.test_tool', # fails on windows
|
| 45 |
+
'hjson.tests.test_for_json',
|
| 46 |
+
]))
|
| 47 |
+
suite = get_suite()
|
| 48 |
+
return suite
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def main():
|
| 52 |
+
runner = unittest.TextTestRunner(verbosity=1 + sys.argv.count('-v'))
|
| 53 |
+
suite = all_tests_suite()
|
| 54 |
+
raise SystemExit(not runner.run(suite).wasSuccessful())
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
if __name__ == '__main__':
|
| 58 |
+
import os
|
| 59 |
+
import sys
|
| 60 |
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
| 61 |
+
main()
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_bigint_as_string.cpython-310.pyc
ADDED
|
Binary file (1.83 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_bitsize_int_as_string.cpython-310.pyc
ADDED
|
Binary file (2.21 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_check_circular.cpython-310.pyc
ADDED
|
Binary file (1.66 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_decode.cpython-310.pyc
ADDED
|
Binary file (4.78 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_default.cpython-310.pyc
ADDED
|
Binary file (616 Bytes). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_dump.cpython-310.pyc
ADDED
|
Binary file (5.26 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_encode_basestring_ascii.cpython-310.pyc
ADDED
|
Binary file (2.04 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_errors.cpython-310.pyc
ADDED
|
Binary file (1.94 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_fail.cpython-310.pyc
ADDED
|
Binary file (2.94 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_float.cpython-310.pyc
ADDED
|
Binary file (1.43 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_hjson.cpython-310.pyc
ADDED
|
Binary file (2.28 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_indent.cpython-310.pyc
ADDED
|
Binary file (2.3 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_item_sort_key.cpython-310.pyc
ADDED
|
Binary file (1.68 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_namedtuple.cpython-310.pyc
ADDED
|
Binary file (4.94 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_pass1.cpython-310.pyc
ADDED
|
Binary file (2.04 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_pass3.cpython-310.pyc
ADDED
|
Binary file (772 Bytes). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_recursion.cpython-310.pyc
ADDED
|
Binary file (2.06 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_separators.cpython-310.pyc
ADDED
|
Binary file (1.27 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_tool.cpython-310.pyc
ADDED
|
Binary file (2.88 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_tuple.cpython-310.pyc
ADDED
|
Binary file (1.7 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_unicode.cpython-310.pyc
ADDED
|
Binary file (6.58 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_bigint_as_string.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
|
| 3 |
+
import hjson as json
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestBigintAsString(TestCase):
|
| 7 |
+
# Python 2.5, at least the one that ships on Mac OS X, calculates
|
| 8 |
+
# 2 ** 53 as 0! It manages to calculate 1 << 53 correctly.
|
| 9 |
+
values = [(200, 200),
|
| 10 |
+
((1 << 53) - 1, 9007199254740991),
|
| 11 |
+
((1 << 53), '9007199254740992'),
|
| 12 |
+
((1 << 53) + 1, '9007199254740993'),
|
| 13 |
+
(-100, -100),
|
| 14 |
+
((-1 << 53), '-9007199254740992'),
|
| 15 |
+
((-1 << 53) - 1, '-9007199254740993'),
|
| 16 |
+
((-1 << 53) + 1, -9007199254740991)]
|
| 17 |
+
|
| 18 |
+
options = (
|
| 19 |
+
{"bigint_as_string": True},
|
| 20 |
+
{"int_as_string_bitcount": 53}
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
def test_ints(self):
|
| 24 |
+
for opts in self.options:
|
| 25 |
+
for val, expect in self.values:
|
| 26 |
+
self.assertEqual(
|
| 27 |
+
val,
|
| 28 |
+
json.loads(json.dumpsJSON(val)))
|
| 29 |
+
self.assertEqual(
|
| 30 |
+
expect,
|
| 31 |
+
json.loads(json.dumpsJSON(val, **opts)))
|
| 32 |
+
|
| 33 |
+
def test_lists(self):
|
| 34 |
+
for opts in self.options:
|
| 35 |
+
for val, expect in self.values:
|
| 36 |
+
val = [val, val]
|
| 37 |
+
expect = [expect, expect]
|
| 38 |
+
self.assertEqual(
|
| 39 |
+
val,
|
| 40 |
+
json.loads(json.dumpsJSON(val)))
|
| 41 |
+
self.assertEqual(
|
| 42 |
+
expect,
|
| 43 |
+
json.loads(json.dumpsJSON(val, **opts)))
|
| 44 |
+
|
| 45 |
+
def test_dicts(self):
|
| 46 |
+
for opts in self.options:
|
| 47 |
+
for val, expect in self.values:
|
| 48 |
+
val = {'k': val}
|
| 49 |
+
expect = {'k': expect}
|
| 50 |
+
self.assertEqual(
|
| 51 |
+
val,
|
| 52 |
+
json.loads(json.dumpsJSON(val)))
|
| 53 |
+
self.assertEqual(
|
| 54 |
+
expect,
|
| 55 |
+
json.loads(json.dumpsJSON(val, **opts)))
|
| 56 |
+
|
| 57 |
+
def test_dict_keys(self):
|
| 58 |
+
for opts in self.options:
|
| 59 |
+
for val, _ in self.values:
|
| 60 |
+
expect = {str(val): 'value'}
|
| 61 |
+
val = {val: 'value'}
|
| 62 |
+
self.assertEqual(
|
| 63 |
+
expect,
|
| 64 |
+
json.loads(json.dumpsJSON(val)))
|
| 65 |
+
self.assertEqual(
|
| 66 |
+
expect,
|
| 67 |
+
json.loads(json.dumpsJSON(val, **opts)))
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_bitsize_int_as_string.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
|
| 3 |
+
import hjson as json
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestBitSizeIntAsString(TestCase):
|
| 7 |
+
# Python 2.5, at least the one that ships on Mac OS X, calculates
|
| 8 |
+
# 2 ** 31 as 0! It manages to calculate 1 << 31 correctly.
|
| 9 |
+
values = [
|
| 10 |
+
(200, 200),
|
| 11 |
+
((1 << 31) - 1, (1 << 31) - 1),
|
| 12 |
+
((1 << 31), str(1 << 31)),
|
| 13 |
+
((1 << 31) + 1, str((1 << 31) + 1)),
|
| 14 |
+
(-100, -100),
|
| 15 |
+
((-1 << 31), str(-1 << 31)),
|
| 16 |
+
((-1 << 31) - 1, str((-1 << 31) - 1)),
|
| 17 |
+
((-1 << 31) + 1, (-1 << 31) + 1),
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
def test_invalid_counts(self):
|
| 21 |
+
for n in ['foo', -1, 0, 1.0]:
|
| 22 |
+
self.assertRaises(
|
| 23 |
+
TypeError,
|
| 24 |
+
json.dumpsJSON, 0, int_as_string_bitcount=n)
|
| 25 |
+
|
| 26 |
+
def test_ints_outside_range_fails(self):
|
| 27 |
+
self.assertNotEqual(
|
| 28 |
+
str(1 << 15),
|
| 29 |
+
json.loads(json.dumpsJSON(1 << 15, int_as_string_bitcount=16)),
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
def test_ints(self):
|
| 33 |
+
for val, expect in self.values:
|
| 34 |
+
self.assertEqual(
|
| 35 |
+
val,
|
| 36 |
+
json.loads(json.dumpsJSON(val)))
|
| 37 |
+
self.assertEqual(
|
| 38 |
+
expect,
|
| 39 |
+
json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)),
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
def test_lists(self):
|
| 43 |
+
for val, expect in self.values:
|
| 44 |
+
val = [val, val]
|
| 45 |
+
expect = [expect, expect]
|
| 46 |
+
self.assertEqual(
|
| 47 |
+
val,
|
| 48 |
+
json.loads(json.dumpsJSON(val)))
|
| 49 |
+
self.assertEqual(
|
| 50 |
+
expect,
|
| 51 |
+
json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)))
|
| 52 |
+
|
| 53 |
+
def test_dicts(self):
|
| 54 |
+
for val, expect in self.values:
|
| 55 |
+
val = {'k': val}
|
| 56 |
+
expect = {'k': expect}
|
| 57 |
+
self.assertEqual(
|
| 58 |
+
val,
|
| 59 |
+
json.loads(json.dumpsJSON(val)))
|
| 60 |
+
self.assertEqual(
|
| 61 |
+
expect,
|
| 62 |
+
json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)))
|
| 63 |
+
|
| 64 |
+
def test_dict_keys(self):
|
| 65 |
+
for val, _ in self.values:
|
| 66 |
+
expect = {str(val): 'value'}
|
| 67 |
+
val = {val: 'value'}
|
| 68 |
+
self.assertEqual(
|
| 69 |
+
expect,
|
| 70 |
+
json.loads(json.dumpsJSON(val)))
|
| 71 |
+
self.assertEqual(
|
| 72 |
+
expect,
|
| 73 |
+
json.loads(json.dumpsJSON(val, int_as_string_bitcount=31)))
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_check_circular.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
import hjson as json
|
| 3 |
+
|
| 4 |
+
def default_iterable(obj):
|
| 5 |
+
return list(obj)
|
| 6 |
+
|
| 7 |
+
class TestCheckCircular(TestCase):
|
| 8 |
+
def test_circular_dict(self):
|
| 9 |
+
dct = {}
|
| 10 |
+
dct['a'] = dct
|
| 11 |
+
self.assertRaises(ValueError, json.dumpsJSON, dct)
|
| 12 |
+
|
| 13 |
+
def test_circular_list(self):
|
| 14 |
+
lst = []
|
| 15 |
+
lst.append(lst)
|
| 16 |
+
self.assertRaises(ValueError, json.dumpsJSON, lst)
|
| 17 |
+
|
| 18 |
+
def test_circular_composite(self):
|
| 19 |
+
dct2 = {}
|
| 20 |
+
dct2['a'] = []
|
| 21 |
+
dct2['a'].append(dct2)
|
| 22 |
+
self.assertRaises(ValueError, json.dumpsJSON, dct2)
|
| 23 |
+
|
| 24 |
+
def test_circular_default(self):
|
| 25 |
+
json.dumpsJSON([set()], default=default_iterable)
|
| 26 |
+
self.assertRaises(TypeError, json.dumpsJSON, [set()])
|
| 27 |
+
|
| 28 |
+
def test_circular_off_default(self):
|
| 29 |
+
json.dumpsJSON([set()], default=default_iterable, check_circular=False)
|
| 30 |
+
self.assertRaises(TypeError, json.dumpsJSON, [set()], check_circular=False)
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_decimal.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import decimal
|
| 2 |
+
from decimal import Decimal
|
| 3 |
+
from unittest import TestCase
|
| 4 |
+
from hjson.compat import StringIO, reload_module
|
| 5 |
+
|
| 6 |
+
import hjson as json
|
| 7 |
+
|
| 8 |
+
class TestDecimal(TestCase):
|
| 9 |
+
NUMS = "1.0", "10.00", "1.1", "1234567890.1234567890", "500"
|
| 10 |
+
def dumps(self, obj, **kw):
|
| 11 |
+
sio = StringIO()
|
| 12 |
+
json.dumpJSON(obj, sio, **kw)
|
| 13 |
+
res = json.dumpsJSON(obj, **kw)
|
| 14 |
+
self.assertEqual(res, sio.getvalue())
|
| 15 |
+
return res
|
| 16 |
+
|
| 17 |
+
def loads(self, s, **kw):
|
| 18 |
+
sio = StringIO(s)
|
| 19 |
+
res = json.loads(s, **kw)
|
| 20 |
+
self.assertEqual(res, json.load(sio, **kw))
|
| 21 |
+
return res
|
| 22 |
+
|
| 23 |
+
def test_decimal_encode(self):
|
| 24 |
+
for d in map(Decimal, self.NUMS):
|
| 25 |
+
self.assertEqual(self.dumps(d, use_decimal=True), str(d))
|
| 26 |
+
|
| 27 |
+
def test_decimal_decode(self):
|
| 28 |
+
for s in self.NUMS:
|
| 29 |
+
self.assertEqual(self.loads(s, parse_float=Decimal), Decimal(s))
|
| 30 |
+
|
| 31 |
+
def test_stringify_key(self):
|
| 32 |
+
for d in map(Decimal, self.NUMS):
|
| 33 |
+
v = {d: d}
|
| 34 |
+
self.assertEqual(
|
| 35 |
+
self.loads(
|
| 36 |
+
self.dumps(v, use_decimal=True), parse_float=Decimal),
|
| 37 |
+
{str(d): d})
|
| 38 |
+
|
| 39 |
+
def test_decimal_roundtrip(self):
|
| 40 |
+
for d in map(Decimal, self.NUMS):
|
| 41 |
+
# The type might not be the same (int and Decimal) but they
|
| 42 |
+
# should still compare equal.
|
| 43 |
+
for v in [d, [d], {'': d}]:
|
| 44 |
+
self.assertEqual(
|
| 45 |
+
self.loads(
|
| 46 |
+
self.dumps(v, use_decimal=True), parse_float=Decimal),
|
| 47 |
+
v)
|
| 48 |
+
|
| 49 |
+
def test_decimal_defaults(self):
|
| 50 |
+
d = Decimal('1.1')
|
| 51 |
+
# use_decimal=True is the default
|
| 52 |
+
self.assertRaises(TypeError, json.dumpsJSON, d, use_decimal=False)
|
| 53 |
+
self.assertEqual('1.1', json.dumpsJSON(d))
|
| 54 |
+
self.assertEqual('1.1', json.dumpsJSON(d, use_decimal=True))
|
| 55 |
+
self.assertRaises(TypeError, json.dumpJSON, d, StringIO(),
|
| 56 |
+
use_decimal=False)
|
| 57 |
+
sio = StringIO()
|
| 58 |
+
json.dumpJSON(d, sio)
|
| 59 |
+
self.assertEqual('1.1', sio.getvalue())
|
| 60 |
+
sio = StringIO()
|
| 61 |
+
json.dumpJSON(d, sio, use_decimal=True)
|
| 62 |
+
self.assertEqual('1.1', sio.getvalue())
|
| 63 |
+
|
| 64 |
+
def test_decimal_reload(self):
|
| 65 |
+
# Simulate a subinterpreter that reloads the Python modules but not
|
| 66 |
+
# the C code https://github.com/simplejson/simplejson/issues/34
|
| 67 |
+
global Decimal
|
| 68 |
+
Decimal = reload_module(decimal).Decimal
|
| 69 |
+
import hjson.encoder
|
| 70 |
+
hjson.encoder.Decimal = Decimal
|
| 71 |
+
self.test_decimal_roundtrip()
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_decode.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import decimal
|
| 4 |
+
from unittest import TestCase
|
| 5 |
+
|
| 6 |
+
import hjson as json
|
| 7 |
+
from hjson import OrderedDict
|
| 8 |
+
from hjson.compat import StringIO
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class TestDecode(TestCase):
|
| 12 |
+
if not hasattr(TestCase, "assertIs"):
|
| 13 |
+
|
| 14 |
+
def assertIs(self, a, b):
|
| 15 |
+
self.assertTrue(a is b, "%r is %r" % (a, b))
|
| 16 |
+
|
| 17 |
+
def test_decimal(self):
|
| 18 |
+
rval = json.loads("1.1", parse_float=decimal.Decimal)
|
| 19 |
+
self.assertTrue(isinstance(rval, decimal.Decimal))
|
| 20 |
+
self.assertEqual(rval, decimal.Decimal("1.1"))
|
| 21 |
+
|
| 22 |
+
def test_float(self):
|
| 23 |
+
rval = json.loads("1", parse_int=float)
|
| 24 |
+
self.assertTrue(isinstance(rval, float))
|
| 25 |
+
self.assertEqual(rval, 1.0)
|
| 26 |
+
|
| 27 |
+
def test_decoder_optimizations(self):
|
| 28 |
+
# Several optimizations were made that skip over calls to
|
| 29 |
+
# the whitespace regex, so this test is designed to try and
|
| 30 |
+
# exercise the uncommon cases. The array cases are already covered.
|
| 31 |
+
rval = json.loads('{ "key" : "value" , "k":"v" }')
|
| 32 |
+
self.assertEqual(rval, {"key": "value", "k": "v"})
|
| 33 |
+
|
| 34 |
+
def test_empty_objects(self):
|
| 35 |
+
s = "{}"
|
| 36 |
+
self.assertEqual(json.loads(s), eval(s))
|
| 37 |
+
s = "[]"
|
| 38 |
+
self.assertEqual(json.loads(s), eval(s))
|
| 39 |
+
s = '""'
|
| 40 |
+
self.assertEqual(json.loads(s), eval(s))
|
| 41 |
+
|
| 42 |
+
def test_object_pairs_hook(self):
|
| 43 |
+
s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
|
| 44 |
+
p = [
|
| 45 |
+
("xkd", 1),
|
| 46 |
+
("kcw", 2),
|
| 47 |
+
("art", 3),
|
| 48 |
+
("hxm", 4),
|
| 49 |
+
("qrt", 5),
|
| 50 |
+
("pad", 6),
|
| 51 |
+
("hoy", 7),
|
| 52 |
+
]
|
| 53 |
+
self.assertEqual(json.loads(s), eval(s))
|
| 54 |
+
self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
|
| 55 |
+
self.assertEqual(json.load(StringIO(s), object_pairs_hook=lambda x: x), p)
|
| 56 |
+
od = json.loads(s, object_pairs_hook=OrderedDict)
|
| 57 |
+
self.assertEqual(od, OrderedDict(p))
|
| 58 |
+
self.assertEqual(type(od), OrderedDict)
|
| 59 |
+
# the object_pairs_hook takes priority over the object_hook
|
| 60 |
+
self.assertEqual(
|
| 61 |
+
json.loads(s, object_pairs_hook=OrderedDict, object_hook=lambda x: None),
|
| 62 |
+
OrderedDict(p),
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
def check_keys_reuse(self, source, loads):
|
| 66 |
+
rval = loads(source)
|
| 67 |
+
(a, b), (c, d) = sorted(rval[0]), sorted(rval[1])
|
| 68 |
+
self.assertIs(a, c)
|
| 69 |
+
self.assertIs(b, d)
|
| 70 |
+
|
| 71 |
+
def test_keys_reuse_str(self):
|
| 72 |
+
s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'.encode("utf8")
|
| 73 |
+
self.check_keys_reuse(s, json.loads)
|
| 74 |
+
|
| 75 |
+
def test_keys_reuse_unicode(self):
|
| 76 |
+
s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'
|
| 77 |
+
self.check_keys_reuse(s, json.loads)
|
| 78 |
+
|
| 79 |
+
def test_empty_strings(self):
|
| 80 |
+
self.assertEqual(json.loads('""'), "")
|
| 81 |
+
self.assertEqual(json.loads(u'""'), u"")
|
| 82 |
+
self.assertEqual(json.loads('[""]'), [""])
|
| 83 |
+
self.assertEqual(json.loads(u'[""]'), [u""])
|
| 84 |
+
|
| 85 |
+
def test_multiline_string(self):
|
| 86 |
+
s1 = """
|
| 87 |
+
|
| 88 |
+
hello: '''
|
| 89 |
+
|
| 90 |
+
'''
|
| 91 |
+
|
| 92 |
+
"""
|
| 93 |
+
s2 = """
|
| 94 |
+
|
| 95 |
+
hello: '''
|
| 96 |
+
'''
|
| 97 |
+
|
| 98 |
+
"""
|
| 99 |
+
s3 = """
|
| 100 |
+
|
| 101 |
+
hello: ''''''
|
| 102 |
+
|
| 103 |
+
"""
|
| 104 |
+
s4 = """
|
| 105 |
+
|
| 106 |
+
hello: ''
|
| 107 |
+
|
| 108 |
+
"""
|
| 109 |
+
s5 = """
|
| 110 |
+
|
| 111 |
+
hello: ""
|
| 112 |
+
|
| 113 |
+
"""
|
| 114 |
+
self.assertEqual(json.loads(s1), {"hello": ""})
|
| 115 |
+
self.assertEqual(json.loads(s2), {"hello": ""})
|
| 116 |
+
self.assertEqual(json.loads(s3), {"hello": ""})
|
| 117 |
+
self.assertEqual(json.loads(s4), {"hello": ""})
|
| 118 |
+
self.assertEqual(json.loads(s5), {"hello": ""})
|
| 119 |
+
|
| 120 |
+
def test_raw_decode(self):
|
| 121 |
+
cls = json.decoder.HjsonDecoder
|
| 122 |
+
self.assertEqual(({"a": {}}, 9), cls().raw_decode('{"a": {}}'))
|
| 123 |
+
# http://code.google.com/p/simplejson/issues/detail?id=85
|
| 124 |
+
self.assertEqual(
|
| 125 |
+
({"a": {}}, 9), cls(object_pairs_hook=dict).raw_decode('{"a": {}}')
|
| 126 |
+
)
|
| 127 |
+
# https://github.com/simplejson/simplejson/pull/38
|
| 128 |
+
self.assertEqual(({"a": {}}, 11), cls().raw_decode(' \n{"a": {}}'))
|
| 129 |
+
|
| 130 |
+
def test_bounds_checking(self):
|
| 131 |
+
# https://github.com/simplejson/simplejson/issues/98
|
| 132 |
+
j = json.decoder.HjsonDecoder()
|
| 133 |
+
for i in [4, 5, 6, -1, -2, -3, -4, -5, -6]:
|
| 134 |
+
self.assertRaises(ValueError, j.scan_once, "1234", i)
|
| 135 |
+
self.assertRaises(ValueError, j.raw_decode, "1234", i)
|
| 136 |
+
x, y = sorted(["128931233", "472389423"], key=id)
|
| 137 |
+
diff = id(x) - id(y)
|
| 138 |
+
self.assertRaises(ValueError, j.scan_once, y, diff)
|
| 139 |
+
self.assertRaises(ValueError, j.raw_decode, y, i)
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_default.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
|
| 3 |
+
import hjson as json
|
| 4 |
+
|
| 5 |
+
class TestDefault(TestCase):
|
| 6 |
+
def test_default(self):
|
| 7 |
+
self.assertEqual(
|
| 8 |
+
json.dumpsJSON(type, default=repr),
|
| 9 |
+
json.dumpsJSON(repr(type)))
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_dump.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
from hjson.compat import StringIO, long_type, b, binary_type, PY3
|
| 3 |
+
import hjson as json
|
| 4 |
+
|
| 5 |
+
def as_text_type(s):
|
| 6 |
+
if PY3 and isinstance(s, binary_type):
|
| 7 |
+
return s.decode('ascii')
|
| 8 |
+
return s
|
| 9 |
+
|
| 10 |
+
class TestDump(TestCase):
|
| 11 |
+
def test_dump(self):
|
| 12 |
+
sio = StringIO()
|
| 13 |
+
json.dumpJSON({}, sio)
|
| 14 |
+
self.assertEqual(sio.getvalue(), '{}')
|
| 15 |
+
|
| 16 |
+
def test_constants(self):
|
| 17 |
+
for c in [None, True, False]:
|
| 18 |
+
self.assertTrue(json.loads(json.dumpsJSON(c)) is c)
|
| 19 |
+
self.assertTrue(json.loads(json.dumpsJSON([c]))[0] is c)
|
| 20 |
+
self.assertTrue(json.loads(json.dumpsJSON({'a': c}))['a'] is c)
|
| 21 |
+
|
| 22 |
+
def test_stringify_key(self):
|
| 23 |
+
items = [(b('bytes'), 'bytes'),
|
| 24 |
+
(1.0, '1.0'),
|
| 25 |
+
(10, '10'),
|
| 26 |
+
(True, 'true'),
|
| 27 |
+
(False, 'false'),
|
| 28 |
+
(None, 'null'),
|
| 29 |
+
(long_type(100), '100')]
|
| 30 |
+
for k, expect in items:
|
| 31 |
+
self.assertEqual(
|
| 32 |
+
json.loads(json.dumpsJSON({k: expect})),
|
| 33 |
+
{expect: expect})
|
| 34 |
+
self.assertEqual(
|
| 35 |
+
json.loads(json.dumpsJSON({k: expect}, sort_keys=True)),
|
| 36 |
+
{expect: expect})
|
| 37 |
+
self.assertRaises(TypeError, json.dumpsJSON, {json: 1})
|
| 38 |
+
for v in [{}, {'other': 1}, {b('derp'): 1, 'herp': 2}]:
|
| 39 |
+
for sort_keys in [False, True]:
|
| 40 |
+
v0 = dict(v)
|
| 41 |
+
v0[json] = 1
|
| 42 |
+
v1 = dict((as_text_type(key), val) for (key, val) in v.items())
|
| 43 |
+
self.assertEqual(
|
| 44 |
+
json.loads(json.dumpsJSON(v0, skipkeys=True, sort_keys=sort_keys)),
|
| 45 |
+
v1)
|
| 46 |
+
self.assertEqual(
|
| 47 |
+
json.loads(json.dumpsJSON({'': v0}, skipkeys=True, sort_keys=sort_keys)),
|
| 48 |
+
{'': v1})
|
| 49 |
+
self.assertEqual(
|
| 50 |
+
json.loads(json.dumpsJSON([v0], skipkeys=True, sort_keys=sort_keys)),
|
| 51 |
+
[v1])
|
| 52 |
+
|
| 53 |
+
def test_dumps(self):
|
| 54 |
+
self.assertEqual(json.dumpsJSON({}), '{}')
|
| 55 |
+
|
| 56 |
+
def test_encode_truefalse(self):
|
| 57 |
+
self.assertEqual(json.dumpsJSON(
|
| 58 |
+
{True: False, False: True}, sort_keys=True),
|
| 59 |
+
'{"false": true, "true": false}')
|
| 60 |
+
self.assertEqual(
|
| 61 |
+
json.dumpsJSON(
|
| 62 |
+
{2: 3.0,
|
| 63 |
+
4.0: long_type(5),
|
| 64 |
+
False: 1,
|
| 65 |
+
long_type(6): True,
|
| 66 |
+
"7": 0},
|
| 67 |
+
sort_keys=True),
|
| 68 |
+
'{"2": 3.0, "4.0": 5, "6": true, "7": 0, "false": 1}')
|
| 69 |
+
|
| 70 |
+
def test_ordered_dict(self):
|
| 71 |
+
# http://bugs.python.org/issue6105
|
| 72 |
+
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
|
| 73 |
+
s = json.dumpsJSON(json.OrderedDict(items))
|
| 74 |
+
self.assertEqual(
|
| 75 |
+
s,
|
| 76 |
+
'{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}')
|
| 77 |
+
|
| 78 |
+
def test_indent_unknown_type_acceptance(self):
|
| 79 |
+
"""
|
| 80 |
+
A test against the regression mentioned at `github issue 29`_.
|
| 81 |
+
|
| 82 |
+
The indent parameter should accept any type which pretends to be
|
| 83 |
+
an instance of int or long when it comes to being multiplied by
|
| 84 |
+
strings, even if it is not actually an int or long, for
|
| 85 |
+
backwards compatibility.
|
| 86 |
+
|
| 87 |
+
.. _github issue 29:
|
| 88 |
+
http://github.com/simplejson/simplejson/issue/29
|
| 89 |
+
"""
|
| 90 |
+
|
| 91 |
+
class AwesomeInt(object):
|
| 92 |
+
"""An awesome reimplementation of integers"""
|
| 93 |
+
|
| 94 |
+
def __init__(self, *args, **kwargs):
|
| 95 |
+
if len(args) > 0:
|
| 96 |
+
# [construct from literals, objects, etc.]
|
| 97 |
+
# ...
|
| 98 |
+
|
| 99 |
+
# Finally, if args[0] is an integer, store it
|
| 100 |
+
if isinstance(args[0], int):
|
| 101 |
+
self._int = args[0]
|
| 102 |
+
|
| 103 |
+
# [various methods]
|
| 104 |
+
|
| 105 |
+
def __mul__(self, other):
|
| 106 |
+
# [various ways to multiply AwesomeInt objects]
|
| 107 |
+
# ... finally, if the right-hand operand is not awesome enough,
|
| 108 |
+
# try to do a normal integer multiplication
|
| 109 |
+
if hasattr(self, '_int'):
|
| 110 |
+
return self._int * other
|
| 111 |
+
else:
|
| 112 |
+
raise NotImplementedError("To do non-awesome things with"
|
| 113 |
+
" this object, please construct it from an integer!")
|
| 114 |
+
|
| 115 |
+
s = json.dumpsJSON([0, 1, 2], indent=AwesomeInt(3))
|
| 116 |
+
self.assertEqual(s, '[\n 0,\n 1,\n 2\n]')
|
| 117 |
+
|
| 118 |
+
def test_accumulator(self):
|
| 119 |
+
# the C API uses an accumulator that collects after 100,000 appends
|
| 120 |
+
lst = [0] * 100000
|
| 121 |
+
self.assertEqual(json.loads(json.dumpsJSON(lst)), lst)
|
| 122 |
+
|
| 123 |
+
def test_sort_keys(self):
|
| 124 |
+
# https://github.com/simplejson/simplejson/issues/106
|
| 125 |
+
for num_keys in range(2, 32):
|
| 126 |
+
p = dict((str(x), x) for x in range(num_keys))
|
| 127 |
+
sio = StringIO()
|
| 128 |
+
json.dumpJSON(p, sio, sort_keys=True)
|
| 129 |
+
self.assertEqual(sio.getvalue(), json.dumpsJSON(p, sort_keys=True))
|
| 130 |
+
self.assertEqual(json.loads(sio.getvalue()), p)
|
evalkit_tf446/lib/python3.10/site-packages/hjson/tests/test_encode_basestring_ascii.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import TestCase
|
| 2 |
+
|
| 3 |
+
import hjson.encoder
|
| 4 |
+
from hjson.compat import b
|
| 5 |
+
|
| 6 |
+
CASES = [
|
| 7 |
+
(u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
|
| 8 |
+
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
|
| 9 |
+
(u'controls', '"controls"'),
|
| 10 |
+
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
|
| 11 |
+
(u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
|
| 12 |
+
(u' s p a c e d ', '" s p a c e d "'),
|
| 13 |
+
(u'\U0001d120', '"\\ud834\\udd20"'),
|
| 14 |
+
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
| 15 |
+
(b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
|
| 16 |
+
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
| 17 |
+
(b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
|
| 18 |
+
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
| 19 |
+
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
|
| 20 |
+
(u"`1~!@#$%^&*()_+-={':[,]}|;.</>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.</>?"'),
|
| 21 |
+
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
|
| 22 |
+
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
class TestEncodeBaseStringAscii(TestCase):
|
| 26 |
+
def test_py_encode_basestring_ascii(self):
|
| 27 |
+
self._test_encode_basestring_ascii(hjson.encoder.encode_basestring_ascii)
|
| 28 |
+
|
| 29 |
+
def _test_encode_basestring_ascii(self, encode_basestring_ascii):
|
| 30 |
+
fname = encode_basestring_ascii.__name__
|
| 31 |
+
for input_string, expect in CASES:
|
| 32 |
+
result = encode_basestring_ascii(input_string)
|
| 33 |
+
#self.assertEqual(result, expect,
|
| 34 |
+
# '{0!r} != {1!r} for {2}({3!r})'.format(
|
| 35 |
+
# result, expect, fname, input_string))
|
| 36 |
+
self.assertEqual(result, expect,
|
| 37 |
+
'%r != %r for %s(%r)' % (result, expect, fname, input_string))
|
| 38 |
+
|
| 39 |
+
def test_sorted_dict(self):
|
| 40 |
+
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
|
| 41 |
+
s = hjson.dumpsJSON(dict(items), sort_keys=True)
|
| 42 |
+
self.assertEqual(s, '{"five": 5, "four": 4, "one": 1, "three": 3, "two": 2}')
|