Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- parrot/lib/python3.10/collections/__init__.py +1556 -0
- parrot/lib/python3.10/collections/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/collections/abc.py +3 -0
- parrot/lib/python3.10/distutils/command/__pycache__/bdist_dumb.cpython-310.pyc +0 -0
- parrot/lib/python3.10/distutils/command/__pycache__/bdist_rpm.cpython-310.pyc +0 -0
- parrot/lib/python3.10/distutils/command/__pycache__/build_ext.cpython-310.pyc +0 -0
- parrot/lib/python3.10/distutils/command/__pycache__/build_py.cpython-310.pyc +0 -0
- parrot/lib/python3.10/distutils/command/__pycache__/clean.cpython-310.pyc +0 -0
- parrot/lib/python3.10/distutils/command/__pycache__/config.cpython-310.pyc +0 -0
- parrot/lib/python3.10/distutils/command/bdist_msi.py +747 -0
- parrot/lib/python3.10/ensurepip/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/ensurepip/__pycache__/__main__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/ensurepip/_bundled/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/ensurepip/_uninstall.py +31 -0
- parrot/lib/python3.10/json/__init__.py +359 -0
- parrot/lib/python3.10/json/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/json/__pycache__/tool.cpython-310.pyc +0 -0
- parrot/lib/python3.10/json/decoder.py +356 -0
- parrot/lib/python3.10/lib2to3/__init__.py +8 -0
- parrot/lib/python3.10/lib2to3/__main__.py +4 -0
- parrot/lib/python3.10/lib2to3/btm_matcher.py +163 -0
- parrot/lib/python3.10/lib2to3/btm_utils.py +281 -0
- parrot/lib/python3.10/lib2to3/fixes/__init__.py +1 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_print.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_standarderror.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_tuple_params.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_asserts.py +34 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_except.py +93 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_execfile.py +53 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_filter.py +94 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_getcwdu.py +19 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_imports2.py +16 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_intern.py +39 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_itertools.py +43 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_long.py +19 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_nonzero.py +21 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_raise.py +90 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_standarderror.py +18 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_unicode.py +42 -0
- parrot/lib/python3.10/lib2to3/main.py +273 -0
- parrot/lib/python3.10/lib2to3/tests/__init__.py +8 -0
- parrot/lib/python3.10/lib2to3/tests/__pycache__/test_pytree.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/tests/__pycache__/test_util.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/tests/data/fixers/no_fixer_cls.py +1 -0
- parrot/lib/python3.10/lib2to3/tests/pytree_idempotency.py +94 -0
- parrot/lib/python3.10/lib2to3/tests/test_main.py +139 -0
- parrot/lib/python3.10/lib2to3/tests/test_pytree.py +472 -0
- parrot/lib/python3.10/multiprocessing/__init__.py +37 -0
- parrot/lib/python3.10/multiprocessing/__pycache__/context.cpython-310.pyc +0 -0
- parrot/lib/python3.10/multiprocessing/__pycache__/forkserver.cpython-310.pyc +0 -0
parrot/lib/python3.10/collections/__init__.py
ADDED
|
@@ -0,0 +1,1556 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''This module implements specialized container datatypes providing
|
| 2 |
+
alternatives to Python's general purpose built-in containers, dict,
|
| 3 |
+
list, set, and tuple.
|
| 4 |
+
|
| 5 |
+
* namedtuple factory function for creating tuple subclasses with named fields
|
| 6 |
+
* deque list-like container with fast appends and pops on either end
|
| 7 |
+
* ChainMap dict-like class for creating a single view of multiple mappings
|
| 8 |
+
* Counter dict subclass for counting hashable objects
|
| 9 |
+
* OrderedDict dict subclass that remembers the order entries were added
|
| 10 |
+
* defaultdict dict subclass that calls a factory function to supply missing values
|
| 11 |
+
* UserDict wrapper around dictionary objects for easier dict subclassing
|
| 12 |
+
* UserList wrapper around list objects for easier list subclassing
|
| 13 |
+
* UserString wrapper around string objects for easier string subclassing
|
| 14 |
+
|
| 15 |
+
'''
|
| 16 |
+
|
| 17 |
+
__all__ = [
|
| 18 |
+
'ChainMap',
|
| 19 |
+
'Counter',
|
| 20 |
+
'OrderedDict',
|
| 21 |
+
'UserDict',
|
| 22 |
+
'UserList',
|
| 23 |
+
'UserString',
|
| 24 |
+
'defaultdict',
|
| 25 |
+
'deque',
|
| 26 |
+
'namedtuple',
|
| 27 |
+
]
|
| 28 |
+
|
| 29 |
+
import _collections_abc
|
| 30 |
+
import sys as _sys
|
| 31 |
+
|
| 32 |
+
from itertools import chain as _chain
|
| 33 |
+
from itertools import repeat as _repeat
|
| 34 |
+
from itertools import starmap as _starmap
|
| 35 |
+
from keyword import iskeyword as _iskeyword
|
| 36 |
+
from operator import eq as _eq
|
| 37 |
+
from operator import itemgetter as _itemgetter
|
| 38 |
+
from reprlib import recursive_repr as _recursive_repr
|
| 39 |
+
from _weakref import proxy as _proxy
|
| 40 |
+
|
| 41 |
+
try:
|
| 42 |
+
from _collections import deque
|
| 43 |
+
except ImportError:
|
| 44 |
+
pass
|
| 45 |
+
else:
|
| 46 |
+
_collections_abc.MutableSequence.register(deque)
|
| 47 |
+
|
| 48 |
+
try:
|
| 49 |
+
from _collections import defaultdict
|
| 50 |
+
except ImportError:
|
| 51 |
+
pass
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
################################################################################
|
| 55 |
+
### OrderedDict
|
| 56 |
+
################################################################################
|
| 57 |
+
|
| 58 |
+
class _OrderedDictKeysView(_collections_abc.KeysView):
|
| 59 |
+
|
| 60 |
+
def __reversed__(self):
|
| 61 |
+
yield from reversed(self._mapping)
|
| 62 |
+
|
| 63 |
+
class _OrderedDictItemsView(_collections_abc.ItemsView):
|
| 64 |
+
|
| 65 |
+
def __reversed__(self):
|
| 66 |
+
for key in reversed(self._mapping):
|
| 67 |
+
yield (key, self._mapping[key])
|
| 68 |
+
|
| 69 |
+
class _OrderedDictValuesView(_collections_abc.ValuesView):
|
| 70 |
+
|
| 71 |
+
def __reversed__(self):
|
| 72 |
+
for key in reversed(self._mapping):
|
| 73 |
+
yield self._mapping[key]
|
| 74 |
+
|
| 75 |
+
class _Link(object):
|
| 76 |
+
__slots__ = 'prev', 'next', 'key', '__weakref__'
|
| 77 |
+
|
| 78 |
+
class OrderedDict(dict):
|
| 79 |
+
'Dictionary that remembers insertion order'
|
| 80 |
+
# An inherited dict maps keys to values.
|
| 81 |
+
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
| 82 |
+
# The remaining methods are order-aware.
|
| 83 |
+
# Big-O running times for all methods are the same as regular dictionaries.
|
| 84 |
+
|
| 85 |
+
# The internal self.__map dict maps keys to links in a doubly linked list.
|
| 86 |
+
# The circular doubly linked list starts and ends with a sentinel element.
|
| 87 |
+
# The sentinel element never gets deleted (this simplifies the algorithm).
|
| 88 |
+
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
|
| 89 |
+
# The prev links are weakref proxies (to prevent circular references).
|
| 90 |
+
# Individual links are kept alive by the hard reference in self.__map.
|
| 91 |
+
# Those hard references disappear when a key is deleted from an OrderedDict.
|
| 92 |
+
|
| 93 |
+
def __init__(self, other=(), /, **kwds):
|
| 94 |
+
'''Initialize an ordered dictionary. The signature is the same as
|
| 95 |
+
regular dictionaries. Keyword argument order is preserved.
|
| 96 |
+
'''
|
| 97 |
+
try:
|
| 98 |
+
self.__root
|
| 99 |
+
except AttributeError:
|
| 100 |
+
self.__hardroot = _Link()
|
| 101 |
+
self.__root = root = _proxy(self.__hardroot)
|
| 102 |
+
root.prev = root.next = root
|
| 103 |
+
self.__map = {}
|
| 104 |
+
self.__update(other, **kwds)
|
| 105 |
+
|
| 106 |
+
def __setitem__(self, key, value,
|
| 107 |
+
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
|
| 108 |
+
'od.__setitem__(i, y) <==> od[i]=y'
|
| 109 |
+
# Setting a new item creates a new link at the end of the linked list,
|
| 110 |
+
# and the inherited dictionary is updated with the new key/value pair.
|
| 111 |
+
if key not in self:
|
| 112 |
+
self.__map[key] = link = Link()
|
| 113 |
+
root = self.__root
|
| 114 |
+
last = root.prev
|
| 115 |
+
link.prev, link.next, link.key = last, root, key
|
| 116 |
+
last.next = link
|
| 117 |
+
root.prev = proxy(link)
|
| 118 |
+
dict_setitem(self, key, value)
|
| 119 |
+
|
| 120 |
+
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
| 121 |
+
'od.__delitem__(y) <==> del od[y]'
|
| 122 |
+
# Deleting an existing item uses self.__map to find the link which gets
|
| 123 |
+
# removed by updating the links in the predecessor and successor nodes.
|
| 124 |
+
dict_delitem(self, key)
|
| 125 |
+
link = self.__map.pop(key)
|
| 126 |
+
link_prev = link.prev
|
| 127 |
+
link_next = link.next
|
| 128 |
+
link_prev.next = link_next
|
| 129 |
+
link_next.prev = link_prev
|
| 130 |
+
link.prev = None
|
| 131 |
+
link.next = None
|
| 132 |
+
|
| 133 |
+
def __iter__(self):
|
| 134 |
+
'od.__iter__() <==> iter(od)'
|
| 135 |
+
# Traverse the linked list in order.
|
| 136 |
+
root = self.__root
|
| 137 |
+
curr = root.next
|
| 138 |
+
while curr is not root:
|
| 139 |
+
yield curr.key
|
| 140 |
+
curr = curr.next
|
| 141 |
+
|
| 142 |
+
def __reversed__(self):
|
| 143 |
+
'od.__reversed__() <==> reversed(od)'
|
| 144 |
+
# Traverse the linked list in reverse order.
|
| 145 |
+
root = self.__root
|
| 146 |
+
curr = root.prev
|
| 147 |
+
while curr is not root:
|
| 148 |
+
yield curr.key
|
| 149 |
+
curr = curr.prev
|
| 150 |
+
|
| 151 |
+
def clear(self):
|
| 152 |
+
'od.clear() -> None. Remove all items from od.'
|
| 153 |
+
root = self.__root
|
| 154 |
+
root.prev = root.next = root
|
| 155 |
+
self.__map.clear()
|
| 156 |
+
dict.clear(self)
|
| 157 |
+
|
| 158 |
+
def popitem(self, last=True):
|
| 159 |
+
'''Remove and return a (key, value) pair from the dictionary.
|
| 160 |
+
|
| 161 |
+
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
| 162 |
+
'''
|
| 163 |
+
if not self:
|
| 164 |
+
raise KeyError('dictionary is empty')
|
| 165 |
+
root = self.__root
|
| 166 |
+
if last:
|
| 167 |
+
link = root.prev
|
| 168 |
+
link_prev = link.prev
|
| 169 |
+
link_prev.next = root
|
| 170 |
+
root.prev = link_prev
|
| 171 |
+
else:
|
| 172 |
+
link = root.next
|
| 173 |
+
link_next = link.next
|
| 174 |
+
root.next = link_next
|
| 175 |
+
link_next.prev = root
|
| 176 |
+
key = link.key
|
| 177 |
+
del self.__map[key]
|
| 178 |
+
value = dict.pop(self, key)
|
| 179 |
+
return key, value
|
| 180 |
+
|
| 181 |
+
def move_to_end(self, key, last=True):
|
| 182 |
+
'''Move an existing element to the end (or beginning if last is false).
|
| 183 |
+
|
| 184 |
+
Raise KeyError if the element does not exist.
|
| 185 |
+
'''
|
| 186 |
+
link = self.__map[key]
|
| 187 |
+
link_prev = link.prev
|
| 188 |
+
link_next = link.next
|
| 189 |
+
soft_link = link_next.prev
|
| 190 |
+
link_prev.next = link_next
|
| 191 |
+
link_next.prev = link_prev
|
| 192 |
+
root = self.__root
|
| 193 |
+
if last:
|
| 194 |
+
last = root.prev
|
| 195 |
+
link.prev = last
|
| 196 |
+
link.next = root
|
| 197 |
+
root.prev = soft_link
|
| 198 |
+
last.next = link
|
| 199 |
+
else:
|
| 200 |
+
first = root.next
|
| 201 |
+
link.prev = root
|
| 202 |
+
link.next = first
|
| 203 |
+
first.prev = soft_link
|
| 204 |
+
root.next = link
|
| 205 |
+
|
| 206 |
+
def __sizeof__(self):
|
| 207 |
+
sizeof = _sys.getsizeof
|
| 208 |
+
n = len(self) + 1 # number of links including root
|
| 209 |
+
size = sizeof(self.__dict__) # instance dictionary
|
| 210 |
+
size += sizeof(self.__map) * 2 # internal dict and inherited dict
|
| 211 |
+
size += sizeof(self.__hardroot) * n # link objects
|
| 212 |
+
size += sizeof(self.__root) * n # proxy objects
|
| 213 |
+
return size
|
| 214 |
+
|
| 215 |
+
update = __update = _collections_abc.MutableMapping.update
|
| 216 |
+
|
| 217 |
+
def keys(self):
|
| 218 |
+
"D.keys() -> a set-like object providing a view on D's keys"
|
| 219 |
+
return _OrderedDictKeysView(self)
|
| 220 |
+
|
| 221 |
+
def items(self):
|
| 222 |
+
"D.items() -> a set-like object providing a view on D's items"
|
| 223 |
+
return _OrderedDictItemsView(self)
|
| 224 |
+
|
| 225 |
+
def values(self):
|
| 226 |
+
"D.values() -> an object providing a view on D's values"
|
| 227 |
+
return _OrderedDictValuesView(self)
|
| 228 |
+
|
| 229 |
+
__ne__ = _collections_abc.MutableMapping.__ne__
|
| 230 |
+
|
| 231 |
+
__marker = object()
|
| 232 |
+
|
| 233 |
+
def pop(self, key, default=__marker):
|
| 234 |
+
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
|
| 235 |
+
value. If key is not found, d is returned if given, otherwise KeyError
|
| 236 |
+
is raised.
|
| 237 |
+
|
| 238 |
+
'''
|
| 239 |
+
if key in self:
|
| 240 |
+
result = self[key]
|
| 241 |
+
del self[key]
|
| 242 |
+
return result
|
| 243 |
+
if default is self.__marker:
|
| 244 |
+
raise KeyError(key)
|
| 245 |
+
return default
|
| 246 |
+
|
| 247 |
+
def setdefault(self, key, default=None):
|
| 248 |
+
'''Insert key with a value of default if key is not in the dictionary.
|
| 249 |
+
|
| 250 |
+
Return the value for key if key is in the dictionary, else default.
|
| 251 |
+
'''
|
| 252 |
+
if key in self:
|
| 253 |
+
return self[key]
|
| 254 |
+
self[key] = default
|
| 255 |
+
return default
|
| 256 |
+
|
| 257 |
+
@_recursive_repr()
|
| 258 |
+
def __repr__(self):
|
| 259 |
+
'od.__repr__() <==> repr(od)'
|
| 260 |
+
if not self:
|
| 261 |
+
return '%s()' % (self.__class__.__name__,)
|
| 262 |
+
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
|
| 263 |
+
|
| 264 |
+
def __reduce__(self):
|
| 265 |
+
'Return state information for pickling'
|
| 266 |
+
inst_dict = vars(self).copy()
|
| 267 |
+
for k in vars(OrderedDict()):
|
| 268 |
+
inst_dict.pop(k, None)
|
| 269 |
+
return self.__class__, (), inst_dict or None, None, iter(self.items())
|
| 270 |
+
|
| 271 |
+
def copy(self):
|
| 272 |
+
'od.copy() -> a shallow copy of od'
|
| 273 |
+
return self.__class__(self)
|
| 274 |
+
|
| 275 |
+
@classmethod
|
| 276 |
+
def fromkeys(cls, iterable, value=None):
|
| 277 |
+
'''Create a new ordered dictionary with keys from iterable and values set to value.
|
| 278 |
+
'''
|
| 279 |
+
self = cls()
|
| 280 |
+
for key in iterable:
|
| 281 |
+
self[key] = value
|
| 282 |
+
return self
|
| 283 |
+
|
| 284 |
+
def __eq__(self, other):
|
| 285 |
+
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
| 286 |
+
while comparison to a regular mapping is order-insensitive.
|
| 287 |
+
|
| 288 |
+
'''
|
| 289 |
+
if isinstance(other, OrderedDict):
|
| 290 |
+
return dict.__eq__(self, other) and all(map(_eq, self, other))
|
| 291 |
+
return dict.__eq__(self, other)
|
| 292 |
+
|
| 293 |
+
def __ior__(self, other):
|
| 294 |
+
self.update(other)
|
| 295 |
+
return self
|
| 296 |
+
|
| 297 |
+
def __or__(self, other):
|
| 298 |
+
if not isinstance(other, dict):
|
| 299 |
+
return NotImplemented
|
| 300 |
+
new = self.__class__(self)
|
| 301 |
+
new.update(other)
|
| 302 |
+
return new
|
| 303 |
+
|
| 304 |
+
def __ror__(self, other):
|
| 305 |
+
if not isinstance(other, dict):
|
| 306 |
+
return NotImplemented
|
| 307 |
+
new = self.__class__(other)
|
| 308 |
+
new.update(self)
|
| 309 |
+
return new
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
try:
|
| 313 |
+
from _collections import OrderedDict
|
| 314 |
+
except ImportError:
|
| 315 |
+
# Leave the pure Python version in place.
|
| 316 |
+
pass
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
################################################################################
|
| 320 |
+
### namedtuple
|
| 321 |
+
################################################################################
|
| 322 |
+
|
| 323 |
+
try:
|
| 324 |
+
from _collections import _tuplegetter
|
| 325 |
+
except ImportError:
|
| 326 |
+
_tuplegetter = lambda index, doc: property(_itemgetter(index), doc=doc)
|
| 327 |
+
|
| 328 |
+
def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None):
|
| 329 |
+
"""Returns a new subclass of tuple with named fields.
|
| 330 |
+
|
| 331 |
+
>>> Point = namedtuple('Point', ['x', 'y'])
|
| 332 |
+
>>> Point.__doc__ # docstring for the new class
|
| 333 |
+
'Point(x, y)'
|
| 334 |
+
>>> p = Point(11, y=22) # instantiate with positional args or keywords
|
| 335 |
+
>>> p[0] + p[1] # indexable like a plain tuple
|
| 336 |
+
33
|
| 337 |
+
>>> x, y = p # unpack like a regular tuple
|
| 338 |
+
>>> x, y
|
| 339 |
+
(11, 22)
|
| 340 |
+
>>> p.x + p.y # fields also accessible by name
|
| 341 |
+
33
|
| 342 |
+
>>> d = p._asdict() # convert to a dictionary
|
| 343 |
+
>>> d['x']
|
| 344 |
+
11
|
| 345 |
+
>>> Point(**d) # convert from a dictionary
|
| 346 |
+
Point(x=11, y=22)
|
| 347 |
+
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
|
| 348 |
+
Point(x=100, y=22)
|
| 349 |
+
|
| 350 |
+
"""
|
| 351 |
+
|
| 352 |
+
# Validate the field names. At the user's option, either generate an error
|
| 353 |
+
# message or automatically replace the field name with a valid name.
|
| 354 |
+
if isinstance(field_names, str):
|
| 355 |
+
field_names = field_names.replace(',', ' ').split()
|
| 356 |
+
field_names = list(map(str, field_names))
|
| 357 |
+
typename = _sys.intern(str(typename))
|
| 358 |
+
|
| 359 |
+
if rename:
|
| 360 |
+
seen = set()
|
| 361 |
+
for index, name in enumerate(field_names):
|
| 362 |
+
if (not name.isidentifier()
|
| 363 |
+
or _iskeyword(name)
|
| 364 |
+
or name.startswith('_')
|
| 365 |
+
or name in seen):
|
| 366 |
+
field_names[index] = f'_{index}'
|
| 367 |
+
seen.add(name)
|
| 368 |
+
|
| 369 |
+
for name in [typename] + field_names:
|
| 370 |
+
if type(name) is not str:
|
| 371 |
+
raise TypeError('Type names and field names must be strings')
|
| 372 |
+
if not name.isidentifier():
|
| 373 |
+
raise ValueError('Type names and field names must be valid '
|
| 374 |
+
f'identifiers: {name!r}')
|
| 375 |
+
if _iskeyword(name):
|
| 376 |
+
raise ValueError('Type names and field names cannot be a '
|
| 377 |
+
f'keyword: {name!r}')
|
| 378 |
+
|
| 379 |
+
seen = set()
|
| 380 |
+
for name in field_names:
|
| 381 |
+
if name.startswith('_') and not rename:
|
| 382 |
+
raise ValueError('Field names cannot start with an underscore: '
|
| 383 |
+
f'{name!r}')
|
| 384 |
+
if name in seen:
|
| 385 |
+
raise ValueError(f'Encountered duplicate field name: {name!r}')
|
| 386 |
+
seen.add(name)
|
| 387 |
+
|
| 388 |
+
field_defaults = {}
|
| 389 |
+
if defaults is not None:
|
| 390 |
+
defaults = tuple(defaults)
|
| 391 |
+
if len(defaults) > len(field_names):
|
| 392 |
+
raise TypeError('Got more default values than field names')
|
| 393 |
+
field_defaults = dict(reversed(list(zip(reversed(field_names),
|
| 394 |
+
reversed(defaults)))))
|
| 395 |
+
|
| 396 |
+
# Variables used in the methods and docstrings
|
| 397 |
+
field_names = tuple(map(_sys.intern, field_names))
|
| 398 |
+
num_fields = len(field_names)
|
| 399 |
+
arg_list = ', '.join(field_names)
|
| 400 |
+
if num_fields == 1:
|
| 401 |
+
arg_list += ','
|
| 402 |
+
repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')'
|
| 403 |
+
tuple_new = tuple.__new__
|
| 404 |
+
_dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
|
| 405 |
+
|
| 406 |
+
# Create all the named tuple methods to be added to the class namespace
|
| 407 |
+
|
| 408 |
+
namespace = {
|
| 409 |
+
'_tuple_new': tuple_new,
|
| 410 |
+
'__builtins__': {},
|
| 411 |
+
'__name__': f'namedtuple_{typename}',
|
| 412 |
+
}
|
| 413 |
+
code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
|
| 414 |
+
__new__ = eval(code, namespace)
|
| 415 |
+
__new__.__name__ = '__new__'
|
| 416 |
+
__new__.__doc__ = f'Create new instance of {typename}({arg_list})'
|
| 417 |
+
if defaults is not None:
|
| 418 |
+
__new__.__defaults__ = defaults
|
| 419 |
+
|
| 420 |
+
@classmethod
|
| 421 |
+
def _make(cls, iterable):
|
| 422 |
+
result = tuple_new(cls, iterable)
|
| 423 |
+
if _len(result) != num_fields:
|
| 424 |
+
raise TypeError(f'Expected {num_fields} arguments, got {len(result)}')
|
| 425 |
+
return result
|
| 426 |
+
|
| 427 |
+
_make.__func__.__doc__ = (f'Make a new {typename} object from a sequence '
|
| 428 |
+
'or iterable')
|
| 429 |
+
|
| 430 |
+
def _replace(self, /, **kwds):
|
| 431 |
+
result = self._make(_map(kwds.pop, field_names, self))
|
| 432 |
+
if kwds:
|
| 433 |
+
raise ValueError(f'Got unexpected field names: {list(kwds)!r}')
|
| 434 |
+
return result
|
| 435 |
+
|
| 436 |
+
_replace.__doc__ = (f'Return a new {typename} object replacing specified '
|
| 437 |
+
'fields with new values')
|
| 438 |
+
|
| 439 |
+
def __repr__(self):
|
| 440 |
+
'Return a nicely formatted representation string'
|
| 441 |
+
return self.__class__.__name__ + repr_fmt % self
|
| 442 |
+
|
| 443 |
+
def _asdict(self):
|
| 444 |
+
'Return a new dict which maps field names to their values.'
|
| 445 |
+
return _dict(_zip(self._fields, self))
|
| 446 |
+
|
| 447 |
+
def __getnewargs__(self):
|
| 448 |
+
'Return self as a plain tuple. Used by copy and pickle.'
|
| 449 |
+
return _tuple(self)
|
| 450 |
+
|
| 451 |
+
# Modify function metadata to help with introspection and debugging
|
| 452 |
+
for method in (
|
| 453 |
+
__new__,
|
| 454 |
+
_make.__func__,
|
| 455 |
+
_replace,
|
| 456 |
+
__repr__,
|
| 457 |
+
_asdict,
|
| 458 |
+
__getnewargs__,
|
| 459 |
+
):
|
| 460 |
+
method.__qualname__ = f'{typename}.{method.__name__}'
|
| 461 |
+
|
| 462 |
+
# Build-up the class namespace dictionary
|
| 463 |
+
# and use type() to build the result class
|
| 464 |
+
class_namespace = {
|
| 465 |
+
'__doc__': f'{typename}({arg_list})',
|
| 466 |
+
'__slots__': (),
|
| 467 |
+
'_fields': field_names,
|
| 468 |
+
'_field_defaults': field_defaults,
|
| 469 |
+
'__new__': __new__,
|
| 470 |
+
'_make': _make,
|
| 471 |
+
'_replace': _replace,
|
| 472 |
+
'__repr__': __repr__,
|
| 473 |
+
'_asdict': _asdict,
|
| 474 |
+
'__getnewargs__': __getnewargs__,
|
| 475 |
+
'__match_args__': field_names,
|
| 476 |
+
}
|
| 477 |
+
for index, name in enumerate(field_names):
|
| 478 |
+
doc = _sys.intern(f'Alias for field number {index}')
|
| 479 |
+
class_namespace[name] = _tuplegetter(index, doc)
|
| 480 |
+
|
| 481 |
+
result = type(typename, (tuple,), class_namespace)
|
| 482 |
+
|
| 483 |
+
# For pickling to work, the __module__ variable needs to be set to the frame
|
| 484 |
+
# where the named tuple is created. Bypass this step in environments where
|
| 485 |
+
# sys._getframe is not defined (Jython for example) or sys._getframe is not
|
| 486 |
+
# defined for arguments greater than 0 (IronPython), or where the user has
|
| 487 |
+
# specified a particular module.
|
| 488 |
+
if module is None:
|
| 489 |
+
try:
|
| 490 |
+
module = _sys._getframe(1).f_globals.get('__name__', '__main__')
|
| 491 |
+
except (AttributeError, ValueError):
|
| 492 |
+
pass
|
| 493 |
+
if module is not None:
|
| 494 |
+
result.__module__ = module
|
| 495 |
+
|
| 496 |
+
return result
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
########################################################################
|
| 500 |
+
### Counter
|
| 501 |
+
########################################################################
|
| 502 |
+
|
| 503 |
+
def _count_elements(mapping, iterable):
|
| 504 |
+
'Tally elements from the iterable.'
|
| 505 |
+
mapping_get = mapping.get
|
| 506 |
+
for elem in iterable:
|
| 507 |
+
mapping[elem] = mapping_get(elem, 0) + 1
|
| 508 |
+
|
| 509 |
+
try: # Load C helper function if available
|
| 510 |
+
from _collections import _count_elements
|
| 511 |
+
except ImportError:
|
| 512 |
+
pass
|
| 513 |
+
|
| 514 |
+
class Counter(dict):
|
| 515 |
+
'''Dict subclass for counting hashable items. Sometimes called a bag
|
| 516 |
+
or multiset. Elements are stored as dictionary keys and their counts
|
| 517 |
+
are stored as dictionary values.
|
| 518 |
+
|
| 519 |
+
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
|
| 520 |
+
|
| 521 |
+
>>> c.most_common(3) # three most common elements
|
| 522 |
+
[('a', 5), ('b', 4), ('c', 3)]
|
| 523 |
+
>>> sorted(c) # list all unique elements
|
| 524 |
+
['a', 'b', 'c', 'd', 'e']
|
| 525 |
+
>>> ''.join(sorted(c.elements())) # list elements with repetitions
|
| 526 |
+
'aaaaabbbbcccdde'
|
| 527 |
+
>>> sum(c.values()) # total of all counts
|
| 528 |
+
15
|
| 529 |
+
|
| 530 |
+
>>> c['a'] # count of letter 'a'
|
| 531 |
+
5
|
| 532 |
+
>>> for elem in 'shazam': # update counts from an iterable
|
| 533 |
+
... c[elem] += 1 # by adding 1 to each element's count
|
| 534 |
+
>>> c['a'] # now there are seven 'a'
|
| 535 |
+
7
|
| 536 |
+
>>> del c['b'] # remove all 'b'
|
| 537 |
+
>>> c['b'] # now there are zero 'b'
|
| 538 |
+
0
|
| 539 |
+
|
| 540 |
+
>>> d = Counter('simsalabim') # make another counter
|
| 541 |
+
>>> c.update(d) # add in the second counter
|
| 542 |
+
>>> c['a'] # now there are nine 'a'
|
| 543 |
+
9
|
| 544 |
+
|
| 545 |
+
>>> c.clear() # empty the counter
|
| 546 |
+
>>> c
|
| 547 |
+
Counter()
|
| 548 |
+
|
| 549 |
+
Note: If a count is set to zero or reduced to zero, it will remain
|
| 550 |
+
in the counter until the entry is deleted or the counter is cleared:
|
| 551 |
+
|
| 552 |
+
>>> c = Counter('aaabbc')
|
| 553 |
+
>>> c['b'] -= 2 # reduce the count of 'b' by two
|
| 554 |
+
>>> c.most_common() # 'b' is still in, but its count is zero
|
| 555 |
+
[('a', 3), ('c', 1), ('b', 0)]
|
| 556 |
+
|
| 557 |
+
'''
|
| 558 |
+
# References:
|
| 559 |
+
# http://en.wikipedia.org/wiki/Multiset
|
| 560 |
+
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
|
| 561 |
+
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
|
| 562 |
+
# http://code.activestate.com/recipes/259174/
|
| 563 |
+
# Knuth, TAOCP Vol. II section 4.6.3
|
| 564 |
+
|
| 565 |
+
def __init__(self, iterable=None, /, **kwds):
|
| 566 |
+
'''Create a new, empty Counter object. And if given, count elements
|
| 567 |
+
from an input iterable. Or, initialize the count from another mapping
|
| 568 |
+
of elements to their counts.
|
| 569 |
+
|
| 570 |
+
>>> c = Counter() # a new, empty counter
|
| 571 |
+
>>> c = Counter('gallahad') # a new counter from an iterable
|
| 572 |
+
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
|
| 573 |
+
>>> c = Counter(a=4, b=2) # a new counter from keyword args
|
| 574 |
+
|
| 575 |
+
'''
|
| 576 |
+
super().__init__()
|
| 577 |
+
self.update(iterable, **kwds)
|
| 578 |
+
|
| 579 |
+
def __missing__(self, key):
|
| 580 |
+
'The count of elements not in the Counter is zero.'
|
| 581 |
+
# Needed so that self[missing_item] does not raise KeyError
|
| 582 |
+
return 0
|
| 583 |
+
|
| 584 |
+
def total(self):
|
| 585 |
+
'Sum of the counts'
|
| 586 |
+
return sum(self.values())
|
| 587 |
+
|
| 588 |
+
def most_common(self, n=None):
|
| 589 |
+
'''List the n most common elements and their counts from the most
|
| 590 |
+
common to the least. If n is None, then list all element counts.
|
| 591 |
+
|
| 592 |
+
>>> Counter('abracadabra').most_common(3)
|
| 593 |
+
[('a', 5), ('b', 2), ('r', 2)]
|
| 594 |
+
|
| 595 |
+
'''
|
| 596 |
+
# Emulate Bag.sortedByCount from Smalltalk
|
| 597 |
+
if n is None:
|
| 598 |
+
return sorted(self.items(), key=_itemgetter(1), reverse=True)
|
| 599 |
+
|
| 600 |
+
# Lazy import to speedup Python startup time
|
| 601 |
+
import heapq
|
| 602 |
+
return heapq.nlargest(n, self.items(), key=_itemgetter(1))
|
| 603 |
+
|
| 604 |
+
def elements(self):
|
| 605 |
+
'''Iterator over elements repeating each as many times as its count.
|
| 606 |
+
|
| 607 |
+
>>> c = Counter('ABCABC')
|
| 608 |
+
>>> sorted(c.elements())
|
| 609 |
+
['A', 'A', 'B', 'B', 'C', 'C']
|
| 610 |
+
|
| 611 |
+
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
|
| 612 |
+
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
|
| 613 |
+
>>> product = 1
|
| 614 |
+
>>> for factor in prime_factors.elements(): # loop over factors
|
| 615 |
+
... product *= factor # and multiply them
|
| 616 |
+
>>> product
|
| 617 |
+
1836
|
| 618 |
+
|
| 619 |
+
Note, if an element's count has been set to zero or is a negative
|
| 620 |
+
number, elements() will ignore it.
|
| 621 |
+
|
| 622 |
+
'''
|
| 623 |
+
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
|
| 624 |
+
return _chain.from_iterable(_starmap(_repeat, self.items()))
|
| 625 |
+
|
| 626 |
+
# Override dict methods where necessary
|
| 627 |
+
|
| 628 |
+
@classmethod
|
| 629 |
+
def fromkeys(cls, iterable, v=None):
|
| 630 |
+
# There is no equivalent method for counters because the semantics
|
| 631 |
+
# would be ambiguous in cases such as Counter.fromkeys('aaabbc', v=2).
|
| 632 |
+
# Initializing counters to zero values isn't necessary because zero
|
| 633 |
+
# is already the default value for counter lookups. Initializing
|
| 634 |
+
# to one is easily accomplished with Counter(set(iterable)). For
|
| 635 |
+
# more exotic cases, create a dictionary first using a dictionary
|
| 636 |
+
# comprehension or dict.fromkeys().
|
| 637 |
+
raise NotImplementedError(
|
| 638 |
+
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
|
| 639 |
+
|
| 640 |
+
def update(self, iterable=None, /, **kwds):
|
| 641 |
+
'''Like dict.update() but add counts instead of replacing them.
|
| 642 |
+
|
| 643 |
+
Source can be an iterable, a dictionary, or another Counter instance.
|
| 644 |
+
|
| 645 |
+
>>> c = Counter('which')
|
| 646 |
+
>>> c.update('witch') # add elements from another iterable
|
| 647 |
+
>>> d = Counter('watch')
|
| 648 |
+
>>> c.update(d) # add elements from another counter
|
| 649 |
+
>>> c['h'] # four 'h' in which, witch, and watch
|
| 650 |
+
4
|
| 651 |
+
|
| 652 |
+
'''
|
| 653 |
+
# The regular dict.update() operation makes no sense here because the
|
| 654 |
+
# replace behavior results in the some of original untouched counts
|
| 655 |
+
# being mixed-in with all of the other counts for a mismash that
|
| 656 |
+
# doesn't have a straight-forward interpretation in most counting
|
| 657 |
+
# contexts. Instead, we implement straight-addition. Both the inputs
|
| 658 |
+
# and outputs are allowed to contain zero and negative counts.
|
| 659 |
+
|
| 660 |
+
if iterable is not None:
|
| 661 |
+
if isinstance(iterable, _collections_abc.Mapping):
|
| 662 |
+
if self:
|
| 663 |
+
self_get = self.get
|
| 664 |
+
for elem, count in iterable.items():
|
| 665 |
+
self[elem] = count + self_get(elem, 0)
|
| 666 |
+
else:
|
| 667 |
+
# fast path when counter is empty
|
| 668 |
+
super().update(iterable)
|
| 669 |
+
else:
|
| 670 |
+
_count_elements(self, iterable)
|
| 671 |
+
if kwds:
|
| 672 |
+
self.update(kwds)
|
| 673 |
+
|
| 674 |
+
def subtract(self, iterable=None, /, **kwds):
|
| 675 |
+
'''Like dict.update() but subtracts counts instead of replacing them.
|
| 676 |
+
Counts can be reduced below zero. Both the inputs and outputs are
|
| 677 |
+
allowed to contain zero and negative counts.
|
| 678 |
+
|
| 679 |
+
Source can be an iterable, a dictionary, or another Counter instance.
|
| 680 |
+
|
| 681 |
+
>>> c = Counter('which')
|
| 682 |
+
>>> c.subtract('witch') # subtract elements from another iterable
|
| 683 |
+
>>> c.subtract(Counter('watch')) # subtract elements from another counter
|
| 684 |
+
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
|
| 685 |
+
0
|
| 686 |
+
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
|
| 687 |
+
-1
|
| 688 |
+
|
| 689 |
+
'''
|
| 690 |
+
if iterable is not None:
|
| 691 |
+
self_get = self.get
|
| 692 |
+
if isinstance(iterable, _collections_abc.Mapping):
|
| 693 |
+
for elem, count in iterable.items():
|
| 694 |
+
self[elem] = self_get(elem, 0) - count
|
| 695 |
+
else:
|
| 696 |
+
for elem in iterable:
|
| 697 |
+
self[elem] = self_get(elem, 0) - 1
|
| 698 |
+
if kwds:
|
| 699 |
+
self.subtract(kwds)
|
| 700 |
+
|
| 701 |
+
def copy(self):
|
| 702 |
+
'Return a shallow copy.'
|
| 703 |
+
return self.__class__(self)
|
| 704 |
+
|
| 705 |
+
def __reduce__(self):
|
| 706 |
+
return self.__class__, (dict(self),)
|
| 707 |
+
|
| 708 |
+
def __delitem__(self, elem):
|
| 709 |
+
'Like dict.__delitem__() but does not raise KeyError for missing values.'
|
| 710 |
+
if elem in self:
|
| 711 |
+
super().__delitem__(elem)
|
| 712 |
+
|
| 713 |
+
def __eq__(self, other):
|
| 714 |
+
'True if all counts agree. Missing counts are treated as zero.'
|
| 715 |
+
if not isinstance(other, Counter):
|
| 716 |
+
return NotImplemented
|
| 717 |
+
return all(self[e] == other[e] for c in (self, other) for e in c)
|
| 718 |
+
|
| 719 |
+
def __ne__(self, other):
|
| 720 |
+
'True if any counts disagree. Missing counts are treated as zero.'
|
| 721 |
+
if not isinstance(other, Counter):
|
| 722 |
+
return NotImplemented
|
| 723 |
+
return not self == other
|
| 724 |
+
|
| 725 |
+
def __le__(self, other):
|
| 726 |
+
'True if all counts in self are a subset of those in other.'
|
| 727 |
+
if not isinstance(other, Counter):
|
| 728 |
+
return NotImplemented
|
| 729 |
+
return all(self[e] <= other[e] for c in (self, other) for e in c)
|
| 730 |
+
|
| 731 |
+
def __lt__(self, other):
|
| 732 |
+
'True if all counts in self are a proper subset of those in other.'
|
| 733 |
+
if not isinstance(other, Counter):
|
| 734 |
+
return NotImplemented
|
| 735 |
+
return self <= other and self != other
|
| 736 |
+
|
| 737 |
+
def __ge__(self, other):
|
| 738 |
+
'True if all counts in self are a superset of those in other.'
|
| 739 |
+
if not isinstance(other, Counter):
|
| 740 |
+
return NotImplemented
|
| 741 |
+
return all(self[e] >= other[e] for c in (self, other) for e in c)
|
| 742 |
+
|
| 743 |
+
def __gt__(self, other):
|
| 744 |
+
'True if all counts in self are a proper superset of those in other.'
|
| 745 |
+
if not isinstance(other, Counter):
|
| 746 |
+
return NotImplemented
|
| 747 |
+
return self >= other and self != other
|
| 748 |
+
|
| 749 |
+
def __repr__(self):
|
| 750 |
+
if not self:
|
| 751 |
+
return f'{self.__class__.__name__}()'
|
| 752 |
+
try:
|
| 753 |
+
# dict() preserves the ordering returned by most_common()
|
| 754 |
+
d = dict(self.most_common())
|
| 755 |
+
except TypeError:
|
| 756 |
+
# handle case where values are not orderable
|
| 757 |
+
d = dict(self)
|
| 758 |
+
return f'{self.__class__.__name__}({d!r})'
|
| 759 |
+
|
| 760 |
+
# Multiset-style mathematical operations discussed in:
|
| 761 |
+
# Knuth TAOCP Volume II section 4.6.3 exercise 19
|
| 762 |
+
# and at http://en.wikipedia.org/wiki/Multiset
|
| 763 |
+
#
|
| 764 |
+
# Outputs guaranteed to only include positive counts.
|
| 765 |
+
#
|
| 766 |
+
# To strip negative and zero counts, add-in an empty counter:
|
| 767 |
+
# c += Counter()
|
| 768 |
+
#
|
| 769 |
+
# Results are ordered according to when an element is first
|
| 770 |
+
# encountered in the left operand and then by the order
|
| 771 |
+
# encountered in the right operand.
|
| 772 |
+
#
|
| 773 |
+
# When the multiplicities are all zero or one, multiset operations
|
| 774 |
+
# are guaranteed to be equivalent to the corresponding operations
|
| 775 |
+
# for regular sets.
|
| 776 |
+
# Given counter multisets such as:
|
| 777 |
+
# cp = Counter(a=1, b=0, c=1)
|
| 778 |
+
# cq = Counter(c=1, d=0, e=1)
|
| 779 |
+
# The corresponding regular sets would be:
|
| 780 |
+
# sp = {'a', 'c'}
|
| 781 |
+
# sq = {'c', 'e'}
|
| 782 |
+
# All of the following relations would hold:
|
| 783 |
+
# set(cp + cq) == sp | sq
|
| 784 |
+
# set(cp - cq) == sp - sq
|
| 785 |
+
# set(cp | cq) == sp | sq
|
| 786 |
+
# set(cp & cq) == sp & sq
|
| 787 |
+
# (cp == cq) == (sp == sq)
|
| 788 |
+
# (cp != cq) == (sp != sq)
|
| 789 |
+
# (cp <= cq) == (sp <= sq)
|
| 790 |
+
# (cp < cq) == (sp < sq)
|
| 791 |
+
# (cp >= cq) == (sp >= sq)
|
| 792 |
+
# (cp > cq) == (sp > sq)
|
| 793 |
+
|
| 794 |
+
def __add__(self, other):
|
| 795 |
+
'''Add counts from two counters.
|
| 796 |
+
|
| 797 |
+
>>> Counter('abbb') + Counter('bcc')
|
| 798 |
+
Counter({'b': 4, 'c': 2, 'a': 1})
|
| 799 |
+
|
| 800 |
+
'''
|
| 801 |
+
if not isinstance(other, Counter):
|
| 802 |
+
return NotImplemented
|
| 803 |
+
result = Counter()
|
| 804 |
+
for elem, count in self.items():
|
| 805 |
+
newcount = count + other[elem]
|
| 806 |
+
if newcount > 0:
|
| 807 |
+
result[elem] = newcount
|
| 808 |
+
for elem, count in other.items():
|
| 809 |
+
if elem not in self and count > 0:
|
| 810 |
+
result[elem] = count
|
| 811 |
+
return result
|
| 812 |
+
|
| 813 |
+
def __sub__(self, other):
|
| 814 |
+
''' Subtract count, but keep only results with positive counts.
|
| 815 |
+
|
| 816 |
+
>>> Counter('abbbc') - Counter('bccd')
|
| 817 |
+
Counter({'b': 2, 'a': 1})
|
| 818 |
+
|
| 819 |
+
'''
|
| 820 |
+
if not isinstance(other, Counter):
|
| 821 |
+
return NotImplemented
|
| 822 |
+
result = Counter()
|
| 823 |
+
for elem, count in self.items():
|
| 824 |
+
newcount = count - other[elem]
|
| 825 |
+
if newcount > 0:
|
| 826 |
+
result[elem] = newcount
|
| 827 |
+
for elem, count in other.items():
|
| 828 |
+
if elem not in self and count < 0:
|
| 829 |
+
result[elem] = 0 - count
|
| 830 |
+
return result
|
| 831 |
+
|
| 832 |
+
def __or__(self, other):
|
| 833 |
+
'''Union is the maximum of value in either of the input counters.
|
| 834 |
+
|
| 835 |
+
>>> Counter('abbb') | Counter('bcc')
|
| 836 |
+
Counter({'b': 3, 'c': 2, 'a': 1})
|
| 837 |
+
|
| 838 |
+
'''
|
| 839 |
+
if not isinstance(other, Counter):
|
| 840 |
+
return NotImplemented
|
| 841 |
+
result = Counter()
|
| 842 |
+
for elem, count in self.items():
|
| 843 |
+
other_count = other[elem]
|
| 844 |
+
newcount = other_count if count < other_count else count
|
| 845 |
+
if newcount > 0:
|
| 846 |
+
result[elem] = newcount
|
| 847 |
+
for elem, count in other.items():
|
| 848 |
+
if elem not in self and count > 0:
|
| 849 |
+
result[elem] = count
|
| 850 |
+
return result
|
| 851 |
+
|
| 852 |
+
def __and__(self, other):
|
| 853 |
+
''' Intersection is the minimum of corresponding counts.
|
| 854 |
+
|
| 855 |
+
>>> Counter('abbb') & Counter('bcc')
|
| 856 |
+
Counter({'b': 1})
|
| 857 |
+
|
| 858 |
+
'''
|
| 859 |
+
if not isinstance(other, Counter):
|
| 860 |
+
return NotImplemented
|
| 861 |
+
result = Counter()
|
| 862 |
+
for elem, count in self.items():
|
| 863 |
+
other_count = other[elem]
|
| 864 |
+
newcount = count if count < other_count else other_count
|
| 865 |
+
if newcount > 0:
|
| 866 |
+
result[elem] = newcount
|
| 867 |
+
return result
|
| 868 |
+
|
| 869 |
+
def __pos__(self):
|
| 870 |
+
'Adds an empty counter, effectively stripping negative and zero counts'
|
| 871 |
+
result = Counter()
|
| 872 |
+
for elem, count in self.items():
|
| 873 |
+
if count > 0:
|
| 874 |
+
result[elem] = count
|
| 875 |
+
return result
|
| 876 |
+
|
| 877 |
+
def __neg__(self):
|
| 878 |
+
'''Subtracts from an empty counter. Strips positive and zero counts,
|
| 879 |
+
and flips the sign on negative counts.
|
| 880 |
+
|
| 881 |
+
'''
|
| 882 |
+
result = Counter()
|
| 883 |
+
for elem, count in self.items():
|
| 884 |
+
if count < 0:
|
| 885 |
+
result[elem] = 0 - count
|
| 886 |
+
return result
|
| 887 |
+
|
| 888 |
+
def _keep_positive(self):
|
| 889 |
+
'''Internal method to strip elements with a negative or zero count'''
|
| 890 |
+
nonpositive = [elem for elem, count in self.items() if not count > 0]
|
| 891 |
+
for elem in nonpositive:
|
| 892 |
+
del self[elem]
|
| 893 |
+
return self
|
| 894 |
+
|
| 895 |
+
def __iadd__(self, other):
|
| 896 |
+
'''Inplace add from another counter, keeping only positive counts.
|
| 897 |
+
|
| 898 |
+
>>> c = Counter('abbb')
|
| 899 |
+
>>> c += Counter('bcc')
|
| 900 |
+
>>> c
|
| 901 |
+
Counter({'b': 4, 'c': 2, 'a': 1})
|
| 902 |
+
|
| 903 |
+
'''
|
| 904 |
+
for elem, count in other.items():
|
| 905 |
+
self[elem] += count
|
| 906 |
+
return self._keep_positive()
|
| 907 |
+
|
| 908 |
+
def __isub__(self, other):
|
| 909 |
+
'''Inplace subtract counter, but keep only results with positive counts.
|
| 910 |
+
|
| 911 |
+
>>> c = Counter('abbbc')
|
| 912 |
+
>>> c -= Counter('bccd')
|
| 913 |
+
>>> c
|
| 914 |
+
Counter({'b': 2, 'a': 1})
|
| 915 |
+
|
| 916 |
+
'''
|
| 917 |
+
for elem, count in other.items():
|
| 918 |
+
self[elem] -= count
|
| 919 |
+
return self._keep_positive()
|
| 920 |
+
|
| 921 |
+
def __ior__(self, other):
|
| 922 |
+
'''Inplace union is the maximum of value from either counter.
|
| 923 |
+
|
| 924 |
+
>>> c = Counter('abbb')
|
| 925 |
+
>>> c |= Counter('bcc')
|
| 926 |
+
>>> c
|
| 927 |
+
Counter({'b': 3, 'c': 2, 'a': 1})
|
| 928 |
+
|
| 929 |
+
'''
|
| 930 |
+
for elem, other_count in other.items():
|
| 931 |
+
count = self[elem]
|
| 932 |
+
if other_count > count:
|
| 933 |
+
self[elem] = other_count
|
| 934 |
+
return self._keep_positive()
|
| 935 |
+
|
| 936 |
+
def __iand__(self, other):
|
| 937 |
+
'''Inplace intersection is the minimum of corresponding counts.
|
| 938 |
+
|
| 939 |
+
>>> c = Counter('abbb')
|
| 940 |
+
>>> c &= Counter('bcc')
|
| 941 |
+
>>> c
|
| 942 |
+
Counter({'b': 1})
|
| 943 |
+
|
| 944 |
+
'''
|
| 945 |
+
for elem, count in self.items():
|
| 946 |
+
other_count = other[elem]
|
| 947 |
+
if other_count < count:
|
| 948 |
+
self[elem] = other_count
|
| 949 |
+
return self._keep_positive()
|
| 950 |
+
|
| 951 |
+
|
| 952 |
+
########################################################################
|
| 953 |
+
### ChainMap
|
| 954 |
+
########################################################################
|
| 955 |
+
|
| 956 |
+
class ChainMap(_collections_abc.MutableMapping):
|
| 957 |
+
''' A ChainMap groups multiple dicts (or other mappings) together
|
| 958 |
+
to create a single, updateable view.
|
| 959 |
+
|
| 960 |
+
The underlying mappings are stored in a list. That list is public and can
|
| 961 |
+
be accessed or updated using the *maps* attribute. There is no other
|
| 962 |
+
state.
|
| 963 |
+
|
| 964 |
+
Lookups search the underlying mappings successively until a key is found.
|
| 965 |
+
In contrast, writes, updates, and deletions only operate on the first
|
| 966 |
+
mapping.
|
| 967 |
+
|
| 968 |
+
'''
|
| 969 |
+
|
| 970 |
+
def __init__(self, *maps):
|
| 971 |
+
'''Initialize a ChainMap by setting *maps* to the given mappings.
|
| 972 |
+
If no mappings are provided, a single empty dictionary is used.
|
| 973 |
+
|
| 974 |
+
'''
|
| 975 |
+
self.maps = list(maps) or [{}] # always at least one map
|
| 976 |
+
|
| 977 |
+
def __missing__(self, key):
|
| 978 |
+
raise KeyError(key)
|
| 979 |
+
|
| 980 |
+
def __getitem__(self, key):
|
| 981 |
+
for mapping in self.maps:
|
| 982 |
+
try:
|
| 983 |
+
return mapping[key] # can't use 'key in mapping' with defaultdict
|
| 984 |
+
except KeyError:
|
| 985 |
+
pass
|
| 986 |
+
return self.__missing__(key) # support subclasses that define __missing__
|
| 987 |
+
|
| 988 |
+
def get(self, key, default=None):
|
| 989 |
+
return self[key] if key in self else default
|
| 990 |
+
|
| 991 |
+
def __len__(self):
|
| 992 |
+
return len(set().union(*self.maps)) # reuses stored hash values if possible
|
| 993 |
+
|
| 994 |
+
def __iter__(self):
|
| 995 |
+
d = {}
|
| 996 |
+
for mapping in reversed(self.maps):
|
| 997 |
+
d.update(dict.fromkeys(mapping)) # reuses stored hash values if possible
|
| 998 |
+
return iter(d)
|
| 999 |
+
|
| 1000 |
+
def __contains__(self, key):
|
| 1001 |
+
return any(key in m for m in self.maps)
|
| 1002 |
+
|
| 1003 |
+
def __bool__(self):
|
| 1004 |
+
return any(self.maps)
|
| 1005 |
+
|
| 1006 |
+
@_recursive_repr()
|
| 1007 |
+
def __repr__(self):
|
| 1008 |
+
return f'{self.__class__.__name__}({", ".join(map(repr, self.maps))})'
|
| 1009 |
+
|
| 1010 |
+
@classmethod
|
| 1011 |
+
def fromkeys(cls, iterable, *args):
|
| 1012 |
+
'Create a ChainMap with a single dict created from the iterable.'
|
| 1013 |
+
return cls(dict.fromkeys(iterable, *args))
|
| 1014 |
+
|
| 1015 |
+
def copy(self):
|
| 1016 |
+
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
|
| 1017 |
+
return self.__class__(self.maps[0].copy(), *self.maps[1:])
|
| 1018 |
+
|
| 1019 |
+
__copy__ = copy
|
| 1020 |
+
|
| 1021 |
+
def new_child(self, m=None, **kwargs): # like Django's Context.push()
|
| 1022 |
+
'''New ChainMap with a new map followed by all previous maps.
|
| 1023 |
+
If no map is provided, an empty dict is used.
|
| 1024 |
+
Keyword arguments update the map or new empty dict.
|
| 1025 |
+
'''
|
| 1026 |
+
if m is None:
|
| 1027 |
+
m = kwargs
|
| 1028 |
+
elif kwargs:
|
| 1029 |
+
m.update(kwargs)
|
| 1030 |
+
return self.__class__(m, *self.maps)
|
| 1031 |
+
|
| 1032 |
+
@property
|
| 1033 |
+
def parents(self): # like Django's Context.pop()
|
| 1034 |
+
'New ChainMap from maps[1:].'
|
| 1035 |
+
return self.__class__(*self.maps[1:])
|
| 1036 |
+
|
| 1037 |
+
def __setitem__(self, key, value):
|
| 1038 |
+
self.maps[0][key] = value
|
| 1039 |
+
|
| 1040 |
+
def __delitem__(self, key):
|
| 1041 |
+
try:
|
| 1042 |
+
del self.maps[0][key]
|
| 1043 |
+
except KeyError:
|
| 1044 |
+
raise KeyError(f'Key not found in the first mapping: {key!r}')
|
| 1045 |
+
|
| 1046 |
+
def popitem(self):
|
| 1047 |
+
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
|
| 1048 |
+
try:
|
| 1049 |
+
return self.maps[0].popitem()
|
| 1050 |
+
except KeyError:
|
| 1051 |
+
raise KeyError('No keys found in the first mapping.')
|
| 1052 |
+
|
| 1053 |
+
def pop(self, key, *args):
|
| 1054 |
+
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
|
| 1055 |
+
try:
|
| 1056 |
+
return self.maps[0].pop(key, *args)
|
| 1057 |
+
except KeyError:
|
| 1058 |
+
raise KeyError(f'Key not found in the first mapping: {key!r}')
|
| 1059 |
+
|
| 1060 |
+
def clear(self):
|
| 1061 |
+
'Clear maps[0], leaving maps[1:] intact.'
|
| 1062 |
+
self.maps[0].clear()
|
| 1063 |
+
|
| 1064 |
+
def __ior__(self, other):
|
| 1065 |
+
self.maps[0].update(other)
|
| 1066 |
+
return self
|
| 1067 |
+
|
| 1068 |
+
def __or__(self, other):
|
| 1069 |
+
if not isinstance(other, _collections_abc.Mapping):
|
| 1070 |
+
return NotImplemented
|
| 1071 |
+
m = self.copy()
|
| 1072 |
+
m.maps[0].update(other)
|
| 1073 |
+
return m
|
| 1074 |
+
|
| 1075 |
+
def __ror__(self, other):
|
| 1076 |
+
if not isinstance(other, _collections_abc.Mapping):
|
| 1077 |
+
return NotImplemented
|
| 1078 |
+
m = dict(other)
|
| 1079 |
+
for child in reversed(self.maps):
|
| 1080 |
+
m.update(child)
|
| 1081 |
+
return self.__class__(m)
|
| 1082 |
+
|
| 1083 |
+
|
| 1084 |
+
################################################################################
|
| 1085 |
+
### UserDict
|
| 1086 |
+
################################################################################
|
| 1087 |
+
|
| 1088 |
+
class UserDict(_collections_abc.MutableMapping):
|
| 1089 |
+
|
| 1090 |
+
# Start by filling-out the abstract methods
|
| 1091 |
+
def __init__(self, dict=None, /, **kwargs):
|
| 1092 |
+
self.data = {}
|
| 1093 |
+
if dict is not None:
|
| 1094 |
+
self.update(dict)
|
| 1095 |
+
if kwargs:
|
| 1096 |
+
self.update(kwargs)
|
| 1097 |
+
|
| 1098 |
+
def __len__(self):
|
| 1099 |
+
return len(self.data)
|
| 1100 |
+
|
| 1101 |
+
def __getitem__(self, key):
|
| 1102 |
+
if key in self.data:
|
| 1103 |
+
return self.data[key]
|
| 1104 |
+
if hasattr(self.__class__, "__missing__"):
|
| 1105 |
+
return self.__class__.__missing__(self, key)
|
| 1106 |
+
raise KeyError(key)
|
| 1107 |
+
|
| 1108 |
+
def __setitem__(self, key, item):
|
| 1109 |
+
self.data[key] = item
|
| 1110 |
+
|
| 1111 |
+
def __delitem__(self, key):
|
| 1112 |
+
del self.data[key]
|
| 1113 |
+
|
| 1114 |
+
def __iter__(self):
|
| 1115 |
+
return iter(self.data)
|
| 1116 |
+
|
| 1117 |
+
# Modify __contains__ to work correctly when __missing__ is present
|
| 1118 |
+
def __contains__(self, key):
|
| 1119 |
+
return key in self.data
|
| 1120 |
+
|
| 1121 |
+
# Now, add the methods in dicts but not in MutableMapping
|
| 1122 |
+
def __repr__(self):
|
| 1123 |
+
return repr(self.data)
|
| 1124 |
+
|
| 1125 |
+
def __or__(self, other):
|
| 1126 |
+
if isinstance(other, UserDict):
|
| 1127 |
+
return self.__class__(self.data | other.data)
|
| 1128 |
+
if isinstance(other, dict):
|
| 1129 |
+
return self.__class__(self.data | other)
|
| 1130 |
+
return NotImplemented
|
| 1131 |
+
|
| 1132 |
+
def __ror__(self, other):
|
| 1133 |
+
if isinstance(other, UserDict):
|
| 1134 |
+
return self.__class__(other.data | self.data)
|
| 1135 |
+
if isinstance(other, dict):
|
| 1136 |
+
return self.__class__(other | self.data)
|
| 1137 |
+
return NotImplemented
|
| 1138 |
+
|
| 1139 |
+
def __ior__(self, other):
|
| 1140 |
+
if isinstance(other, UserDict):
|
| 1141 |
+
self.data |= other.data
|
| 1142 |
+
else:
|
| 1143 |
+
self.data |= other
|
| 1144 |
+
return self
|
| 1145 |
+
|
| 1146 |
+
def __copy__(self):
|
| 1147 |
+
inst = self.__class__.__new__(self.__class__)
|
| 1148 |
+
inst.__dict__.update(self.__dict__)
|
| 1149 |
+
# Create a copy and avoid triggering descriptors
|
| 1150 |
+
inst.__dict__["data"] = self.__dict__["data"].copy()
|
| 1151 |
+
return inst
|
| 1152 |
+
|
| 1153 |
+
def copy(self):
|
| 1154 |
+
if self.__class__ is UserDict:
|
| 1155 |
+
return UserDict(self.data.copy())
|
| 1156 |
+
import copy
|
| 1157 |
+
data = self.data
|
| 1158 |
+
try:
|
| 1159 |
+
self.data = {}
|
| 1160 |
+
c = copy.copy(self)
|
| 1161 |
+
finally:
|
| 1162 |
+
self.data = data
|
| 1163 |
+
c.update(self)
|
| 1164 |
+
return c
|
| 1165 |
+
|
| 1166 |
+
@classmethod
|
| 1167 |
+
def fromkeys(cls, iterable, value=None):
|
| 1168 |
+
d = cls()
|
| 1169 |
+
for key in iterable:
|
| 1170 |
+
d[key] = value
|
| 1171 |
+
return d
|
| 1172 |
+
|
| 1173 |
+
|
| 1174 |
+
################################################################################
|
| 1175 |
+
### UserList
|
| 1176 |
+
################################################################################
|
| 1177 |
+
|
| 1178 |
+
class UserList(_collections_abc.MutableSequence):
|
| 1179 |
+
"""A more or less complete user-defined wrapper around list objects."""
|
| 1180 |
+
|
| 1181 |
+
def __init__(self, initlist=None):
|
| 1182 |
+
self.data = []
|
| 1183 |
+
if initlist is not None:
|
| 1184 |
+
# XXX should this accept an arbitrary sequence?
|
| 1185 |
+
if type(initlist) == type(self.data):
|
| 1186 |
+
self.data[:] = initlist
|
| 1187 |
+
elif isinstance(initlist, UserList):
|
| 1188 |
+
self.data[:] = initlist.data[:]
|
| 1189 |
+
else:
|
| 1190 |
+
self.data = list(initlist)
|
| 1191 |
+
|
| 1192 |
+
def __repr__(self):
|
| 1193 |
+
return repr(self.data)
|
| 1194 |
+
|
| 1195 |
+
def __lt__(self, other):
|
| 1196 |
+
return self.data < self.__cast(other)
|
| 1197 |
+
|
| 1198 |
+
def __le__(self, other):
|
| 1199 |
+
return self.data <= self.__cast(other)
|
| 1200 |
+
|
| 1201 |
+
def __eq__(self, other):
|
| 1202 |
+
return self.data == self.__cast(other)
|
| 1203 |
+
|
| 1204 |
+
def __gt__(self, other):
|
| 1205 |
+
return self.data > self.__cast(other)
|
| 1206 |
+
|
| 1207 |
+
def __ge__(self, other):
|
| 1208 |
+
return self.data >= self.__cast(other)
|
| 1209 |
+
|
| 1210 |
+
def __cast(self, other):
|
| 1211 |
+
return other.data if isinstance(other, UserList) else other
|
| 1212 |
+
|
| 1213 |
+
def __contains__(self, item):
|
| 1214 |
+
return item in self.data
|
| 1215 |
+
|
| 1216 |
+
def __len__(self):
|
| 1217 |
+
return len(self.data)
|
| 1218 |
+
|
| 1219 |
+
def __getitem__(self, i):
|
| 1220 |
+
if isinstance(i, slice):
|
| 1221 |
+
return self.__class__(self.data[i])
|
| 1222 |
+
else:
|
| 1223 |
+
return self.data[i]
|
| 1224 |
+
|
| 1225 |
+
def __setitem__(self, i, item):
|
| 1226 |
+
self.data[i] = item
|
| 1227 |
+
|
| 1228 |
+
def __delitem__(self, i):
|
| 1229 |
+
del self.data[i]
|
| 1230 |
+
|
| 1231 |
+
def __add__(self, other):
|
| 1232 |
+
if isinstance(other, UserList):
|
| 1233 |
+
return self.__class__(self.data + other.data)
|
| 1234 |
+
elif isinstance(other, type(self.data)):
|
| 1235 |
+
return self.__class__(self.data + other)
|
| 1236 |
+
return self.__class__(self.data + list(other))
|
| 1237 |
+
|
| 1238 |
+
def __radd__(self, other):
|
| 1239 |
+
if isinstance(other, UserList):
|
| 1240 |
+
return self.__class__(other.data + self.data)
|
| 1241 |
+
elif isinstance(other, type(self.data)):
|
| 1242 |
+
return self.__class__(other + self.data)
|
| 1243 |
+
return self.__class__(list(other) + self.data)
|
| 1244 |
+
|
| 1245 |
+
def __iadd__(self, other):
|
| 1246 |
+
if isinstance(other, UserList):
|
| 1247 |
+
self.data += other.data
|
| 1248 |
+
elif isinstance(other, type(self.data)):
|
| 1249 |
+
self.data += other
|
| 1250 |
+
else:
|
| 1251 |
+
self.data += list(other)
|
| 1252 |
+
return self
|
| 1253 |
+
|
| 1254 |
+
def __mul__(self, n):
|
| 1255 |
+
return self.__class__(self.data * n)
|
| 1256 |
+
|
| 1257 |
+
__rmul__ = __mul__
|
| 1258 |
+
|
| 1259 |
+
def __imul__(self, n):
|
| 1260 |
+
self.data *= n
|
| 1261 |
+
return self
|
| 1262 |
+
|
| 1263 |
+
def __copy__(self):
|
| 1264 |
+
inst = self.__class__.__new__(self.__class__)
|
| 1265 |
+
inst.__dict__.update(self.__dict__)
|
| 1266 |
+
# Create a copy and avoid triggering descriptors
|
| 1267 |
+
inst.__dict__["data"] = self.__dict__["data"][:]
|
| 1268 |
+
return inst
|
| 1269 |
+
|
| 1270 |
+
def append(self, item):
|
| 1271 |
+
self.data.append(item)
|
| 1272 |
+
|
| 1273 |
+
def insert(self, i, item):
|
| 1274 |
+
self.data.insert(i, item)
|
| 1275 |
+
|
| 1276 |
+
def pop(self, i=-1):
|
| 1277 |
+
return self.data.pop(i)
|
| 1278 |
+
|
| 1279 |
+
def remove(self, item):
|
| 1280 |
+
self.data.remove(item)
|
| 1281 |
+
|
| 1282 |
+
def clear(self):
|
| 1283 |
+
self.data.clear()
|
| 1284 |
+
|
| 1285 |
+
def copy(self):
|
| 1286 |
+
return self.__class__(self)
|
| 1287 |
+
|
| 1288 |
+
def count(self, item):
|
| 1289 |
+
return self.data.count(item)
|
| 1290 |
+
|
| 1291 |
+
def index(self, item, *args):
|
| 1292 |
+
return self.data.index(item, *args)
|
| 1293 |
+
|
| 1294 |
+
def reverse(self):
|
| 1295 |
+
self.data.reverse()
|
| 1296 |
+
|
| 1297 |
+
def sort(self, /, *args, **kwds):
|
| 1298 |
+
self.data.sort(*args, **kwds)
|
| 1299 |
+
|
| 1300 |
+
def extend(self, other):
|
| 1301 |
+
if isinstance(other, UserList):
|
| 1302 |
+
self.data.extend(other.data)
|
| 1303 |
+
else:
|
| 1304 |
+
self.data.extend(other)
|
| 1305 |
+
|
| 1306 |
+
|
| 1307 |
+
################################################################################
|
| 1308 |
+
### UserString
|
| 1309 |
+
################################################################################
|
| 1310 |
+
|
| 1311 |
+
class UserString(_collections_abc.Sequence):
|
| 1312 |
+
|
| 1313 |
+
def __init__(self, seq):
|
| 1314 |
+
if isinstance(seq, str):
|
| 1315 |
+
self.data = seq
|
| 1316 |
+
elif isinstance(seq, UserString):
|
| 1317 |
+
self.data = seq.data[:]
|
| 1318 |
+
else:
|
| 1319 |
+
self.data = str(seq)
|
| 1320 |
+
|
| 1321 |
+
def __str__(self):
|
| 1322 |
+
return str(self.data)
|
| 1323 |
+
|
| 1324 |
+
def __repr__(self):
|
| 1325 |
+
return repr(self.data)
|
| 1326 |
+
|
| 1327 |
+
def __int__(self):
|
| 1328 |
+
return int(self.data)
|
| 1329 |
+
|
| 1330 |
+
def __float__(self):
|
| 1331 |
+
return float(self.data)
|
| 1332 |
+
|
| 1333 |
+
def __complex__(self):
|
| 1334 |
+
return complex(self.data)
|
| 1335 |
+
|
| 1336 |
+
def __hash__(self):
|
| 1337 |
+
return hash(self.data)
|
| 1338 |
+
|
| 1339 |
+
def __getnewargs__(self):
|
| 1340 |
+
return (self.data[:],)
|
| 1341 |
+
|
| 1342 |
+
def __eq__(self, string):
|
| 1343 |
+
if isinstance(string, UserString):
|
| 1344 |
+
return self.data == string.data
|
| 1345 |
+
return self.data == string
|
| 1346 |
+
|
| 1347 |
+
def __lt__(self, string):
|
| 1348 |
+
if isinstance(string, UserString):
|
| 1349 |
+
return self.data < string.data
|
| 1350 |
+
return self.data < string
|
| 1351 |
+
|
| 1352 |
+
def __le__(self, string):
|
| 1353 |
+
if isinstance(string, UserString):
|
| 1354 |
+
return self.data <= string.data
|
| 1355 |
+
return self.data <= string
|
| 1356 |
+
|
| 1357 |
+
def __gt__(self, string):
|
| 1358 |
+
if isinstance(string, UserString):
|
| 1359 |
+
return self.data > string.data
|
| 1360 |
+
return self.data > string
|
| 1361 |
+
|
| 1362 |
+
def __ge__(self, string):
|
| 1363 |
+
if isinstance(string, UserString):
|
| 1364 |
+
return self.data >= string.data
|
| 1365 |
+
return self.data >= string
|
| 1366 |
+
|
| 1367 |
+
def __contains__(self, char):
|
| 1368 |
+
if isinstance(char, UserString):
|
| 1369 |
+
char = char.data
|
| 1370 |
+
return char in self.data
|
| 1371 |
+
|
| 1372 |
+
def __len__(self):
|
| 1373 |
+
return len(self.data)
|
| 1374 |
+
|
| 1375 |
+
def __getitem__(self, index):
|
| 1376 |
+
return self.__class__(self.data[index])
|
| 1377 |
+
|
| 1378 |
+
def __add__(self, other):
|
| 1379 |
+
if isinstance(other, UserString):
|
| 1380 |
+
return self.__class__(self.data + other.data)
|
| 1381 |
+
elif isinstance(other, str):
|
| 1382 |
+
return self.__class__(self.data + other)
|
| 1383 |
+
return self.__class__(self.data + str(other))
|
| 1384 |
+
|
| 1385 |
+
def __radd__(self, other):
|
| 1386 |
+
if isinstance(other, str):
|
| 1387 |
+
return self.__class__(other + self.data)
|
| 1388 |
+
return self.__class__(str(other) + self.data)
|
| 1389 |
+
|
| 1390 |
+
def __mul__(self, n):
|
| 1391 |
+
return self.__class__(self.data * n)
|
| 1392 |
+
|
| 1393 |
+
__rmul__ = __mul__
|
| 1394 |
+
|
| 1395 |
+
def __mod__(self, args):
|
| 1396 |
+
return self.__class__(self.data % args)
|
| 1397 |
+
|
| 1398 |
+
def __rmod__(self, template):
|
| 1399 |
+
return self.__class__(str(template) % self)
|
| 1400 |
+
|
| 1401 |
+
# the following methods are defined in alphabetical order:
|
| 1402 |
+
def capitalize(self):
|
| 1403 |
+
return self.__class__(self.data.capitalize())
|
| 1404 |
+
|
| 1405 |
+
def casefold(self):
|
| 1406 |
+
return self.__class__(self.data.casefold())
|
| 1407 |
+
|
| 1408 |
+
def center(self, width, *args):
|
| 1409 |
+
return self.__class__(self.data.center(width, *args))
|
| 1410 |
+
|
| 1411 |
+
def count(self, sub, start=0, end=_sys.maxsize):
|
| 1412 |
+
if isinstance(sub, UserString):
|
| 1413 |
+
sub = sub.data
|
| 1414 |
+
return self.data.count(sub, start, end)
|
| 1415 |
+
|
| 1416 |
+
def removeprefix(self, prefix, /):
|
| 1417 |
+
if isinstance(prefix, UserString):
|
| 1418 |
+
prefix = prefix.data
|
| 1419 |
+
return self.__class__(self.data.removeprefix(prefix))
|
| 1420 |
+
|
| 1421 |
+
def removesuffix(self, suffix, /):
|
| 1422 |
+
if isinstance(suffix, UserString):
|
| 1423 |
+
suffix = suffix.data
|
| 1424 |
+
return self.__class__(self.data.removesuffix(suffix))
|
| 1425 |
+
|
| 1426 |
+
def encode(self, encoding='utf-8', errors='strict'):
|
| 1427 |
+
encoding = 'utf-8' if encoding is None else encoding
|
| 1428 |
+
errors = 'strict' if errors is None else errors
|
| 1429 |
+
return self.data.encode(encoding, errors)
|
| 1430 |
+
|
| 1431 |
+
def endswith(self, suffix, start=0, end=_sys.maxsize):
|
| 1432 |
+
return self.data.endswith(suffix, start, end)
|
| 1433 |
+
|
| 1434 |
+
def expandtabs(self, tabsize=8):
|
| 1435 |
+
return self.__class__(self.data.expandtabs(tabsize))
|
| 1436 |
+
|
| 1437 |
+
def find(self, sub, start=0, end=_sys.maxsize):
|
| 1438 |
+
if isinstance(sub, UserString):
|
| 1439 |
+
sub = sub.data
|
| 1440 |
+
return self.data.find(sub, start, end)
|
| 1441 |
+
|
| 1442 |
+
def format(self, /, *args, **kwds):
|
| 1443 |
+
return self.data.format(*args, **kwds)
|
| 1444 |
+
|
| 1445 |
+
def format_map(self, mapping):
|
| 1446 |
+
return self.data.format_map(mapping)
|
| 1447 |
+
|
| 1448 |
+
def index(self, sub, start=0, end=_sys.maxsize):
|
| 1449 |
+
return self.data.index(sub, start, end)
|
| 1450 |
+
|
| 1451 |
+
def isalpha(self):
|
| 1452 |
+
return self.data.isalpha()
|
| 1453 |
+
|
| 1454 |
+
def isalnum(self):
|
| 1455 |
+
return self.data.isalnum()
|
| 1456 |
+
|
| 1457 |
+
def isascii(self):
|
| 1458 |
+
return self.data.isascii()
|
| 1459 |
+
|
| 1460 |
+
def isdecimal(self):
|
| 1461 |
+
return self.data.isdecimal()
|
| 1462 |
+
|
| 1463 |
+
def isdigit(self):
|
| 1464 |
+
return self.data.isdigit()
|
| 1465 |
+
|
| 1466 |
+
def isidentifier(self):
|
| 1467 |
+
return self.data.isidentifier()
|
| 1468 |
+
|
| 1469 |
+
def islower(self):
|
| 1470 |
+
return self.data.islower()
|
| 1471 |
+
|
| 1472 |
+
def isnumeric(self):
|
| 1473 |
+
return self.data.isnumeric()
|
| 1474 |
+
|
| 1475 |
+
def isprintable(self):
|
| 1476 |
+
return self.data.isprintable()
|
| 1477 |
+
|
| 1478 |
+
def isspace(self):
|
| 1479 |
+
return self.data.isspace()
|
| 1480 |
+
|
| 1481 |
+
def istitle(self):
|
| 1482 |
+
return self.data.istitle()
|
| 1483 |
+
|
| 1484 |
+
def isupper(self):
|
| 1485 |
+
return self.data.isupper()
|
| 1486 |
+
|
| 1487 |
+
def join(self, seq):
|
| 1488 |
+
return self.data.join(seq)
|
| 1489 |
+
|
| 1490 |
+
def ljust(self, width, *args):
|
| 1491 |
+
return self.__class__(self.data.ljust(width, *args))
|
| 1492 |
+
|
| 1493 |
+
def lower(self):
|
| 1494 |
+
return self.__class__(self.data.lower())
|
| 1495 |
+
|
| 1496 |
+
def lstrip(self, chars=None):
|
| 1497 |
+
return self.__class__(self.data.lstrip(chars))
|
| 1498 |
+
|
| 1499 |
+
maketrans = str.maketrans
|
| 1500 |
+
|
| 1501 |
+
def partition(self, sep):
|
| 1502 |
+
return self.data.partition(sep)
|
| 1503 |
+
|
| 1504 |
+
def replace(self, old, new, maxsplit=-1):
|
| 1505 |
+
if isinstance(old, UserString):
|
| 1506 |
+
old = old.data
|
| 1507 |
+
if isinstance(new, UserString):
|
| 1508 |
+
new = new.data
|
| 1509 |
+
return self.__class__(self.data.replace(old, new, maxsplit))
|
| 1510 |
+
|
| 1511 |
+
def rfind(self, sub, start=0, end=_sys.maxsize):
|
| 1512 |
+
if isinstance(sub, UserString):
|
| 1513 |
+
sub = sub.data
|
| 1514 |
+
return self.data.rfind(sub, start, end)
|
| 1515 |
+
|
| 1516 |
+
def rindex(self, sub, start=0, end=_sys.maxsize):
|
| 1517 |
+
return self.data.rindex(sub, start, end)
|
| 1518 |
+
|
| 1519 |
+
def rjust(self, width, *args):
|
| 1520 |
+
return self.__class__(self.data.rjust(width, *args))
|
| 1521 |
+
|
| 1522 |
+
def rpartition(self, sep):
|
| 1523 |
+
return self.data.rpartition(sep)
|
| 1524 |
+
|
| 1525 |
+
def rstrip(self, chars=None):
|
| 1526 |
+
return self.__class__(self.data.rstrip(chars))
|
| 1527 |
+
|
| 1528 |
+
def split(self, sep=None, maxsplit=-1):
|
| 1529 |
+
return self.data.split(sep, maxsplit)
|
| 1530 |
+
|
| 1531 |
+
def rsplit(self, sep=None, maxsplit=-1):
|
| 1532 |
+
return self.data.rsplit(sep, maxsplit)
|
| 1533 |
+
|
| 1534 |
+
def splitlines(self, keepends=False):
|
| 1535 |
+
return self.data.splitlines(keepends)
|
| 1536 |
+
|
| 1537 |
+
def startswith(self, prefix, start=0, end=_sys.maxsize):
|
| 1538 |
+
return self.data.startswith(prefix, start, end)
|
| 1539 |
+
|
| 1540 |
+
def strip(self, chars=None):
|
| 1541 |
+
return self.__class__(self.data.strip(chars))
|
| 1542 |
+
|
| 1543 |
+
def swapcase(self):
|
| 1544 |
+
return self.__class__(self.data.swapcase())
|
| 1545 |
+
|
| 1546 |
+
def title(self):
|
| 1547 |
+
return self.__class__(self.data.title())
|
| 1548 |
+
|
| 1549 |
+
def translate(self, *args):
|
| 1550 |
+
return self.__class__(self.data.translate(*args))
|
| 1551 |
+
|
| 1552 |
+
def upper(self):
|
| 1553 |
+
return self.__class__(self.data.upper())
|
| 1554 |
+
|
| 1555 |
+
def zfill(self, width):
|
| 1556 |
+
return self.__class__(self.data.zfill(width))
|
parrot/lib/python3.10/collections/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (48.4 kB). View file
|
|
|
parrot/lib/python3.10/collections/abc.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from _collections_abc import *
|
| 2 |
+
from _collections_abc import __all__
|
| 3 |
+
from _collections_abc import _CallableGenericAlias
|
parrot/lib/python3.10/distutils/command/__pycache__/bdist_dumb.cpython-310.pyc
ADDED
|
Binary file (3.58 kB). View file
|
|
|
parrot/lib/python3.10/distutils/command/__pycache__/bdist_rpm.cpython-310.pyc
ADDED
|
Binary file (12.5 kB). View file
|
|
|
parrot/lib/python3.10/distutils/command/__pycache__/build_ext.cpython-310.pyc
ADDED
|
Binary file (16.4 kB). View file
|
|
|
parrot/lib/python3.10/distutils/command/__pycache__/build_py.cpython-310.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
parrot/lib/python3.10/distutils/command/__pycache__/clean.cpython-310.pyc
ADDED
|
Binary file (2.09 kB). View file
|
|
|
parrot/lib/python3.10/distutils/command/__pycache__/config.cpython-310.pyc
ADDED
|
Binary file (10.5 kB). View file
|
|
|
parrot/lib/python3.10/distutils/command/bdist_msi.py
ADDED
|
@@ -0,0 +1,747 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2005, 2006 Martin von Löwis
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
"""
|
| 4 |
+
Implements the bdist_msi command.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
import sys
|
| 9 |
+
import warnings
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
from distutils.dir_util import remove_tree
|
| 12 |
+
from distutils.sysconfig import get_python_version
|
| 13 |
+
from distutils.version import StrictVersion
|
| 14 |
+
from distutils.errors import DistutilsOptionError
|
| 15 |
+
from distutils.util import get_platform
|
| 16 |
+
from distutils import log
|
| 17 |
+
import msilib
|
| 18 |
+
from msilib import schema, sequence, text
|
| 19 |
+
from msilib import Directory, Feature, Dialog, add_data
|
| 20 |
+
|
| 21 |
+
class PyDialog(Dialog):
|
| 22 |
+
"""Dialog class with a fixed layout: controls at the top, then a ruler,
|
| 23 |
+
then a list of buttons: back, next, cancel. Optionally a bitmap at the
|
| 24 |
+
left."""
|
| 25 |
+
def __init__(self, *args, **kw):
|
| 26 |
+
"""Dialog(database, name, x, y, w, h, attributes, title, first,
|
| 27 |
+
default, cancel, bitmap=true)"""
|
| 28 |
+
Dialog.__init__(self, *args)
|
| 29 |
+
ruler = self.h - 36
|
| 30 |
+
bmwidth = 152*ruler/328
|
| 31 |
+
#if kw.get("bitmap", True):
|
| 32 |
+
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
|
| 33 |
+
self.line("BottomLine", 0, ruler, self.w, 0)
|
| 34 |
+
|
| 35 |
+
def title(self, title):
|
| 36 |
+
"Set the title text of the dialog at the top."
|
| 37 |
+
# name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
|
| 38 |
+
# text, in VerdanaBold10
|
| 39 |
+
self.text("Title", 15, 10, 320, 60, 0x30003,
|
| 40 |
+
r"{\VerdanaBold10}%s" % title)
|
| 41 |
+
|
| 42 |
+
def back(self, title, next, name = "Back", active = 1):
|
| 43 |
+
"""Add a back button with a given title, the tab-next button,
|
| 44 |
+
its name in the Control table, possibly initially disabled.
|
| 45 |
+
|
| 46 |
+
Return the button, so that events can be associated"""
|
| 47 |
+
if active:
|
| 48 |
+
flags = 3 # Visible|Enabled
|
| 49 |
+
else:
|
| 50 |
+
flags = 1 # Visible
|
| 51 |
+
return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
|
| 52 |
+
|
| 53 |
+
def cancel(self, title, next, name = "Cancel", active = 1):
|
| 54 |
+
"""Add a cancel button with a given title, the tab-next button,
|
| 55 |
+
its name in the Control table, possibly initially disabled.
|
| 56 |
+
|
| 57 |
+
Return the button, so that events can be associated"""
|
| 58 |
+
if active:
|
| 59 |
+
flags = 3 # Visible|Enabled
|
| 60 |
+
else:
|
| 61 |
+
flags = 1 # Visible
|
| 62 |
+
return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
|
| 63 |
+
|
| 64 |
+
def next(self, title, next, name = "Next", active = 1):
|
| 65 |
+
"""Add a Next button with a given title, the tab-next button,
|
| 66 |
+
its name in the Control table, possibly initially disabled.
|
| 67 |
+
|
| 68 |
+
Return the button, so that events can be associated"""
|
| 69 |
+
if active:
|
| 70 |
+
flags = 3 # Visible|Enabled
|
| 71 |
+
else:
|
| 72 |
+
flags = 1 # Visible
|
| 73 |
+
return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
|
| 74 |
+
|
| 75 |
+
def xbutton(self, name, title, next, xpos):
|
| 76 |
+
"""Add a button with a given title, the tab-next button,
|
| 77 |
+
its name in the Control table, giving its x position; the
|
| 78 |
+
y-position is aligned with the other buttons.
|
| 79 |
+
|
| 80 |
+
Return the button, so that events can be associated"""
|
| 81 |
+
return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
|
| 82 |
+
|
| 83 |
+
class bdist_msi(Command):
|
| 84 |
+
|
| 85 |
+
description = "create a Microsoft Installer (.msi) binary distribution"
|
| 86 |
+
|
| 87 |
+
user_options = [('bdist-dir=', None,
|
| 88 |
+
"temporary directory for creating the distribution"),
|
| 89 |
+
('plat-name=', 'p',
|
| 90 |
+
"platform name to embed in generated filenames "
|
| 91 |
+
"(default: %s)" % get_platform()),
|
| 92 |
+
('keep-temp', 'k',
|
| 93 |
+
"keep the pseudo-installation tree around after " +
|
| 94 |
+
"creating the distribution archive"),
|
| 95 |
+
('target-version=', None,
|
| 96 |
+
"require a specific python version" +
|
| 97 |
+
" on the target system"),
|
| 98 |
+
('no-target-compile', 'c',
|
| 99 |
+
"do not compile .py to .pyc on the target system"),
|
| 100 |
+
('no-target-optimize', 'o',
|
| 101 |
+
"do not compile .py to .pyo (optimized) "
|
| 102 |
+
"on the target system"),
|
| 103 |
+
('dist-dir=', 'd',
|
| 104 |
+
"directory to put final built distributions in"),
|
| 105 |
+
('skip-build', None,
|
| 106 |
+
"skip rebuilding everything (for testing/debugging)"),
|
| 107 |
+
('install-script=', None,
|
| 108 |
+
"basename of installation script to be run after "
|
| 109 |
+
"installation or before deinstallation"),
|
| 110 |
+
('pre-install-script=', None,
|
| 111 |
+
"Fully qualified filename of a script to be run before "
|
| 112 |
+
"any files are installed. This script need not be in the "
|
| 113 |
+
"distribution"),
|
| 114 |
+
]
|
| 115 |
+
|
| 116 |
+
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
|
| 117 |
+
'skip-build']
|
| 118 |
+
|
| 119 |
+
all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
|
| 120 |
+
'2.5', '2.6', '2.7', '2.8', '2.9',
|
| 121 |
+
'3.0', '3.1', '3.2', '3.3', '3.4',
|
| 122 |
+
'3.5', '3.6', '3.7', '3.8', '3.9']
|
| 123 |
+
other_version = 'X'
|
| 124 |
+
|
| 125 |
+
def __init__(self, *args, **kw):
|
| 126 |
+
super().__init__(*args, **kw)
|
| 127 |
+
warnings.warn("bdist_msi command is deprecated since Python 3.9, "
|
| 128 |
+
"use bdist_wheel (wheel packages) instead",
|
| 129 |
+
DeprecationWarning, 2)
|
| 130 |
+
|
| 131 |
+
def initialize_options(self):
|
| 132 |
+
self.bdist_dir = None
|
| 133 |
+
self.plat_name = None
|
| 134 |
+
self.keep_temp = 0
|
| 135 |
+
self.no_target_compile = 0
|
| 136 |
+
self.no_target_optimize = 0
|
| 137 |
+
self.target_version = None
|
| 138 |
+
self.dist_dir = None
|
| 139 |
+
self.skip_build = None
|
| 140 |
+
self.install_script = None
|
| 141 |
+
self.pre_install_script = None
|
| 142 |
+
self.versions = None
|
| 143 |
+
|
| 144 |
+
def finalize_options(self):
|
| 145 |
+
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
|
| 146 |
+
|
| 147 |
+
if self.bdist_dir is None:
|
| 148 |
+
bdist_base = self.get_finalized_command('bdist').bdist_base
|
| 149 |
+
self.bdist_dir = os.path.join(bdist_base, 'msi')
|
| 150 |
+
|
| 151 |
+
short_version = get_python_version()
|
| 152 |
+
if (not self.target_version) and self.distribution.has_ext_modules():
|
| 153 |
+
self.target_version = short_version
|
| 154 |
+
|
| 155 |
+
if self.target_version:
|
| 156 |
+
self.versions = [self.target_version]
|
| 157 |
+
if not self.skip_build and self.distribution.has_ext_modules()\
|
| 158 |
+
and self.target_version != short_version:
|
| 159 |
+
raise DistutilsOptionError(
|
| 160 |
+
"target version can only be %s, or the '--skip-build'"
|
| 161 |
+
" option must be specified" % (short_version,))
|
| 162 |
+
else:
|
| 163 |
+
self.versions = list(self.all_versions)
|
| 164 |
+
|
| 165 |
+
self.set_undefined_options('bdist',
|
| 166 |
+
('dist_dir', 'dist_dir'),
|
| 167 |
+
('plat_name', 'plat_name'),
|
| 168 |
+
)
|
| 169 |
+
|
| 170 |
+
if self.pre_install_script:
|
| 171 |
+
raise DistutilsOptionError(
|
| 172 |
+
"the pre-install-script feature is not yet implemented")
|
| 173 |
+
|
| 174 |
+
if self.install_script:
|
| 175 |
+
for script in self.distribution.scripts:
|
| 176 |
+
if self.install_script == os.path.basename(script):
|
| 177 |
+
break
|
| 178 |
+
else:
|
| 179 |
+
raise DistutilsOptionError(
|
| 180 |
+
"install_script '%s' not found in scripts"
|
| 181 |
+
% self.install_script)
|
| 182 |
+
self.install_script_key = None
|
| 183 |
+
|
| 184 |
+
def run(self):
|
| 185 |
+
if not self.skip_build:
|
| 186 |
+
self.run_command('build')
|
| 187 |
+
|
| 188 |
+
install = self.reinitialize_command('install', reinit_subcommands=1)
|
| 189 |
+
install.prefix = self.bdist_dir
|
| 190 |
+
install.skip_build = self.skip_build
|
| 191 |
+
install.warn_dir = 0
|
| 192 |
+
|
| 193 |
+
install_lib = self.reinitialize_command('install_lib')
|
| 194 |
+
# we do not want to include pyc or pyo files
|
| 195 |
+
install_lib.compile = 0
|
| 196 |
+
install_lib.optimize = 0
|
| 197 |
+
|
| 198 |
+
if self.distribution.has_ext_modules():
|
| 199 |
+
# If we are building an installer for a Python version other
|
| 200 |
+
# than the one we are currently running, then we need to ensure
|
| 201 |
+
# our build_lib reflects the other Python version rather than ours.
|
| 202 |
+
# Note that for target_version!=sys.version, we must have skipped the
|
| 203 |
+
# build step, so there is no issue with enforcing the build of this
|
| 204 |
+
# version.
|
| 205 |
+
target_version = self.target_version
|
| 206 |
+
if not target_version:
|
| 207 |
+
assert self.skip_build, "Should have already checked this"
|
| 208 |
+
target_version = '%d.%d' % sys.version_info[:2]
|
| 209 |
+
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
|
| 210 |
+
build = self.get_finalized_command('build')
|
| 211 |
+
build.build_lib = os.path.join(build.build_base,
|
| 212 |
+
'lib' + plat_specifier)
|
| 213 |
+
|
| 214 |
+
log.info("installing to %s", self.bdist_dir)
|
| 215 |
+
install.ensure_finalized()
|
| 216 |
+
|
| 217 |
+
# avoid warning of 'install_lib' about installing
|
| 218 |
+
# into a directory not in sys.path
|
| 219 |
+
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
|
| 220 |
+
|
| 221 |
+
install.run()
|
| 222 |
+
|
| 223 |
+
del sys.path[0]
|
| 224 |
+
|
| 225 |
+
self.mkpath(self.dist_dir)
|
| 226 |
+
fullname = self.distribution.get_fullname()
|
| 227 |
+
installer_name = self.get_installer_filename(fullname)
|
| 228 |
+
installer_name = os.path.abspath(installer_name)
|
| 229 |
+
if os.path.exists(installer_name): os.unlink(installer_name)
|
| 230 |
+
|
| 231 |
+
metadata = self.distribution.metadata
|
| 232 |
+
author = metadata.author
|
| 233 |
+
if not author:
|
| 234 |
+
author = metadata.maintainer
|
| 235 |
+
if not author:
|
| 236 |
+
author = "UNKNOWN"
|
| 237 |
+
version = metadata.get_version()
|
| 238 |
+
# ProductVersion must be strictly numeric
|
| 239 |
+
# XXX need to deal with prerelease versions
|
| 240 |
+
sversion = "%d.%d.%d" % StrictVersion(version).version
|
| 241 |
+
# Prefix ProductName with Python x.y, so that
|
| 242 |
+
# it sorts together with the other Python packages
|
| 243 |
+
# in Add-Remove-Programs (APR)
|
| 244 |
+
fullname = self.distribution.get_fullname()
|
| 245 |
+
if self.target_version:
|
| 246 |
+
product_name = "Python %s %s" % (self.target_version, fullname)
|
| 247 |
+
else:
|
| 248 |
+
product_name = "Python %s" % (fullname)
|
| 249 |
+
self.db = msilib.init_database(installer_name, schema,
|
| 250 |
+
product_name, msilib.gen_uuid(),
|
| 251 |
+
sversion, author)
|
| 252 |
+
msilib.add_tables(self.db, sequence)
|
| 253 |
+
props = [('DistVersion', version)]
|
| 254 |
+
email = metadata.author_email or metadata.maintainer_email
|
| 255 |
+
if email:
|
| 256 |
+
props.append(("ARPCONTACT", email))
|
| 257 |
+
if metadata.url:
|
| 258 |
+
props.append(("ARPURLINFOABOUT", metadata.url))
|
| 259 |
+
if props:
|
| 260 |
+
add_data(self.db, 'Property', props)
|
| 261 |
+
|
| 262 |
+
self.add_find_python()
|
| 263 |
+
self.add_files()
|
| 264 |
+
self.add_scripts()
|
| 265 |
+
self.add_ui()
|
| 266 |
+
self.db.Commit()
|
| 267 |
+
|
| 268 |
+
if hasattr(self.distribution, 'dist_files'):
|
| 269 |
+
tup = 'bdist_msi', self.target_version or 'any', fullname
|
| 270 |
+
self.distribution.dist_files.append(tup)
|
| 271 |
+
|
| 272 |
+
if not self.keep_temp:
|
| 273 |
+
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
| 274 |
+
|
| 275 |
+
def add_files(self):
|
| 276 |
+
db = self.db
|
| 277 |
+
cab = msilib.CAB("distfiles")
|
| 278 |
+
rootdir = os.path.abspath(self.bdist_dir)
|
| 279 |
+
|
| 280 |
+
root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
|
| 281 |
+
f = Feature(db, "Python", "Python", "Everything",
|
| 282 |
+
0, 1, directory="TARGETDIR")
|
| 283 |
+
|
| 284 |
+
items = [(f, root, '')]
|
| 285 |
+
for version in self.versions + [self.other_version]:
|
| 286 |
+
target = "TARGETDIR" + version
|
| 287 |
+
name = default = "Python" + version
|
| 288 |
+
desc = "Everything"
|
| 289 |
+
if version is self.other_version:
|
| 290 |
+
title = "Python from another location"
|
| 291 |
+
level = 2
|
| 292 |
+
else:
|
| 293 |
+
title = "Python %s from registry" % version
|
| 294 |
+
level = 1
|
| 295 |
+
f = Feature(db, name, title, desc, 1, level, directory=target)
|
| 296 |
+
dir = Directory(db, cab, root, rootdir, target, default)
|
| 297 |
+
items.append((f, dir, version))
|
| 298 |
+
db.Commit()
|
| 299 |
+
|
| 300 |
+
seen = {}
|
| 301 |
+
for feature, dir, version in items:
|
| 302 |
+
todo = [dir]
|
| 303 |
+
while todo:
|
| 304 |
+
dir = todo.pop()
|
| 305 |
+
for file in os.listdir(dir.absolute):
|
| 306 |
+
afile = os.path.join(dir.absolute, file)
|
| 307 |
+
if os.path.isdir(afile):
|
| 308 |
+
short = "%s|%s" % (dir.make_short(file), file)
|
| 309 |
+
default = file + version
|
| 310 |
+
newdir = Directory(db, cab, dir, file, default, short)
|
| 311 |
+
todo.append(newdir)
|
| 312 |
+
else:
|
| 313 |
+
if not dir.component:
|
| 314 |
+
dir.start_component(dir.logical, feature, 0)
|
| 315 |
+
if afile not in seen:
|
| 316 |
+
key = seen[afile] = dir.add_file(file)
|
| 317 |
+
if file==self.install_script:
|
| 318 |
+
if self.install_script_key:
|
| 319 |
+
raise DistutilsOptionError(
|
| 320 |
+
"Multiple files with name %s" % file)
|
| 321 |
+
self.install_script_key = '[#%s]' % key
|
| 322 |
+
else:
|
| 323 |
+
key = seen[afile]
|
| 324 |
+
add_data(self.db, "DuplicateFile",
|
| 325 |
+
[(key + version, dir.component, key, None, dir.logical)])
|
| 326 |
+
db.Commit()
|
| 327 |
+
cab.commit(db)
|
| 328 |
+
|
| 329 |
+
def add_find_python(self):
|
| 330 |
+
"""Adds code to the installer to compute the location of Python.
|
| 331 |
+
|
| 332 |
+
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
|
| 333 |
+
registry for each version of Python.
|
| 334 |
+
|
| 335 |
+
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
|
| 336 |
+
else from PYTHON.MACHINE.X.Y.
|
| 337 |
+
|
| 338 |
+
Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
|
| 339 |
+
|
| 340 |
+
start = 402
|
| 341 |
+
for ver in self.versions:
|
| 342 |
+
install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
|
| 343 |
+
machine_reg = "python.machine." + ver
|
| 344 |
+
user_reg = "python.user." + ver
|
| 345 |
+
machine_prop = "PYTHON.MACHINE." + ver
|
| 346 |
+
user_prop = "PYTHON.USER." + ver
|
| 347 |
+
machine_action = "PythonFromMachine" + ver
|
| 348 |
+
user_action = "PythonFromUser" + ver
|
| 349 |
+
exe_action = "PythonExe" + ver
|
| 350 |
+
target_dir_prop = "TARGETDIR" + ver
|
| 351 |
+
exe_prop = "PYTHON" + ver
|
| 352 |
+
if msilib.Win64:
|
| 353 |
+
# type: msidbLocatorTypeRawValue + msidbLocatorType64bit
|
| 354 |
+
Type = 2+16
|
| 355 |
+
else:
|
| 356 |
+
Type = 2
|
| 357 |
+
add_data(self.db, "RegLocator",
|
| 358 |
+
[(machine_reg, 2, install_path, None, Type),
|
| 359 |
+
(user_reg, 1, install_path, None, Type)])
|
| 360 |
+
add_data(self.db, "AppSearch",
|
| 361 |
+
[(machine_prop, machine_reg),
|
| 362 |
+
(user_prop, user_reg)])
|
| 363 |
+
add_data(self.db, "CustomAction",
|
| 364 |
+
[(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
|
| 365 |
+
(user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
|
| 366 |
+
(exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
|
| 367 |
+
])
|
| 368 |
+
add_data(self.db, "InstallExecuteSequence",
|
| 369 |
+
[(machine_action, machine_prop, start),
|
| 370 |
+
(user_action, user_prop, start + 1),
|
| 371 |
+
(exe_action, None, start + 2),
|
| 372 |
+
])
|
| 373 |
+
add_data(self.db, "InstallUISequence",
|
| 374 |
+
[(machine_action, machine_prop, start),
|
| 375 |
+
(user_action, user_prop, start + 1),
|
| 376 |
+
(exe_action, None, start + 2),
|
| 377 |
+
])
|
| 378 |
+
add_data(self.db, "Condition",
|
| 379 |
+
[("Python" + ver, 0, "NOT TARGETDIR" + ver)])
|
| 380 |
+
start += 4
|
| 381 |
+
assert start < 500
|
| 382 |
+
|
| 383 |
+
def add_scripts(self):
|
| 384 |
+
if self.install_script:
|
| 385 |
+
start = 6800
|
| 386 |
+
for ver in self.versions + [self.other_version]:
|
| 387 |
+
install_action = "install_script." + ver
|
| 388 |
+
exe_prop = "PYTHON" + ver
|
| 389 |
+
add_data(self.db, "CustomAction",
|
| 390 |
+
[(install_action, 50, exe_prop, self.install_script_key)])
|
| 391 |
+
add_data(self.db, "InstallExecuteSequence",
|
| 392 |
+
[(install_action, "&Python%s=3" % ver, start)])
|
| 393 |
+
start += 1
|
| 394 |
+
# XXX pre-install scripts are currently refused in finalize_options()
|
| 395 |
+
# but if this feature is completed, it will also need to add
|
| 396 |
+
# entries for each version as the above code does
|
| 397 |
+
if self.pre_install_script:
|
| 398 |
+
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
|
| 399 |
+
with open(scriptfn, "w") as f:
|
| 400 |
+
# The batch file will be executed with [PYTHON], so that %1
|
| 401 |
+
# is the path to the Python interpreter; %0 will be the path
|
| 402 |
+
# of the batch file.
|
| 403 |
+
# rem ="""
|
| 404 |
+
# %1 %0
|
| 405 |
+
# exit
|
| 406 |
+
# """
|
| 407 |
+
# <actual script>
|
| 408 |
+
f.write('rem ="""\n%1 %0\nexit\n"""\n')
|
| 409 |
+
with open(self.pre_install_script) as fin:
|
| 410 |
+
f.write(fin.read())
|
| 411 |
+
add_data(self.db, "Binary",
|
| 412 |
+
[("PreInstall", msilib.Binary(scriptfn))
|
| 413 |
+
])
|
| 414 |
+
add_data(self.db, "CustomAction",
|
| 415 |
+
[("PreInstall", 2, "PreInstall", None)
|
| 416 |
+
])
|
| 417 |
+
add_data(self.db, "InstallExecuteSequence",
|
| 418 |
+
[("PreInstall", "NOT Installed", 450)])
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
def add_ui(self):
|
| 422 |
+
db = self.db
|
| 423 |
+
x = y = 50
|
| 424 |
+
w = 370
|
| 425 |
+
h = 300
|
| 426 |
+
title = "[ProductName] Setup"
|
| 427 |
+
|
| 428 |
+
# see "Dialog Style Bits"
|
| 429 |
+
modal = 3 # visible | modal
|
| 430 |
+
modeless = 1 # visible
|
| 431 |
+
track_disk_space = 32
|
| 432 |
+
|
| 433 |
+
# UI customization properties
|
| 434 |
+
add_data(db, "Property",
|
| 435 |
+
# See "DefaultUIFont Property"
|
| 436 |
+
[("DefaultUIFont", "DlgFont8"),
|
| 437 |
+
# See "ErrorDialog Style Bit"
|
| 438 |
+
("ErrorDialog", "ErrorDlg"),
|
| 439 |
+
("Progress1", "Install"), # modified in maintenance type dlg
|
| 440 |
+
("Progress2", "installs"),
|
| 441 |
+
("MaintenanceForm_Action", "Repair"),
|
| 442 |
+
# possible values: ALL, JUSTME
|
| 443 |
+
("WhichUsers", "ALL")
|
| 444 |
+
])
|
| 445 |
+
|
| 446 |
+
# Fonts, see "TextStyle Table"
|
| 447 |
+
add_data(db, "TextStyle",
|
| 448 |
+
[("DlgFont8", "Tahoma", 9, None, 0),
|
| 449 |
+
("DlgFontBold8", "Tahoma", 8, None, 1), #bold
|
| 450 |
+
("VerdanaBold10", "Verdana", 10, None, 1),
|
| 451 |
+
("VerdanaRed9", "Verdana", 9, 255, 0),
|
| 452 |
+
])
|
| 453 |
+
|
| 454 |
+
# UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
|
| 455 |
+
# Numbers indicate sequence; see sequence.py for how these action integrate
|
| 456 |
+
add_data(db, "InstallUISequence",
|
| 457 |
+
[("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
|
| 458 |
+
("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
|
| 459 |
+
# In the user interface, assume all-users installation if privileged.
|
| 460 |
+
("SelectFeaturesDlg", "Not Installed", 1230),
|
| 461 |
+
# XXX no support for resume installations yet
|
| 462 |
+
#("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
|
| 463 |
+
("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
|
| 464 |
+
("ProgressDlg", None, 1280)])
|
| 465 |
+
|
| 466 |
+
add_data(db, 'ActionText', text.ActionText)
|
| 467 |
+
add_data(db, 'UIText', text.UIText)
|
| 468 |
+
#####################################################################
|
| 469 |
+
# Standard dialogs: FatalError, UserExit, ExitDialog
|
| 470 |
+
fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
|
| 471 |
+
"Finish", "Finish", "Finish")
|
| 472 |
+
fatal.title("[ProductName] Installer ended prematurely")
|
| 473 |
+
fatal.back("< Back", "Finish", active = 0)
|
| 474 |
+
fatal.cancel("Cancel", "Back", active = 0)
|
| 475 |
+
fatal.text("Description1", 15, 70, 320, 80, 0x30003,
|
| 476 |
+
"[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
|
| 477 |
+
fatal.text("Description2", 15, 155, 320, 20, 0x30003,
|
| 478 |
+
"Click the Finish button to exit the Installer.")
|
| 479 |
+
c=fatal.next("Finish", "Cancel", name="Finish")
|
| 480 |
+
c.event("EndDialog", "Exit")
|
| 481 |
+
|
| 482 |
+
user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
|
| 483 |
+
"Finish", "Finish", "Finish")
|
| 484 |
+
user_exit.title("[ProductName] Installer was interrupted")
|
| 485 |
+
user_exit.back("< Back", "Finish", active = 0)
|
| 486 |
+
user_exit.cancel("Cancel", "Back", active = 0)
|
| 487 |
+
user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
|
| 488 |
+
"[ProductName] setup was interrupted. Your system has not been modified. "
|
| 489 |
+
"To install this program at a later time, please run the installation again.")
|
| 490 |
+
user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
|
| 491 |
+
"Click the Finish button to exit the Installer.")
|
| 492 |
+
c = user_exit.next("Finish", "Cancel", name="Finish")
|
| 493 |
+
c.event("EndDialog", "Exit")
|
| 494 |
+
|
| 495 |
+
exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
|
| 496 |
+
"Finish", "Finish", "Finish")
|
| 497 |
+
exit_dialog.title("Completing the [ProductName] Installer")
|
| 498 |
+
exit_dialog.back("< Back", "Finish", active = 0)
|
| 499 |
+
exit_dialog.cancel("Cancel", "Back", active = 0)
|
| 500 |
+
exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
|
| 501 |
+
"Click the Finish button to exit the Installer.")
|
| 502 |
+
c = exit_dialog.next("Finish", "Cancel", name="Finish")
|
| 503 |
+
c.event("EndDialog", "Return")
|
| 504 |
+
|
| 505 |
+
#####################################################################
|
| 506 |
+
# Required dialog: FilesInUse, ErrorDlg
|
| 507 |
+
inuse = PyDialog(db, "FilesInUse",
|
| 508 |
+
x, y, w, h,
|
| 509 |
+
19, # KeepModeless|Modal|Visible
|
| 510 |
+
title,
|
| 511 |
+
"Retry", "Retry", "Retry", bitmap=False)
|
| 512 |
+
inuse.text("Title", 15, 6, 200, 15, 0x30003,
|
| 513 |
+
r"{\DlgFontBold8}Files in Use")
|
| 514 |
+
inuse.text("Description", 20, 23, 280, 20, 0x30003,
|
| 515 |
+
"Some files that need to be updated are currently in use.")
|
| 516 |
+
inuse.text("Text", 20, 55, 330, 50, 3,
|
| 517 |
+
"The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
|
| 518 |
+
inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
|
| 519 |
+
None, None, None)
|
| 520 |
+
c=inuse.back("Exit", "Ignore", name="Exit")
|
| 521 |
+
c.event("EndDialog", "Exit")
|
| 522 |
+
c=inuse.next("Ignore", "Retry", name="Ignore")
|
| 523 |
+
c.event("EndDialog", "Ignore")
|
| 524 |
+
c=inuse.cancel("Retry", "Exit", name="Retry")
|
| 525 |
+
c.event("EndDialog","Retry")
|
| 526 |
+
|
| 527 |
+
# See "Error Dialog". See "ICE20" for the required names of the controls.
|
| 528 |
+
error = Dialog(db, "ErrorDlg",
|
| 529 |
+
50, 10, 330, 101,
|
| 530 |
+
65543, # Error|Minimize|Modal|Visible
|
| 531 |
+
title,
|
| 532 |
+
"ErrorText", None, None)
|
| 533 |
+
error.text("ErrorText", 50,9,280,48,3, "")
|
| 534 |
+
#error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
|
| 535 |
+
error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
|
| 536 |
+
error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
|
| 537 |
+
error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
|
| 538 |
+
error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
|
| 539 |
+
error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
|
| 540 |
+
error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
|
| 541 |
+
error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
|
| 542 |
+
|
| 543 |
+
#####################################################################
|
| 544 |
+
# Global "Query Cancel" dialog
|
| 545 |
+
cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
|
| 546 |
+
"No", "No", "No")
|
| 547 |
+
cancel.text("Text", 48, 15, 194, 30, 3,
|
| 548 |
+
"Are you sure you want to cancel [ProductName] installation?")
|
| 549 |
+
#cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
|
| 550 |
+
# "py.ico", None, None)
|
| 551 |
+
c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
|
| 552 |
+
c.event("EndDialog", "Exit")
|
| 553 |
+
|
| 554 |
+
c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
|
| 555 |
+
c.event("EndDialog", "Return")
|
| 556 |
+
|
| 557 |
+
#####################################################################
|
| 558 |
+
# Global "Wait for costing" dialog
|
| 559 |
+
costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
|
| 560 |
+
"Return", "Return", "Return")
|
| 561 |
+
costing.text("Text", 48, 15, 194, 30, 3,
|
| 562 |
+
"Please wait while the installer finishes determining your disk space requirements.")
|
| 563 |
+
c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
|
| 564 |
+
c.event("EndDialog", "Exit")
|
| 565 |
+
|
| 566 |
+
#####################################################################
|
| 567 |
+
# Preparation dialog: no user input except cancellation
|
| 568 |
+
prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
|
| 569 |
+
"Cancel", "Cancel", "Cancel")
|
| 570 |
+
prep.text("Description", 15, 70, 320, 40, 0x30003,
|
| 571 |
+
"Please wait while the Installer prepares to guide you through the installation.")
|
| 572 |
+
prep.title("Welcome to the [ProductName] Installer")
|
| 573 |
+
c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
|
| 574 |
+
c.mapping("ActionText", "Text")
|
| 575 |
+
c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
|
| 576 |
+
c.mapping("ActionData", "Text")
|
| 577 |
+
prep.back("Back", None, active=0)
|
| 578 |
+
prep.next("Next", None, active=0)
|
| 579 |
+
c=prep.cancel("Cancel", None)
|
| 580 |
+
c.event("SpawnDialog", "CancelDlg")
|
| 581 |
+
|
| 582 |
+
#####################################################################
|
| 583 |
+
# Feature (Python directory) selection
|
| 584 |
+
seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
|
| 585 |
+
"Next", "Next", "Cancel")
|
| 586 |
+
seldlg.title("Select Python Installations")
|
| 587 |
+
|
| 588 |
+
seldlg.text("Hint", 15, 30, 300, 20, 3,
|
| 589 |
+
"Select the Python locations where %s should be installed."
|
| 590 |
+
% self.distribution.get_fullname())
|
| 591 |
+
|
| 592 |
+
seldlg.back("< Back", None, active=0)
|
| 593 |
+
c = seldlg.next("Next >", "Cancel")
|
| 594 |
+
order = 1
|
| 595 |
+
c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
|
| 596 |
+
for version in self.versions + [self.other_version]:
|
| 597 |
+
order += 1
|
| 598 |
+
c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
|
| 599 |
+
"FEATURE_SELECTED AND &Python%s=3" % version,
|
| 600 |
+
ordering=order)
|
| 601 |
+
c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
|
| 602 |
+
c.event("EndDialog", "Return", ordering=order + 2)
|
| 603 |
+
c = seldlg.cancel("Cancel", "Features")
|
| 604 |
+
c.event("SpawnDialog", "CancelDlg")
|
| 605 |
+
|
| 606 |
+
c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
|
| 607 |
+
"FEATURE", None, "PathEdit", None)
|
| 608 |
+
c.event("[FEATURE_SELECTED]", "1")
|
| 609 |
+
ver = self.other_version
|
| 610 |
+
install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
|
| 611 |
+
dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
|
| 612 |
+
|
| 613 |
+
c = seldlg.text("Other", 15, 200, 300, 15, 3,
|
| 614 |
+
"Provide an alternate Python location")
|
| 615 |
+
c.condition("Enable", install_other_cond)
|
| 616 |
+
c.condition("Show", install_other_cond)
|
| 617 |
+
c.condition("Disable", dont_install_other_cond)
|
| 618 |
+
c.condition("Hide", dont_install_other_cond)
|
| 619 |
+
|
| 620 |
+
c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
|
| 621 |
+
"TARGETDIR" + ver, None, "Next", None)
|
| 622 |
+
c.condition("Enable", install_other_cond)
|
| 623 |
+
c.condition("Show", install_other_cond)
|
| 624 |
+
c.condition("Disable", dont_install_other_cond)
|
| 625 |
+
c.condition("Hide", dont_install_other_cond)
|
| 626 |
+
|
| 627 |
+
#####################################################################
|
| 628 |
+
# Disk cost
|
| 629 |
+
cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
|
| 630 |
+
"OK", "OK", "OK", bitmap=False)
|
| 631 |
+
cost.text("Title", 15, 6, 200, 15, 0x30003,
|
| 632 |
+
r"{\DlgFontBold8}Disk Space Requirements")
|
| 633 |
+
cost.text("Description", 20, 20, 280, 20, 0x30003,
|
| 634 |
+
"The disk space required for the installation of the selected features.")
|
| 635 |
+
cost.text("Text", 20, 53, 330, 60, 3,
|
| 636 |
+
"The highlighted volumes (if any) do not have enough disk space "
|
| 637 |
+
"available for the currently selected features. You can either "
|
| 638 |
+
"remove some files from the highlighted volumes, or choose to "
|
| 639 |
+
"install less features onto local drive(s), or select different "
|
| 640 |
+
"destination drive(s).")
|
| 641 |
+
cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
|
| 642 |
+
None, "{120}{70}{70}{70}{70}", None, None)
|
| 643 |
+
cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
|
| 644 |
+
|
| 645 |
+
#####################################################################
|
| 646 |
+
# WhichUsers Dialog. Only available on NT, and for privileged users.
|
| 647 |
+
# This must be run before FindRelatedProducts, because that will
|
| 648 |
+
# take into account whether the previous installation was per-user
|
| 649 |
+
# or per-machine. We currently don't support going back to this
|
| 650 |
+
# dialog after "Next" was selected; to support this, we would need to
|
| 651 |
+
# find how to reset the ALLUSERS property, and how to re-run
|
| 652 |
+
# FindRelatedProducts.
|
| 653 |
+
# On Windows9x, the ALLUSERS property is ignored on the command line
|
| 654 |
+
# and in the Property table, but installer fails according to the documentation
|
| 655 |
+
# if a dialog attempts to set ALLUSERS.
|
| 656 |
+
whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
|
| 657 |
+
"AdminInstall", "Next", "Cancel")
|
| 658 |
+
whichusers.title("Select whether to install [ProductName] for all users of this computer.")
|
| 659 |
+
# A radio group with two options: allusers, justme
|
| 660 |
+
g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
|
| 661 |
+
"WhichUsers", "", "Next")
|
| 662 |
+
g.add("ALL", 0, 5, 150, 20, "Install for all users")
|
| 663 |
+
g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
|
| 664 |
+
|
| 665 |
+
whichusers.back("Back", None, active=0)
|
| 666 |
+
|
| 667 |
+
c = whichusers.next("Next >", "Cancel")
|
| 668 |
+
c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
|
| 669 |
+
c.event("EndDialog", "Return", ordering = 2)
|
| 670 |
+
|
| 671 |
+
c = whichusers.cancel("Cancel", "AdminInstall")
|
| 672 |
+
c.event("SpawnDialog", "CancelDlg")
|
| 673 |
+
|
| 674 |
+
#####################################################################
|
| 675 |
+
# Installation Progress dialog (modeless)
|
| 676 |
+
progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
|
| 677 |
+
"Cancel", "Cancel", "Cancel", bitmap=False)
|
| 678 |
+
progress.text("Title", 20, 15, 200, 15, 0x30003,
|
| 679 |
+
r"{\DlgFontBold8}[Progress1] [ProductName]")
|
| 680 |
+
progress.text("Text", 35, 65, 300, 30, 3,
|
| 681 |
+
"Please wait while the Installer [Progress2] [ProductName]. "
|
| 682 |
+
"This may take several minutes.")
|
| 683 |
+
progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
|
| 684 |
+
|
| 685 |
+
c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
|
| 686 |
+
c.mapping("ActionText", "Text")
|
| 687 |
+
|
| 688 |
+
#c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
|
| 689 |
+
#c.mapping("ActionData", "Text")
|
| 690 |
+
|
| 691 |
+
c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
|
| 692 |
+
None, "Progress done", None, None)
|
| 693 |
+
c.mapping("SetProgress", "Progress")
|
| 694 |
+
|
| 695 |
+
progress.back("< Back", "Next", active=False)
|
| 696 |
+
progress.next("Next >", "Cancel", active=False)
|
| 697 |
+
progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
|
| 698 |
+
|
| 699 |
+
###################################################################
|
| 700 |
+
# Maintenance type: repair/uninstall
|
| 701 |
+
maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
|
| 702 |
+
"Next", "Next", "Cancel")
|
| 703 |
+
maint.title("Welcome to the [ProductName] Setup Wizard")
|
| 704 |
+
maint.text("BodyText", 15, 63, 330, 42, 3,
|
| 705 |
+
"Select whether you want to repair or remove [ProductName].")
|
| 706 |
+
g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
|
| 707 |
+
"MaintenanceForm_Action", "", "Next")
|
| 708 |
+
#g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
|
| 709 |
+
g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
|
| 710 |
+
g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
|
| 711 |
+
|
| 712 |
+
maint.back("< Back", None, active=False)
|
| 713 |
+
c=maint.next("Finish", "Cancel")
|
| 714 |
+
# Change installation: Change progress dialog to "Change", then ask
|
| 715 |
+
# for feature selection
|
| 716 |
+
#c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
|
| 717 |
+
#c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
|
| 718 |
+
|
| 719 |
+
# Reinstall: Change progress dialog to "Repair", then invoke reinstall
|
| 720 |
+
# Also set list of reinstalled features to "ALL"
|
| 721 |
+
c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
|
| 722 |
+
c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
|
| 723 |
+
c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
|
| 724 |
+
c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
|
| 725 |
+
|
| 726 |
+
# Uninstall: Change progress to "Remove", then invoke uninstall
|
| 727 |
+
# Also set list of removed features to "ALL"
|
| 728 |
+
c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
|
| 729 |
+
c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
|
| 730 |
+
c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
|
| 731 |
+
c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
|
| 732 |
+
|
| 733 |
+
# Close dialog when maintenance action scheduled
|
| 734 |
+
c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
|
| 735 |
+
#c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
|
| 736 |
+
|
| 737 |
+
maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
|
| 738 |
+
|
| 739 |
+
def get_installer_filename(self, fullname):
|
| 740 |
+
# Factored out to allow overriding in subclasses
|
| 741 |
+
if self.target_version:
|
| 742 |
+
base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
|
| 743 |
+
self.target_version)
|
| 744 |
+
else:
|
| 745 |
+
base_name = "%s.%s.msi" % (fullname, self.plat_name)
|
| 746 |
+
installer_name = os.path.join(self.dist_dir, base_name)
|
| 747 |
+
return installer_name
|
parrot/lib/python3.10/ensurepip/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (6.57 kB). View file
|
|
|
parrot/lib/python3.10/ensurepip/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (489 Bytes). View file
|
|
|
parrot/lib/python3.10/ensurepip/_bundled/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (397 Bytes). View file
|
|
|
parrot/lib/python3.10/ensurepip/_uninstall.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Basic pip uninstallation support, helper for the Windows uninstaller"""
|
| 2 |
+
|
| 3 |
+
import argparse
|
| 4 |
+
import ensurepip
|
| 5 |
+
import sys
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def _main(argv=None):
|
| 9 |
+
parser = argparse.ArgumentParser(prog="python -m ensurepip._uninstall")
|
| 10 |
+
parser.add_argument(
|
| 11 |
+
"--version",
|
| 12 |
+
action="version",
|
| 13 |
+
version="pip {}".format(ensurepip.version()),
|
| 14 |
+
help="Show the version of pip this will attempt to uninstall.",
|
| 15 |
+
)
|
| 16 |
+
parser.add_argument(
|
| 17 |
+
"-v", "--verbose",
|
| 18 |
+
action="count",
|
| 19 |
+
default=0,
|
| 20 |
+
dest="verbosity",
|
| 21 |
+
help=("Give more output. Option is additive, and can be used up to 3 "
|
| 22 |
+
"times."),
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
args = parser.parse_args(argv)
|
| 26 |
+
|
| 27 |
+
return ensurepip._uninstall_helper(verbosity=args.verbosity)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
if __name__ == "__main__":
|
| 31 |
+
sys.exit(_main())
|
parrot/lib/python3.10/json/__init__.py
ADDED
|
@@ -0,0 +1,359 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""JSON (JavaScript Object Notation) <https://json.org> is a subset of
|
| 2 |
+
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
|
| 3 |
+
interchange format.
|
| 4 |
+
|
| 5 |
+
:mod:`json` exposes an API familiar to users of the standard library
|
| 6 |
+
:mod:`marshal` and :mod:`pickle` modules. It is derived from a
|
| 7 |
+
version of the externally maintained simplejson library.
|
| 8 |
+
|
| 9 |
+
Encoding basic Python object hierarchies::
|
| 10 |
+
|
| 11 |
+
>>> import json
|
| 12 |
+
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
| 13 |
+
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
| 14 |
+
>>> print(json.dumps("\"foo\bar"))
|
| 15 |
+
"\"foo\bar"
|
| 16 |
+
>>> print(json.dumps('\u1234'))
|
| 17 |
+
"\u1234"
|
| 18 |
+
>>> print(json.dumps('\\'))
|
| 19 |
+
"\\"
|
| 20 |
+
>>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
|
| 21 |
+
{"a": 0, "b": 0, "c": 0}
|
| 22 |
+
>>> from io import StringIO
|
| 23 |
+
>>> io = StringIO()
|
| 24 |
+
>>> json.dump(['streaming API'], io)
|
| 25 |
+
>>> io.getvalue()
|
| 26 |
+
'["streaming API"]'
|
| 27 |
+
|
| 28 |
+
Compact encoding::
|
| 29 |
+
|
| 30 |
+
>>> import json
|
| 31 |
+
>>> mydict = {'4': 5, '6': 7}
|
| 32 |
+
>>> json.dumps([1,2,3,mydict], separators=(',', ':'))
|
| 33 |
+
'[1,2,3,{"4":5,"6":7}]'
|
| 34 |
+
|
| 35 |
+
Pretty printing::
|
| 36 |
+
|
| 37 |
+
>>> import json
|
| 38 |
+
>>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
|
| 39 |
+
{
|
| 40 |
+
"4": 5,
|
| 41 |
+
"6": 7
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
Decoding JSON::
|
| 45 |
+
|
| 46 |
+
>>> import json
|
| 47 |
+
>>> obj = ['foo', {'bar': ['baz', None, 1.0, 2]}]
|
| 48 |
+
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
|
| 49 |
+
True
|
| 50 |
+
>>> json.loads('"\\"foo\\bar"') == '"foo\x08ar'
|
| 51 |
+
True
|
| 52 |
+
>>> from io import StringIO
|
| 53 |
+
>>> io = StringIO('["streaming API"]')
|
| 54 |
+
>>> json.load(io)[0] == 'streaming API'
|
| 55 |
+
True
|
| 56 |
+
|
| 57 |
+
Specializing JSON object decoding::
|
| 58 |
+
|
| 59 |
+
>>> import json
|
| 60 |
+
>>> def as_complex(dct):
|
| 61 |
+
... if '__complex__' in dct:
|
| 62 |
+
... return complex(dct['real'], dct['imag'])
|
| 63 |
+
... return dct
|
| 64 |
+
...
|
| 65 |
+
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
| 66 |
+
... object_hook=as_complex)
|
| 67 |
+
(1+2j)
|
| 68 |
+
>>> from decimal import Decimal
|
| 69 |
+
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
|
| 70 |
+
True
|
| 71 |
+
|
| 72 |
+
Specializing JSON object encoding::
|
| 73 |
+
|
| 74 |
+
>>> import json
|
| 75 |
+
>>> def encode_complex(obj):
|
| 76 |
+
... if isinstance(obj, complex):
|
| 77 |
+
... return [obj.real, obj.imag]
|
| 78 |
+
... raise TypeError(f'Object of type {obj.__class__.__name__} '
|
| 79 |
+
... f'is not JSON serializable')
|
| 80 |
+
...
|
| 81 |
+
>>> json.dumps(2 + 1j, default=encode_complex)
|
| 82 |
+
'[2.0, 1.0]'
|
| 83 |
+
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
|
| 84 |
+
'[2.0, 1.0]'
|
| 85 |
+
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
|
| 86 |
+
'[2.0, 1.0]'
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
Using json.tool from the shell to validate and pretty-print::
|
| 90 |
+
|
| 91 |
+
$ echo '{"json":"obj"}' | python -m json.tool
|
| 92 |
+
{
|
| 93 |
+
"json": "obj"
|
| 94 |
+
}
|
| 95 |
+
$ echo '{ 1.2:3.4}' | python -m json.tool
|
| 96 |
+
Expecting property name enclosed in double quotes: line 1 column 3 (char 2)
|
| 97 |
+
"""
|
| 98 |
+
__version__ = '2.0.9'
|
| 99 |
+
__all__ = [
|
| 100 |
+
'dump', 'dumps', 'load', 'loads',
|
| 101 |
+
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
|
| 102 |
+
]
|
| 103 |
+
|
| 104 |
+
__author__ = 'Bob Ippolito <bob@redivi.com>'
|
| 105 |
+
|
| 106 |
+
from .decoder import JSONDecoder, JSONDecodeError
|
| 107 |
+
from .encoder import JSONEncoder
|
| 108 |
+
import codecs
|
| 109 |
+
|
| 110 |
+
_default_encoder = JSONEncoder(
|
| 111 |
+
skipkeys=False,
|
| 112 |
+
ensure_ascii=True,
|
| 113 |
+
check_circular=True,
|
| 114 |
+
allow_nan=True,
|
| 115 |
+
indent=None,
|
| 116 |
+
separators=None,
|
| 117 |
+
default=None,
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
def dump(obj, fp, *, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 121 |
+
allow_nan=True, cls=None, indent=None, separators=None,
|
| 122 |
+
default=None, sort_keys=False, **kw):
|
| 123 |
+
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
| 124 |
+
``.write()``-supporting file-like object).
|
| 125 |
+
|
| 126 |
+
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
| 127 |
+
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
|
| 128 |
+
instead of raising a ``TypeError``.
|
| 129 |
+
|
| 130 |
+
If ``ensure_ascii`` is false, then the strings written to ``fp`` can
|
| 131 |
+
contain non-ASCII characters if they appear in strings contained in
|
| 132 |
+
``obj``. Otherwise, all such characters are escaped in JSON strings.
|
| 133 |
+
|
| 134 |
+
If ``check_circular`` is false, then the circular reference check
|
| 135 |
+
for container types will be skipped and a circular reference will
|
| 136 |
+
result in an ``RecursionError`` (or worse).
|
| 137 |
+
|
| 138 |
+
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
| 139 |
+
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
|
| 140 |
+
in strict compliance of the JSON specification, instead of using the
|
| 141 |
+
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
| 142 |
+
|
| 143 |
+
If ``indent`` is a non-negative integer, then JSON array elements and
|
| 144 |
+
object members will be pretty-printed with that indent level. An indent
|
| 145 |
+
level of 0 will only insert newlines. ``None`` is the most compact
|
| 146 |
+
representation.
|
| 147 |
+
|
| 148 |
+
If specified, ``separators`` should be an ``(item_separator, key_separator)``
|
| 149 |
+
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
|
| 150 |
+
``(',', ': ')`` otherwise. To get the most compact JSON representation,
|
| 151 |
+
you should specify ``(',', ':')`` to eliminate whitespace.
|
| 152 |
+
|
| 153 |
+
``default(obj)`` is a function that should return a serializable version
|
| 154 |
+
of obj or raise TypeError. The default simply raises TypeError.
|
| 155 |
+
|
| 156 |
+
If *sort_keys* is true (default: ``False``), then the output of
|
| 157 |
+
dictionaries will be sorted by key.
|
| 158 |
+
|
| 159 |
+
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
| 160 |
+
``.default()`` method to serialize additional types), specify it with
|
| 161 |
+
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
|
| 162 |
+
|
| 163 |
+
"""
|
| 164 |
+
# cached encoder
|
| 165 |
+
if (not skipkeys and ensure_ascii and
|
| 166 |
+
check_circular and allow_nan and
|
| 167 |
+
cls is None and indent is None and separators is None and
|
| 168 |
+
default is None and not sort_keys and not kw):
|
| 169 |
+
iterable = _default_encoder.iterencode(obj)
|
| 170 |
+
else:
|
| 171 |
+
if cls is None:
|
| 172 |
+
cls = JSONEncoder
|
| 173 |
+
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 174 |
+
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
| 175 |
+
separators=separators,
|
| 176 |
+
default=default, sort_keys=sort_keys, **kw).iterencode(obj)
|
| 177 |
+
# could accelerate with writelines in some versions of Python, at
|
| 178 |
+
# a debuggability cost
|
| 179 |
+
for chunk in iterable:
|
| 180 |
+
fp.write(chunk)
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def dumps(obj, *, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 184 |
+
allow_nan=True, cls=None, indent=None, separators=None,
|
| 185 |
+
default=None, sort_keys=False, **kw):
|
| 186 |
+
"""Serialize ``obj`` to a JSON formatted ``str``.
|
| 187 |
+
|
| 188 |
+
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
| 189 |
+
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
|
| 190 |
+
instead of raising a ``TypeError``.
|
| 191 |
+
|
| 192 |
+
If ``ensure_ascii`` is false, then the return value can contain non-ASCII
|
| 193 |
+
characters if they appear in strings contained in ``obj``. Otherwise, all
|
| 194 |
+
such characters are escaped in JSON strings.
|
| 195 |
+
|
| 196 |
+
If ``check_circular`` is false, then the circular reference check
|
| 197 |
+
for container types will be skipped and a circular reference will
|
| 198 |
+
result in an ``RecursionError`` (or worse).
|
| 199 |
+
|
| 200 |
+
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
| 201 |
+
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
|
| 202 |
+
strict compliance of the JSON specification, instead of using the
|
| 203 |
+
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
| 204 |
+
|
| 205 |
+
If ``indent`` is a non-negative integer, then JSON array elements and
|
| 206 |
+
object members will be pretty-printed with that indent level. An indent
|
| 207 |
+
level of 0 will only insert newlines. ``None`` is the most compact
|
| 208 |
+
representation.
|
| 209 |
+
|
| 210 |
+
If specified, ``separators`` should be an ``(item_separator, key_separator)``
|
| 211 |
+
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
|
| 212 |
+
``(',', ': ')`` otherwise. To get the most compact JSON representation,
|
| 213 |
+
you should specify ``(',', ':')`` to eliminate whitespace.
|
| 214 |
+
|
| 215 |
+
``default(obj)`` is a function that should return a serializable version
|
| 216 |
+
of obj or raise TypeError. The default simply raises TypeError.
|
| 217 |
+
|
| 218 |
+
If *sort_keys* is true (default: ``False``), then the output of
|
| 219 |
+
dictionaries will be sorted by key.
|
| 220 |
+
|
| 221 |
+
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
| 222 |
+
``.default()`` method to serialize additional types), specify it with
|
| 223 |
+
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
|
| 224 |
+
|
| 225 |
+
"""
|
| 226 |
+
# cached encoder
|
| 227 |
+
if (not skipkeys and ensure_ascii and
|
| 228 |
+
check_circular and allow_nan and
|
| 229 |
+
cls is None and indent is None and separators is None and
|
| 230 |
+
default is None and not sort_keys and not kw):
|
| 231 |
+
return _default_encoder.encode(obj)
|
| 232 |
+
if cls is None:
|
| 233 |
+
cls = JSONEncoder
|
| 234 |
+
return cls(
|
| 235 |
+
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 236 |
+
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
| 237 |
+
separators=separators, default=default, sort_keys=sort_keys,
|
| 238 |
+
**kw).encode(obj)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
_default_decoder = JSONDecoder(object_hook=None, object_pairs_hook=None)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def detect_encoding(b):
|
| 245 |
+
bstartswith = b.startswith
|
| 246 |
+
if bstartswith((codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE)):
|
| 247 |
+
return 'utf-32'
|
| 248 |
+
if bstartswith((codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE)):
|
| 249 |
+
return 'utf-16'
|
| 250 |
+
if bstartswith(codecs.BOM_UTF8):
|
| 251 |
+
return 'utf-8-sig'
|
| 252 |
+
|
| 253 |
+
if len(b) >= 4:
|
| 254 |
+
if not b[0]:
|
| 255 |
+
# 00 00 -- -- - utf-32-be
|
| 256 |
+
# 00 XX -- -- - utf-16-be
|
| 257 |
+
return 'utf-16-be' if b[1] else 'utf-32-be'
|
| 258 |
+
if not b[1]:
|
| 259 |
+
# XX 00 00 00 - utf-32-le
|
| 260 |
+
# XX 00 00 XX - utf-16-le
|
| 261 |
+
# XX 00 XX -- - utf-16-le
|
| 262 |
+
return 'utf-16-le' if b[2] or b[3] else 'utf-32-le'
|
| 263 |
+
elif len(b) == 2:
|
| 264 |
+
if not b[0]:
|
| 265 |
+
# 00 XX - utf-16-be
|
| 266 |
+
return 'utf-16-be'
|
| 267 |
+
if not b[1]:
|
| 268 |
+
# XX 00 - utf-16-le
|
| 269 |
+
return 'utf-16-le'
|
| 270 |
+
# default
|
| 271 |
+
return 'utf-8'
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def load(fp, *, cls=None, object_hook=None, parse_float=None,
|
| 275 |
+
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
|
| 276 |
+
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
|
| 277 |
+
a JSON document) to a Python object.
|
| 278 |
+
|
| 279 |
+
``object_hook`` is an optional function that will be called with the
|
| 280 |
+
result of any object literal decode (a ``dict``). The return value of
|
| 281 |
+
``object_hook`` will be used instead of the ``dict``. This feature
|
| 282 |
+
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
| 283 |
+
|
| 284 |
+
``object_pairs_hook`` is an optional function that will be called with the
|
| 285 |
+
result of any object literal decoded with an ordered list of pairs. The
|
| 286 |
+
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
|
| 287 |
+
This feature can be used to implement custom decoders. If ``object_hook``
|
| 288 |
+
is also defined, the ``object_pairs_hook`` takes priority.
|
| 289 |
+
|
| 290 |
+
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
| 291 |
+
kwarg; otherwise ``JSONDecoder`` is used.
|
| 292 |
+
"""
|
| 293 |
+
return loads(fp.read(),
|
| 294 |
+
cls=cls, object_hook=object_hook,
|
| 295 |
+
parse_float=parse_float, parse_int=parse_int,
|
| 296 |
+
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def loads(s, *, cls=None, object_hook=None, parse_float=None,
|
| 300 |
+
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
|
| 301 |
+
"""Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance
|
| 302 |
+
containing a JSON document) to a Python object.
|
| 303 |
+
|
| 304 |
+
``object_hook`` is an optional function that will be called with the
|
| 305 |
+
result of any object literal decode (a ``dict``). The return value of
|
| 306 |
+
``object_hook`` will be used instead of the ``dict``. This feature
|
| 307 |
+
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
| 308 |
+
|
| 309 |
+
``object_pairs_hook`` is an optional function that will be called with the
|
| 310 |
+
result of any object literal decoded with an ordered list of pairs. The
|
| 311 |
+
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
|
| 312 |
+
This feature can be used to implement custom decoders. If ``object_hook``
|
| 313 |
+
is also defined, the ``object_pairs_hook`` takes priority.
|
| 314 |
+
|
| 315 |
+
``parse_float``, if specified, will be called with the string
|
| 316 |
+
of every JSON float to be decoded. By default this is equivalent to
|
| 317 |
+
float(num_str). This can be used to use another datatype or parser
|
| 318 |
+
for JSON floats (e.g. decimal.Decimal).
|
| 319 |
+
|
| 320 |
+
``parse_int``, if specified, will be called with the string
|
| 321 |
+
of every JSON int to be decoded. By default this is equivalent to
|
| 322 |
+
int(num_str). This can be used to use another datatype or parser
|
| 323 |
+
for JSON integers (e.g. float).
|
| 324 |
+
|
| 325 |
+
``parse_constant``, if specified, will be called with one of the
|
| 326 |
+
following strings: -Infinity, Infinity, NaN.
|
| 327 |
+
This can be used to raise an exception if invalid JSON numbers
|
| 328 |
+
are encountered.
|
| 329 |
+
|
| 330 |
+
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
| 331 |
+
kwarg; otherwise ``JSONDecoder`` is used.
|
| 332 |
+
"""
|
| 333 |
+
if isinstance(s, str):
|
| 334 |
+
if s.startswith('\ufeff'):
|
| 335 |
+
raise JSONDecodeError("Unexpected UTF-8 BOM (decode using utf-8-sig)",
|
| 336 |
+
s, 0)
|
| 337 |
+
else:
|
| 338 |
+
if not isinstance(s, (bytes, bytearray)):
|
| 339 |
+
raise TypeError(f'the JSON object must be str, bytes or bytearray, '
|
| 340 |
+
f'not {s.__class__.__name__}')
|
| 341 |
+
s = s.decode(detect_encoding(s), 'surrogatepass')
|
| 342 |
+
|
| 343 |
+
if (cls is None and object_hook is None and
|
| 344 |
+
parse_int is None and parse_float is None and
|
| 345 |
+
parse_constant is None and object_pairs_hook is None and not kw):
|
| 346 |
+
return _default_decoder.decode(s)
|
| 347 |
+
if cls is None:
|
| 348 |
+
cls = JSONDecoder
|
| 349 |
+
if object_hook is not None:
|
| 350 |
+
kw['object_hook'] = object_hook
|
| 351 |
+
if object_pairs_hook is not None:
|
| 352 |
+
kw['object_pairs_hook'] = object_pairs_hook
|
| 353 |
+
if parse_float is not None:
|
| 354 |
+
kw['parse_float'] = parse_float
|
| 355 |
+
if parse_int is not None:
|
| 356 |
+
kw['parse_int'] = parse_int
|
| 357 |
+
if parse_constant is not None:
|
| 358 |
+
kw['parse_constant'] = parse_constant
|
| 359 |
+
return cls(**kw).decode(s)
|
parrot/lib/python3.10/json/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (12.5 kB). View file
|
|
|
parrot/lib/python3.10/json/__pycache__/tool.cpython-310.pyc
ADDED
|
Binary file (3.07 kB). View file
|
|
|
parrot/lib/python3.10/json/decoder.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Implementation of JSONDecoder
|
| 2 |
+
"""
|
| 3 |
+
import re
|
| 4 |
+
|
| 5 |
+
from json import scanner
|
| 6 |
+
try:
|
| 7 |
+
from _json import scanstring as c_scanstring
|
| 8 |
+
except ImportError:
|
| 9 |
+
c_scanstring = None
|
| 10 |
+
|
| 11 |
+
__all__ = ['JSONDecoder', 'JSONDecodeError']
|
| 12 |
+
|
| 13 |
+
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
|
| 14 |
+
|
| 15 |
+
NaN = float('nan')
|
| 16 |
+
PosInf = float('inf')
|
| 17 |
+
NegInf = float('-inf')
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class JSONDecodeError(ValueError):
|
| 21 |
+
"""Subclass of ValueError with the following additional properties:
|
| 22 |
+
|
| 23 |
+
msg: The unformatted error message
|
| 24 |
+
doc: The JSON document being parsed
|
| 25 |
+
pos: The start index of doc where parsing failed
|
| 26 |
+
lineno: The line corresponding to pos
|
| 27 |
+
colno: The column corresponding to pos
|
| 28 |
+
|
| 29 |
+
"""
|
| 30 |
+
# Note that this exception is used from _json
|
| 31 |
+
def __init__(self, msg, doc, pos):
|
| 32 |
+
lineno = doc.count('\n', 0, pos) + 1
|
| 33 |
+
colno = pos - doc.rfind('\n', 0, pos)
|
| 34 |
+
errmsg = '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
|
| 35 |
+
ValueError.__init__(self, errmsg)
|
| 36 |
+
self.msg = msg
|
| 37 |
+
self.doc = doc
|
| 38 |
+
self.pos = pos
|
| 39 |
+
self.lineno = lineno
|
| 40 |
+
self.colno = colno
|
| 41 |
+
|
| 42 |
+
def __reduce__(self):
|
| 43 |
+
return self.__class__, (self.msg, self.doc, self.pos)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
_CONSTANTS = {
|
| 47 |
+
'-Infinity': NegInf,
|
| 48 |
+
'Infinity': PosInf,
|
| 49 |
+
'NaN': NaN,
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
|
| 54 |
+
BACKSLASH = {
|
| 55 |
+
'"': '"', '\\': '\\', '/': '/',
|
| 56 |
+
'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t',
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
def _decode_uXXXX(s, pos):
|
| 60 |
+
esc = s[pos + 1:pos + 5]
|
| 61 |
+
if len(esc) == 4 and esc[1] not in 'xX':
|
| 62 |
+
try:
|
| 63 |
+
return int(esc, 16)
|
| 64 |
+
except ValueError:
|
| 65 |
+
pass
|
| 66 |
+
msg = "Invalid \\uXXXX escape"
|
| 67 |
+
raise JSONDecodeError(msg, s, pos)
|
| 68 |
+
|
| 69 |
+
def py_scanstring(s, end, strict=True,
|
| 70 |
+
_b=BACKSLASH, _m=STRINGCHUNK.match):
|
| 71 |
+
"""Scan the string s for a JSON string. End is the index of the
|
| 72 |
+
character in s after the quote that started the JSON string.
|
| 73 |
+
Unescapes all valid JSON string escape sequences and raises ValueError
|
| 74 |
+
on attempt to decode an invalid string. If strict is False then literal
|
| 75 |
+
control characters are allowed in the string.
|
| 76 |
+
|
| 77 |
+
Returns a tuple of the decoded string and the index of the character in s
|
| 78 |
+
after the end quote."""
|
| 79 |
+
chunks = []
|
| 80 |
+
_append = chunks.append
|
| 81 |
+
begin = end - 1
|
| 82 |
+
while 1:
|
| 83 |
+
chunk = _m(s, end)
|
| 84 |
+
if chunk is None:
|
| 85 |
+
raise JSONDecodeError("Unterminated string starting at", s, begin)
|
| 86 |
+
end = chunk.end()
|
| 87 |
+
content, terminator = chunk.groups()
|
| 88 |
+
# Content is contains zero or more unescaped string characters
|
| 89 |
+
if content:
|
| 90 |
+
_append(content)
|
| 91 |
+
# Terminator is the end of string, a literal control character,
|
| 92 |
+
# or a backslash denoting that an escape sequence follows
|
| 93 |
+
if terminator == '"':
|
| 94 |
+
break
|
| 95 |
+
elif terminator != '\\':
|
| 96 |
+
if strict:
|
| 97 |
+
#msg = "Invalid control character %r at" % (terminator,)
|
| 98 |
+
msg = "Invalid control character {0!r} at".format(terminator)
|
| 99 |
+
raise JSONDecodeError(msg, s, end)
|
| 100 |
+
else:
|
| 101 |
+
_append(terminator)
|
| 102 |
+
continue
|
| 103 |
+
try:
|
| 104 |
+
esc = s[end]
|
| 105 |
+
except IndexError:
|
| 106 |
+
raise JSONDecodeError("Unterminated string starting at",
|
| 107 |
+
s, begin) from None
|
| 108 |
+
# If not a unicode escape sequence, must be in the lookup table
|
| 109 |
+
if esc != 'u':
|
| 110 |
+
try:
|
| 111 |
+
char = _b[esc]
|
| 112 |
+
except KeyError:
|
| 113 |
+
msg = "Invalid \\escape: {0!r}".format(esc)
|
| 114 |
+
raise JSONDecodeError(msg, s, end)
|
| 115 |
+
end += 1
|
| 116 |
+
else:
|
| 117 |
+
uni = _decode_uXXXX(s, end)
|
| 118 |
+
end += 5
|
| 119 |
+
if 0xd800 <= uni <= 0xdbff and s[end:end + 2] == '\\u':
|
| 120 |
+
uni2 = _decode_uXXXX(s, end + 1)
|
| 121 |
+
if 0xdc00 <= uni2 <= 0xdfff:
|
| 122 |
+
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
|
| 123 |
+
end += 6
|
| 124 |
+
char = chr(uni)
|
| 125 |
+
_append(char)
|
| 126 |
+
return ''.join(chunks), end
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
# Use speedup if available
|
| 130 |
+
scanstring = c_scanstring or py_scanstring
|
| 131 |
+
|
| 132 |
+
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
|
| 133 |
+
WHITESPACE_STR = ' \t\n\r'
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook,
|
| 137 |
+
memo=None, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
| 138 |
+
s, end = s_and_end
|
| 139 |
+
pairs = []
|
| 140 |
+
pairs_append = pairs.append
|
| 141 |
+
# Backwards compatibility
|
| 142 |
+
if memo is None:
|
| 143 |
+
memo = {}
|
| 144 |
+
memo_get = memo.setdefault
|
| 145 |
+
# Use a slice to prevent IndexError from being raised, the following
|
| 146 |
+
# check will raise a more specific ValueError if the string is empty
|
| 147 |
+
nextchar = s[end:end + 1]
|
| 148 |
+
# Normally we expect nextchar == '"'
|
| 149 |
+
if nextchar != '"':
|
| 150 |
+
if nextchar in _ws:
|
| 151 |
+
end = _w(s, end).end()
|
| 152 |
+
nextchar = s[end:end + 1]
|
| 153 |
+
# Trivial empty object
|
| 154 |
+
if nextchar == '}':
|
| 155 |
+
if object_pairs_hook is not None:
|
| 156 |
+
result = object_pairs_hook(pairs)
|
| 157 |
+
return result, end + 1
|
| 158 |
+
pairs = {}
|
| 159 |
+
if object_hook is not None:
|
| 160 |
+
pairs = object_hook(pairs)
|
| 161 |
+
return pairs, end + 1
|
| 162 |
+
elif nextchar != '"':
|
| 163 |
+
raise JSONDecodeError(
|
| 164 |
+
"Expecting property name enclosed in double quotes", s, end)
|
| 165 |
+
end += 1
|
| 166 |
+
while True:
|
| 167 |
+
key, end = scanstring(s, end, strict)
|
| 168 |
+
key = memo_get(key, key)
|
| 169 |
+
# To skip some function call overhead we optimize the fast paths where
|
| 170 |
+
# the JSON key separator is ": " or just ":".
|
| 171 |
+
if s[end:end + 1] != ':':
|
| 172 |
+
end = _w(s, end).end()
|
| 173 |
+
if s[end:end + 1] != ':':
|
| 174 |
+
raise JSONDecodeError("Expecting ':' delimiter", s, end)
|
| 175 |
+
end += 1
|
| 176 |
+
|
| 177 |
+
try:
|
| 178 |
+
if s[end] in _ws:
|
| 179 |
+
end += 1
|
| 180 |
+
if s[end] in _ws:
|
| 181 |
+
end = _w(s, end + 1).end()
|
| 182 |
+
except IndexError:
|
| 183 |
+
pass
|
| 184 |
+
|
| 185 |
+
try:
|
| 186 |
+
value, end = scan_once(s, end)
|
| 187 |
+
except StopIteration as err:
|
| 188 |
+
raise JSONDecodeError("Expecting value", s, err.value) from None
|
| 189 |
+
pairs_append((key, value))
|
| 190 |
+
try:
|
| 191 |
+
nextchar = s[end]
|
| 192 |
+
if nextchar in _ws:
|
| 193 |
+
end = _w(s, end + 1).end()
|
| 194 |
+
nextchar = s[end]
|
| 195 |
+
except IndexError:
|
| 196 |
+
nextchar = ''
|
| 197 |
+
end += 1
|
| 198 |
+
|
| 199 |
+
if nextchar == '}':
|
| 200 |
+
break
|
| 201 |
+
elif nextchar != ',':
|
| 202 |
+
raise JSONDecodeError("Expecting ',' delimiter", s, end - 1)
|
| 203 |
+
end = _w(s, end).end()
|
| 204 |
+
nextchar = s[end:end + 1]
|
| 205 |
+
end += 1
|
| 206 |
+
if nextchar != '"':
|
| 207 |
+
raise JSONDecodeError(
|
| 208 |
+
"Expecting property name enclosed in double quotes", s, end - 1)
|
| 209 |
+
if object_pairs_hook is not None:
|
| 210 |
+
result = object_pairs_hook(pairs)
|
| 211 |
+
return result, end
|
| 212 |
+
pairs = dict(pairs)
|
| 213 |
+
if object_hook is not None:
|
| 214 |
+
pairs = object_hook(pairs)
|
| 215 |
+
return pairs, end
|
| 216 |
+
|
| 217 |
+
def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
| 218 |
+
s, end = s_and_end
|
| 219 |
+
values = []
|
| 220 |
+
nextchar = s[end:end + 1]
|
| 221 |
+
if nextchar in _ws:
|
| 222 |
+
end = _w(s, end + 1).end()
|
| 223 |
+
nextchar = s[end:end + 1]
|
| 224 |
+
# Look-ahead for trivial empty array
|
| 225 |
+
if nextchar == ']':
|
| 226 |
+
return values, end + 1
|
| 227 |
+
_append = values.append
|
| 228 |
+
while True:
|
| 229 |
+
try:
|
| 230 |
+
value, end = scan_once(s, end)
|
| 231 |
+
except StopIteration as err:
|
| 232 |
+
raise JSONDecodeError("Expecting value", s, err.value) from None
|
| 233 |
+
_append(value)
|
| 234 |
+
nextchar = s[end:end + 1]
|
| 235 |
+
if nextchar in _ws:
|
| 236 |
+
end = _w(s, end + 1).end()
|
| 237 |
+
nextchar = s[end:end + 1]
|
| 238 |
+
end += 1
|
| 239 |
+
if nextchar == ']':
|
| 240 |
+
break
|
| 241 |
+
elif nextchar != ',':
|
| 242 |
+
raise JSONDecodeError("Expecting ',' delimiter", s, end - 1)
|
| 243 |
+
try:
|
| 244 |
+
if s[end] in _ws:
|
| 245 |
+
end += 1
|
| 246 |
+
if s[end] in _ws:
|
| 247 |
+
end = _w(s, end + 1).end()
|
| 248 |
+
except IndexError:
|
| 249 |
+
pass
|
| 250 |
+
|
| 251 |
+
return values, end
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
class JSONDecoder(object):
|
| 255 |
+
"""Simple JSON <https://json.org> decoder
|
| 256 |
+
|
| 257 |
+
Performs the following translations in decoding by default:
|
| 258 |
+
|
| 259 |
+
+---------------+-------------------+
|
| 260 |
+
| JSON | Python |
|
| 261 |
+
+===============+===================+
|
| 262 |
+
| object | dict |
|
| 263 |
+
+---------------+-------------------+
|
| 264 |
+
| array | list |
|
| 265 |
+
+---------------+-------------------+
|
| 266 |
+
| string | str |
|
| 267 |
+
+---------------+-------------------+
|
| 268 |
+
| number (int) | int |
|
| 269 |
+
+---------------+-------------------+
|
| 270 |
+
| number (real) | float |
|
| 271 |
+
+---------------+-------------------+
|
| 272 |
+
| true | True |
|
| 273 |
+
+---------------+-------------------+
|
| 274 |
+
| false | False |
|
| 275 |
+
+---------------+-------------------+
|
| 276 |
+
| null | None |
|
| 277 |
+
+---------------+-------------------+
|
| 278 |
+
|
| 279 |
+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
|
| 280 |
+
their corresponding ``float`` values, which is outside the JSON spec.
|
| 281 |
+
|
| 282 |
+
"""
|
| 283 |
+
|
| 284 |
+
def __init__(self, *, object_hook=None, parse_float=None,
|
| 285 |
+
parse_int=None, parse_constant=None, strict=True,
|
| 286 |
+
object_pairs_hook=None):
|
| 287 |
+
"""``object_hook``, if specified, will be called with the result
|
| 288 |
+
of every JSON object decoded and its return value will be used in
|
| 289 |
+
place of the given ``dict``. This can be used to provide custom
|
| 290 |
+
deserializations (e.g. to support JSON-RPC class hinting).
|
| 291 |
+
|
| 292 |
+
``object_pairs_hook``, if specified will be called with the result of
|
| 293 |
+
every JSON object decoded with an ordered list of pairs. The return
|
| 294 |
+
value of ``object_pairs_hook`` will be used instead of the ``dict``.
|
| 295 |
+
This feature can be used to implement custom decoders.
|
| 296 |
+
If ``object_hook`` is also defined, the ``object_pairs_hook`` takes
|
| 297 |
+
priority.
|
| 298 |
+
|
| 299 |
+
``parse_float``, if specified, will be called with the string
|
| 300 |
+
of every JSON float to be decoded. By default this is equivalent to
|
| 301 |
+
float(num_str). This can be used to use another datatype or parser
|
| 302 |
+
for JSON floats (e.g. decimal.Decimal).
|
| 303 |
+
|
| 304 |
+
``parse_int``, if specified, will be called with the string
|
| 305 |
+
of every JSON int to be decoded. By default this is equivalent to
|
| 306 |
+
int(num_str). This can be used to use another datatype or parser
|
| 307 |
+
for JSON integers (e.g. float).
|
| 308 |
+
|
| 309 |
+
``parse_constant``, if specified, will be called with one of the
|
| 310 |
+
following strings: -Infinity, Infinity, NaN.
|
| 311 |
+
This can be used to raise an exception if invalid JSON numbers
|
| 312 |
+
are encountered.
|
| 313 |
+
|
| 314 |
+
If ``strict`` is false (true is the default), then control
|
| 315 |
+
characters will be allowed inside strings. Control characters in
|
| 316 |
+
this context are those with character codes in the 0-31 range,
|
| 317 |
+
including ``'\\t'`` (tab), ``'\\n'``, ``'\\r'`` and ``'\\0'``.
|
| 318 |
+
"""
|
| 319 |
+
self.object_hook = object_hook
|
| 320 |
+
self.parse_float = parse_float or float
|
| 321 |
+
self.parse_int = parse_int or int
|
| 322 |
+
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
|
| 323 |
+
self.strict = strict
|
| 324 |
+
self.object_pairs_hook = object_pairs_hook
|
| 325 |
+
self.parse_object = JSONObject
|
| 326 |
+
self.parse_array = JSONArray
|
| 327 |
+
self.parse_string = scanstring
|
| 328 |
+
self.memo = {}
|
| 329 |
+
self.scan_once = scanner.make_scanner(self)
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def decode(self, s, _w=WHITESPACE.match):
|
| 333 |
+
"""Return the Python representation of ``s`` (a ``str`` instance
|
| 334 |
+
containing a JSON document).
|
| 335 |
+
|
| 336 |
+
"""
|
| 337 |
+
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
|
| 338 |
+
end = _w(s, end).end()
|
| 339 |
+
if end != len(s):
|
| 340 |
+
raise JSONDecodeError("Extra data", s, end)
|
| 341 |
+
return obj
|
| 342 |
+
|
| 343 |
+
def raw_decode(self, s, idx=0):
|
| 344 |
+
"""Decode a JSON document from ``s`` (a ``str`` beginning with
|
| 345 |
+
a JSON document) and return a 2-tuple of the Python
|
| 346 |
+
representation and the index in ``s`` where the document ended.
|
| 347 |
+
|
| 348 |
+
This can be used to decode a JSON document from a string that may
|
| 349 |
+
have extraneous data at the end.
|
| 350 |
+
|
| 351 |
+
"""
|
| 352 |
+
try:
|
| 353 |
+
obj, end = self.scan_once(s, idx)
|
| 354 |
+
except StopIteration as err:
|
| 355 |
+
raise JSONDecodeError("Expecting value", s, err.value) from None
|
| 356 |
+
return obj, end
|
parrot/lib/python3.10/lib2to3/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import warnings
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
warnings.warn(
|
| 5 |
+
"lib2to3 package is deprecated and may not be able to parse Python 3.10+",
|
| 6 |
+
PendingDeprecationWarning,
|
| 7 |
+
stacklevel=2,
|
| 8 |
+
)
|
parrot/lib/python3.10/lib2to3/__main__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from .main import main
|
| 3 |
+
|
| 4 |
+
sys.exit(main("lib2to3.fixes"))
|
parrot/lib/python3.10/lib2to3/btm_matcher.py
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A bottom-up tree matching algorithm implementation meant to speed
|
| 2 |
+
up 2to3's matching process. After the tree patterns are reduced to
|
| 3 |
+
their rarest linear path, a linear Aho-Corasick automaton is
|
| 4 |
+
created. The linear automaton traverses the linear paths from the
|
| 5 |
+
leaves to the root of the AST and returns a set of nodes for further
|
| 6 |
+
matching. This reduces significantly the number of candidate nodes."""
|
| 7 |
+
|
| 8 |
+
__author__ = "George Boutsioukis <gboutsioukis@gmail.com>"
|
| 9 |
+
|
| 10 |
+
import logging
|
| 11 |
+
import itertools
|
| 12 |
+
from collections import defaultdict
|
| 13 |
+
|
| 14 |
+
from . import pytree
|
| 15 |
+
from .btm_utils import reduce_tree
|
| 16 |
+
|
| 17 |
+
class BMNode(object):
|
| 18 |
+
"""Class for a node of the Aho-Corasick automaton used in matching"""
|
| 19 |
+
count = itertools.count()
|
| 20 |
+
def __init__(self):
|
| 21 |
+
self.transition_table = {}
|
| 22 |
+
self.fixers = []
|
| 23 |
+
self.id = next(BMNode.count)
|
| 24 |
+
self.content = ''
|
| 25 |
+
|
| 26 |
+
class BottomMatcher(object):
|
| 27 |
+
"""The main matcher class. After instantiating the patterns should
|
| 28 |
+
be added using the add_fixer method"""
|
| 29 |
+
|
| 30 |
+
def __init__(self):
|
| 31 |
+
self.match = set()
|
| 32 |
+
self.root = BMNode()
|
| 33 |
+
self.nodes = [self.root]
|
| 34 |
+
self.fixers = []
|
| 35 |
+
self.logger = logging.getLogger("RefactoringTool")
|
| 36 |
+
|
| 37 |
+
def add_fixer(self, fixer):
|
| 38 |
+
"""Reduces a fixer's pattern tree to a linear path and adds it
|
| 39 |
+
to the matcher(a common Aho-Corasick automaton). The fixer is
|
| 40 |
+
appended on the matching states and called when they are
|
| 41 |
+
reached"""
|
| 42 |
+
self.fixers.append(fixer)
|
| 43 |
+
tree = reduce_tree(fixer.pattern_tree)
|
| 44 |
+
linear = tree.get_linear_subpattern()
|
| 45 |
+
match_nodes = self.add(linear, start=self.root)
|
| 46 |
+
for match_node in match_nodes:
|
| 47 |
+
match_node.fixers.append(fixer)
|
| 48 |
+
|
| 49 |
+
def add(self, pattern, start):
|
| 50 |
+
"Recursively adds a linear pattern to the AC automaton"
|
| 51 |
+
#print("adding pattern", pattern, "to", start)
|
| 52 |
+
if not pattern:
|
| 53 |
+
#print("empty pattern")
|
| 54 |
+
return [start]
|
| 55 |
+
if isinstance(pattern[0], tuple):
|
| 56 |
+
#alternatives
|
| 57 |
+
#print("alternatives")
|
| 58 |
+
match_nodes = []
|
| 59 |
+
for alternative in pattern[0]:
|
| 60 |
+
#add all alternatives, and add the rest of the pattern
|
| 61 |
+
#to each end node
|
| 62 |
+
end_nodes = self.add(alternative, start=start)
|
| 63 |
+
for end in end_nodes:
|
| 64 |
+
match_nodes.extend(self.add(pattern[1:], end))
|
| 65 |
+
return match_nodes
|
| 66 |
+
else:
|
| 67 |
+
#single token
|
| 68 |
+
#not last
|
| 69 |
+
if pattern[0] not in start.transition_table:
|
| 70 |
+
#transition did not exist, create new
|
| 71 |
+
next_node = BMNode()
|
| 72 |
+
start.transition_table[pattern[0]] = next_node
|
| 73 |
+
else:
|
| 74 |
+
#transition exists already, follow
|
| 75 |
+
next_node = start.transition_table[pattern[0]]
|
| 76 |
+
|
| 77 |
+
if pattern[1:]:
|
| 78 |
+
end_nodes = self.add(pattern[1:], start=next_node)
|
| 79 |
+
else:
|
| 80 |
+
end_nodes = [next_node]
|
| 81 |
+
return end_nodes
|
| 82 |
+
|
| 83 |
+
def run(self, leaves):
|
| 84 |
+
"""The main interface with the bottom matcher. The tree is
|
| 85 |
+
traversed from the bottom using the constructed
|
| 86 |
+
automaton. Nodes are only checked once as the tree is
|
| 87 |
+
retraversed. When the automaton fails, we give it one more
|
| 88 |
+
shot(in case the above tree matches as a whole with the
|
| 89 |
+
rejected leaf), then we break for the next leaf. There is the
|
| 90 |
+
special case of multiple arguments(see code comments) where we
|
| 91 |
+
recheck the nodes
|
| 92 |
+
|
| 93 |
+
Args:
|
| 94 |
+
The leaves of the AST tree to be matched
|
| 95 |
+
|
| 96 |
+
Returns:
|
| 97 |
+
A dictionary of node matches with fixers as the keys
|
| 98 |
+
"""
|
| 99 |
+
current_ac_node = self.root
|
| 100 |
+
results = defaultdict(list)
|
| 101 |
+
for leaf in leaves:
|
| 102 |
+
current_ast_node = leaf
|
| 103 |
+
while current_ast_node:
|
| 104 |
+
current_ast_node.was_checked = True
|
| 105 |
+
for child in current_ast_node.children:
|
| 106 |
+
# multiple statements, recheck
|
| 107 |
+
if isinstance(child, pytree.Leaf) and child.value == ";":
|
| 108 |
+
current_ast_node.was_checked = False
|
| 109 |
+
break
|
| 110 |
+
if current_ast_node.type == 1:
|
| 111 |
+
#name
|
| 112 |
+
node_token = current_ast_node.value
|
| 113 |
+
else:
|
| 114 |
+
node_token = current_ast_node.type
|
| 115 |
+
|
| 116 |
+
if node_token in current_ac_node.transition_table:
|
| 117 |
+
#token matches
|
| 118 |
+
current_ac_node = current_ac_node.transition_table[node_token]
|
| 119 |
+
for fixer in current_ac_node.fixers:
|
| 120 |
+
results[fixer].append(current_ast_node)
|
| 121 |
+
else:
|
| 122 |
+
#matching failed, reset automaton
|
| 123 |
+
current_ac_node = self.root
|
| 124 |
+
if (current_ast_node.parent is not None
|
| 125 |
+
and current_ast_node.parent.was_checked):
|
| 126 |
+
#the rest of the tree upwards has been checked, next leaf
|
| 127 |
+
break
|
| 128 |
+
|
| 129 |
+
#recheck the rejected node once from the root
|
| 130 |
+
if node_token in current_ac_node.transition_table:
|
| 131 |
+
#token matches
|
| 132 |
+
current_ac_node = current_ac_node.transition_table[node_token]
|
| 133 |
+
for fixer in current_ac_node.fixers:
|
| 134 |
+
results[fixer].append(current_ast_node)
|
| 135 |
+
|
| 136 |
+
current_ast_node = current_ast_node.parent
|
| 137 |
+
return results
|
| 138 |
+
|
| 139 |
+
def print_ac(self):
|
| 140 |
+
"Prints a graphviz diagram of the BM automaton(for debugging)"
|
| 141 |
+
print("digraph g{")
|
| 142 |
+
def print_node(node):
|
| 143 |
+
for subnode_key in node.transition_table.keys():
|
| 144 |
+
subnode = node.transition_table[subnode_key]
|
| 145 |
+
print("%d -> %d [label=%s] //%s" %
|
| 146 |
+
(node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers)))
|
| 147 |
+
if subnode_key == 1:
|
| 148 |
+
print(subnode.content)
|
| 149 |
+
print_node(subnode)
|
| 150 |
+
print_node(self.root)
|
| 151 |
+
print("}")
|
| 152 |
+
|
| 153 |
+
# taken from pytree.py for debugging; only used by print_ac
|
| 154 |
+
_type_reprs = {}
|
| 155 |
+
def type_repr(type_num):
|
| 156 |
+
global _type_reprs
|
| 157 |
+
if not _type_reprs:
|
| 158 |
+
from .pygram import python_symbols
|
| 159 |
+
# printing tokens is possible but not as useful
|
| 160 |
+
# from .pgen2 import token // token.__dict__.items():
|
| 161 |
+
for name, val in python_symbols.__dict__.items():
|
| 162 |
+
if type(val) == int: _type_reprs[val] = name
|
| 163 |
+
return _type_reprs.setdefault(type_num, type_num)
|
parrot/lib/python3.10/lib2to3/btm_utils.py
ADDED
|
@@ -0,0 +1,281 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"Utility functions used by the btm_matcher module"
|
| 2 |
+
|
| 3 |
+
from . import pytree
|
| 4 |
+
from .pgen2 import grammar, token
|
| 5 |
+
from .pygram import pattern_symbols, python_symbols
|
| 6 |
+
|
| 7 |
+
syms = pattern_symbols
|
| 8 |
+
pysyms = python_symbols
|
| 9 |
+
tokens = grammar.opmap
|
| 10 |
+
token_labels = token
|
| 11 |
+
|
| 12 |
+
TYPE_ANY = -1
|
| 13 |
+
TYPE_ALTERNATIVES = -2
|
| 14 |
+
TYPE_GROUP = -3
|
| 15 |
+
|
| 16 |
+
class MinNode(object):
|
| 17 |
+
"""This class serves as an intermediate representation of the
|
| 18 |
+
pattern tree during the conversion to sets of leaf-to-root
|
| 19 |
+
subpatterns"""
|
| 20 |
+
|
| 21 |
+
def __init__(self, type=None, name=None):
|
| 22 |
+
self.type = type
|
| 23 |
+
self.name = name
|
| 24 |
+
self.children = []
|
| 25 |
+
self.leaf = False
|
| 26 |
+
self.parent = None
|
| 27 |
+
self.alternatives = []
|
| 28 |
+
self.group = []
|
| 29 |
+
|
| 30 |
+
def __repr__(self):
|
| 31 |
+
return str(self.type) + ' ' + str(self.name)
|
| 32 |
+
|
| 33 |
+
def leaf_to_root(self):
|
| 34 |
+
"""Internal method. Returns a characteristic path of the
|
| 35 |
+
pattern tree. This method must be run for all leaves until the
|
| 36 |
+
linear subpatterns are merged into a single"""
|
| 37 |
+
node = self
|
| 38 |
+
subp = []
|
| 39 |
+
while node:
|
| 40 |
+
if node.type == TYPE_ALTERNATIVES:
|
| 41 |
+
node.alternatives.append(subp)
|
| 42 |
+
if len(node.alternatives) == len(node.children):
|
| 43 |
+
#last alternative
|
| 44 |
+
subp = [tuple(node.alternatives)]
|
| 45 |
+
node.alternatives = []
|
| 46 |
+
node = node.parent
|
| 47 |
+
continue
|
| 48 |
+
else:
|
| 49 |
+
node = node.parent
|
| 50 |
+
subp = None
|
| 51 |
+
break
|
| 52 |
+
|
| 53 |
+
if node.type == TYPE_GROUP:
|
| 54 |
+
node.group.append(subp)
|
| 55 |
+
#probably should check the number of leaves
|
| 56 |
+
if len(node.group) == len(node.children):
|
| 57 |
+
subp = get_characteristic_subpattern(node.group)
|
| 58 |
+
node.group = []
|
| 59 |
+
node = node.parent
|
| 60 |
+
continue
|
| 61 |
+
else:
|
| 62 |
+
node = node.parent
|
| 63 |
+
subp = None
|
| 64 |
+
break
|
| 65 |
+
|
| 66 |
+
if node.type == token_labels.NAME and node.name:
|
| 67 |
+
#in case of type=name, use the name instead
|
| 68 |
+
subp.append(node.name)
|
| 69 |
+
else:
|
| 70 |
+
subp.append(node.type)
|
| 71 |
+
|
| 72 |
+
node = node.parent
|
| 73 |
+
return subp
|
| 74 |
+
|
| 75 |
+
def get_linear_subpattern(self):
|
| 76 |
+
"""Drives the leaf_to_root method. The reason that
|
| 77 |
+
leaf_to_root must be run multiple times is because we need to
|
| 78 |
+
reject 'group' matches; for example the alternative form
|
| 79 |
+
(a | b c) creates a group [b c] that needs to be matched. Since
|
| 80 |
+
matching multiple linear patterns overcomes the automaton's
|
| 81 |
+
capabilities, leaf_to_root merges each group into a single
|
| 82 |
+
choice based on 'characteristic'ity,
|
| 83 |
+
|
| 84 |
+
i.e. (a|b c) -> (a|b) if b more characteristic than c
|
| 85 |
+
|
| 86 |
+
Returns: The most 'characteristic'(as defined by
|
| 87 |
+
get_characteristic_subpattern) path for the compiled pattern
|
| 88 |
+
tree.
|
| 89 |
+
"""
|
| 90 |
+
|
| 91 |
+
for l in self.leaves():
|
| 92 |
+
subp = l.leaf_to_root()
|
| 93 |
+
if subp:
|
| 94 |
+
return subp
|
| 95 |
+
|
| 96 |
+
def leaves(self):
|
| 97 |
+
"Generator that returns the leaves of the tree"
|
| 98 |
+
for child in self.children:
|
| 99 |
+
yield from child.leaves()
|
| 100 |
+
if not self.children:
|
| 101 |
+
yield self
|
| 102 |
+
|
| 103 |
+
def reduce_tree(node, parent=None):
|
| 104 |
+
"""
|
| 105 |
+
Internal function. Reduces a compiled pattern tree to an
|
| 106 |
+
intermediate representation suitable for feeding the
|
| 107 |
+
automaton. This also trims off any optional pattern elements(like
|
| 108 |
+
[a], a*).
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
new_node = None
|
| 112 |
+
#switch on the node type
|
| 113 |
+
if node.type == syms.Matcher:
|
| 114 |
+
#skip
|
| 115 |
+
node = node.children[0]
|
| 116 |
+
|
| 117 |
+
if node.type == syms.Alternatives :
|
| 118 |
+
#2 cases
|
| 119 |
+
if len(node.children) <= 2:
|
| 120 |
+
#just a single 'Alternative', skip this node
|
| 121 |
+
new_node = reduce_tree(node.children[0], parent)
|
| 122 |
+
else:
|
| 123 |
+
#real alternatives
|
| 124 |
+
new_node = MinNode(type=TYPE_ALTERNATIVES)
|
| 125 |
+
#skip odd children('|' tokens)
|
| 126 |
+
for child in node.children:
|
| 127 |
+
if node.children.index(child)%2:
|
| 128 |
+
continue
|
| 129 |
+
reduced = reduce_tree(child, new_node)
|
| 130 |
+
if reduced is not None:
|
| 131 |
+
new_node.children.append(reduced)
|
| 132 |
+
elif node.type == syms.Alternative:
|
| 133 |
+
if len(node.children) > 1:
|
| 134 |
+
|
| 135 |
+
new_node = MinNode(type=TYPE_GROUP)
|
| 136 |
+
for child in node.children:
|
| 137 |
+
reduced = reduce_tree(child, new_node)
|
| 138 |
+
if reduced:
|
| 139 |
+
new_node.children.append(reduced)
|
| 140 |
+
if not new_node.children:
|
| 141 |
+
# delete the group if all of the children were reduced to None
|
| 142 |
+
new_node = None
|
| 143 |
+
|
| 144 |
+
else:
|
| 145 |
+
new_node = reduce_tree(node.children[0], parent)
|
| 146 |
+
|
| 147 |
+
elif node.type == syms.Unit:
|
| 148 |
+
if (isinstance(node.children[0], pytree.Leaf) and
|
| 149 |
+
node.children[0].value == '('):
|
| 150 |
+
#skip parentheses
|
| 151 |
+
return reduce_tree(node.children[1], parent)
|
| 152 |
+
if ((isinstance(node.children[0], pytree.Leaf) and
|
| 153 |
+
node.children[0].value == '[')
|
| 154 |
+
or
|
| 155 |
+
(len(node.children)>1 and
|
| 156 |
+
hasattr(node.children[1], "value") and
|
| 157 |
+
node.children[1].value == '[')):
|
| 158 |
+
#skip whole unit if its optional
|
| 159 |
+
return None
|
| 160 |
+
|
| 161 |
+
leaf = True
|
| 162 |
+
details_node = None
|
| 163 |
+
alternatives_node = None
|
| 164 |
+
has_repeater = False
|
| 165 |
+
repeater_node = None
|
| 166 |
+
has_variable_name = False
|
| 167 |
+
|
| 168 |
+
for child in node.children:
|
| 169 |
+
if child.type == syms.Details:
|
| 170 |
+
leaf = False
|
| 171 |
+
details_node = child
|
| 172 |
+
elif child.type == syms.Repeater:
|
| 173 |
+
has_repeater = True
|
| 174 |
+
repeater_node = child
|
| 175 |
+
elif child.type == syms.Alternatives:
|
| 176 |
+
alternatives_node = child
|
| 177 |
+
if hasattr(child, 'value') and child.value == '=': # variable name
|
| 178 |
+
has_variable_name = True
|
| 179 |
+
|
| 180 |
+
#skip variable name
|
| 181 |
+
if has_variable_name:
|
| 182 |
+
#skip variable name, '='
|
| 183 |
+
name_leaf = node.children[2]
|
| 184 |
+
if hasattr(name_leaf, 'value') and name_leaf.value == '(':
|
| 185 |
+
# skip parenthesis
|
| 186 |
+
name_leaf = node.children[3]
|
| 187 |
+
else:
|
| 188 |
+
name_leaf = node.children[0]
|
| 189 |
+
|
| 190 |
+
#set node type
|
| 191 |
+
if name_leaf.type == token_labels.NAME:
|
| 192 |
+
#(python) non-name or wildcard
|
| 193 |
+
if name_leaf.value == 'any':
|
| 194 |
+
new_node = MinNode(type=TYPE_ANY)
|
| 195 |
+
else:
|
| 196 |
+
if hasattr(token_labels, name_leaf.value):
|
| 197 |
+
new_node = MinNode(type=getattr(token_labels, name_leaf.value))
|
| 198 |
+
else:
|
| 199 |
+
new_node = MinNode(type=getattr(pysyms, name_leaf.value))
|
| 200 |
+
|
| 201 |
+
elif name_leaf.type == token_labels.STRING:
|
| 202 |
+
#(python) name or character; remove the apostrophes from
|
| 203 |
+
#the string value
|
| 204 |
+
name = name_leaf.value.strip("'")
|
| 205 |
+
if name in tokens:
|
| 206 |
+
new_node = MinNode(type=tokens[name])
|
| 207 |
+
else:
|
| 208 |
+
new_node = MinNode(type=token_labels.NAME, name=name)
|
| 209 |
+
elif name_leaf.type == syms.Alternatives:
|
| 210 |
+
new_node = reduce_tree(alternatives_node, parent)
|
| 211 |
+
|
| 212 |
+
#handle repeaters
|
| 213 |
+
if has_repeater:
|
| 214 |
+
if repeater_node.children[0].value == '*':
|
| 215 |
+
#reduce to None
|
| 216 |
+
new_node = None
|
| 217 |
+
elif repeater_node.children[0].value == '+':
|
| 218 |
+
#reduce to a single occurrence i.e. do nothing
|
| 219 |
+
pass
|
| 220 |
+
else:
|
| 221 |
+
#TODO: handle {min, max} repeaters
|
| 222 |
+
raise NotImplementedError
|
| 223 |
+
pass
|
| 224 |
+
|
| 225 |
+
#add children
|
| 226 |
+
if details_node and new_node is not None:
|
| 227 |
+
for child in details_node.children[1:-1]:
|
| 228 |
+
#skip '<', '>' markers
|
| 229 |
+
reduced = reduce_tree(child, new_node)
|
| 230 |
+
if reduced is not None:
|
| 231 |
+
new_node.children.append(reduced)
|
| 232 |
+
if new_node:
|
| 233 |
+
new_node.parent = parent
|
| 234 |
+
return new_node
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def get_characteristic_subpattern(subpatterns):
|
| 238 |
+
"""Picks the most characteristic from a list of linear patterns
|
| 239 |
+
Current order used is:
|
| 240 |
+
names > common_names > common_chars
|
| 241 |
+
"""
|
| 242 |
+
if not isinstance(subpatterns, list):
|
| 243 |
+
return subpatterns
|
| 244 |
+
if len(subpatterns)==1:
|
| 245 |
+
return subpatterns[0]
|
| 246 |
+
|
| 247 |
+
# first pick out the ones containing variable names
|
| 248 |
+
subpatterns_with_names = []
|
| 249 |
+
subpatterns_with_common_names = []
|
| 250 |
+
common_names = ['in', 'for', 'if' , 'not', 'None']
|
| 251 |
+
subpatterns_with_common_chars = []
|
| 252 |
+
common_chars = "[]().,:"
|
| 253 |
+
for subpattern in subpatterns:
|
| 254 |
+
if any(rec_test(subpattern, lambda x: type(x) is str)):
|
| 255 |
+
if any(rec_test(subpattern,
|
| 256 |
+
lambda x: isinstance(x, str) and x in common_chars)):
|
| 257 |
+
subpatterns_with_common_chars.append(subpattern)
|
| 258 |
+
elif any(rec_test(subpattern,
|
| 259 |
+
lambda x: isinstance(x, str) and x in common_names)):
|
| 260 |
+
subpatterns_with_common_names.append(subpattern)
|
| 261 |
+
|
| 262 |
+
else:
|
| 263 |
+
subpatterns_with_names.append(subpattern)
|
| 264 |
+
|
| 265 |
+
if subpatterns_with_names:
|
| 266 |
+
subpatterns = subpatterns_with_names
|
| 267 |
+
elif subpatterns_with_common_names:
|
| 268 |
+
subpatterns = subpatterns_with_common_names
|
| 269 |
+
elif subpatterns_with_common_chars:
|
| 270 |
+
subpatterns = subpatterns_with_common_chars
|
| 271 |
+
# of the remaining subpatterns pick out the longest one
|
| 272 |
+
return max(subpatterns, key=len)
|
| 273 |
+
|
| 274 |
+
def rec_test(sequence, test_func):
|
| 275 |
+
"""Tests test_func on all items of sequence and items of included
|
| 276 |
+
sub-iterables"""
|
| 277 |
+
for x in sequence:
|
| 278 |
+
if isinstance(x, (list, tuple)):
|
| 279 |
+
yield from rec_test(x, test_func)
|
| 280 |
+
else:
|
| 281 |
+
yield test_func(x)
|
parrot/lib/python3.10/lib2to3/fixes/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Dummy file to make this directory a package.
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_print.cpython-310.pyc
ADDED
|
Binary file (2.56 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_standarderror.cpython-310.pyc
ADDED
|
Binary file (972 Bytes). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_tuple_params.cpython-310.pyc
ADDED
|
Binary file (4.58 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/fix_asserts.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer that replaces deprecated unittest method names."""
|
| 2 |
+
|
| 3 |
+
# Author: Ezio Melotti
|
| 4 |
+
|
| 5 |
+
from ..fixer_base import BaseFix
|
| 6 |
+
from ..fixer_util import Name
|
| 7 |
+
|
| 8 |
+
NAMES = dict(
|
| 9 |
+
assert_="assertTrue",
|
| 10 |
+
assertEquals="assertEqual",
|
| 11 |
+
assertNotEquals="assertNotEqual",
|
| 12 |
+
assertAlmostEquals="assertAlmostEqual",
|
| 13 |
+
assertNotAlmostEquals="assertNotAlmostEqual",
|
| 14 |
+
assertRegexpMatches="assertRegex",
|
| 15 |
+
assertRaisesRegexp="assertRaisesRegex",
|
| 16 |
+
failUnlessEqual="assertEqual",
|
| 17 |
+
failIfEqual="assertNotEqual",
|
| 18 |
+
failUnlessAlmostEqual="assertAlmostEqual",
|
| 19 |
+
failIfAlmostEqual="assertNotAlmostEqual",
|
| 20 |
+
failUnless="assertTrue",
|
| 21 |
+
failUnlessRaises="assertRaises",
|
| 22 |
+
failIf="assertFalse",
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class FixAsserts(BaseFix):
|
| 27 |
+
|
| 28 |
+
PATTERN = """
|
| 29 |
+
power< any+ trailer< '.' meth=(%s)> any* >
|
| 30 |
+
""" % '|'.join(map(repr, NAMES))
|
| 31 |
+
|
| 32 |
+
def transform(self, node, results):
|
| 33 |
+
name = results["meth"][0]
|
| 34 |
+
name.replace(Name(NAMES[str(name)], prefix=name.prefix))
|
parrot/lib/python3.10/lib2to3/fixes/fix_except.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for except statements with named exceptions.
|
| 2 |
+
|
| 3 |
+
The following cases will be converted:
|
| 4 |
+
|
| 5 |
+
- "except E, T:" where T is a name:
|
| 6 |
+
|
| 7 |
+
except E as T:
|
| 8 |
+
|
| 9 |
+
- "except E, T:" where T is not a name, tuple or list:
|
| 10 |
+
|
| 11 |
+
except E as t:
|
| 12 |
+
T = t
|
| 13 |
+
|
| 14 |
+
This is done because the target of an "except" clause must be a
|
| 15 |
+
name.
|
| 16 |
+
|
| 17 |
+
- "except E, T:" where T is a tuple or list literal:
|
| 18 |
+
|
| 19 |
+
except E as t:
|
| 20 |
+
T = t.args
|
| 21 |
+
"""
|
| 22 |
+
# Author: Collin Winter
|
| 23 |
+
|
| 24 |
+
# Local imports
|
| 25 |
+
from .. import pytree
|
| 26 |
+
from ..pgen2 import token
|
| 27 |
+
from .. import fixer_base
|
| 28 |
+
from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms
|
| 29 |
+
|
| 30 |
+
def find_excepts(nodes):
|
| 31 |
+
for i, n in enumerate(nodes):
|
| 32 |
+
if n.type == syms.except_clause:
|
| 33 |
+
if n.children[0].value == 'except':
|
| 34 |
+
yield (n, nodes[i+2])
|
| 35 |
+
|
| 36 |
+
class FixExcept(fixer_base.BaseFix):
|
| 37 |
+
BM_compatible = True
|
| 38 |
+
|
| 39 |
+
PATTERN = """
|
| 40 |
+
try_stmt< 'try' ':' (simple_stmt | suite)
|
| 41 |
+
cleanup=(except_clause ':' (simple_stmt | suite))+
|
| 42 |
+
tail=(['except' ':' (simple_stmt | suite)]
|
| 43 |
+
['else' ':' (simple_stmt | suite)]
|
| 44 |
+
['finally' ':' (simple_stmt | suite)]) >
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
def transform(self, node, results):
|
| 48 |
+
syms = self.syms
|
| 49 |
+
|
| 50 |
+
tail = [n.clone() for n in results["tail"]]
|
| 51 |
+
|
| 52 |
+
try_cleanup = [ch.clone() for ch in results["cleanup"]]
|
| 53 |
+
for except_clause, e_suite in find_excepts(try_cleanup):
|
| 54 |
+
if len(except_clause.children) == 4:
|
| 55 |
+
(E, comma, N) = except_clause.children[1:4]
|
| 56 |
+
comma.replace(Name("as", prefix=" "))
|
| 57 |
+
|
| 58 |
+
if N.type != token.NAME:
|
| 59 |
+
# Generate a new N for the except clause
|
| 60 |
+
new_N = Name(self.new_name(), prefix=" ")
|
| 61 |
+
target = N.clone()
|
| 62 |
+
target.prefix = ""
|
| 63 |
+
N.replace(new_N)
|
| 64 |
+
new_N = new_N.clone()
|
| 65 |
+
|
| 66 |
+
# Insert "old_N = new_N" as the first statement in
|
| 67 |
+
# the except body. This loop skips leading whitespace
|
| 68 |
+
# and indents
|
| 69 |
+
#TODO(cwinter) suite-cleanup
|
| 70 |
+
suite_stmts = e_suite.children
|
| 71 |
+
for i, stmt in enumerate(suite_stmts):
|
| 72 |
+
if isinstance(stmt, pytree.Node):
|
| 73 |
+
break
|
| 74 |
+
|
| 75 |
+
# The assignment is different if old_N is a tuple or list
|
| 76 |
+
# In that case, the assignment is old_N = new_N.args
|
| 77 |
+
if is_tuple(N) or is_list(N):
|
| 78 |
+
assign = Assign(target, Attr(new_N, Name('args')))
|
| 79 |
+
else:
|
| 80 |
+
assign = Assign(target, new_N)
|
| 81 |
+
|
| 82 |
+
#TODO(cwinter) stopgap until children becomes a smart list
|
| 83 |
+
for child in reversed(suite_stmts[:i]):
|
| 84 |
+
e_suite.insert_child(0, child)
|
| 85 |
+
e_suite.insert_child(i, assign)
|
| 86 |
+
elif N.prefix == "":
|
| 87 |
+
# No space after a comma is legal; no space after "as",
|
| 88 |
+
# not so much.
|
| 89 |
+
N.prefix = " "
|
| 90 |
+
|
| 91 |
+
#TODO(cwinter) fix this when children becomes a smart list
|
| 92 |
+
children = [c.clone() for c in node.children[:3]] + try_cleanup + tail
|
| 93 |
+
return pytree.Node(node.type, children)
|
parrot/lib/python3.10/lib2to3/fixes/fix_execfile.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for execfile.
|
| 5 |
+
|
| 6 |
+
This converts usages of the execfile function into calls to the built-in
|
| 7 |
+
exec() function.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from .. import fixer_base
|
| 11 |
+
from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node,
|
| 12 |
+
ArgList, String, syms)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FixExecfile(fixer_base.BaseFix):
|
| 16 |
+
BM_compatible = True
|
| 17 |
+
|
| 18 |
+
PATTERN = """
|
| 19 |
+
power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
|
| 20 |
+
|
|
| 21 |
+
power< 'execfile' trailer< '(' filename=any ')' > >
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def transform(self, node, results):
|
| 25 |
+
assert results
|
| 26 |
+
filename = results["filename"]
|
| 27 |
+
globals = results.get("globals")
|
| 28 |
+
locals = results.get("locals")
|
| 29 |
+
|
| 30 |
+
# Copy over the prefix from the right parentheses end of the execfile
|
| 31 |
+
# call.
|
| 32 |
+
execfile_paren = node.children[-1].children[-1].clone()
|
| 33 |
+
# Construct open().read().
|
| 34 |
+
open_args = ArgList([filename.clone(), Comma(), String('"rb"', ' ')],
|
| 35 |
+
rparen=execfile_paren)
|
| 36 |
+
open_call = Node(syms.power, [Name("open"), open_args])
|
| 37 |
+
read = [Node(syms.trailer, [Dot(), Name('read')]),
|
| 38 |
+
Node(syms.trailer, [LParen(), RParen()])]
|
| 39 |
+
open_expr = [open_call] + read
|
| 40 |
+
# Wrap the open call in a compile call. This is so the filename will be
|
| 41 |
+
# preserved in the execed code.
|
| 42 |
+
filename_arg = filename.clone()
|
| 43 |
+
filename_arg.prefix = " "
|
| 44 |
+
exec_str = String("'exec'", " ")
|
| 45 |
+
compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str]
|
| 46 |
+
compile_call = Call(Name("compile"), compile_args, "")
|
| 47 |
+
# Finally, replace the execfile call with an exec call.
|
| 48 |
+
args = [compile_call]
|
| 49 |
+
if globals is not None:
|
| 50 |
+
args.extend([Comma(), globals.clone()])
|
| 51 |
+
if locals is not None:
|
| 52 |
+
args.extend([Comma(), locals.clone()])
|
| 53 |
+
return Call(Name("exec"), args, prefix=node.prefix)
|
parrot/lib/python3.10/lib2to3/fixes/fix_filter.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that changes filter(F, X) into list(filter(F, X)).
|
| 5 |
+
|
| 6 |
+
We avoid the transformation if the filter() call is directly contained
|
| 7 |
+
in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or
|
| 8 |
+
for V in <>:.
|
| 9 |
+
|
| 10 |
+
NOTE: This is still not correct if the original code was depending on
|
| 11 |
+
filter(F, X) to return a string if X is a string and a tuple if X is a
|
| 12 |
+
tuple. That would require type inference, which we don't do. Let
|
| 13 |
+
Python 2.6 figure it out.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
# Local imports
|
| 17 |
+
from .. import fixer_base
|
| 18 |
+
from ..pytree import Node
|
| 19 |
+
from ..pygram import python_symbols as syms
|
| 20 |
+
from ..fixer_util import Name, ArgList, ListComp, in_special_context, parenthesize
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class FixFilter(fixer_base.ConditionalFix):
|
| 24 |
+
BM_compatible = True
|
| 25 |
+
|
| 26 |
+
PATTERN = """
|
| 27 |
+
filter_lambda=power<
|
| 28 |
+
'filter'
|
| 29 |
+
trailer<
|
| 30 |
+
'('
|
| 31 |
+
arglist<
|
| 32 |
+
lambdef< 'lambda'
|
| 33 |
+
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
|
| 34 |
+
>
|
| 35 |
+
','
|
| 36 |
+
it=any
|
| 37 |
+
>
|
| 38 |
+
')'
|
| 39 |
+
>
|
| 40 |
+
[extra_trailers=trailer*]
|
| 41 |
+
>
|
| 42 |
+
|
|
| 43 |
+
power<
|
| 44 |
+
'filter'
|
| 45 |
+
trailer< '(' arglist< none='None' ',' seq=any > ')' >
|
| 46 |
+
[extra_trailers=trailer*]
|
| 47 |
+
>
|
| 48 |
+
|
|
| 49 |
+
power<
|
| 50 |
+
'filter'
|
| 51 |
+
args=trailer< '(' [any] ')' >
|
| 52 |
+
[extra_trailers=trailer*]
|
| 53 |
+
>
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
skip_on = "future_builtins.filter"
|
| 57 |
+
|
| 58 |
+
def transform(self, node, results):
|
| 59 |
+
if self.should_skip(node):
|
| 60 |
+
return
|
| 61 |
+
|
| 62 |
+
trailers = []
|
| 63 |
+
if 'extra_trailers' in results:
|
| 64 |
+
for t in results['extra_trailers']:
|
| 65 |
+
trailers.append(t.clone())
|
| 66 |
+
|
| 67 |
+
if "filter_lambda" in results:
|
| 68 |
+
xp = results.get("xp").clone()
|
| 69 |
+
if xp.type == syms.test:
|
| 70 |
+
xp.prefix = ""
|
| 71 |
+
xp = parenthesize(xp)
|
| 72 |
+
|
| 73 |
+
new = ListComp(results.get("fp").clone(),
|
| 74 |
+
results.get("fp").clone(),
|
| 75 |
+
results.get("it").clone(), xp)
|
| 76 |
+
new = Node(syms.power, [new] + trailers, prefix="")
|
| 77 |
+
|
| 78 |
+
elif "none" in results:
|
| 79 |
+
new = ListComp(Name("_f"),
|
| 80 |
+
Name("_f"),
|
| 81 |
+
results["seq"].clone(),
|
| 82 |
+
Name("_f"))
|
| 83 |
+
new = Node(syms.power, [new] + trailers, prefix="")
|
| 84 |
+
|
| 85 |
+
else:
|
| 86 |
+
if in_special_context(node):
|
| 87 |
+
return None
|
| 88 |
+
|
| 89 |
+
args = results['args'].clone()
|
| 90 |
+
new = Node(syms.power, [Name("filter"), args], prefix="")
|
| 91 |
+
new = Node(syms.power, [Name("list"), ArgList([new])] + trailers)
|
| 92 |
+
new.prefix = ""
|
| 93 |
+
new.prefix = node.prefix
|
| 94 |
+
return new
|
parrot/lib/python3.10/lib2to3/fixes/fix_getcwdu.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Fixer that changes os.getcwdu() to os.getcwd().
|
| 3 |
+
"""
|
| 4 |
+
# Author: Victor Stinner
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import fixer_base
|
| 8 |
+
from ..fixer_util import Name
|
| 9 |
+
|
| 10 |
+
class FixGetcwdu(fixer_base.BaseFix):
|
| 11 |
+
BM_compatible = True
|
| 12 |
+
|
| 13 |
+
PATTERN = """
|
| 14 |
+
power< 'os' trailer< dot='.' name='getcwdu' > any* >
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def transform(self, node, results):
|
| 18 |
+
name = results["name"]
|
| 19 |
+
name.replace(Name("getcwd", prefix=name.prefix))
|
parrot/lib/python3.10/lib2to3/fixes/fix_imports2.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix incompatible imports and module references that must be fixed after
|
| 2 |
+
fix_imports."""
|
| 3 |
+
from . import fix_imports
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
MAPPING = {
|
| 7 |
+
'whichdb': 'dbm',
|
| 8 |
+
'anydbm': 'dbm',
|
| 9 |
+
}
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixImports2(fix_imports.FixImports):
|
| 13 |
+
|
| 14 |
+
run_order = 7
|
| 15 |
+
|
| 16 |
+
mapping = MAPPING
|
parrot/lib/python3.10/lib2to3/fixes/fix_intern.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Georg Brandl.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for intern().
|
| 5 |
+
|
| 6 |
+
intern(s) -> sys.intern(s)"""
|
| 7 |
+
|
| 8 |
+
# Local imports
|
| 9 |
+
from .. import fixer_base
|
| 10 |
+
from ..fixer_util import ImportAndCall, touch_import
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class FixIntern(fixer_base.BaseFix):
|
| 14 |
+
BM_compatible = True
|
| 15 |
+
order = "pre"
|
| 16 |
+
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< 'intern'
|
| 19 |
+
trailer< lpar='('
|
| 20 |
+
( not(arglist | argument<any '=' any>) obj=any
|
| 21 |
+
| obj=arglist<(not argument<any '=' any>) any ','> )
|
| 22 |
+
rpar=')' >
|
| 23 |
+
after=any*
|
| 24 |
+
>
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def transform(self, node, results):
|
| 28 |
+
if results:
|
| 29 |
+
# I feel like we should be able to express this logic in the
|
| 30 |
+
# PATTERN above but I don't know how to do it so...
|
| 31 |
+
obj = results['obj']
|
| 32 |
+
if obj:
|
| 33 |
+
if (obj.type == self.syms.argument and
|
| 34 |
+
obj.children[0].value in {'**', '*'}):
|
| 35 |
+
return # Make no change.
|
| 36 |
+
names = ('sys', 'intern')
|
| 37 |
+
new = ImportAndCall(node, results, names)
|
| 38 |
+
touch_import(None, 'sys', node)
|
| 39 |
+
return new
|
parrot/lib/python3.10/lib2to3/fixes/fix_itertools.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
|
| 2 |
+
itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
|
| 3 |
+
|
| 4 |
+
imports from itertools are fixed in fix_itertools_import.py
|
| 5 |
+
|
| 6 |
+
If itertools is imported as something else (ie: import itertools as it;
|
| 7 |
+
it.izip(spam, eggs)) method calls will not get fixed.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# Local imports
|
| 11 |
+
from .. import fixer_base
|
| 12 |
+
from ..fixer_util import Name
|
| 13 |
+
|
| 14 |
+
class FixItertools(fixer_base.BaseFix):
|
| 15 |
+
BM_compatible = True
|
| 16 |
+
it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')"
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< it='itertools'
|
| 19 |
+
trailer<
|
| 20 |
+
dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > >
|
| 21 |
+
|
|
| 22 |
+
power< func=%(it_funcs)s trailer< '(' [any] ')' > >
|
| 23 |
+
""" %(locals())
|
| 24 |
+
|
| 25 |
+
# Needs to be run after fix_(map|zip|filter)
|
| 26 |
+
run_order = 6
|
| 27 |
+
|
| 28 |
+
def transform(self, node, results):
|
| 29 |
+
prefix = None
|
| 30 |
+
func = results['func'][0]
|
| 31 |
+
if ('it' in results and
|
| 32 |
+
func.value not in ('ifilterfalse', 'izip_longest')):
|
| 33 |
+
dot, it = (results['dot'], results['it'])
|
| 34 |
+
# Remove the 'itertools'
|
| 35 |
+
prefix = it.prefix
|
| 36 |
+
it.remove()
|
| 37 |
+
# Replace the node which contains ('.', 'function') with the
|
| 38 |
+
# function (to be consistent with the second part of the pattern)
|
| 39 |
+
dot.remove()
|
| 40 |
+
func.parent.replace(func)
|
| 41 |
+
|
| 42 |
+
prefix = prefix or func.prefix
|
| 43 |
+
func.replace(Name(func.value[1:], prefix=prefix))
|
parrot/lib/python3.10/lib2to3/fixes/fix_long.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that turns 'long' into 'int' everywhere.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# Local imports
|
| 8 |
+
from lib2to3 import fixer_base
|
| 9 |
+
from lib2to3.fixer_util import is_probably_builtin
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixLong(fixer_base.BaseFix):
|
| 13 |
+
BM_compatible = True
|
| 14 |
+
PATTERN = "'long'"
|
| 15 |
+
|
| 16 |
+
def transform(self, node, results):
|
| 17 |
+
if is_probably_builtin(node):
|
| 18 |
+
node.value = "int"
|
| 19 |
+
node.changed()
|
parrot/lib/python3.10/lib2to3/fixes/fix_nonzero.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for __nonzero__ -> __bool__ methods."""
|
| 2 |
+
# Author: Collin Winter
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Name
|
| 7 |
+
|
| 8 |
+
class FixNonzero(fixer_base.BaseFix):
|
| 9 |
+
BM_compatible = True
|
| 10 |
+
PATTERN = """
|
| 11 |
+
classdef< 'class' any+ ':'
|
| 12 |
+
suite< any*
|
| 13 |
+
funcdef< 'def' name='__nonzero__'
|
| 14 |
+
parameters< '(' NAME ')' > any+ >
|
| 15 |
+
any* > >
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
def transform(self, node, results):
|
| 19 |
+
name = results["name"]
|
| 20 |
+
new = Name("__bool__", prefix=name.prefix)
|
| 21 |
+
name.replace(new)
|
parrot/lib/python3.10/lib2to3/fixes/fix_raise.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for 'raise E, V, T'
|
| 2 |
+
|
| 3 |
+
raise -> raise
|
| 4 |
+
raise E -> raise E
|
| 5 |
+
raise E, V -> raise E(V)
|
| 6 |
+
raise E, V, T -> raise E(V).with_traceback(T)
|
| 7 |
+
raise E, None, T -> raise E.with_traceback(T)
|
| 8 |
+
|
| 9 |
+
raise (((E, E'), E''), E'''), V -> raise E(V)
|
| 10 |
+
raise "foo", V, T -> warns about string exceptions
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
CAVEATS:
|
| 14 |
+
1) "raise E, V" will be incorrectly translated if V is an exception
|
| 15 |
+
instance. The correct Python 3 idiom is
|
| 16 |
+
|
| 17 |
+
raise E from V
|
| 18 |
+
|
| 19 |
+
but since we can't detect instance-hood by syntax alone and since
|
| 20 |
+
any client code would have to be changed as well, we don't automate
|
| 21 |
+
this.
|
| 22 |
+
"""
|
| 23 |
+
# Author: Collin Winter
|
| 24 |
+
|
| 25 |
+
# Local imports
|
| 26 |
+
from .. import pytree
|
| 27 |
+
from ..pgen2 import token
|
| 28 |
+
from .. import fixer_base
|
| 29 |
+
from ..fixer_util import Name, Call, Attr, ArgList, is_tuple
|
| 30 |
+
|
| 31 |
+
class FixRaise(fixer_base.BaseFix):
|
| 32 |
+
|
| 33 |
+
BM_compatible = True
|
| 34 |
+
PATTERN = """
|
| 35 |
+
raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def transform(self, node, results):
|
| 39 |
+
syms = self.syms
|
| 40 |
+
|
| 41 |
+
exc = results["exc"].clone()
|
| 42 |
+
if exc.type == token.STRING:
|
| 43 |
+
msg = "Python 3 does not support string exceptions"
|
| 44 |
+
self.cannot_convert(node, msg)
|
| 45 |
+
return
|
| 46 |
+
|
| 47 |
+
# Python 2 supports
|
| 48 |
+
# raise ((((E1, E2), E3), E4), E5), V
|
| 49 |
+
# as a synonym for
|
| 50 |
+
# raise E1, V
|
| 51 |
+
# Since Python 3 will not support this, we recurse down any tuple
|
| 52 |
+
# literals, always taking the first element.
|
| 53 |
+
if is_tuple(exc):
|
| 54 |
+
while is_tuple(exc):
|
| 55 |
+
# exc.children[1:-1] is the unparenthesized tuple
|
| 56 |
+
# exc.children[1].children[0] is the first element of the tuple
|
| 57 |
+
exc = exc.children[1].children[0].clone()
|
| 58 |
+
exc.prefix = " "
|
| 59 |
+
|
| 60 |
+
if "val" not in results:
|
| 61 |
+
# One-argument raise
|
| 62 |
+
new = pytree.Node(syms.raise_stmt, [Name("raise"), exc])
|
| 63 |
+
new.prefix = node.prefix
|
| 64 |
+
return new
|
| 65 |
+
|
| 66 |
+
val = results["val"].clone()
|
| 67 |
+
if is_tuple(val):
|
| 68 |
+
args = [c.clone() for c in val.children[1:-1]]
|
| 69 |
+
else:
|
| 70 |
+
val.prefix = ""
|
| 71 |
+
args = [val]
|
| 72 |
+
|
| 73 |
+
if "tb" in results:
|
| 74 |
+
tb = results["tb"].clone()
|
| 75 |
+
tb.prefix = ""
|
| 76 |
+
|
| 77 |
+
e = exc
|
| 78 |
+
# If there's a traceback and None is passed as the value, then don't
|
| 79 |
+
# add a call, since the user probably just wants to add a
|
| 80 |
+
# traceback. See issue #9661.
|
| 81 |
+
if val.type != token.NAME or val.value != "None":
|
| 82 |
+
e = Call(exc, args)
|
| 83 |
+
with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])]
|
| 84 |
+
new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb)
|
| 85 |
+
new.prefix = node.prefix
|
| 86 |
+
return new
|
| 87 |
+
else:
|
| 88 |
+
return pytree.Node(syms.raise_stmt,
|
| 89 |
+
[Name("raise"), Call(exc, args)],
|
| 90 |
+
prefix=node.prefix)
|
parrot/lib/python3.10/lib2to3/fixes/fix_standarderror.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for StandardError -> Exception."""
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import fixer_base
|
| 8 |
+
from ..fixer_util import Name
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class FixStandarderror(fixer_base.BaseFix):
|
| 12 |
+
BM_compatible = True
|
| 13 |
+
PATTERN = """
|
| 14 |
+
'StandardError'
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def transform(self, node, results):
|
| 18 |
+
return Name("Exception", prefix=node.prefix)
|
parrot/lib/python3.10/lib2to3/fixes/fix_unicode.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""Fixer for unicode.
|
| 2 |
+
|
| 3 |
+
* Changes unicode to str and unichr to chr.
|
| 4 |
+
|
| 5 |
+
* If "...\u..." is not unicode literal change it into "...\\u...".
|
| 6 |
+
|
| 7 |
+
* Change u"..." into "...".
|
| 8 |
+
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from ..pgen2 import token
|
| 12 |
+
from .. import fixer_base
|
| 13 |
+
|
| 14 |
+
_mapping = {"unichr" : "chr", "unicode" : "str"}
|
| 15 |
+
|
| 16 |
+
class FixUnicode(fixer_base.BaseFix):
|
| 17 |
+
BM_compatible = True
|
| 18 |
+
PATTERN = "STRING | 'unicode' | 'unichr'"
|
| 19 |
+
|
| 20 |
+
def start_tree(self, tree, filename):
|
| 21 |
+
super(FixUnicode, self).start_tree(tree, filename)
|
| 22 |
+
self.unicode_literals = 'unicode_literals' in tree.future_features
|
| 23 |
+
|
| 24 |
+
def transform(self, node, results):
|
| 25 |
+
if node.type == token.NAME:
|
| 26 |
+
new = node.clone()
|
| 27 |
+
new.value = _mapping[node.value]
|
| 28 |
+
return new
|
| 29 |
+
elif node.type == token.STRING:
|
| 30 |
+
val = node.value
|
| 31 |
+
if not self.unicode_literals and val[0] in '\'"' and '\\' in val:
|
| 32 |
+
val = r'\\'.join([
|
| 33 |
+
v.replace('\\u', r'\\u').replace('\\U', r'\\U')
|
| 34 |
+
for v in val.split(r'\\')
|
| 35 |
+
])
|
| 36 |
+
if val[0] in 'uU':
|
| 37 |
+
val = val[1:]
|
| 38 |
+
if val == node.value:
|
| 39 |
+
return node
|
| 40 |
+
new = node.clone()
|
| 41 |
+
new.value = val
|
| 42 |
+
return new
|
parrot/lib/python3.10/lib2to3/main.py
ADDED
|
@@ -0,0 +1,273 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Main program for 2to3.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from __future__ import with_statement, print_function
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
import difflib
|
| 10 |
+
import logging
|
| 11 |
+
import shutil
|
| 12 |
+
import optparse
|
| 13 |
+
|
| 14 |
+
from . import refactor
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def diff_texts(a, b, filename):
|
| 18 |
+
"""Return a unified diff of two strings."""
|
| 19 |
+
a = a.splitlines()
|
| 20 |
+
b = b.splitlines()
|
| 21 |
+
return difflib.unified_diff(a, b, filename, filename,
|
| 22 |
+
"(original)", "(refactored)",
|
| 23 |
+
lineterm="")
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
|
| 27 |
+
"""
|
| 28 |
+
A refactoring tool that can avoid overwriting its input files.
|
| 29 |
+
Prints output to stdout.
|
| 30 |
+
|
| 31 |
+
Output files can optionally be written to a different directory and or
|
| 32 |
+
have an extra file suffix appended to their name for use in situations
|
| 33 |
+
where you do not want to replace the input files.
|
| 34 |
+
"""
|
| 35 |
+
|
| 36 |
+
def __init__(self, fixers, options, explicit, nobackups, show_diffs,
|
| 37 |
+
input_base_dir='', output_dir='', append_suffix=''):
|
| 38 |
+
"""
|
| 39 |
+
Args:
|
| 40 |
+
fixers: A list of fixers to import.
|
| 41 |
+
options: A dict with RefactoringTool configuration.
|
| 42 |
+
explicit: A list of fixers to run even if they are explicit.
|
| 43 |
+
nobackups: If true no backup '.bak' files will be created for those
|
| 44 |
+
files that are being refactored.
|
| 45 |
+
show_diffs: Should diffs of the refactoring be printed to stdout?
|
| 46 |
+
input_base_dir: The base directory for all input files. This class
|
| 47 |
+
will strip this path prefix off of filenames before substituting
|
| 48 |
+
it with output_dir. Only meaningful if output_dir is supplied.
|
| 49 |
+
All files processed by refactor() must start with this path.
|
| 50 |
+
output_dir: If supplied, all converted files will be written into
|
| 51 |
+
this directory tree instead of input_base_dir.
|
| 52 |
+
append_suffix: If supplied, all files output by this tool will have
|
| 53 |
+
this appended to their filename. Useful for changing .py to
|
| 54 |
+
.py3 for example by passing append_suffix='3'.
|
| 55 |
+
"""
|
| 56 |
+
self.nobackups = nobackups
|
| 57 |
+
self.show_diffs = show_diffs
|
| 58 |
+
if input_base_dir and not input_base_dir.endswith(os.sep):
|
| 59 |
+
input_base_dir += os.sep
|
| 60 |
+
self._input_base_dir = input_base_dir
|
| 61 |
+
self._output_dir = output_dir
|
| 62 |
+
self._append_suffix = append_suffix
|
| 63 |
+
super(StdoutRefactoringTool, self).__init__(fixers, options, explicit)
|
| 64 |
+
|
| 65 |
+
def log_error(self, msg, *args, **kwargs):
|
| 66 |
+
self.errors.append((msg, args, kwargs))
|
| 67 |
+
self.logger.error(msg, *args, **kwargs)
|
| 68 |
+
|
| 69 |
+
def write_file(self, new_text, filename, old_text, encoding):
|
| 70 |
+
orig_filename = filename
|
| 71 |
+
if self._output_dir:
|
| 72 |
+
if filename.startswith(self._input_base_dir):
|
| 73 |
+
filename = os.path.join(self._output_dir,
|
| 74 |
+
filename[len(self._input_base_dir):])
|
| 75 |
+
else:
|
| 76 |
+
raise ValueError('filename %s does not start with the '
|
| 77 |
+
'input_base_dir %s' % (
|
| 78 |
+
filename, self._input_base_dir))
|
| 79 |
+
if self._append_suffix:
|
| 80 |
+
filename += self._append_suffix
|
| 81 |
+
if orig_filename != filename:
|
| 82 |
+
output_dir = os.path.dirname(filename)
|
| 83 |
+
if not os.path.isdir(output_dir) and output_dir:
|
| 84 |
+
os.makedirs(output_dir)
|
| 85 |
+
self.log_message('Writing converted %s to %s.', orig_filename,
|
| 86 |
+
filename)
|
| 87 |
+
if not self.nobackups:
|
| 88 |
+
# Make backup
|
| 89 |
+
backup = filename + ".bak"
|
| 90 |
+
if os.path.lexists(backup):
|
| 91 |
+
try:
|
| 92 |
+
os.remove(backup)
|
| 93 |
+
except OSError:
|
| 94 |
+
self.log_message("Can't remove backup %s", backup)
|
| 95 |
+
try:
|
| 96 |
+
os.rename(filename, backup)
|
| 97 |
+
except OSError:
|
| 98 |
+
self.log_message("Can't rename %s to %s", filename, backup)
|
| 99 |
+
# Actually write the new file
|
| 100 |
+
write = super(StdoutRefactoringTool, self).write_file
|
| 101 |
+
write(new_text, filename, old_text, encoding)
|
| 102 |
+
if not self.nobackups:
|
| 103 |
+
shutil.copymode(backup, filename)
|
| 104 |
+
if orig_filename != filename:
|
| 105 |
+
# Preserve the file mode in the new output directory.
|
| 106 |
+
shutil.copymode(orig_filename, filename)
|
| 107 |
+
|
| 108 |
+
def print_output(self, old, new, filename, equal):
|
| 109 |
+
if equal:
|
| 110 |
+
self.log_message("No changes to %s", filename)
|
| 111 |
+
else:
|
| 112 |
+
self.log_message("Refactored %s", filename)
|
| 113 |
+
if self.show_diffs:
|
| 114 |
+
diff_lines = diff_texts(old, new, filename)
|
| 115 |
+
try:
|
| 116 |
+
if self.output_lock is not None:
|
| 117 |
+
with self.output_lock:
|
| 118 |
+
for line in diff_lines:
|
| 119 |
+
print(line)
|
| 120 |
+
sys.stdout.flush()
|
| 121 |
+
else:
|
| 122 |
+
for line in diff_lines:
|
| 123 |
+
print(line)
|
| 124 |
+
except UnicodeEncodeError:
|
| 125 |
+
warn("couldn't encode %s's diff for your terminal" %
|
| 126 |
+
(filename,))
|
| 127 |
+
return
|
| 128 |
+
|
| 129 |
+
def warn(msg):
|
| 130 |
+
print("WARNING: %s" % (msg,), file=sys.stderr)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def main(fixer_pkg, args=None):
|
| 134 |
+
"""Main program.
|
| 135 |
+
|
| 136 |
+
Args:
|
| 137 |
+
fixer_pkg: the name of a package where the fixers are located.
|
| 138 |
+
args: optional; a list of command line arguments. If omitted,
|
| 139 |
+
sys.argv[1:] is used.
|
| 140 |
+
|
| 141 |
+
Returns a suggested exit status (0, 1, 2).
|
| 142 |
+
"""
|
| 143 |
+
# Set up option parser
|
| 144 |
+
parser = optparse.OptionParser(usage="2to3 [options] file|dir ...")
|
| 145 |
+
parser.add_option("-d", "--doctests_only", action="store_true",
|
| 146 |
+
help="Fix up doctests only")
|
| 147 |
+
parser.add_option("-f", "--fix", action="append", default=[],
|
| 148 |
+
help="Each FIX specifies a transformation; default: all")
|
| 149 |
+
parser.add_option("-j", "--processes", action="store", default=1,
|
| 150 |
+
type="int", help="Run 2to3 concurrently")
|
| 151 |
+
parser.add_option("-x", "--nofix", action="append", default=[],
|
| 152 |
+
help="Prevent a transformation from being run")
|
| 153 |
+
parser.add_option("-l", "--list-fixes", action="store_true",
|
| 154 |
+
help="List available transformations")
|
| 155 |
+
parser.add_option("-p", "--print-function", action="store_true",
|
| 156 |
+
help="Modify the grammar so that print() is a function")
|
| 157 |
+
parser.add_option("-e", "--exec-function", action="store_true",
|
| 158 |
+
help="Modify the grammar so that exec() is a function")
|
| 159 |
+
parser.add_option("-v", "--verbose", action="store_true",
|
| 160 |
+
help="More verbose logging")
|
| 161 |
+
parser.add_option("--no-diffs", action="store_true",
|
| 162 |
+
help="Don't show diffs of the refactoring")
|
| 163 |
+
parser.add_option("-w", "--write", action="store_true",
|
| 164 |
+
help="Write back modified files")
|
| 165 |
+
parser.add_option("-n", "--nobackups", action="store_true", default=False,
|
| 166 |
+
help="Don't write backups for modified files")
|
| 167 |
+
parser.add_option("-o", "--output-dir", action="store", type="str",
|
| 168 |
+
default="", help="Put output files in this directory "
|
| 169 |
+
"instead of overwriting the input files. Requires -n.")
|
| 170 |
+
parser.add_option("-W", "--write-unchanged-files", action="store_true",
|
| 171 |
+
help="Also write files even if no changes were required"
|
| 172 |
+
" (useful with --output-dir); implies -w.")
|
| 173 |
+
parser.add_option("--add-suffix", action="store", type="str", default="",
|
| 174 |
+
help="Append this string to all output filenames."
|
| 175 |
+
" Requires -n if non-empty. "
|
| 176 |
+
"ex: --add-suffix='3' will generate .py3 files.")
|
| 177 |
+
|
| 178 |
+
# Parse command line arguments
|
| 179 |
+
refactor_stdin = False
|
| 180 |
+
flags = {}
|
| 181 |
+
options, args = parser.parse_args(args)
|
| 182 |
+
if options.write_unchanged_files:
|
| 183 |
+
flags["write_unchanged_files"] = True
|
| 184 |
+
if not options.write:
|
| 185 |
+
warn("--write-unchanged-files/-W implies -w.")
|
| 186 |
+
options.write = True
|
| 187 |
+
# If we allowed these, the original files would be renamed to backup names
|
| 188 |
+
# but not replaced.
|
| 189 |
+
if options.output_dir and not options.nobackups:
|
| 190 |
+
parser.error("Can't use --output-dir/-o without -n.")
|
| 191 |
+
if options.add_suffix and not options.nobackups:
|
| 192 |
+
parser.error("Can't use --add-suffix without -n.")
|
| 193 |
+
|
| 194 |
+
if not options.write and options.no_diffs:
|
| 195 |
+
warn("not writing files and not printing diffs; that's not very useful")
|
| 196 |
+
if not options.write and options.nobackups:
|
| 197 |
+
parser.error("Can't use -n without -w")
|
| 198 |
+
if options.list_fixes:
|
| 199 |
+
print("Available transformations for the -f/--fix option:")
|
| 200 |
+
for fixname in refactor.get_all_fix_names(fixer_pkg):
|
| 201 |
+
print(fixname)
|
| 202 |
+
if not args:
|
| 203 |
+
return 0
|
| 204 |
+
if not args:
|
| 205 |
+
print("At least one file or directory argument required.", file=sys.stderr)
|
| 206 |
+
print("Use --help to show usage.", file=sys.stderr)
|
| 207 |
+
return 2
|
| 208 |
+
if "-" in args:
|
| 209 |
+
refactor_stdin = True
|
| 210 |
+
if options.write:
|
| 211 |
+
print("Can't write to stdin.", file=sys.stderr)
|
| 212 |
+
return 2
|
| 213 |
+
if options.print_function:
|
| 214 |
+
flags["print_function"] = True
|
| 215 |
+
|
| 216 |
+
if options.exec_function:
|
| 217 |
+
flags["exec_function"] = True
|
| 218 |
+
|
| 219 |
+
# Set up logging handler
|
| 220 |
+
level = logging.DEBUG if options.verbose else logging.INFO
|
| 221 |
+
logging.basicConfig(format='%(name)s: %(message)s', level=level)
|
| 222 |
+
logger = logging.getLogger('lib2to3.main')
|
| 223 |
+
|
| 224 |
+
# Initialize the refactoring tool
|
| 225 |
+
avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))
|
| 226 |
+
unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix)
|
| 227 |
+
explicit = set()
|
| 228 |
+
if options.fix:
|
| 229 |
+
all_present = False
|
| 230 |
+
for fix in options.fix:
|
| 231 |
+
if fix == "all":
|
| 232 |
+
all_present = True
|
| 233 |
+
else:
|
| 234 |
+
explicit.add(fixer_pkg + ".fix_" + fix)
|
| 235 |
+
requested = avail_fixes.union(explicit) if all_present else explicit
|
| 236 |
+
else:
|
| 237 |
+
requested = avail_fixes.union(explicit)
|
| 238 |
+
fixer_names = requested.difference(unwanted_fixes)
|
| 239 |
+
input_base_dir = os.path.commonprefix(args)
|
| 240 |
+
if (input_base_dir and not input_base_dir.endswith(os.sep)
|
| 241 |
+
and not os.path.isdir(input_base_dir)):
|
| 242 |
+
# One or more similar names were passed, their directory is the base.
|
| 243 |
+
# os.path.commonprefix() is ignorant of path elements, this corrects
|
| 244 |
+
# for that weird API.
|
| 245 |
+
input_base_dir = os.path.dirname(input_base_dir)
|
| 246 |
+
if options.output_dir:
|
| 247 |
+
input_base_dir = input_base_dir.rstrip(os.sep)
|
| 248 |
+
logger.info('Output in %r will mirror the input directory %r layout.',
|
| 249 |
+
options.output_dir, input_base_dir)
|
| 250 |
+
rt = StdoutRefactoringTool(
|
| 251 |
+
sorted(fixer_names), flags, sorted(explicit),
|
| 252 |
+
options.nobackups, not options.no_diffs,
|
| 253 |
+
input_base_dir=input_base_dir,
|
| 254 |
+
output_dir=options.output_dir,
|
| 255 |
+
append_suffix=options.add_suffix)
|
| 256 |
+
|
| 257 |
+
# Refactor all files and directories passed as arguments
|
| 258 |
+
if not rt.errors:
|
| 259 |
+
if refactor_stdin:
|
| 260 |
+
rt.refactor_stdin()
|
| 261 |
+
else:
|
| 262 |
+
try:
|
| 263 |
+
rt.refactor(args, options.write, options.doctests_only,
|
| 264 |
+
options.processes)
|
| 265 |
+
except refactor.MultiprocessingUnsupported:
|
| 266 |
+
assert options.processes > 1
|
| 267 |
+
print("Sorry, -j isn't supported on this platform.",
|
| 268 |
+
file=sys.stderr)
|
| 269 |
+
return 1
|
| 270 |
+
rt.summarize()
|
| 271 |
+
|
| 272 |
+
# Return error status (0 if rt.errors is zero)
|
| 273 |
+
return int(bool(rt.errors))
|
parrot/lib/python3.10/lib2to3/tests/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Author: Collin Winter
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
from test.support import load_package_tests
|
| 6 |
+
|
| 7 |
+
def load_tests(*args):
|
| 8 |
+
return load_package_tests(os.path.dirname(__file__), *args)
|
parrot/lib/python3.10/lib2to3/tests/__pycache__/test_pytree.cpython-310.pyc
ADDED
|
Binary file (14.9 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/tests/__pycache__/test_util.cpython-310.pyc
ADDED
|
Binary file (19.5 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/tests/data/fixers/no_fixer_cls.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# This is empty so trying to fetch the fixer class gives an AttributeError
|
parrot/lib/python3.10/lib2to3/tests/pytree_idempotency.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 3 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 4 |
+
|
| 5 |
+
"""Main program for testing the infrastructure."""
|
| 6 |
+
|
| 7 |
+
from __future__ import print_function
|
| 8 |
+
|
| 9 |
+
__author__ = "Guido van Rossum <guido@python.org>"
|
| 10 |
+
|
| 11 |
+
# Support imports (need to be imported first)
|
| 12 |
+
from . import support
|
| 13 |
+
|
| 14 |
+
# Python imports
|
| 15 |
+
import os
|
| 16 |
+
import sys
|
| 17 |
+
import logging
|
| 18 |
+
|
| 19 |
+
# Local imports
|
| 20 |
+
from .. import pytree
|
| 21 |
+
from .. import pgen2
|
| 22 |
+
from ..pgen2 import driver
|
| 23 |
+
|
| 24 |
+
logging.basicConfig()
|
| 25 |
+
|
| 26 |
+
def main():
|
| 27 |
+
gr = driver.load_grammar("Grammar.txt")
|
| 28 |
+
dr = driver.Driver(gr, convert=pytree.convert)
|
| 29 |
+
|
| 30 |
+
fn = "example.py"
|
| 31 |
+
tree = dr.parse_file(fn, debug=True)
|
| 32 |
+
if not diff(fn, tree):
|
| 33 |
+
print("No diffs.")
|
| 34 |
+
if not sys.argv[1:]:
|
| 35 |
+
return # Pass a dummy argument to run the complete test suite below
|
| 36 |
+
|
| 37 |
+
problems = []
|
| 38 |
+
|
| 39 |
+
# Process every imported module
|
| 40 |
+
for name in sys.modules:
|
| 41 |
+
mod = sys.modules[name]
|
| 42 |
+
if mod is None or not hasattr(mod, "__file__"):
|
| 43 |
+
continue
|
| 44 |
+
fn = mod.__file__
|
| 45 |
+
if fn.endswith(".pyc"):
|
| 46 |
+
fn = fn[:-1]
|
| 47 |
+
if not fn.endswith(".py"):
|
| 48 |
+
continue
|
| 49 |
+
print("Parsing", fn, file=sys.stderr)
|
| 50 |
+
tree = dr.parse_file(fn, debug=True)
|
| 51 |
+
if diff(fn, tree):
|
| 52 |
+
problems.append(fn)
|
| 53 |
+
|
| 54 |
+
# Process every single module on sys.path (but not in packages)
|
| 55 |
+
for dir in sys.path:
|
| 56 |
+
try:
|
| 57 |
+
names = os.listdir(dir)
|
| 58 |
+
except OSError:
|
| 59 |
+
continue
|
| 60 |
+
print("Scanning", dir, "...", file=sys.stderr)
|
| 61 |
+
for name in names:
|
| 62 |
+
if not name.endswith(".py"):
|
| 63 |
+
continue
|
| 64 |
+
print("Parsing", name, file=sys.stderr)
|
| 65 |
+
fn = os.path.join(dir, name)
|
| 66 |
+
try:
|
| 67 |
+
tree = dr.parse_file(fn, debug=True)
|
| 68 |
+
except pgen2.parse.ParseError as err:
|
| 69 |
+
print("ParseError:", err)
|
| 70 |
+
else:
|
| 71 |
+
if diff(fn, tree):
|
| 72 |
+
problems.append(fn)
|
| 73 |
+
|
| 74 |
+
# Show summary of problem files
|
| 75 |
+
if not problems:
|
| 76 |
+
print("No problems. Congratulations!")
|
| 77 |
+
else:
|
| 78 |
+
print("Problems in following files:")
|
| 79 |
+
for fn in problems:
|
| 80 |
+
print("***", fn)
|
| 81 |
+
|
| 82 |
+
def diff(fn, tree):
|
| 83 |
+
f = open("@", "w")
|
| 84 |
+
try:
|
| 85 |
+
f.write(str(tree))
|
| 86 |
+
finally:
|
| 87 |
+
f.close()
|
| 88 |
+
try:
|
| 89 |
+
return os.system("diff -u %s @" % fn)
|
| 90 |
+
finally:
|
| 91 |
+
os.remove("@")
|
| 92 |
+
|
| 93 |
+
if __name__ == "__main__":
|
| 94 |
+
main()
|
parrot/lib/python3.10/lib2to3/tests/test_main.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
import codecs
|
| 3 |
+
import io
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import re
|
| 7 |
+
import shutil
|
| 8 |
+
import sys
|
| 9 |
+
import tempfile
|
| 10 |
+
import unittest
|
| 11 |
+
|
| 12 |
+
from lib2to3 import main
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
|
| 16 |
+
PY2_TEST_MODULE = os.path.join(TEST_DATA_DIR, "py2_test_grammar.py")
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class TestMain(unittest.TestCase):
|
| 20 |
+
|
| 21 |
+
def setUp(self):
|
| 22 |
+
self.temp_dir = None # tearDown() will rmtree this directory if set.
|
| 23 |
+
|
| 24 |
+
def tearDown(self):
|
| 25 |
+
# Clean up logging configuration down by main.
|
| 26 |
+
del logging.root.handlers[:]
|
| 27 |
+
if self.temp_dir:
|
| 28 |
+
shutil.rmtree(self.temp_dir)
|
| 29 |
+
|
| 30 |
+
def run_2to3_capture(self, args, in_capture, out_capture, err_capture):
|
| 31 |
+
save_stdin = sys.stdin
|
| 32 |
+
save_stdout = sys.stdout
|
| 33 |
+
save_stderr = sys.stderr
|
| 34 |
+
sys.stdin = in_capture
|
| 35 |
+
sys.stdout = out_capture
|
| 36 |
+
sys.stderr = err_capture
|
| 37 |
+
try:
|
| 38 |
+
return main.main("lib2to3.fixes", args)
|
| 39 |
+
finally:
|
| 40 |
+
sys.stdin = save_stdin
|
| 41 |
+
sys.stdout = save_stdout
|
| 42 |
+
sys.stderr = save_stderr
|
| 43 |
+
|
| 44 |
+
def test_unencodable_diff(self):
|
| 45 |
+
input_stream = io.StringIO("print 'nothing'\nprint u'über'\n")
|
| 46 |
+
out = io.BytesIO()
|
| 47 |
+
out_enc = codecs.getwriter("ascii")(out)
|
| 48 |
+
err = io.StringIO()
|
| 49 |
+
ret = self.run_2to3_capture(["-"], input_stream, out_enc, err)
|
| 50 |
+
self.assertEqual(ret, 0)
|
| 51 |
+
output = out.getvalue().decode("ascii")
|
| 52 |
+
self.assertIn("-print 'nothing'", output)
|
| 53 |
+
self.assertIn("WARNING: couldn't encode <stdin>'s diff for "
|
| 54 |
+
"your terminal", err.getvalue())
|
| 55 |
+
|
| 56 |
+
def setup_test_source_trees(self):
|
| 57 |
+
"""Setup a test source tree and output destination tree."""
|
| 58 |
+
self.temp_dir = tempfile.mkdtemp() # tearDown() cleans this up.
|
| 59 |
+
self.py2_src_dir = os.path.join(self.temp_dir, "python2_project")
|
| 60 |
+
self.py3_dest_dir = os.path.join(self.temp_dir, "python3_project")
|
| 61 |
+
os.mkdir(self.py2_src_dir)
|
| 62 |
+
os.mkdir(self.py3_dest_dir)
|
| 63 |
+
# Turn it into a package with a few files.
|
| 64 |
+
self.setup_files = []
|
| 65 |
+
open(os.path.join(self.py2_src_dir, "__init__.py"), "w").close()
|
| 66 |
+
self.setup_files.append("__init__.py")
|
| 67 |
+
shutil.copy(PY2_TEST_MODULE, self.py2_src_dir)
|
| 68 |
+
self.setup_files.append(os.path.basename(PY2_TEST_MODULE))
|
| 69 |
+
self.trivial_py2_file = os.path.join(self.py2_src_dir, "trivial.py")
|
| 70 |
+
self.init_py2_file = os.path.join(self.py2_src_dir, "__init__.py")
|
| 71 |
+
with open(self.trivial_py2_file, "w") as trivial:
|
| 72 |
+
trivial.write("print 'I need a simple conversion.'")
|
| 73 |
+
self.setup_files.append("trivial.py")
|
| 74 |
+
|
| 75 |
+
def test_filename_changing_on_output_single_dir(self):
|
| 76 |
+
"""2to3 a single directory with a new output dir and suffix."""
|
| 77 |
+
self.setup_test_source_trees()
|
| 78 |
+
out = io.StringIO()
|
| 79 |
+
err = io.StringIO()
|
| 80 |
+
suffix = "TEST"
|
| 81 |
+
ret = self.run_2to3_capture(
|
| 82 |
+
["-n", "--add-suffix", suffix, "--write-unchanged-files",
|
| 83 |
+
"--no-diffs", "--output-dir",
|
| 84 |
+
self.py3_dest_dir, self.py2_src_dir],
|
| 85 |
+
io.StringIO(""), out, err)
|
| 86 |
+
self.assertEqual(ret, 0)
|
| 87 |
+
stderr = err.getvalue()
|
| 88 |
+
self.assertIn(" implies -w.", stderr)
|
| 89 |
+
self.assertIn(
|
| 90 |
+
"Output in %r will mirror the input directory %r layout" % (
|
| 91 |
+
self.py3_dest_dir, self.py2_src_dir), stderr)
|
| 92 |
+
self.assertEqual(set(name+suffix for name in self.setup_files),
|
| 93 |
+
set(os.listdir(self.py3_dest_dir)))
|
| 94 |
+
for name in self.setup_files:
|
| 95 |
+
self.assertIn("Writing converted %s to %s" % (
|
| 96 |
+
os.path.join(self.py2_src_dir, name),
|
| 97 |
+
os.path.join(self.py3_dest_dir, name+suffix)), stderr)
|
| 98 |
+
sep = re.escape(os.sep)
|
| 99 |
+
self.assertRegex(
|
| 100 |
+
stderr, r"No changes to .*/__init__\.py".replace("/", sep))
|
| 101 |
+
self.assertNotRegex(
|
| 102 |
+
stderr, r"No changes to .*/trivial\.py".replace("/", sep))
|
| 103 |
+
|
| 104 |
+
def test_filename_changing_on_output_two_files(self):
|
| 105 |
+
"""2to3 two files in one directory with a new output dir."""
|
| 106 |
+
self.setup_test_source_trees()
|
| 107 |
+
err = io.StringIO()
|
| 108 |
+
py2_files = [self.trivial_py2_file, self.init_py2_file]
|
| 109 |
+
expected_files = set(os.path.basename(name) for name in py2_files)
|
| 110 |
+
ret = self.run_2to3_capture(
|
| 111 |
+
["-n", "-w", "--write-unchanged-files",
|
| 112 |
+
"--no-diffs", "--output-dir", self.py3_dest_dir] + py2_files,
|
| 113 |
+
io.StringIO(""), io.StringIO(), err)
|
| 114 |
+
self.assertEqual(ret, 0)
|
| 115 |
+
stderr = err.getvalue()
|
| 116 |
+
self.assertIn(
|
| 117 |
+
"Output in %r will mirror the input directory %r layout" % (
|
| 118 |
+
self.py3_dest_dir, self.py2_src_dir), stderr)
|
| 119 |
+
self.assertEqual(expected_files, set(os.listdir(self.py3_dest_dir)))
|
| 120 |
+
|
| 121 |
+
def test_filename_changing_on_output_single_file(self):
|
| 122 |
+
"""2to3 a single file with a new output dir."""
|
| 123 |
+
self.setup_test_source_trees()
|
| 124 |
+
err = io.StringIO()
|
| 125 |
+
ret = self.run_2to3_capture(
|
| 126 |
+
["-n", "-w", "--no-diffs", "--output-dir", self.py3_dest_dir,
|
| 127 |
+
self.trivial_py2_file],
|
| 128 |
+
io.StringIO(""), io.StringIO(), err)
|
| 129 |
+
self.assertEqual(ret, 0)
|
| 130 |
+
stderr = err.getvalue()
|
| 131 |
+
self.assertIn(
|
| 132 |
+
"Output in %r will mirror the input directory %r layout" % (
|
| 133 |
+
self.py3_dest_dir, self.py2_src_dir), stderr)
|
| 134 |
+
self.assertEqual(set([os.path.basename(self.trivial_py2_file)]),
|
| 135 |
+
set(os.listdir(self.py3_dest_dir)))
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
if __name__ == '__main__':
|
| 139 |
+
unittest.main()
|
parrot/lib/python3.10/lib2to3/tests/test_pytree.py
ADDED
|
@@ -0,0 +1,472 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Unit tests for pytree.py.
|
| 5 |
+
|
| 6 |
+
NOTE: Please *don't* add doc strings to individual test methods!
|
| 7 |
+
In verbose mode, printing of the module, class and method name is much
|
| 8 |
+
more helpful than printing of (the first line of) the docstring,
|
| 9 |
+
especially when debugging a test.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
# Testing imports
|
| 13 |
+
from . import support
|
| 14 |
+
|
| 15 |
+
from lib2to3 import pytree
|
| 16 |
+
|
| 17 |
+
try:
|
| 18 |
+
sorted
|
| 19 |
+
except NameError:
|
| 20 |
+
def sorted(lst):
|
| 21 |
+
l = list(lst)
|
| 22 |
+
l.sort()
|
| 23 |
+
return l
|
| 24 |
+
|
| 25 |
+
class TestNodes(support.TestCase):
|
| 26 |
+
|
| 27 |
+
"""Unit tests for nodes (Base, Leaf, Node)."""
|
| 28 |
+
|
| 29 |
+
def test_instantiate_base(self):
|
| 30 |
+
if __debug__:
|
| 31 |
+
# Test that instantiating Base() raises an AssertionError
|
| 32 |
+
self.assertRaises(AssertionError, pytree.Base)
|
| 33 |
+
|
| 34 |
+
def test_leaf(self):
|
| 35 |
+
l1 = pytree.Leaf(100, "foo")
|
| 36 |
+
self.assertEqual(l1.type, 100)
|
| 37 |
+
self.assertEqual(l1.value, "foo")
|
| 38 |
+
|
| 39 |
+
def test_leaf_repr(self):
|
| 40 |
+
l1 = pytree.Leaf(100, "foo")
|
| 41 |
+
self.assertEqual(repr(l1), "Leaf(100, 'foo')")
|
| 42 |
+
|
| 43 |
+
def test_leaf_str(self):
|
| 44 |
+
l1 = pytree.Leaf(100, "foo")
|
| 45 |
+
self.assertEqual(str(l1), "foo")
|
| 46 |
+
l2 = pytree.Leaf(100, "foo", context=(" ", (10, 1)))
|
| 47 |
+
self.assertEqual(str(l2), " foo")
|
| 48 |
+
|
| 49 |
+
def test_leaf_str_numeric_value(self):
|
| 50 |
+
# Make sure that the Leaf's value is stringified. Failing to
|
| 51 |
+
# do this can cause a TypeError in certain situations.
|
| 52 |
+
l1 = pytree.Leaf(2, 5)
|
| 53 |
+
l1.prefix = "foo_"
|
| 54 |
+
self.assertEqual(str(l1), "foo_5")
|
| 55 |
+
|
| 56 |
+
def test_leaf_equality(self):
|
| 57 |
+
l1 = pytree.Leaf(100, "foo")
|
| 58 |
+
l2 = pytree.Leaf(100, "foo", context=(" ", (1, 0)))
|
| 59 |
+
self.assertEqual(l1, l2)
|
| 60 |
+
l3 = pytree.Leaf(101, "foo")
|
| 61 |
+
l4 = pytree.Leaf(100, "bar")
|
| 62 |
+
self.assertNotEqual(l1, l3)
|
| 63 |
+
self.assertNotEqual(l1, l4)
|
| 64 |
+
|
| 65 |
+
def test_leaf_prefix(self):
|
| 66 |
+
l1 = pytree.Leaf(100, "foo")
|
| 67 |
+
self.assertEqual(l1.prefix, "")
|
| 68 |
+
self.assertFalse(l1.was_changed)
|
| 69 |
+
l1.prefix = " ##\n\n"
|
| 70 |
+
self.assertEqual(l1.prefix, " ##\n\n")
|
| 71 |
+
self.assertTrue(l1.was_changed)
|
| 72 |
+
|
| 73 |
+
def test_node(self):
|
| 74 |
+
l1 = pytree.Leaf(100, "foo")
|
| 75 |
+
l2 = pytree.Leaf(200, "bar")
|
| 76 |
+
n1 = pytree.Node(1000, [l1, l2])
|
| 77 |
+
self.assertEqual(n1.type, 1000)
|
| 78 |
+
self.assertEqual(n1.children, [l1, l2])
|
| 79 |
+
|
| 80 |
+
def test_node_repr(self):
|
| 81 |
+
l1 = pytree.Leaf(100, "foo")
|
| 82 |
+
l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0)))
|
| 83 |
+
n1 = pytree.Node(1000, [l1, l2])
|
| 84 |
+
self.assertEqual(repr(n1),
|
| 85 |
+
"Node(1000, [%s, %s])" % (repr(l1), repr(l2)))
|
| 86 |
+
|
| 87 |
+
def test_node_str(self):
|
| 88 |
+
l1 = pytree.Leaf(100, "foo")
|
| 89 |
+
l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0)))
|
| 90 |
+
n1 = pytree.Node(1000, [l1, l2])
|
| 91 |
+
self.assertEqual(str(n1), "foo bar")
|
| 92 |
+
|
| 93 |
+
def test_node_prefix(self):
|
| 94 |
+
l1 = pytree.Leaf(100, "foo")
|
| 95 |
+
self.assertEqual(l1.prefix, "")
|
| 96 |
+
n1 = pytree.Node(1000, [l1])
|
| 97 |
+
self.assertEqual(n1.prefix, "")
|
| 98 |
+
n1.prefix = " "
|
| 99 |
+
self.assertEqual(n1.prefix, " ")
|
| 100 |
+
self.assertEqual(l1.prefix, " ")
|
| 101 |
+
|
| 102 |
+
def test_get_suffix(self):
|
| 103 |
+
l1 = pytree.Leaf(100, "foo", prefix="a")
|
| 104 |
+
l2 = pytree.Leaf(100, "bar", prefix="b")
|
| 105 |
+
n1 = pytree.Node(1000, [l1, l2])
|
| 106 |
+
|
| 107 |
+
self.assertEqual(l1.get_suffix(), l2.prefix)
|
| 108 |
+
self.assertEqual(l2.get_suffix(), "")
|
| 109 |
+
self.assertEqual(n1.get_suffix(), "")
|
| 110 |
+
|
| 111 |
+
l3 = pytree.Leaf(100, "bar", prefix="c")
|
| 112 |
+
n2 = pytree.Node(1000, [n1, l3])
|
| 113 |
+
|
| 114 |
+
self.assertEqual(n1.get_suffix(), l3.prefix)
|
| 115 |
+
self.assertEqual(l3.get_suffix(), "")
|
| 116 |
+
self.assertEqual(n2.get_suffix(), "")
|
| 117 |
+
|
| 118 |
+
def test_node_equality(self):
|
| 119 |
+
n1 = pytree.Node(1000, ())
|
| 120 |
+
n2 = pytree.Node(1000, [], context=(" ", (1, 0)))
|
| 121 |
+
self.assertEqual(n1, n2)
|
| 122 |
+
n3 = pytree.Node(1001, ())
|
| 123 |
+
self.assertNotEqual(n1, n3)
|
| 124 |
+
|
| 125 |
+
def test_node_recursive_equality(self):
|
| 126 |
+
l1 = pytree.Leaf(100, "foo")
|
| 127 |
+
l2 = pytree.Leaf(100, "foo")
|
| 128 |
+
n1 = pytree.Node(1000, [l1])
|
| 129 |
+
n2 = pytree.Node(1000, [l2])
|
| 130 |
+
self.assertEqual(n1, n2)
|
| 131 |
+
l3 = pytree.Leaf(100, "bar")
|
| 132 |
+
n3 = pytree.Node(1000, [l3])
|
| 133 |
+
self.assertNotEqual(n1, n3)
|
| 134 |
+
|
| 135 |
+
def test_replace(self):
|
| 136 |
+
l1 = pytree.Leaf(100, "foo")
|
| 137 |
+
l2 = pytree.Leaf(100, "+")
|
| 138 |
+
l3 = pytree.Leaf(100, "bar")
|
| 139 |
+
n1 = pytree.Node(1000, [l1, l2, l3])
|
| 140 |
+
self.assertEqual(n1.children, [l1, l2, l3])
|
| 141 |
+
self.assertIsInstance(n1.children, list)
|
| 142 |
+
self.assertFalse(n1.was_changed)
|
| 143 |
+
l2new = pytree.Leaf(100, "-")
|
| 144 |
+
l2.replace(l2new)
|
| 145 |
+
self.assertEqual(n1.children, [l1, l2new, l3])
|
| 146 |
+
self.assertIsInstance(n1.children, list)
|
| 147 |
+
self.assertTrue(n1.was_changed)
|
| 148 |
+
|
| 149 |
+
def test_replace_with_list(self):
|
| 150 |
+
l1 = pytree.Leaf(100, "foo")
|
| 151 |
+
l2 = pytree.Leaf(100, "+")
|
| 152 |
+
l3 = pytree.Leaf(100, "bar")
|
| 153 |
+
n1 = pytree.Node(1000, [l1, l2, l3])
|
| 154 |
+
|
| 155 |
+
l2.replace([pytree.Leaf(100, "*"), pytree.Leaf(100, "*")])
|
| 156 |
+
self.assertEqual(str(n1), "foo**bar")
|
| 157 |
+
self.assertIsInstance(n1.children, list)
|
| 158 |
+
|
| 159 |
+
def test_leaves(self):
|
| 160 |
+
l1 = pytree.Leaf(100, "foo")
|
| 161 |
+
l2 = pytree.Leaf(100, "bar")
|
| 162 |
+
l3 = pytree.Leaf(100, "fooey")
|
| 163 |
+
n2 = pytree.Node(1000, [l1, l2])
|
| 164 |
+
n3 = pytree.Node(1000, [l3])
|
| 165 |
+
n1 = pytree.Node(1000, [n2, n3])
|
| 166 |
+
|
| 167 |
+
self.assertEqual(list(n1.leaves()), [l1, l2, l3])
|
| 168 |
+
|
| 169 |
+
def test_depth(self):
|
| 170 |
+
l1 = pytree.Leaf(100, "foo")
|
| 171 |
+
l2 = pytree.Leaf(100, "bar")
|
| 172 |
+
n2 = pytree.Node(1000, [l1, l2])
|
| 173 |
+
n3 = pytree.Node(1000, [])
|
| 174 |
+
n1 = pytree.Node(1000, [n2, n3])
|
| 175 |
+
|
| 176 |
+
self.assertEqual(l1.depth(), 2)
|
| 177 |
+
self.assertEqual(n3.depth(), 1)
|
| 178 |
+
self.assertEqual(n1.depth(), 0)
|
| 179 |
+
|
| 180 |
+
def test_post_order(self):
|
| 181 |
+
l1 = pytree.Leaf(100, "foo")
|
| 182 |
+
l2 = pytree.Leaf(100, "bar")
|
| 183 |
+
l3 = pytree.Leaf(100, "fooey")
|
| 184 |
+
c1 = pytree.Node(1000, [l1, l2])
|
| 185 |
+
n1 = pytree.Node(1000, [c1, l3])
|
| 186 |
+
self.assertEqual(list(n1.post_order()), [l1, l2, c1, l3, n1])
|
| 187 |
+
|
| 188 |
+
def test_pre_order(self):
|
| 189 |
+
l1 = pytree.Leaf(100, "foo")
|
| 190 |
+
l2 = pytree.Leaf(100, "bar")
|
| 191 |
+
l3 = pytree.Leaf(100, "fooey")
|
| 192 |
+
c1 = pytree.Node(1000, [l1, l2])
|
| 193 |
+
n1 = pytree.Node(1000, [c1, l3])
|
| 194 |
+
self.assertEqual(list(n1.pre_order()), [n1, c1, l1, l2, l3])
|
| 195 |
+
|
| 196 |
+
def test_changed(self):
|
| 197 |
+
l1 = pytree.Leaf(100, "f")
|
| 198 |
+
self.assertFalse(l1.was_changed)
|
| 199 |
+
l1.changed()
|
| 200 |
+
self.assertTrue(l1.was_changed)
|
| 201 |
+
|
| 202 |
+
l1 = pytree.Leaf(100, "f")
|
| 203 |
+
n1 = pytree.Node(1000, [l1])
|
| 204 |
+
self.assertFalse(n1.was_changed)
|
| 205 |
+
n1.changed()
|
| 206 |
+
self.assertTrue(n1.was_changed)
|
| 207 |
+
|
| 208 |
+
l1 = pytree.Leaf(100, "foo")
|
| 209 |
+
l2 = pytree.Leaf(100, "+")
|
| 210 |
+
l3 = pytree.Leaf(100, "bar")
|
| 211 |
+
n1 = pytree.Node(1000, [l1, l2, l3])
|
| 212 |
+
n2 = pytree.Node(1000, [n1])
|
| 213 |
+
self.assertFalse(l1.was_changed)
|
| 214 |
+
self.assertFalse(n1.was_changed)
|
| 215 |
+
self.assertFalse(n2.was_changed)
|
| 216 |
+
|
| 217 |
+
n1.changed()
|
| 218 |
+
self.assertTrue(n1.was_changed)
|
| 219 |
+
self.assertTrue(n2.was_changed)
|
| 220 |
+
self.assertFalse(l1.was_changed)
|
| 221 |
+
|
| 222 |
+
def test_leaf_constructor_prefix(self):
|
| 223 |
+
for prefix in ("xyz_", ""):
|
| 224 |
+
l1 = pytree.Leaf(100, "self", prefix=prefix)
|
| 225 |
+
self.assertTrue(str(l1), prefix + "self")
|
| 226 |
+
self.assertEqual(l1.prefix, prefix)
|
| 227 |
+
|
| 228 |
+
def test_node_constructor_prefix(self):
|
| 229 |
+
for prefix in ("xyz_", ""):
|
| 230 |
+
l1 = pytree.Leaf(100, "self")
|
| 231 |
+
l2 = pytree.Leaf(100, "foo", prefix="_")
|
| 232 |
+
n1 = pytree.Node(1000, [l1, l2], prefix=prefix)
|
| 233 |
+
self.assertTrue(str(n1), prefix + "self_foo")
|
| 234 |
+
self.assertEqual(n1.prefix, prefix)
|
| 235 |
+
self.assertEqual(l1.prefix, prefix)
|
| 236 |
+
self.assertEqual(l2.prefix, "_")
|
| 237 |
+
|
| 238 |
+
def test_remove(self):
|
| 239 |
+
l1 = pytree.Leaf(100, "foo")
|
| 240 |
+
l2 = pytree.Leaf(100, "foo")
|
| 241 |
+
n1 = pytree.Node(1000, [l1, l2])
|
| 242 |
+
n2 = pytree.Node(1000, [n1])
|
| 243 |
+
|
| 244 |
+
self.assertEqual(n1.remove(), 0)
|
| 245 |
+
self.assertEqual(n2.children, [])
|
| 246 |
+
self.assertEqual(l1.parent, n1)
|
| 247 |
+
self.assertEqual(n1.parent, None)
|
| 248 |
+
self.assertEqual(n2.parent, None)
|
| 249 |
+
self.assertFalse(n1.was_changed)
|
| 250 |
+
self.assertTrue(n2.was_changed)
|
| 251 |
+
|
| 252 |
+
self.assertEqual(l2.remove(), 1)
|
| 253 |
+
self.assertEqual(l1.remove(), 0)
|
| 254 |
+
self.assertEqual(n1.children, [])
|
| 255 |
+
self.assertEqual(l1.parent, None)
|
| 256 |
+
self.assertEqual(n1.parent, None)
|
| 257 |
+
self.assertEqual(n2.parent, None)
|
| 258 |
+
self.assertTrue(n1.was_changed)
|
| 259 |
+
self.assertTrue(n2.was_changed)
|
| 260 |
+
|
| 261 |
+
def test_remove_parentless(self):
|
| 262 |
+
n1 = pytree.Node(1000, [])
|
| 263 |
+
n1.remove()
|
| 264 |
+
self.assertEqual(n1.parent, None)
|
| 265 |
+
|
| 266 |
+
l1 = pytree.Leaf(100, "foo")
|
| 267 |
+
l1.remove()
|
| 268 |
+
self.assertEqual(l1.parent, None)
|
| 269 |
+
|
| 270 |
+
def test_node_set_child(self):
|
| 271 |
+
l1 = pytree.Leaf(100, "foo")
|
| 272 |
+
n1 = pytree.Node(1000, [l1])
|
| 273 |
+
|
| 274 |
+
l2 = pytree.Leaf(100, "bar")
|
| 275 |
+
n1.set_child(0, l2)
|
| 276 |
+
self.assertEqual(l1.parent, None)
|
| 277 |
+
self.assertEqual(l2.parent, n1)
|
| 278 |
+
self.assertEqual(n1.children, [l2])
|
| 279 |
+
|
| 280 |
+
n2 = pytree.Node(1000, [l1])
|
| 281 |
+
n2.set_child(0, n1)
|
| 282 |
+
self.assertEqual(l1.parent, None)
|
| 283 |
+
self.assertEqual(n1.parent, n2)
|
| 284 |
+
self.assertEqual(n2.parent, None)
|
| 285 |
+
self.assertEqual(n2.children, [n1])
|
| 286 |
+
|
| 287 |
+
self.assertRaises(IndexError, n1.set_child, 4, l2)
|
| 288 |
+
# I don't care what it raises, so long as it's an exception
|
| 289 |
+
self.assertRaises(Exception, n1.set_child, 0, list)
|
| 290 |
+
|
| 291 |
+
def test_node_insert_child(self):
|
| 292 |
+
l1 = pytree.Leaf(100, "foo")
|
| 293 |
+
n1 = pytree.Node(1000, [l1])
|
| 294 |
+
|
| 295 |
+
l2 = pytree.Leaf(100, "bar")
|
| 296 |
+
n1.insert_child(0, l2)
|
| 297 |
+
self.assertEqual(l2.parent, n1)
|
| 298 |
+
self.assertEqual(n1.children, [l2, l1])
|
| 299 |
+
|
| 300 |
+
l3 = pytree.Leaf(100, "abc")
|
| 301 |
+
n1.insert_child(2, l3)
|
| 302 |
+
self.assertEqual(n1.children, [l2, l1, l3])
|
| 303 |
+
|
| 304 |
+
# I don't care what it raises, so long as it's an exception
|
| 305 |
+
self.assertRaises(Exception, n1.insert_child, 0, list)
|
| 306 |
+
|
| 307 |
+
def test_node_append_child(self):
|
| 308 |
+
n1 = pytree.Node(1000, [])
|
| 309 |
+
|
| 310 |
+
l1 = pytree.Leaf(100, "foo")
|
| 311 |
+
n1.append_child(l1)
|
| 312 |
+
self.assertEqual(l1.parent, n1)
|
| 313 |
+
self.assertEqual(n1.children, [l1])
|
| 314 |
+
|
| 315 |
+
l2 = pytree.Leaf(100, "bar")
|
| 316 |
+
n1.append_child(l2)
|
| 317 |
+
self.assertEqual(l2.parent, n1)
|
| 318 |
+
self.assertEqual(n1.children, [l1, l2])
|
| 319 |
+
|
| 320 |
+
# I don't care what it raises, so long as it's an exception
|
| 321 |
+
self.assertRaises(Exception, n1.append_child, list)
|
| 322 |
+
|
| 323 |
+
def test_node_next_sibling(self):
|
| 324 |
+
n1 = pytree.Node(1000, [])
|
| 325 |
+
n2 = pytree.Node(1000, [])
|
| 326 |
+
p1 = pytree.Node(1000, [n1, n2])
|
| 327 |
+
|
| 328 |
+
self.assertIs(n1.next_sibling, n2)
|
| 329 |
+
self.assertEqual(n2.next_sibling, None)
|
| 330 |
+
self.assertEqual(p1.next_sibling, None)
|
| 331 |
+
|
| 332 |
+
def test_leaf_next_sibling(self):
|
| 333 |
+
l1 = pytree.Leaf(100, "a")
|
| 334 |
+
l2 = pytree.Leaf(100, "b")
|
| 335 |
+
p1 = pytree.Node(1000, [l1, l2])
|
| 336 |
+
|
| 337 |
+
self.assertIs(l1.next_sibling, l2)
|
| 338 |
+
self.assertEqual(l2.next_sibling, None)
|
| 339 |
+
self.assertEqual(p1.next_sibling, None)
|
| 340 |
+
|
| 341 |
+
def test_node_prev_sibling(self):
|
| 342 |
+
n1 = pytree.Node(1000, [])
|
| 343 |
+
n2 = pytree.Node(1000, [])
|
| 344 |
+
p1 = pytree.Node(1000, [n1, n2])
|
| 345 |
+
|
| 346 |
+
self.assertIs(n2.prev_sibling, n1)
|
| 347 |
+
self.assertEqual(n1.prev_sibling, None)
|
| 348 |
+
self.assertEqual(p1.prev_sibling, None)
|
| 349 |
+
|
| 350 |
+
def test_leaf_prev_sibling(self):
|
| 351 |
+
l1 = pytree.Leaf(100, "a")
|
| 352 |
+
l2 = pytree.Leaf(100, "b")
|
| 353 |
+
p1 = pytree.Node(1000, [l1, l2])
|
| 354 |
+
|
| 355 |
+
self.assertIs(l2.prev_sibling, l1)
|
| 356 |
+
self.assertEqual(l1.prev_sibling, None)
|
| 357 |
+
self.assertEqual(p1.prev_sibling, None)
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
class TestPatterns(support.TestCase):
|
| 361 |
+
|
| 362 |
+
"""Unit tests for tree matching patterns."""
|
| 363 |
+
|
| 364 |
+
def test_basic_patterns(self):
|
| 365 |
+
# Build a tree
|
| 366 |
+
l1 = pytree.Leaf(100, "foo")
|
| 367 |
+
l2 = pytree.Leaf(100, "bar")
|
| 368 |
+
l3 = pytree.Leaf(100, "foo")
|
| 369 |
+
n1 = pytree.Node(1000, [l1, l2])
|
| 370 |
+
n2 = pytree.Node(1000, [l3])
|
| 371 |
+
root = pytree.Node(1000, [n1, n2])
|
| 372 |
+
# Build a pattern matching a leaf
|
| 373 |
+
pl = pytree.LeafPattern(100, "foo", name="pl")
|
| 374 |
+
r = {}
|
| 375 |
+
self.assertFalse(pl.match(root, results=r))
|
| 376 |
+
self.assertEqual(r, {})
|
| 377 |
+
self.assertFalse(pl.match(n1, results=r))
|
| 378 |
+
self.assertEqual(r, {})
|
| 379 |
+
self.assertFalse(pl.match(n2, results=r))
|
| 380 |
+
self.assertEqual(r, {})
|
| 381 |
+
self.assertTrue(pl.match(l1, results=r))
|
| 382 |
+
self.assertEqual(r, {"pl": l1})
|
| 383 |
+
r = {}
|
| 384 |
+
self.assertFalse(pl.match(l2, results=r))
|
| 385 |
+
self.assertEqual(r, {})
|
| 386 |
+
# Build a pattern matching a node
|
| 387 |
+
pn = pytree.NodePattern(1000, [pl], name="pn")
|
| 388 |
+
self.assertFalse(pn.match(root, results=r))
|
| 389 |
+
self.assertEqual(r, {})
|
| 390 |
+
self.assertFalse(pn.match(n1, results=r))
|
| 391 |
+
self.assertEqual(r, {})
|
| 392 |
+
self.assertTrue(pn.match(n2, results=r))
|
| 393 |
+
self.assertEqual(r, {"pn": n2, "pl": l3})
|
| 394 |
+
r = {}
|
| 395 |
+
self.assertFalse(pn.match(l1, results=r))
|
| 396 |
+
self.assertEqual(r, {})
|
| 397 |
+
self.assertFalse(pn.match(l2, results=r))
|
| 398 |
+
self.assertEqual(r, {})
|
| 399 |
+
|
| 400 |
+
def test_wildcard(self):
|
| 401 |
+
# Build a tree for testing
|
| 402 |
+
l1 = pytree.Leaf(100, "foo")
|
| 403 |
+
l2 = pytree.Leaf(100, "bar")
|
| 404 |
+
l3 = pytree.Leaf(100, "foo")
|
| 405 |
+
n1 = pytree.Node(1000, [l1, l2])
|
| 406 |
+
n2 = pytree.Node(1000, [l3])
|
| 407 |
+
root = pytree.Node(1000, [n1, n2])
|
| 408 |
+
# Build a pattern
|
| 409 |
+
pl = pytree.LeafPattern(100, "foo", name="pl")
|
| 410 |
+
pn = pytree.NodePattern(1000, [pl], name="pn")
|
| 411 |
+
pw = pytree.WildcardPattern([[pn], [pl, pl]], name="pw")
|
| 412 |
+
r = {}
|
| 413 |
+
self.assertFalse(pw.match_seq([root], r))
|
| 414 |
+
self.assertEqual(r, {})
|
| 415 |
+
self.assertFalse(pw.match_seq([n1], r))
|
| 416 |
+
self.assertEqual(r, {})
|
| 417 |
+
self.assertTrue(pw.match_seq([n2], r))
|
| 418 |
+
# These are easier to debug
|
| 419 |
+
self.assertEqual(sorted(r.keys()), ["pl", "pn", "pw"])
|
| 420 |
+
self.assertEqual(r["pl"], l1)
|
| 421 |
+
self.assertEqual(r["pn"], n2)
|
| 422 |
+
self.assertEqual(r["pw"], [n2])
|
| 423 |
+
# But this is equivalent
|
| 424 |
+
self.assertEqual(r, {"pl": l1, "pn": n2, "pw": [n2]})
|
| 425 |
+
r = {}
|
| 426 |
+
self.assertTrue(pw.match_seq([l1, l3], r))
|
| 427 |
+
self.assertEqual(r, {"pl": l3, "pw": [l1, l3]})
|
| 428 |
+
self.assertIs(r["pl"], l3)
|
| 429 |
+
r = {}
|
| 430 |
+
|
| 431 |
+
def test_generate_matches(self):
|
| 432 |
+
la = pytree.Leaf(1, "a")
|
| 433 |
+
lb = pytree.Leaf(1, "b")
|
| 434 |
+
lc = pytree.Leaf(1, "c")
|
| 435 |
+
ld = pytree.Leaf(1, "d")
|
| 436 |
+
le = pytree.Leaf(1, "e")
|
| 437 |
+
lf = pytree.Leaf(1, "f")
|
| 438 |
+
leaves = [la, lb, lc, ld, le, lf]
|
| 439 |
+
root = pytree.Node(1000, leaves)
|
| 440 |
+
pa = pytree.LeafPattern(1, "a", "pa")
|
| 441 |
+
pb = pytree.LeafPattern(1, "b", "pb")
|
| 442 |
+
pc = pytree.LeafPattern(1, "c", "pc")
|
| 443 |
+
pd = pytree.LeafPattern(1, "d", "pd")
|
| 444 |
+
pe = pytree.LeafPattern(1, "e", "pe")
|
| 445 |
+
pf = pytree.LeafPattern(1, "f", "pf")
|
| 446 |
+
pw = pytree.WildcardPattern([[pa, pb, pc], [pd, pe],
|
| 447 |
+
[pa, pb], [pc, pd], [pe, pf]],
|
| 448 |
+
min=1, max=4, name="pw")
|
| 449 |
+
self.assertEqual([x[0] for x in pw.generate_matches(leaves)],
|
| 450 |
+
[3, 5, 2, 4, 6])
|
| 451 |
+
pr = pytree.NodePattern(type=1000, content=[pw], name="pr")
|
| 452 |
+
matches = list(pytree.generate_matches([pr], [root]))
|
| 453 |
+
self.assertEqual(len(matches), 1)
|
| 454 |
+
c, r = matches[0]
|
| 455 |
+
self.assertEqual(c, 1)
|
| 456 |
+
self.assertEqual(str(r["pr"]), "abcdef")
|
| 457 |
+
self.assertEqual(r["pw"], [la, lb, lc, ld, le, lf])
|
| 458 |
+
for c in "abcdef":
|
| 459 |
+
self.assertEqual(r["p" + c], pytree.Leaf(1, c))
|
| 460 |
+
|
| 461 |
+
def test_has_key_example(self):
|
| 462 |
+
pattern = pytree.NodePattern(331,
|
| 463 |
+
(pytree.LeafPattern(7),
|
| 464 |
+
pytree.WildcardPattern(name="args"),
|
| 465 |
+
pytree.LeafPattern(8)))
|
| 466 |
+
l1 = pytree.Leaf(7, "(")
|
| 467 |
+
l2 = pytree.Leaf(3, "x")
|
| 468 |
+
l3 = pytree.Leaf(8, ")")
|
| 469 |
+
node = pytree.Node(331, [l1, l2, l3])
|
| 470 |
+
r = {}
|
| 471 |
+
self.assertTrue(pattern.match(node, r))
|
| 472 |
+
self.assertEqual(r["args"], [l2])
|
parrot/lib/python3.10/multiprocessing/__init__.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Package analogous to 'threading.py' but using processes
|
| 3 |
+
#
|
| 4 |
+
# multiprocessing/__init__.py
|
| 5 |
+
#
|
| 6 |
+
# This package is intended to duplicate the functionality (and much of
|
| 7 |
+
# the API) of threading.py but uses processes instead of threads. A
|
| 8 |
+
# subpackage 'multiprocessing.dummy' has the same API but is a simple
|
| 9 |
+
# wrapper for 'threading'.
|
| 10 |
+
#
|
| 11 |
+
# Copyright (c) 2006-2008, R Oudkerk
|
| 12 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 13 |
+
#
|
| 14 |
+
|
| 15 |
+
import sys
|
| 16 |
+
from . import context
|
| 17 |
+
|
| 18 |
+
#
|
| 19 |
+
# Copy stuff from default context
|
| 20 |
+
#
|
| 21 |
+
|
| 22 |
+
__all__ = [x for x in dir(context._default_context) if not x.startswith('_')]
|
| 23 |
+
globals().update((name, getattr(context._default_context, name)) for name in __all__)
|
| 24 |
+
|
| 25 |
+
#
|
| 26 |
+
# XXX These should not really be documented or public.
|
| 27 |
+
#
|
| 28 |
+
|
| 29 |
+
SUBDEBUG = 5
|
| 30 |
+
SUBWARNING = 25
|
| 31 |
+
|
| 32 |
+
#
|
| 33 |
+
# Alias for main module -- will be reset by bootstrapping child processes
|
| 34 |
+
#
|
| 35 |
+
|
| 36 |
+
if '__main__' in sys.modules:
|
| 37 |
+
sys.modules['__mp_main__'] = sys.modules['__main__']
|
parrot/lib/python3.10/multiprocessing/__pycache__/context.cpython-310.pyc
ADDED
|
Binary file (13.1 kB). View file
|
|
|
parrot/lib/python3.10/multiprocessing/__pycache__/forkserver.cpython-310.pyc
ADDED
|
Binary file (8.42 kB). View file
|
|
|