Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- deepseek/lib/python3.10/collections/__init__.py +1556 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/__init__.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/build_scripts.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/clean.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/config.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/install.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/install_data.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/install_headers.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/__pycache__/upload.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/distutils/command/bdist_dumb.py +123 -0
- deepseek/lib/python3.10/ensurepip/__init__.py +294 -0
- deepseek/lib/python3.10/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl +3 -0
- deepseek/lib/python3.10/json/__pycache__/__init__.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/json/__pycache__/decoder.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/json/tool.py +85 -0
- deepseek/lib/python3.10/lib2to3/PatternGrammar.txt +28 -0
- deepseek/lib/python3.10/lib2to3/__init__.py +8 -0
- deepseek/lib/python3.10/lib2to3/__main__.py +4 -0
- deepseek/lib/python3.10/lib2to3/btm_matcher.py +163 -0
- deepseek/lib/python3.10/lib2to3/fixer_base.py +186 -0
- deepseek/lib/python3.10/lib2to3/fixer_util.py +453 -0
- deepseek/lib/python3.10/lib2to3/fixes/__init__.py +1 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_execfile.py +53 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_exitfunc.py +72 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_idioms.py +152 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_imports.py +145 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_imports2.py +16 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_intern.py +39 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_itertools.py +43 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_next.py +103 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_numliterals.py +28 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_reduce.py +35 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_types.py +61 -0
- deepseek/lib/python3.10/lib2to3/fixes/fix_unicode.py +42 -0
- deepseek/lib/python3.10/lib2to3/pytree.py +853 -0
- deepseek/lib/python3.10/lib2to3/refactor.py +732 -0
- deepseek/lib/python3.10/multiprocessing/__init__.py +37 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/__init__.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/connection.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/context.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/forkserver.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/heap.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/managers.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/pool.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/popen_fork.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/popen_forkserver.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/popen_spawn_posix.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/popen_spawn_win32.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/multiprocessing/__pycache__/process.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -424,3 +424,5 @@ deepseek/lib/libitm.so filter=lfs diff=lfs merge=lfs -text
|
|
| 424 |
evalkit_tf437/lib/python3.10/site-packages/scipy/spatial/_voronoi.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 425 |
deepseek/lib/libgomp.so.1.0.0 filter=lfs diff=lfs merge=lfs -text
|
| 426 |
evalkit_tf437/lib/python3.10/site-packages/matplotlib/_tri.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 424 |
evalkit_tf437/lib/python3.10/site-packages/scipy/spatial/_voronoi.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 425 |
deepseek/lib/libgomp.so.1.0.0 filter=lfs diff=lfs merge=lfs -text
|
| 426 |
evalkit_tf437/lib/python3.10/site-packages/matplotlib/_tri.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 427 |
+
deepseek/lib/python3.10/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
| 428 |
+
deepseek/lib/python3.10/tkinter/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
deepseek/lib/python3.10/collections/__init__.py
ADDED
|
@@ -0,0 +1,1556 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''This module implements specialized container datatypes providing
|
| 2 |
+
alternatives to Python's general purpose built-in containers, dict,
|
| 3 |
+
list, set, and tuple.
|
| 4 |
+
|
| 5 |
+
* namedtuple factory function for creating tuple subclasses with named fields
|
| 6 |
+
* deque list-like container with fast appends and pops on either end
|
| 7 |
+
* ChainMap dict-like class for creating a single view of multiple mappings
|
| 8 |
+
* Counter dict subclass for counting hashable objects
|
| 9 |
+
* OrderedDict dict subclass that remembers the order entries were added
|
| 10 |
+
* defaultdict dict subclass that calls a factory function to supply missing values
|
| 11 |
+
* UserDict wrapper around dictionary objects for easier dict subclassing
|
| 12 |
+
* UserList wrapper around list objects for easier list subclassing
|
| 13 |
+
* UserString wrapper around string objects for easier string subclassing
|
| 14 |
+
|
| 15 |
+
'''
|
| 16 |
+
|
| 17 |
+
__all__ = [
|
| 18 |
+
'ChainMap',
|
| 19 |
+
'Counter',
|
| 20 |
+
'OrderedDict',
|
| 21 |
+
'UserDict',
|
| 22 |
+
'UserList',
|
| 23 |
+
'UserString',
|
| 24 |
+
'defaultdict',
|
| 25 |
+
'deque',
|
| 26 |
+
'namedtuple',
|
| 27 |
+
]
|
| 28 |
+
|
| 29 |
+
import _collections_abc
|
| 30 |
+
import sys as _sys
|
| 31 |
+
|
| 32 |
+
from itertools import chain as _chain
|
| 33 |
+
from itertools import repeat as _repeat
|
| 34 |
+
from itertools import starmap as _starmap
|
| 35 |
+
from keyword import iskeyword as _iskeyword
|
| 36 |
+
from operator import eq as _eq
|
| 37 |
+
from operator import itemgetter as _itemgetter
|
| 38 |
+
from reprlib import recursive_repr as _recursive_repr
|
| 39 |
+
from _weakref import proxy as _proxy
|
| 40 |
+
|
| 41 |
+
try:
|
| 42 |
+
from _collections import deque
|
| 43 |
+
except ImportError:
|
| 44 |
+
pass
|
| 45 |
+
else:
|
| 46 |
+
_collections_abc.MutableSequence.register(deque)
|
| 47 |
+
|
| 48 |
+
try:
|
| 49 |
+
from _collections import defaultdict
|
| 50 |
+
except ImportError:
|
| 51 |
+
pass
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
################################################################################
|
| 55 |
+
### OrderedDict
|
| 56 |
+
################################################################################
|
| 57 |
+
|
| 58 |
+
class _OrderedDictKeysView(_collections_abc.KeysView):
|
| 59 |
+
|
| 60 |
+
def __reversed__(self):
|
| 61 |
+
yield from reversed(self._mapping)
|
| 62 |
+
|
| 63 |
+
class _OrderedDictItemsView(_collections_abc.ItemsView):
|
| 64 |
+
|
| 65 |
+
def __reversed__(self):
|
| 66 |
+
for key in reversed(self._mapping):
|
| 67 |
+
yield (key, self._mapping[key])
|
| 68 |
+
|
| 69 |
+
class _OrderedDictValuesView(_collections_abc.ValuesView):
|
| 70 |
+
|
| 71 |
+
def __reversed__(self):
|
| 72 |
+
for key in reversed(self._mapping):
|
| 73 |
+
yield self._mapping[key]
|
| 74 |
+
|
| 75 |
+
class _Link(object):
|
| 76 |
+
__slots__ = 'prev', 'next', 'key', '__weakref__'
|
| 77 |
+
|
| 78 |
+
class OrderedDict(dict):
|
| 79 |
+
'Dictionary that remembers insertion order'
|
| 80 |
+
# An inherited dict maps keys to values.
|
| 81 |
+
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
| 82 |
+
# The remaining methods are order-aware.
|
| 83 |
+
# Big-O running times for all methods are the same as regular dictionaries.
|
| 84 |
+
|
| 85 |
+
# The internal self.__map dict maps keys to links in a doubly linked list.
|
| 86 |
+
# The circular doubly linked list starts and ends with a sentinel element.
|
| 87 |
+
# The sentinel element never gets deleted (this simplifies the algorithm).
|
| 88 |
+
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
|
| 89 |
+
# The prev links are weakref proxies (to prevent circular references).
|
| 90 |
+
# Individual links are kept alive by the hard reference in self.__map.
|
| 91 |
+
# Those hard references disappear when a key is deleted from an OrderedDict.
|
| 92 |
+
|
| 93 |
+
def __init__(self, other=(), /, **kwds):
|
| 94 |
+
'''Initialize an ordered dictionary. The signature is the same as
|
| 95 |
+
regular dictionaries. Keyword argument order is preserved.
|
| 96 |
+
'''
|
| 97 |
+
try:
|
| 98 |
+
self.__root
|
| 99 |
+
except AttributeError:
|
| 100 |
+
self.__hardroot = _Link()
|
| 101 |
+
self.__root = root = _proxy(self.__hardroot)
|
| 102 |
+
root.prev = root.next = root
|
| 103 |
+
self.__map = {}
|
| 104 |
+
self.__update(other, **kwds)
|
| 105 |
+
|
| 106 |
+
def __setitem__(self, key, value,
|
| 107 |
+
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
|
| 108 |
+
'od.__setitem__(i, y) <==> od[i]=y'
|
| 109 |
+
# Setting a new item creates a new link at the end of the linked list,
|
| 110 |
+
# and the inherited dictionary is updated with the new key/value pair.
|
| 111 |
+
if key not in self:
|
| 112 |
+
self.__map[key] = link = Link()
|
| 113 |
+
root = self.__root
|
| 114 |
+
last = root.prev
|
| 115 |
+
link.prev, link.next, link.key = last, root, key
|
| 116 |
+
last.next = link
|
| 117 |
+
root.prev = proxy(link)
|
| 118 |
+
dict_setitem(self, key, value)
|
| 119 |
+
|
| 120 |
+
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
| 121 |
+
'od.__delitem__(y) <==> del od[y]'
|
| 122 |
+
# Deleting an existing item uses self.__map to find the link which gets
|
| 123 |
+
# removed by updating the links in the predecessor and successor nodes.
|
| 124 |
+
dict_delitem(self, key)
|
| 125 |
+
link = self.__map.pop(key)
|
| 126 |
+
link_prev = link.prev
|
| 127 |
+
link_next = link.next
|
| 128 |
+
link_prev.next = link_next
|
| 129 |
+
link_next.prev = link_prev
|
| 130 |
+
link.prev = None
|
| 131 |
+
link.next = None
|
| 132 |
+
|
| 133 |
+
def __iter__(self):
|
| 134 |
+
'od.__iter__() <==> iter(od)'
|
| 135 |
+
# Traverse the linked list in order.
|
| 136 |
+
root = self.__root
|
| 137 |
+
curr = root.next
|
| 138 |
+
while curr is not root:
|
| 139 |
+
yield curr.key
|
| 140 |
+
curr = curr.next
|
| 141 |
+
|
| 142 |
+
def __reversed__(self):
|
| 143 |
+
'od.__reversed__() <==> reversed(od)'
|
| 144 |
+
# Traverse the linked list in reverse order.
|
| 145 |
+
root = self.__root
|
| 146 |
+
curr = root.prev
|
| 147 |
+
while curr is not root:
|
| 148 |
+
yield curr.key
|
| 149 |
+
curr = curr.prev
|
| 150 |
+
|
| 151 |
+
def clear(self):
|
| 152 |
+
'od.clear() -> None. Remove all items from od.'
|
| 153 |
+
root = self.__root
|
| 154 |
+
root.prev = root.next = root
|
| 155 |
+
self.__map.clear()
|
| 156 |
+
dict.clear(self)
|
| 157 |
+
|
| 158 |
+
def popitem(self, last=True):
|
| 159 |
+
'''Remove and return a (key, value) pair from the dictionary.
|
| 160 |
+
|
| 161 |
+
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
| 162 |
+
'''
|
| 163 |
+
if not self:
|
| 164 |
+
raise KeyError('dictionary is empty')
|
| 165 |
+
root = self.__root
|
| 166 |
+
if last:
|
| 167 |
+
link = root.prev
|
| 168 |
+
link_prev = link.prev
|
| 169 |
+
link_prev.next = root
|
| 170 |
+
root.prev = link_prev
|
| 171 |
+
else:
|
| 172 |
+
link = root.next
|
| 173 |
+
link_next = link.next
|
| 174 |
+
root.next = link_next
|
| 175 |
+
link_next.prev = root
|
| 176 |
+
key = link.key
|
| 177 |
+
del self.__map[key]
|
| 178 |
+
value = dict.pop(self, key)
|
| 179 |
+
return key, value
|
| 180 |
+
|
| 181 |
+
def move_to_end(self, key, last=True):
|
| 182 |
+
'''Move an existing element to the end (or beginning if last is false).
|
| 183 |
+
|
| 184 |
+
Raise KeyError if the element does not exist.
|
| 185 |
+
'''
|
| 186 |
+
link = self.__map[key]
|
| 187 |
+
link_prev = link.prev
|
| 188 |
+
link_next = link.next
|
| 189 |
+
soft_link = link_next.prev
|
| 190 |
+
link_prev.next = link_next
|
| 191 |
+
link_next.prev = link_prev
|
| 192 |
+
root = self.__root
|
| 193 |
+
if last:
|
| 194 |
+
last = root.prev
|
| 195 |
+
link.prev = last
|
| 196 |
+
link.next = root
|
| 197 |
+
root.prev = soft_link
|
| 198 |
+
last.next = link
|
| 199 |
+
else:
|
| 200 |
+
first = root.next
|
| 201 |
+
link.prev = root
|
| 202 |
+
link.next = first
|
| 203 |
+
first.prev = soft_link
|
| 204 |
+
root.next = link
|
| 205 |
+
|
| 206 |
+
def __sizeof__(self):
|
| 207 |
+
sizeof = _sys.getsizeof
|
| 208 |
+
n = len(self) + 1 # number of links including root
|
| 209 |
+
size = sizeof(self.__dict__) # instance dictionary
|
| 210 |
+
size += sizeof(self.__map) * 2 # internal dict and inherited dict
|
| 211 |
+
size += sizeof(self.__hardroot) * n # link objects
|
| 212 |
+
size += sizeof(self.__root) * n # proxy objects
|
| 213 |
+
return size
|
| 214 |
+
|
| 215 |
+
update = __update = _collections_abc.MutableMapping.update
|
| 216 |
+
|
| 217 |
+
def keys(self):
|
| 218 |
+
"D.keys() -> a set-like object providing a view on D's keys"
|
| 219 |
+
return _OrderedDictKeysView(self)
|
| 220 |
+
|
| 221 |
+
def items(self):
|
| 222 |
+
"D.items() -> a set-like object providing a view on D's items"
|
| 223 |
+
return _OrderedDictItemsView(self)
|
| 224 |
+
|
| 225 |
+
def values(self):
|
| 226 |
+
"D.values() -> an object providing a view on D's values"
|
| 227 |
+
return _OrderedDictValuesView(self)
|
| 228 |
+
|
| 229 |
+
__ne__ = _collections_abc.MutableMapping.__ne__
|
| 230 |
+
|
| 231 |
+
__marker = object()
|
| 232 |
+
|
| 233 |
+
def pop(self, key, default=__marker):
|
| 234 |
+
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
|
| 235 |
+
value. If key is not found, d is returned if given, otherwise KeyError
|
| 236 |
+
is raised.
|
| 237 |
+
|
| 238 |
+
'''
|
| 239 |
+
if key in self:
|
| 240 |
+
result = self[key]
|
| 241 |
+
del self[key]
|
| 242 |
+
return result
|
| 243 |
+
if default is self.__marker:
|
| 244 |
+
raise KeyError(key)
|
| 245 |
+
return default
|
| 246 |
+
|
| 247 |
+
def setdefault(self, key, default=None):
|
| 248 |
+
'''Insert key with a value of default if key is not in the dictionary.
|
| 249 |
+
|
| 250 |
+
Return the value for key if key is in the dictionary, else default.
|
| 251 |
+
'''
|
| 252 |
+
if key in self:
|
| 253 |
+
return self[key]
|
| 254 |
+
self[key] = default
|
| 255 |
+
return default
|
| 256 |
+
|
| 257 |
+
@_recursive_repr()
|
| 258 |
+
def __repr__(self):
|
| 259 |
+
'od.__repr__() <==> repr(od)'
|
| 260 |
+
if not self:
|
| 261 |
+
return '%s()' % (self.__class__.__name__,)
|
| 262 |
+
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
|
| 263 |
+
|
| 264 |
+
def __reduce__(self):
|
| 265 |
+
'Return state information for pickling'
|
| 266 |
+
inst_dict = vars(self).copy()
|
| 267 |
+
for k in vars(OrderedDict()):
|
| 268 |
+
inst_dict.pop(k, None)
|
| 269 |
+
return self.__class__, (), inst_dict or None, None, iter(self.items())
|
| 270 |
+
|
| 271 |
+
def copy(self):
|
| 272 |
+
'od.copy() -> a shallow copy of od'
|
| 273 |
+
return self.__class__(self)
|
| 274 |
+
|
| 275 |
+
@classmethod
|
| 276 |
+
def fromkeys(cls, iterable, value=None):
|
| 277 |
+
'''Create a new ordered dictionary with keys from iterable and values set to value.
|
| 278 |
+
'''
|
| 279 |
+
self = cls()
|
| 280 |
+
for key in iterable:
|
| 281 |
+
self[key] = value
|
| 282 |
+
return self
|
| 283 |
+
|
| 284 |
+
def __eq__(self, other):
|
| 285 |
+
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
| 286 |
+
while comparison to a regular mapping is order-insensitive.
|
| 287 |
+
|
| 288 |
+
'''
|
| 289 |
+
if isinstance(other, OrderedDict):
|
| 290 |
+
return dict.__eq__(self, other) and all(map(_eq, self, other))
|
| 291 |
+
return dict.__eq__(self, other)
|
| 292 |
+
|
| 293 |
+
def __ior__(self, other):
|
| 294 |
+
self.update(other)
|
| 295 |
+
return self
|
| 296 |
+
|
| 297 |
+
def __or__(self, other):
|
| 298 |
+
if not isinstance(other, dict):
|
| 299 |
+
return NotImplemented
|
| 300 |
+
new = self.__class__(self)
|
| 301 |
+
new.update(other)
|
| 302 |
+
return new
|
| 303 |
+
|
| 304 |
+
def __ror__(self, other):
|
| 305 |
+
if not isinstance(other, dict):
|
| 306 |
+
return NotImplemented
|
| 307 |
+
new = self.__class__(other)
|
| 308 |
+
new.update(self)
|
| 309 |
+
return new
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
try:
|
| 313 |
+
from _collections import OrderedDict
|
| 314 |
+
except ImportError:
|
| 315 |
+
# Leave the pure Python version in place.
|
| 316 |
+
pass
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
################################################################################
|
| 320 |
+
### namedtuple
|
| 321 |
+
################################################################################
|
| 322 |
+
|
| 323 |
+
try:
|
| 324 |
+
from _collections import _tuplegetter
|
| 325 |
+
except ImportError:
|
| 326 |
+
_tuplegetter = lambda index, doc: property(_itemgetter(index), doc=doc)
|
| 327 |
+
|
| 328 |
+
def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None):
|
| 329 |
+
"""Returns a new subclass of tuple with named fields.
|
| 330 |
+
|
| 331 |
+
>>> Point = namedtuple('Point', ['x', 'y'])
|
| 332 |
+
>>> Point.__doc__ # docstring for the new class
|
| 333 |
+
'Point(x, y)'
|
| 334 |
+
>>> p = Point(11, y=22) # instantiate with positional args or keywords
|
| 335 |
+
>>> p[0] + p[1] # indexable like a plain tuple
|
| 336 |
+
33
|
| 337 |
+
>>> x, y = p # unpack like a regular tuple
|
| 338 |
+
>>> x, y
|
| 339 |
+
(11, 22)
|
| 340 |
+
>>> p.x + p.y # fields also accessible by name
|
| 341 |
+
33
|
| 342 |
+
>>> d = p._asdict() # convert to a dictionary
|
| 343 |
+
>>> d['x']
|
| 344 |
+
11
|
| 345 |
+
>>> Point(**d) # convert from a dictionary
|
| 346 |
+
Point(x=11, y=22)
|
| 347 |
+
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
|
| 348 |
+
Point(x=100, y=22)
|
| 349 |
+
|
| 350 |
+
"""
|
| 351 |
+
|
| 352 |
+
# Validate the field names. At the user's option, either generate an error
|
| 353 |
+
# message or automatically replace the field name with a valid name.
|
| 354 |
+
if isinstance(field_names, str):
|
| 355 |
+
field_names = field_names.replace(',', ' ').split()
|
| 356 |
+
field_names = list(map(str, field_names))
|
| 357 |
+
typename = _sys.intern(str(typename))
|
| 358 |
+
|
| 359 |
+
if rename:
|
| 360 |
+
seen = set()
|
| 361 |
+
for index, name in enumerate(field_names):
|
| 362 |
+
if (not name.isidentifier()
|
| 363 |
+
or _iskeyword(name)
|
| 364 |
+
or name.startswith('_')
|
| 365 |
+
or name in seen):
|
| 366 |
+
field_names[index] = f'_{index}'
|
| 367 |
+
seen.add(name)
|
| 368 |
+
|
| 369 |
+
for name in [typename] + field_names:
|
| 370 |
+
if type(name) is not str:
|
| 371 |
+
raise TypeError('Type names and field names must be strings')
|
| 372 |
+
if not name.isidentifier():
|
| 373 |
+
raise ValueError('Type names and field names must be valid '
|
| 374 |
+
f'identifiers: {name!r}')
|
| 375 |
+
if _iskeyword(name):
|
| 376 |
+
raise ValueError('Type names and field names cannot be a '
|
| 377 |
+
f'keyword: {name!r}')
|
| 378 |
+
|
| 379 |
+
seen = set()
|
| 380 |
+
for name in field_names:
|
| 381 |
+
if name.startswith('_') and not rename:
|
| 382 |
+
raise ValueError('Field names cannot start with an underscore: '
|
| 383 |
+
f'{name!r}')
|
| 384 |
+
if name in seen:
|
| 385 |
+
raise ValueError(f'Encountered duplicate field name: {name!r}')
|
| 386 |
+
seen.add(name)
|
| 387 |
+
|
| 388 |
+
field_defaults = {}
|
| 389 |
+
if defaults is not None:
|
| 390 |
+
defaults = tuple(defaults)
|
| 391 |
+
if len(defaults) > len(field_names):
|
| 392 |
+
raise TypeError('Got more default values than field names')
|
| 393 |
+
field_defaults = dict(reversed(list(zip(reversed(field_names),
|
| 394 |
+
reversed(defaults)))))
|
| 395 |
+
|
| 396 |
+
# Variables used in the methods and docstrings
|
| 397 |
+
field_names = tuple(map(_sys.intern, field_names))
|
| 398 |
+
num_fields = len(field_names)
|
| 399 |
+
arg_list = ', '.join(field_names)
|
| 400 |
+
if num_fields == 1:
|
| 401 |
+
arg_list += ','
|
| 402 |
+
repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')'
|
| 403 |
+
tuple_new = tuple.__new__
|
| 404 |
+
_dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
|
| 405 |
+
|
| 406 |
+
# Create all the named tuple methods to be added to the class namespace
|
| 407 |
+
|
| 408 |
+
namespace = {
|
| 409 |
+
'_tuple_new': tuple_new,
|
| 410 |
+
'__builtins__': {},
|
| 411 |
+
'__name__': f'namedtuple_{typename}',
|
| 412 |
+
}
|
| 413 |
+
code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
|
| 414 |
+
__new__ = eval(code, namespace)
|
| 415 |
+
__new__.__name__ = '__new__'
|
| 416 |
+
__new__.__doc__ = f'Create new instance of {typename}({arg_list})'
|
| 417 |
+
if defaults is not None:
|
| 418 |
+
__new__.__defaults__ = defaults
|
| 419 |
+
|
| 420 |
+
@classmethod
|
| 421 |
+
def _make(cls, iterable):
|
| 422 |
+
result = tuple_new(cls, iterable)
|
| 423 |
+
if _len(result) != num_fields:
|
| 424 |
+
raise TypeError(f'Expected {num_fields} arguments, got {len(result)}')
|
| 425 |
+
return result
|
| 426 |
+
|
| 427 |
+
_make.__func__.__doc__ = (f'Make a new {typename} object from a sequence '
|
| 428 |
+
'or iterable')
|
| 429 |
+
|
| 430 |
+
def _replace(self, /, **kwds):
|
| 431 |
+
result = self._make(_map(kwds.pop, field_names, self))
|
| 432 |
+
if kwds:
|
| 433 |
+
raise ValueError(f'Got unexpected field names: {list(kwds)!r}')
|
| 434 |
+
return result
|
| 435 |
+
|
| 436 |
+
_replace.__doc__ = (f'Return a new {typename} object replacing specified '
|
| 437 |
+
'fields with new values')
|
| 438 |
+
|
| 439 |
+
def __repr__(self):
|
| 440 |
+
'Return a nicely formatted representation string'
|
| 441 |
+
return self.__class__.__name__ + repr_fmt % self
|
| 442 |
+
|
| 443 |
+
def _asdict(self):
|
| 444 |
+
'Return a new dict which maps field names to their values.'
|
| 445 |
+
return _dict(_zip(self._fields, self))
|
| 446 |
+
|
| 447 |
+
def __getnewargs__(self):
|
| 448 |
+
'Return self as a plain tuple. Used by copy and pickle.'
|
| 449 |
+
return _tuple(self)
|
| 450 |
+
|
| 451 |
+
# Modify function metadata to help with introspection and debugging
|
| 452 |
+
for method in (
|
| 453 |
+
__new__,
|
| 454 |
+
_make.__func__,
|
| 455 |
+
_replace,
|
| 456 |
+
__repr__,
|
| 457 |
+
_asdict,
|
| 458 |
+
__getnewargs__,
|
| 459 |
+
):
|
| 460 |
+
method.__qualname__ = f'{typename}.{method.__name__}'
|
| 461 |
+
|
| 462 |
+
# Build-up the class namespace dictionary
|
| 463 |
+
# and use type() to build the result class
|
| 464 |
+
class_namespace = {
|
| 465 |
+
'__doc__': f'{typename}({arg_list})',
|
| 466 |
+
'__slots__': (),
|
| 467 |
+
'_fields': field_names,
|
| 468 |
+
'_field_defaults': field_defaults,
|
| 469 |
+
'__new__': __new__,
|
| 470 |
+
'_make': _make,
|
| 471 |
+
'_replace': _replace,
|
| 472 |
+
'__repr__': __repr__,
|
| 473 |
+
'_asdict': _asdict,
|
| 474 |
+
'__getnewargs__': __getnewargs__,
|
| 475 |
+
'__match_args__': field_names,
|
| 476 |
+
}
|
| 477 |
+
for index, name in enumerate(field_names):
|
| 478 |
+
doc = _sys.intern(f'Alias for field number {index}')
|
| 479 |
+
class_namespace[name] = _tuplegetter(index, doc)
|
| 480 |
+
|
| 481 |
+
result = type(typename, (tuple,), class_namespace)
|
| 482 |
+
|
| 483 |
+
# For pickling to work, the __module__ variable needs to be set to the frame
|
| 484 |
+
# where the named tuple is created. Bypass this step in environments where
|
| 485 |
+
# sys._getframe is not defined (Jython for example) or sys._getframe is not
|
| 486 |
+
# defined for arguments greater than 0 (IronPython), or where the user has
|
| 487 |
+
# specified a particular module.
|
| 488 |
+
if module is None:
|
| 489 |
+
try:
|
| 490 |
+
module = _sys._getframe(1).f_globals.get('__name__', '__main__')
|
| 491 |
+
except (AttributeError, ValueError):
|
| 492 |
+
pass
|
| 493 |
+
if module is not None:
|
| 494 |
+
result.__module__ = module
|
| 495 |
+
|
| 496 |
+
return result
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
########################################################################
|
| 500 |
+
### Counter
|
| 501 |
+
########################################################################
|
| 502 |
+
|
| 503 |
+
def _count_elements(mapping, iterable):
|
| 504 |
+
'Tally elements from the iterable.'
|
| 505 |
+
mapping_get = mapping.get
|
| 506 |
+
for elem in iterable:
|
| 507 |
+
mapping[elem] = mapping_get(elem, 0) + 1
|
| 508 |
+
|
| 509 |
+
try: # Load C helper function if available
|
| 510 |
+
from _collections import _count_elements
|
| 511 |
+
except ImportError:
|
| 512 |
+
pass
|
| 513 |
+
|
| 514 |
+
class Counter(dict):
|
| 515 |
+
'''Dict subclass for counting hashable items. Sometimes called a bag
|
| 516 |
+
or multiset. Elements are stored as dictionary keys and their counts
|
| 517 |
+
are stored as dictionary values.
|
| 518 |
+
|
| 519 |
+
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
|
| 520 |
+
|
| 521 |
+
>>> c.most_common(3) # three most common elements
|
| 522 |
+
[('a', 5), ('b', 4), ('c', 3)]
|
| 523 |
+
>>> sorted(c) # list all unique elements
|
| 524 |
+
['a', 'b', 'c', 'd', 'e']
|
| 525 |
+
>>> ''.join(sorted(c.elements())) # list elements with repetitions
|
| 526 |
+
'aaaaabbbbcccdde'
|
| 527 |
+
>>> sum(c.values()) # total of all counts
|
| 528 |
+
15
|
| 529 |
+
|
| 530 |
+
>>> c['a'] # count of letter 'a'
|
| 531 |
+
5
|
| 532 |
+
>>> for elem in 'shazam': # update counts from an iterable
|
| 533 |
+
... c[elem] += 1 # by adding 1 to each element's count
|
| 534 |
+
>>> c['a'] # now there are seven 'a'
|
| 535 |
+
7
|
| 536 |
+
>>> del c['b'] # remove all 'b'
|
| 537 |
+
>>> c['b'] # now there are zero 'b'
|
| 538 |
+
0
|
| 539 |
+
|
| 540 |
+
>>> d = Counter('simsalabim') # make another counter
|
| 541 |
+
>>> c.update(d) # add in the second counter
|
| 542 |
+
>>> c['a'] # now there are nine 'a'
|
| 543 |
+
9
|
| 544 |
+
|
| 545 |
+
>>> c.clear() # empty the counter
|
| 546 |
+
>>> c
|
| 547 |
+
Counter()
|
| 548 |
+
|
| 549 |
+
Note: If a count is set to zero or reduced to zero, it will remain
|
| 550 |
+
in the counter until the entry is deleted or the counter is cleared:
|
| 551 |
+
|
| 552 |
+
>>> c = Counter('aaabbc')
|
| 553 |
+
>>> c['b'] -= 2 # reduce the count of 'b' by two
|
| 554 |
+
>>> c.most_common() # 'b' is still in, but its count is zero
|
| 555 |
+
[('a', 3), ('c', 1), ('b', 0)]
|
| 556 |
+
|
| 557 |
+
'''
|
| 558 |
+
# References:
|
| 559 |
+
# http://en.wikipedia.org/wiki/Multiset
|
| 560 |
+
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
|
| 561 |
+
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
|
| 562 |
+
# http://code.activestate.com/recipes/259174/
|
| 563 |
+
# Knuth, TAOCP Vol. II section 4.6.3
|
| 564 |
+
|
| 565 |
+
def __init__(self, iterable=None, /, **kwds):
|
| 566 |
+
'''Create a new, empty Counter object. And if given, count elements
|
| 567 |
+
from an input iterable. Or, initialize the count from another mapping
|
| 568 |
+
of elements to their counts.
|
| 569 |
+
|
| 570 |
+
>>> c = Counter() # a new, empty counter
|
| 571 |
+
>>> c = Counter('gallahad') # a new counter from an iterable
|
| 572 |
+
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
|
| 573 |
+
>>> c = Counter(a=4, b=2) # a new counter from keyword args
|
| 574 |
+
|
| 575 |
+
'''
|
| 576 |
+
super().__init__()
|
| 577 |
+
self.update(iterable, **kwds)
|
| 578 |
+
|
| 579 |
+
def __missing__(self, key):
|
| 580 |
+
'The count of elements not in the Counter is zero.'
|
| 581 |
+
# Needed so that self[missing_item] does not raise KeyError
|
| 582 |
+
return 0
|
| 583 |
+
|
| 584 |
+
def total(self):
|
| 585 |
+
'Sum of the counts'
|
| 586 |
+
return sum(self.values())
|
| 587 |
+
|
| 588 |
+
def most_common(self, n=None):
|
| 589 |
+
'''List the n most common elements and their counts from the most
|
| 590 |
+
common to the least. If n is None, then list all element counts.
|
| 591 |
+
|
| 592 |
+
>>> Counter('abracadabra').most_common(3)
|
| 593 |
+
[('a', 5), ('b', 2), ('r', 2)]
|
| 594 |
+
|
| 595 |
+
'''
|
| 596 |
+
# Emulate Bag.sortedByCount from Smalltalk
|
| 597 |
+
if n is None:
|
| 598 |
+
return sorted(self.items(), key=_itemgetter(1), reverse=True)
|
| 599 |
+
|
| 600 |
+
# Lazy import to speedup Python startup time
|
| 601 |
+
import heapq
|
| 602 |
+
return heapq.nlargest(n, self.items(), key=_itemgetter(1))
|
| 603 |
+
|
| 604 |
+
def elements(self):
|
| 605 |
+
'''Iterator over elements repeating each as many times as its count.
|
| 606 |
+
|
| 607 |
+
>>> c = Counter('ABCABC')
|
| 608 |
+
>>> sorted(c.elements())
|
| 609 |
+
['A', 'A', 'B', 'B', 'C', 'C']
|
| 610 |
+
|
| 611 |
+
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
|
| 612 |
+
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
|
| 613 |
+
>>> product = 1
|
| 614 |
+
>>> for factor in prime_factors.elements(): # loop over factors
|
| 615 |
+
... product *= factor # and multiply them
|
| 616 |
+
>>> product
|
| 617 |
+
1836
|
| 618 |
+
|
| 619 |
+
Note, if an element's count has been set to zero or is a negative
|
| 620 |
+
number, elements() will ignore it.
|
| 621 |
+
|
| 622 |
+
'''
|
| 623 |
+
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
|
| 624 |
+
return _chain.from_iterable(_starmap(_repeat, self.items()))
|
| 625 |
+
|
| 626 |
+
# Override dict methods where necessary
|
| 627 |
+
|
| 628 |
+
@classmethod
|
| 629 |
+
def fromkeys(cls, iterable, v=None):
|
| 630 |
+
# There is no equivalent method for counters because the semantics
|
| 631 |
+
# would be ambiguous in cases such as Counter.fromkeys('aaabbc', v=2).
|
| 632 |
+
# Initializing counters to zero values isn't necessary because zero
|
| 633 |
+
# is already the default value for counter lookups. Initializing
|
| 634 |
+
# to one is easily accomplished with Counter(set(iterable)). For
|
| 635 |
+
# more exotic cases, create a dictionary first using a dictionary
|
| 636 |
+
# comprehension or dict.fromkeys().
|
| 637 |
+
raise NotImplementedError(
|
| 638 |
+
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
|
| 639 |
+
|
| 640 |
+
def update(self, iterable=None, /, **kwds):
|
| 641 |
+
'''Like dict.update() but add counts instead of replacing them.
|
| 642 |
+
|
| 643 |
+
Source can be an iterable, a dictionary, or another Counter instance.
|
| 644 |
+
|
| 645 |
+
>>> c = Counter('which')
|
| 646 |
+
>>> c.update('witch') # add elements from another iterable
|
| 647 |
+
>>> d = Counter('watch')
|
| 648 |
+
>>> c.update(d) # add elements from another counter
|
| 649 |
+
>>> c['h'] # four 'h' in which, witch, and watch
|
| 650 |
+
4
|
| 651 |
+
|
| 652 |
+
'''
|
| 653 |
+
# The regular dict.update() operation makes no sense here because the
|
| 654 |
+
# replace behavior results in the some of original untouched counts
|
| 655 |
+
# being mixed-in with all of the other counts for a mismash that
|
| 656 |
+
# doesn't have a straight-forward interpretation in most counting
|
| 657 |
+
# contexts. Instead, we implement straight-addition. Both the inputs
|
| 658 |
+
# and outputs are allowed to contain zero and negative counts.
|
| 659 |
+
|
| 660 |
+
if iterable is not None:
|
| 661 |
+
if isinstance(iterable, _collections_abc.Mapping):
|
| 662 |
+
if self:
|
| 663 |
+
self_get = self.get
|
| 664 |
+
for elem, count in iterable.items():
|
| 665 |
+
self[elem] = count + self_get(elem, 0)
|
| 666 |
+
else:
|
| 667 |
+
# fast path when counter is empty
|
| 668 |
+
super().update(iterable)
|
| 669 |
+
else:
|
| 670 |
+
_count_elements(self, iterable)
|
| 671 |
+
if kwds:
|
| 672 |
+
self.update(kwds)
|
| 673 |
+
|
| 674 |
+
def subtract(self, iterable=None, /, **kwds):
|
| 675 |
+
'''Like dict.update() but subtracts counts instead of replacing them.
|
| 676 |
+
Counts can be reduced below zero. Both the inputs and outputs are
|
| 677 |
+
allowed to contain zero and negative counts.
|
| 678 |
+
|
| 679 |
+
Source can be an iterable, a dictionary, or another Counter instance.
|
| 680 |
+
|
| 681 |
+
>>> c = Counter('which')
|
| 682 |
+
>>> c.subtract('witch') # subtract elements from another iterable
|
| 683 |
+
>>> c.subtract(Counter('watch')) # subtract elements from another counter
|
| 684 |
+
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
|
| 685 |
+
0
|
| 686 |
+
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
|
| 687 |
+
-1
|
| 688 |
+
|
| 689 |
+
'''
|
| 690 |
+
if iterable is not None:
|
| 691 |
+
self_get = self.get
|
| 692 |
+
if isinstance(iterable, _collections_abc.Mapping):
|
| 693 |
+
for elem, count in iterable.items():
|
| 694 |
+
self[elem] = self_get(elem, 0) - count
|
| 695 |
+
else:
|
| 696 |
+
for elem in iterable:
|
| 697 |
+
self[elem] = self_get(elem, 0) - 1
|
| 698 |
+
if kwds:
|
| 699 |
+
self.subtract(kwds)
|
| 700 |
+
|
| 701 |
+
def copy(self):
|
| 702 |
+
'Return a shallow copy.'
|
| 703 |
+
return self.__class__(self)
|
| 704 |
+
|
| 705 |
+
def __reduce__(self):
|
| 706 |
+
return self.__class__, (dict(self),)
|
| 707 |
+
|
| 708 |
+
def __delitem__(self, elem):
|
| 709 |
+
'Like dict.__delitem__() but does not raise KeyError for missing values.'
|
| 710 |
+
if elem in self:
|
| 711 |
+
super().__delitem__(elem)
|
| 712 |
+
|
| 713 |
+
def __eq__(self, other):
|
| 714 |
+
'True if all counts agree. Missing counts are treated as zero.'
|
| 715 |
+
if not isinstance(other, Counter):
|
| 716 |
+
return NotImplemented
|
| 717 |
+
return all(self[e] == other[e] for c in (self, other) for e in c)
|
| 718 |
+
|
| 719 |
+
def __ne__(self, other):
|
| 720 |
+
'True if any counts disagree. Missing counts are treated as zero.'
|
| 721 |
+
if not isinstance(other, Counter):
|
| 722 |
+
return NotImplemented
|
| 723 |
+
return not self == other
|
| 724 |
+
|
| 725 |
+
def __le__(self, other):
|
| 726 |
+
'True if all counts in self are a subset of those in other.'
|
| 727 |
+
if not isinstance(other, Counter):
|
| 728 |
+
return NotImplemented
|
| 729 |
+
return all(self[e] <= other[e] for c in (self, other) for e in c)
|
| 730 |
+
|
| 731 |
+
def __lt__(self, other):
|
| 732 |
+
'True if all counts in self are a proper subset of those in other.'
|
| 733 |
+
if not isinstance(other, Counter):
|
| 734 |
+
return NotImplemented
|
| 735 |
+
return self <= other and self != other
|
| 736 |
+
|
| 737 |
+
def __ge__(self, other):
|
| 738 |
+
'True if all counts in self are a superset of those in other.'
|
| 739 |
+
if not isinstance(other, Counter):
|
| 740 |
+
return NotImplemented
|
| 741 |
+
return all(self[e] >= other[e] for c in (self, other) for e in c)
|
| 742 |
+
|
| 743 |
+
def __gt__(self, other):
|
| 744 |
+
'True if all counts in self are a proper superset of those in other.'
|
| 745 |
+
if not isinstance(other, Counter):
|
| 746 |
+
return NotImplemented
|
| 747 |
+
return self >= other and self != other
|
| 748 |
+
|
| 749 |
+
def __repr__(self):
|
| 750 |
+
if not self:
|
| 751 |
+
return f'{self.__class__.__name__}()'
|
| 752 |
+
try:
|
| 753 |
+
# dict() preserves the ordering returned by most_common()
|
| 754 |
+
d = dict(self.most_common())
|
| 755 |
+
except TypeError:
|
| 756 |
+
# handle case where values are not orderable
|
| 757 |
+
d = dict(self)
|
| 758 |
+
return f'{self.__class__.__name__}({d!r})'
|
| 759 |
+
|
| 760 |
+
# Multiset-style mathematical operations discussed in:
|
| 761 |
+
# Knuth TAOCP Volume II section 4.6.3 exercise 19
|
| 762 |
+
# and at http://en.wikipedia.org/wiki/Multiset
|
| 763 |
+
#
|
| 764 |
+
# Outputs guaranteed to only include positive counts.
|
| 765 |
+
#
|
| 766 |
+
# To strip negative and zero counts, add-in an empty counter:
|
| 767 |
+
# c += Counter()
|
| 768 |
+
#
|
| 769 |
+
# Results are ordered according to when an element is first
|
| 770 |
+
# encountered in the left operand and then by the order
|
| 771 |
+
# encountered in the right operand.
|
| 772 |
+
#
|
| 773 |
+
# When the multiplicities are all zero or one, multiset operations
|
| 774 |
+
# are guaranteed to be equivalent to the corresponding operations
|
| 775 |
+
# for regular sets.
|
| 776 |
+
# Given counter multisets such as:
|
| 777 |
+
# cp = Counter(a=1, b=0, c=1)
|
| 778 |
+
# cq = Counter(c=1, d=0, e=1)
|
| 779 |
+
# The corresponding regular sets would be:
|
| 780 |
+
# sp = {'a', 'c'}
|
| 781 |
+
# sq = {'c', 'e'}
|
| 782 |
+
# All of the following relations would hold:
|
| 783 |
+
# set(cp + cq) == sp | sq
|
| 784 |
+
# set(cp - cq) == sp - sq
|
| 785 |
+
# set(cp | cq) == sp | sq
|
| 786 |
+
# set(cp & cq) == sp & sq
|
| 787 |
+
# (cp == cq) == (sp == sq)
|
| 788 |
+
# (cp != cq) == (sp != sq)
|
| 789 |
+
# (cp <= cq) == (sp <= sq)
|
| 790 |
+
# (cp < cq) == (sp < sq)
|
| 791 |
+
# (cp >= cq) == (sp >= sq)
|
| 792 |
+
# (cp > cq) == (sp > sq)
|
| 793 |
+
|
| 794 |
+
def __add__(self, other):
|
| 795 |
+
'''Add counts from two counters.
|
| 796 |
+
|
| 797 |
+
>>> Counter('abbb') + Counter('bcc')
|
| 798 |
+
Counter({'b': 4, 'c': 2, 'a': 1})
|
| 799 |
+
|
| 800 |
+
'''
|
| 801 |
+
if not isinstance(other, Counter):
|
| 802 |
+
return NotImplemented
|
| 803 |
+
result = Counter()
|
| 804 |
+
for elem, count in self.items():
|
| 805 |
+
newcount = count + other[elem]
|
| 806 |
+
if newcount > 0:
|
| 807 |
+
result[elem] = newcount
|
| 808 |
+
for elem, count in other.items():
|
| 809 |
+
if elem not in self and count > 0:
|
| 810 |
+
result[elem] = count
|
| 811 |
+
return result
|
| 812 |
+
|
| 813 |
+
def __sub__(self, other):
|
| 814 |
+
''' Subtract count, but keep only results with positive counts.
|
| 815 |
+
|
| 816 |
+
>>> Counter('abbbc') - Counter('bccd')
|
| 817 |
+
Counter({'b': 2, 'a': 1})
|
| 818 |
+
|
| 819 |
+
'''
|
| 820 |
+
if not isinstance(other, Counter):
|
| 821 |
+
return NotImplemented
|
| 822 |
+
result = Counter()
|
| 823 |
+
for elem, count in self.items():
|
| 824 |
+
newcount = count - other[elem]
|
| 825 |
+
if newcount > 0:
|
| 826 |
+
result[elem] = newcount
|
| 827 |
+
for elem, count in other.items():
|
| 828 |
+
if elem not in self and count < 0:
|
| 829 |
+
result[elem] = 0 - count
|
| 830 |
+
return result
|
| 831 |
+
|
| 832 |
+
def __or__(self, other):
|
| 833 |
+
'''Union is the maximum of value in either of the input counters.
|
| 834 |
+
|
| 835 |
+
>>> Counter('abbb') | Counter('bcc')
|
| 836 |
+
Counter({'b': 3, 'c': 2, 'a': 1})
|
| 837 |
+
|
| 838 |
+
'''
|
| 839 |
+
if not isinstance(other, Counter):
|
| 840 |
+
return NotImplemented
|
| 841 |
+
result = Counter()
|
| 842 |
+
for elem, count in self.items():
|
| 843 |
+
other_count = other[elem]
|
| 844 |
+
newcount = other_count if count < other_count else count
|
| 845 |
+
if newcount > 0:
|
| 846 |
+
result[elem] = newcount
|
| 847 |
+
for elem, count in other.items():
|
| 848 |
+
if elem not in self and count > 0:
|
| 849 |
+
result[elem] = count
|
| 850 |
+
return result
|
| 851 |
+
|
| 852 |
+
def __and__(self, other):
|
| 853 |
+
''' Intersection is the minimum of corresponding counts.
|
| 854 |
+
|
| 855 |
+
>>> Counter('abbb') & Counter('bcc')
|
| 856 |
+
Counter({'b': 1})
|
| 857 |
+
|
| 858 |
+
'''
|
| 859 |
+
if not isinstance(other, Counter):
|
| 860 |
+
return NotImplemented
|
| 861 |
+
result = Counter()
|
| 862 |
+
for elem, count in self.items():
|
| 863 |
+
other_count = other[elem]
|
| 864 |
+
newcount = count if count < other_count else other_count
|
| 865 |
+
if newcount > 0:
|
| 866 |
+
result[elem] = newcount
|
| 867 |
+
return result
|
| 868 |
+
|
| 869 |
+
def __pos__(self):
|
| 870 |
+
'Adds an empty counter, effectively stripping negative and zero counts'
|
| 871 |
+
result = Counter()
|
| 872 |
+
for elem, count in self.items():
|
| 873 |
+
if count > 0:
|
| 874 |
+
result[elem] = count
|
| 875 |
+
return result
|
| 876 |
+
|
| 877 |
+
def __neg__(self):
|
| 878 |
+
'''Subtracts from an empty counter. Strips positive and zero counts,
|
| 879 |
+
and flips the sign on negative counts.
|
| 880 |
+
|
| 881 |
+
'''
|
| 882 |
+
result = Counter()
|
| 883 |
+
for elem, count in self.items():
|
| 884 |
+
if count < 0:
|
| 885 |
+
result[elem] = 0 - count
|
| 886 |
+
return result
|
| 887 |
+
|
| 888 |
+
def _keep_positive(self):
|
| 889 |
+
'''Internal method to strip elements with a negative or zero count'''
|
| 890 |
+
nonpositive = [elem for elem, count in self.items() if not count > 0]
|
| 891 |
+
for elem in nonpositive:
|
| 892 |
+
del self[elem]
|
| 893 |
+
return self
|
| 894 |
+
|
| 895 |
+
def __iadd__(self, other):
|
| 896 |
+
'''Inplace add from another counter, keeping only positive counts.
|
| 897 |
+
|
| 898 |
+
>>> c = Counter('abbb')
|
| 899 |
+
>>> c += Counter('bcc')
|
| 900 |
+
>>> c
|
| 901 |
+
Counter({'b': 4, 'c': 2, 'a': 1})
|
| 902 |
+
|
| 903 |
+
'''
|
| 904 |
+
for elem, count in other.items():
|
| 905 |
+
self[elem] += count
|
| 906 |
+
return self._keep_positive()
|
| 907 |
+
|
| 908 |
+
def __isub__(self, other):
|
| 909 |
+
'''Inplace subtract counter, but keep only results with positive counts.
|
| 910 |
+
|
| 911 |
+
>>> c = Counter('abbbc')
|
| 912 |
+
>>> c -= Counter('bccd')
|
| 913 |
+
>>> c
|
| 914 |
+
Counter({'b': 2, 'a': 1})
|
| 915 |
+
|
| 916 |
+
'''
|
| 917 |
+
for elem, count in other.items():
|
| 918 |
+
self[elem] -= count
|
| 919 |
+
return self._keep_positive()
|
| 920 |
+
|
| 921 |
+
def __ior__(self, other):
|
| 922 |
+
'''Inplace union is the maximum of value from either counter.
|
| 923 |
+
|
| 924 |
+
>>> c = Counter('abbb')
|
| 925 |
+
>>> c |= Counter('bcc')
|
| 926 |
+
>>> c
|
| 927 |
+
Counter({'b': 3, 'c': 2, 'a': 1})
|
| 928 |
+
|
| 929 |
+
'''
|
| 930 |
+
for elem, other_count in other.items():
|
| 931 |
+
count = self[elem]
|
| 932 |
+
if other_count > count:
|
| 933 |
+
self[elem] = other_count
|
| 934 |
+
return self._keep_positive()
|
| 935 |
+
|
| 936 |
+
def __iand__(self, other):
|
| 937 |
+
'''Inplace intersection is the minimum of corresponding counts.
|
| 938 |
+
|
| 939 |
+
>>> c = Counter('abbb')
|
| 940 |
+
>>> c &= Counter('bcc')
|
| 941 |
+
>>> c
|
| 942 |
+
Counter({'b': 1})
|
| 943 |
+
|
| 944 |
+
'''
|
| 945 |
+
for elem, count in self.items():
|
| 946 |
+
other_count = other[elem]
|
| 947 |
+
if other_count < count:
|
| 948 |
+
self[elem] = other_count
|
| 949 |
+
return self._keep_positive()
|
| 950 |
+
|
| 951 |
+
|
| 952 |
+
########################################################################
|
| 953 |
+
### ChainMap
|
| 954 |
+
########################################################################
|
| 955 |
+
|
| 956 |
+
class ChainMap(_collections_abc.MutableMapping):
|
| 957 |
+
''' A ChainMap groups multiple dicts (or other mappings) together
|
| 958 |
+
to create a single, updateable view.
|
| 959 |
+
|
| 960 |
+
The underlying mappings are stored in a list. That list is public and can
|
| 961 |
+
be accessed or updated using the *maps* attribute. There is no other
|
| 962 |
+
state.
|
| 963 |
+
|
| 964 |
+
Lookups search the underlying mappings successively until a key is found.
|
| 965 |
+
In contrast, writes, updates, and deletions only operate on the first
|
| 966 |
+
mapping.
|
| 967 |
+
|
| 968 |
+
'''
|
| 969 |
+
|
| 970 |
+
def __init__(self, *maps):
|
| 971 |
+
'''Initialize a ChainMap by setting *maps* to the given mappings.
|
| 972 |
+
If no mappings are provided, a single empty dictionary is used.
|
| 973 |
+
|
| 974 |
+
'''
|
| 975 |
+
self.maps = list(maps) or [{}] # always at least one map
|
| 976 |
+
|
| 977 |
+
def __missing__(self, key):
|
| 978 |
+
raise KeyError(key)
|
| 979 |
+
|
| 980 |
+
def __getitem__(self, key):
|
| 981 |
+
for mapping in self.maps:
|
| 982 |
+
try:
|
| 983 |
+
return mapping[key] # can't use 'key in mapping' with defaultdict
|
| 984 |
+
except KeyError:
|
| 985 |
+
pass
|
| 986 |
+
return self.__missing__(key) # support subclasses that define __missing__
|
| 987 |
+
|
| 988 |
+
def get(self, key, default=None):
|
| 989 |
+
return self[key] if key in self else default
|
| 990 |
+
|
| 991 |
+
def __len__(self):
|
| 992 |
+
return len(set().union(*self.maps)) # reuses stored hash values if possible
|
| 993 |
+
|
| 994 |
+
def __iter__(self):
|
| 995 |
+
d = {}
|
| 996 |
+
for mapping in reversed(self.maps):
|
| 997 |
+
d.update(dict.fromkeys(mapping)) # reuses stored hash values if possible
|
| 998 |
+
return iter(d)
|
| 999 |
+
|
| 1000 |
+
def __contains__(self, key):
|
| 1001 |
+
return any(key in m for m in self.maps)
|
| 1002 |
+
|
| 1003 |
+
def __bool__(self):
|
| 1004 |
+
return any(self.maps)
|
| 1005 |
+
|
| 1006 |
+
@_recursive_repr()
|
| 1007 |
+
def __repr__(self):
|
| 1008 |
+
return f'{self.__class__.__name__}({", ".join(map(repr, self.maps))})'
|
| 1009 |
+
|
| 1010 |
+
@classmethod
|
| 1011 |
+
def fromkeys(cls, iterable, *args):
|
| 1012 |
+
'Create a ChainMap with a single dict created from the iterable.'
|
| 1013 |
+
return cls(dict.fromkeys(iterable, *args))
|
| 1014 |
+
|
| 1015 |
+
def copy(self):
|
| 1016 |
+
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
|
| 1017 |
+
return self.__class__(self.maps[0].copy(), *self.maps[1:])
|
| 1018 |
+
|
| 1019 |
+
__copy__ = copy
|
| 1020 |
+
|
| 1021 |
+
def new_child(self, m=None, **kwargs): # like Django's Context.push()
|
| 1022 |
+
'''New ChainMap with a new map followed by all previous maps.
|
| 1023 |
+
If no map is provided, an empty dict is used.
|
| 1024 |
+
Keyword arguments update the map or new empty dict.
|
| 1025 |
+
'''
|
| 1026 |
+
if m is None:
|
| 1027 |
+
m = kwargs
|
| 1028 |
+
elif kwargs:
|
| 1029 |
+
m.update(kwargs)
|
| 1030 |
+
return self.__class__(m, *self.maps)
|
| 1031 |
+
|
| 1032 |
+
@property
|
| 1033 |
+
def parents(self): # like Django's Context.pop()
|
| 1034 |
+
'New ChainMap from maps[1:].'
|
| 1035 |
+
return self.__class__(*self.maps[1:])
|
| 1036 |
+
|
| 1037 |
+
def __setitem__(self, key, value):
|
| 1038 |
+
self.maps[0][key] = value
|
| 1039 |
+
|
| 1040 |
+
def __delitem__(self, key):
|
| 1041 |
+
try:
|
| 1042 |
+
del self.maps[0][key]
|
| 1043 |
+
except KeyError:
|
| 1044 |
+
raise KeyError(f'Key not found in the first mapping: {key!r}')
|
| 1045 |
+
|
| 1046 |
+
def popitem(self):
|
| 1047 |
+
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
|
| 1048 |
+
try:
|
| 1049 |
+
return self.maps[0].popitem()
|
| 1050 |
+
except KeyError:
|
| 1051 |
+
raise KeyError('No keys found in the first mapping.')
|
| 1052 |
+
|
| 1053 |
+
def pop(self, key, *args):
|
| 1054 |
+
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
|
| 1055 |
+
try:
|
| 1056 |
+
return self.maps[0].pop(key, *args)
|
| 1057 |
+
except KeyError:
|
| 1058 |
+
raise KeyError(f'Key not found in the first mapping: {key!r}')
|
| 1059 |
+
|
| 1060 |
+
def clear(self):
|
| 1061 |
+
'Clear maps[0], leaving maps[1:] intact.'
|
| 1062 |
+
self.maps[0].clear()
|
| 1063 |
+
|
| 1064 |
+
def __ior__(self, other):
|
| 1065 |
+
self.maps[0].update(other)
|
| 1066 |
+
return self
|
| 1067 |
+
|
| 1068 |
+
def __or__(self, other):
|
| 1069 |
+
if not isinstance(other, _collections_abc.Mapping):
|
| 1070 |
+
return NotImplemented
|
| 1071 |
+
m = self.copy()
|
| 1072 |
+
m.maps[0].update(other)
|
| 1073 |
+
return m
|
| 1074 |
+
|
| 1075 |
+
def __ror__(self, other):
|
| 1076 |
+
if not isinstance(other, _collections_abc.Mapping):
|
| 1077 |
+
return NotImplemented
|
| 1078 |
+
m = dict(other)
|
| 1079 |
+
for child in reversed(self.maps):
|
| 1080 |
+
m.update(child)
|
| 1081 |
+
return self.__class__(m)
|
| 1082 |
+
|
| 1083 |
+
|
| 1084 |
+
################################################################################
|
| 1085 |
+
### UserDict
|
| 1086 |
+
################################################################################
|
| 1087 |
+
|
| 1088 |
+
class UserDict(_collections_abc.MutableMapping):
|
| 1089 |
+
|
| 1090 |
+
# Start by filling-out the abstract methods
|
| 1091 |
+
def __init__(self, dict=None, /, **kwargs):
|
| 1092 |
+
self.data = {}
|
| 1093 |
+
if dict is not None:
|
| 1094 |
+
self.update(dict)
|
| 1095 |
+
if kwargs:
|
| 1096 |
+
self.update(kwargs)
|
| 1097 |
+
|
| 1098 |
+
def __len__(self):
|
| 1099 |
+
return len(self.data)
|
| 1100 |
+
|
| 1101 |
+
def __getitem__(self, key):
|
| 1102 |
+
if key in self.data:
|
| 1103 |
+
return self.data[key]
|
| 1104 |
+
if hasattr(self.__class__, "__missing__"):
|
| 1105 |
+
return self.__class__.__missing__(self, key)
|
| 1106 |
+
raise KeyError(key)
|
| 1107 |
+
|
| 1108 |
+
def __setitem__(self, key, item):
|
| 1109 |
+
self.data[key] = item
|
| 1110 |
+
|
| 1111 |
+
def __delitem__(self, key):
|
| 1112 |
+
del self.data[key]
|
| 1113 |
+
|
| 1114 |
+
def __iter__(self):
|
| 1115 |
+
return iter(self.data)
|
| 1116 |
+
|
| 1117 |
+
# Modify __contains__ to work correctly when __missing__ is present
|
| 1118 |
+
def __contains__(self, key):
|
| 1119 |
+
return key in self.data
|
| 1120 |
+
|
| 1121 |
+
# Now, add the methods in dicts but not in MutableMapping
|
| 1122 |
+
def __repr__(self):
|
| 1123 |
+
return repr(self.data)
|
| 1124 |
+
|
| 1125 |
+
def __or__(self, other):
|
| 1126 |
+
if isinstance(other, UserDict):
|
| 1127 |
+
return self.__class__(self.data | other.data)
|
| 1128 |
+
if isinstance(other, dict):
|
| 1129 |
+
return self.__class__(self.data | other)
|
| 1130 |
+
return NotImplemented
|
| 1131 |
+
|
| 1132 |
+
def __ror__(self, other):
|
| 1133 |
+
if isinstance(other, UserDict):
|
| 1134 |
+
return self.__class__(other.data | self.data)
|
| 1135 |
+
if isinstance(other, dict):
|
| 1136 |
+
return self.__class__(other | self.data)
|
| 1137 |
+
return NotImplemented
|
| 1138 |
+
|
| 1139 |
+
def __ior__(self, other):
|
| 1140 |
+
if isinstance(other, UserDict):
|
| 1141 |
+
self.data |= other.data
|
| 1142 |
+
else:
|
| 1143 |
+
self.data |= other
|
| 1144 |
+
return self
|
| 1145 |
+
|
| 1146 |
+
def __copy__(self):
|
| 1147 |
+
inst = self.__class__.__new__(self.__class__)
|
| 1148 |
+
inst.__dict__.update(self.__dict__)
|
| 1149 |
+
# Create a copy and avoid triggering descriptors
|
| 1150 |
+
inst.__dict__["data"] = self.__dict__["data"].copy()
|
| 1151 |
+
return inst
|
| 1152 |
+
|
| 1153 |
+
def copy(self):
|
| 1154 |
+
if self.__class__ is UserDict:
|
| 1155 |
+
return UserDict(self.data.copy())
|
| 1156 |
+
import copy
|
| 1157 |
+
data = self.data
|
| 1158 |
+
try:
|
| 1159 |
+
self.data = {}
|
| 1160 |
+
c = copy.copy(self)
|
| 1161 |
+
finally:
|
| 1162 |
+
self.data = data
|
| 1163 |
+
c.update(self)
|
| 1164 |
+
return c
|
| 1165 |
+
|
| 1166 |
+
@classmethod
|
| 1167 |
+
def fromkeys(cls, iterable, value=None):
|
| 1168 |
+
d = cls()
|
| 1169 |
+
for key in iterable:
|
| 1170 |
+
d[key] = value
|
| 1171 |
+
return d
|
| 1172 |
+
|
| 1173 |
+
|
| 1174 |
+
################################################################################
|
| 1175 |
+
### UserList
|
| 1176 |
+
################################################################################
|
| 1177 |
+
|
| 1178 |
+
class UserList(_collections_abc.MutableSequence):
|
| 1179 |
+
"""A more or less complete user-defined wrapper around list objects."""
|
| 1180 |
+
|
| 1181 |
+
def __init__(self, initlist=None):
|
| 1182 |
+
self.data = []
|
| 1183 |
+
if initlist is not None:
|
| 1184 |
+
# XXX should this accept an arbitrary sequence?
|
| 1185 |
+
if type(initlist) == type(self.data):
|
| 1186 |
+
self.data[:] = initlist
|
| 1187 |
+
elif isinstance(initlist, UserList):
|
| 1188 |
+
self.data[:] = initlist.data[:]
|
| 1189 |
+
else:
|
| 1190 |
+
self.data = list(initlist)
|
| 1191 |
+
|
| 1192 |
+
def __repr__(self):
|
| 1193 |
+
return repr(self.data)
|
| 1194 |
+
|
| 1195 |
+
def __lt__(self, other):
|
| 1196 |
+
return self.data < self.__cast(other)
|
| 1197 |
+
|
| 1198 |
+
def __le__(self, other):
|
| 1199 |
+
return self.data <= self.__cast(other)
|
| 1200 |
+
|
| 1201 |
+
def __eq__(self, other):
|
| 1202 |
+
return self.data == self.__cast(other)
|
| 1203 |
+
|
| 1204 |
+
def __gt__(self, other):
|
| 1205 |
+
return self.data > self.__cast(other)
|
| 1206 |
+
|
| 1207 |
+
def __ge__(self, other):
|
| 1208 |
+
return self.data >= self.__cast(other)
|
| 1209 |
+
|
| 1210 |
+
def __cast(self, other):
|
| 1211 |
+
return other.data if isinstance(other, UserList) else other
|
| 1212 |
+
|
| 1213 |
+
def __contains__(self, item):
|
| 1214 |
+
return item in self.data
|
| 1215 |
+
|
| 1216 |
+
def __len__(self):
|
| 1217 |
+
return len(self.data)
|
| 1218 |
+
|
| 1219 |
+
def __getitem__(self, i):
|
| 1220 |
+
if isinstance(i, slice):
|
| 1221 |
+
return self.__class__(self.data[i])
|
| 1222 |
+
else:
|
| 1223 |
+
return self.data[i]
|
| 1224 |
+
|
| 1225 |
+
def __setitem__(self, i, item):
|
| 1226 |
+
self.data[i] = item
|
| 1227 |
+
|
| 1228 |
+
def __delitem__(self, i):
|
| 1229 |
+
del self.data[i]
|
| 1230 |
+
|
| 1231 |
+
def __add__(self, other):
|
| 1232 |
+
if isinstance(other, UserList):
|
| 1233 |
+
return self.__class__(self.data + other.data)
|
| 1234 |
+
elif isinstance(other, type(self.data)):
|
| 1235 |
+
return self.__class__(self.data + other)
|
| 1236 |
+
return self.__class__(self.data + list(other))
|
| 1237 |
+
|
| 1238 |
+
def __radd__(self, other):
|
| 1239 |
+
if isinstance(other, UserList):
|
| 1240 |
+
return self.__class__(other.data + self.data)
|
| 1241 |
+
elif isinstance(other, type(self.data)):
|
| 1242 |
+
return self.__class__(other + self.data)
|
| 1243 |
+
return self.__class__(list(other) + self.data)
|
| 1244 |
+
|
| 1245 |
+
def __iadd__(self, other):
|
| 1246 |
+
if isinstance(other, UserList):
|
| 1247 |
+
self.data += other.data
|
| 1248 |
+
elif isinstance(other, type(self.data)):
|
| 1249 |
+
self.data += other
|
| 1250 |
+
else:
|
| 1251 |
+
self.data += list(other)
|
| 1252 |
+
return self
|
| 1253 |
+
|
| 1254 |
+
def __mul__(self, n):
|
| 1255 |
+
return self.__class__(self.data * n)
|
| 1256 |
+
|
| 1257 |
+
__rmul__ = __mul__
|
| 1258 |
+
|
| 1259 |
+
def __imul__(self, n):
|
| 1260 |
+
self.data *= n
|
| 1261 |
+
return self
|
| 1262 |
+
|
| 1263 |
+
def __copy__(self):
|
| 1264 |
+
inst = self.__class__.__new__(self.__class__)
|
| 1265 |
+
inst.__dict__.update(self.__dict__)
|
| 1266 |
+
# Create a copy and avoid triggering descriptors
|
| 1267 |
+
inst.__dict__["data"] = self.__dict__["data"][:]
|
| 1268 |
+
return inst
|
| 1269 |
+
|
| 1270 |
+
def append(self, item):
|
| 1271 |
+
self.data.append(item)
|
| 1272 |
+
|
| 1273 |
+
def insert(self, i, item):
|
| 1274 |
+
self.data.insert(i, item)
|
| 1275 |
+
|
| 1276 |
+
def pop(self, i=-1):
|
| 1277 |
+
return self.data.pop(i)
|
| 1278 |
+
|
| 1279 |
+
def remove(self, item):
|
| 1280 |
+
self.data.remove(item)
|
| 1281 |
+
|
| 1282 |
+
def clear(self):
|
| 1283 |
+
self.data.clear()
|
| 1284 |
+
|
| 1285 |
+
def copy(self):
|
| 1286 |
+
return self.__class__(self)
|
| 1287 |
+
|
| 1288 |
+
def count(self, item):
|
| 1289 |
+
return self.data.count(item)
|
| 1290 |
+
|
| 1291 |
+
def index(self, item, *args):
|
| 1292 |
+
return self.data.index(item, *args)
|
| 1293 |
+
|
| 1294 |
+
def reverse(self):
|
| 1295 |
+
self.data.reverse()
|
| 1296 |
+
|
| 1297 |
+
def sort(self, /, *args, **kwds):
|
| 1298 |
+
self.data.sort(*args, **kwds)
|
| 1299 |
+
|
| 1300 |
+
def extend(self, other):
|
| 1301 |
+
if isinstance(other, UserList):
|
| 1302 |
+
self.data.extend(other.data)
|
| 1303 |
+
else:
|
| 1304 |
+
self.data.extend(other)
|
| 1305 |
+
|
| 1306 |
+
|
| 1307 |
+
################################################################################
|
| 1308 |
+
### UserString
|
| 1309 |
+
################################################################################
|
| 1310 |
+
|
| 1311 |
+
class UserString(_collections_abc.Sequence):
|
| 1312 |
+
|
| 1313 |
+
def __init__(self, seq):
|
| 1314 |
+
if isinstance(seq, str):
|
| 1315 |
+
self.data = seq
|
| 1316 |
+
elif isinstance(seq, UserString):
|
| 1317 |
+
self.data = seq.data[:]
|
| 1318 |
+
else:
|
| 1319 |
+
self.data = str(seq)
|
| 1320 |
+
|
| 1321 |
+
def __str__(self):
|
| 1322 |
+
return str(self.data)
|
| 1323 |
+
|
| 1324 |
+
def __repr__(self):
|
| 1325 |
+
return repr(self.data)
|
| 1326 |
+
|
| 1327 |
+
def __int__(self):
|
| 1328 |
+
return int(self.data)
|
| 1329 |
+
|
| 1330 |
+
def __float__(self):
|
| 1331 |
+
return float(self.data)
|
| 1332 |
+
|
| 1333 |
+
def __complex__(self):
|
| 1334 |
+
return complex(self.data)
|
| 1335 |
+
|
| 1336 |
+
def __hash__(self):
|
| 1337 |
+
return hash(self.data)
|
| 1338 |
+
|
| 1339 |
+
def __getnewargs__(self):
|
| 1340 |
+
return (self.data[:],)
|
| 1341 |
+
|
| 1342 |
+
def __eq__(self, string):
|
| 1343 |
+
if isinstance(string, UserString):
|
| 1344 |
+
return self.data == string.data
|
| 1345 |
+
return self.data == string
|
| 1346 |
+
|
| 1347 |
+
def __lt__(self, string):
|
| 1348 |
+
if isinstance(string, UserString):
|
| 1349 |
+
return self.data < string.data
|
| 1350 |
+
return self.data < string
|
| 1351 |
+
|
| 1352 |
+
def __le__(self, string):
|
| 1353 |
+
if isinstance(string, UserString):
|
| 1354 |
+
return self.data <= string.data
|
| 1355 |
+
return self.data <= string
|
| 1356 |
+
|
| 1357 |
+
def __gt__(self, string):
|
| 1358 |
+
if isinstance(string, UserString):
|
| 1359 |
+
return self.data > string.data
|
| 1360 |
+
return self.data > string
|
| 1361 |
+
|
| 1362 |
+
def __ge__(self, string):
|
| 1363 |
+
if isinstance(string, UserString):
|
| 1364 |
+
return self.data >= string.data
|
| 1365 |
+
return self.data >= string
|
| 1366 |
+
|
| 1367 |
+
def __contains__(self, char):
|
| 1368 |
+
if isinstance(char, UserString):
|
| 1369 |
+
char = char.data
|
| 1370 |
+
return char in self.data
|
| 1371 |
+
|
| 1372 |
+
def __len__(self):
|
| 1373 |
+
return len(self.data)
|
| 1374 |
+
|
| 1375 |
+
def __getitem__(self, index):
|
| 1376 |
+
return self.__class__(self.data[index])
|
| 1377 |
+
|
| 1378 |
+
def __add__(self, other):
|
| 1379 |
+
if isinstance(other, UserString):
|
| 1380 |
+
return self.__class__(self.data + other.data)
|
| 1381 |
+
elif isinstance(other, str):
|
| 1382 |
+
return self.__class__(self.data + other)
|
| 1383 |
+
return self.__class__(self.data + str(other))
|
| 1384 |
+
|
| 1385 |
+
def __radd__(self, other):
|
| 1386 |
+
if isinstance(other, str):
|
| 1387 |
+
return self.__class__(other + self.data)
|
| 1388 |
+
return self.__class__(str(other) + self.data)
|
| 1389 |
+
|
| 1390 |
+
def __mul__(self, n):
|
| 1391 |
+
return self.__class__(self.data * n)
|
| 1392 |
+
|
| 1393 |
+
__rmul__ = __mul__
|
| 1394 |
+
|
| 1395 |
+
def __mod__(self, args):
|
| 1396 |
+
return self.__class__(self.data % args)
|
| 1397 |
+
|
| 1398 |
+
def __rmod__(self, template):
|
| 1399 |
+
return self.__class__(str(template) % self)
|
| 1400 |
+
|
| 1401 |
+
# the following methods are defined in alphabetical order:
|
| 1402 |
+
def capitalize(self):
|
| 1403 |
+
return self.__class__(self.data.capitalize())
|
| 1404 |
+
|
| 1405 |
+
def casefold(self):
|
| 1406 |
+
return self.__class__(self.data.casefold())
|
| 1407 |
+
|
| 1408 |
+
def center(self, width, *args):
|
| 1409 |
+
return self.__class__(self.data.center(width, *args))
|
| 1410 |
+
|
| 1411 |
+
def count(self, sub, start=0, end=_sys.maxsize):
|
| 1412 |
+
if isinstance(sub, UserString):
|
| 1413 |
+
sub = sub.data
|
| 1414 |
+
return self.data.count(sub, start, end)
|
| 1415 |
+
|
| 1416 |
+
def removeprefix(self, prefix, /):
|
| 1417 |
+
if isinstance(prefix, UserString):
|
| 1418 |
+
prefix = prefix.data
|
| 1419 |
+
return self.__class__(self.data.removeprefix(prefix))
|
| 1420 |
+
|
| 1421 |
+
def removesuffix(self, suffix, /):
|
| 1422 |
+
if isinstance(suffix, UserString):
|
| 1423 |
+
suffix = suffix.data
|
| 1424 |
+
return self.__class__(self.data.removesuffix(suffix))
|
| 1425 |
+
|
| 1426 |
+
def encode(self, encoding='utf-8', errors='strict'):
|
| 1427 |
+
encoding = 'utf-8' if encoding is None else encoding
|
| 1428 |
+
errors = 'strict' if errors is None else errors
|
| 1429 |
+
return self.data.encode(encoding, errors)
|
| 1430 |
+
|
| 1431 |
+
def endswith(self, suffix, start=0, end=_sys.maxsize):
|
| 1432 |
+
return self.data.endswith(suffix, start, end)
|
| 1433 |
+
|
| 1434 |
+
def expandtabs(self, tabsize=8):
|
| 1435 |
+
return self.__class__(self.data.expandtabs(tabsize))
|
| 1436 |
+
|
| 1437 |
+
def find(self, sub, start=0, end=_sys.maxsize):
|
| 1438 |
+
if isinstance(sub, UserString):
|
| 1439 |
+
sub = sub.data
|
| 1440 |
+
return self.data.find(sub, start, end)
|
| 1441 |
+
|
| 1442 |
+
def format(self, /, *args, **kwds):
|
| 1443 |
+
return self.data.format(*args, **kwds)
|
| 1444 |
+
|
| 1445 |
+
def format_map(self, mapping):
|
| 1446 |
+
return self.data.format_map(mapping)
|
| 1447 |
+
|
| 1448 |
+
def index(self, sub, start=0, end=_sys.maxsize):
|
| 1449 |
+
return self.data.index(sub, start, end)
|
| 1450 |
+
|
| 1451 |
+
def isalpha(self):
|
| 1452 |
+
return self.data.isalpha()
|
| 1453 |
+
|
| 1454 |
+
def isalnum(self):
|
| 1455 |
+
return self.data.isalnum()
|
| 1456 |
+
|
| 1457 |
+
def isascii(self):
|
| 1458 |
+
return self.data.isascii()
|
| 1459 |
+
|
| 1460 |
+
def isdecimal(self):
|
| 1461 |
+
return self.data.isdecimal()
|
| 1462 |
+
|
| 1463 |
+
def isdigit(self):
|
| 1464 |
+
return self.data.isdigit()
|
| 1465 |
+
|
| 1466 |
+
def isidentifier(self):
|
| 1467 |
+
return self.data.isidentifier()
|
| 1468 |
+
|
| 1469 |
+
def islower(self):
|
| 1470 |
+
return self.data.islower()
|
| 1471 |
+
|
| 1472 |
+
def isnumeric(self):
|
| 1473 |
+
return self.data.isnumeric()
|
| 1474 |
+
|
| 1475 |
+
def isprintable(self):
|
| 1476 |
+
return self.data.isprintable()
|
| 1477 |
+
|
| 1478 |
+
def isspace(self):
|
| 1479 |
+
return self.data.isspace()
|
| 1480 |
+
|
| 1481 |
+
def istitle(self):
|
| 1482 |
+
return self.data.istitle()
|
| 1483 |
+
|
| 1484 |
+
def isupper(self):
|
| 1485 |
+
return self.data.isupper()
|
| 1486 |
+
|
| 1487 |
+
def join(self, seq):
|
| 1488 |
+
return self.data.join(seq)
|
| 1489 |
+
|
| 1490 |
+
def ljust(self, width, *args):
|
| 1491 |
+
return self.__class__(self.data.ljust(width, *args))
|
| 1492 |
+
|
| 1493 |
+
def lower(self):
|
| 1494 |
+
return self.__class__(self.data.lower())
|
| 1495 |
+
|
| 1496 |
+
def lstrip(self, chars=None):
|
| 1497 |
+
return self.__class__(self.data.lstrip(chars))
|
| 1498 |
+
|
| 1499 |
+
maketrans = str.maketrans
|
| 1500 |
+
|
| 1501 |
+
def partition(self, sep):
|
| 1502 |
+
return self.data.partition(sep)
|
| 1503 |
+
|
| 1504 |
+
def replace(self, old, new, maxsplit=-1):
|
| 1505 |
+
if isinstance(old, UserString):
|
| 1506 |
+
old = old.data
|
| 1507 |
+
if isinstance(new, UserString):
|
| 1508 |
+
new = new.data
|
| 1509 |
+
return self.__class__(self.data.replace(old, new, maxsplit))
|
| 1510 |
+
|
| 1511 |
+
def rfind(self, sub, start=0, end=_sys.maxsize):
|
| 1512 |
+
if isinstance(sub, UserString):
|
| 1513 |
+
sub = sub.data
|
| 1514 |
+
return self.data.rfind(sub, start, end)
|
| 1515 |
+
|
| 1516 |
+
def rindex(self, sub, start=0, end=_sys.maxsize):
|
| 1517 |
+
return self.data.rindex(sub, start, end)
|
| 1518 |
+
|
| 1519 |
+
def rjust(self, width, *args):
|
| 1520 |
+
return self.__class__(self.data.rjust(width, *args))
|
| 1521 |
+
|
| 1522 |
+
def rpartition(self, sep):
|
| 1523 |
+
return self.data.rpartition(sep)
|
| 1524 |
+
|
| 1525 |
+
def rstrip(self, chars=None):
|
| 1526 |
+
return self.__class__(self.data.rstrip(chars))
|
| 1527 |
+
|
| 1528 |
+
def split(self, sep=None, maxsplit=-1):
|
| 1529 |
+
return self.data.split(sep, maxsplit)
|
| 1530 |
+
|
| 1531 |
+
def rsplit(self, sep=None, maxsplit=-1):
|
| 1532 |
+
return self.data.rsplit(sep, maxsplit)
|
| 1533 |
+
|
| 1534 |
+
def splitlines(self, keepends=False):
|
| 1535 |
+
return self.data.splitlines(keepends)
|
| 1536 |
+
|
| 1537 |
+
def startswith(self, prefix, start=0, end=_sys.maxsize):
|
| 1538 |
+
return self.data.startswith(prefix, start, end)
|
| 1539 |
+
|
| 1540 |
+
def strip(self, chars=None):
|
| 1541 |
+
return self.__class__(self.data.strip(chars))
|
| 1542 |
+
|
| 1543 |
+
def swapcase(self):
|
| 1544 |
+
return self.__class__(self.data.swapcase())
|
| 1545 |
+
|
| 1546 |
+
def title(self):
|
| 1547 |
+
return self.__class__(self.data.title())
|
| 1548 |
+
|
| 1549 |
+
def translate(self, *args):
|
| 1550 |
+
return self.__class__(self.data.translate(*args))
|
| 1551 |
+
|
| 1552 |
+
def upper(self):
|
| 1553 |
+
return self.__class__(self.data.upper())
|
| 1554 |
+
|
| 1555 |
+
def zfill(self, width):
|
| 1556 |
+
return self.__class__(self.data.zfill(width))
|
deepseek/lib/python3.10/distutils/command/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (715 Bytes). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/build_scripts.cpython-310.pyc
ADDED
|
Binary file (4.32 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/clean.cpython-310.pyc
ADDED
|
Binary file (2.09 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/config.cpython-310.pyc
ADDED
|
Binary file (10.5 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/install.cpython-310.pyc
ADDED
|
Binary file (14.1 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/install_data.cpython-310.pyc
ADDED
|
Binary file (2.29 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/install_headers.cpython-310.pyc
ADDED
|
Binary file (1.71 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/__pycache__/upload.cpython-310.pyc
ADDED
|
Binary file (5.34 kB). View file
|
|
|
deepseek/lib/python3.10/distutils/command/bdist_dumb.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.bdist_dumb
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
|
| 4 |
+
distribution -- i.e., just an archive to be unpacked under $prefix or
|
| 5 |
+
$exec_prefix)."""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from distutils.core import Command
|
| 9 |
+
from distutils.util import get_platform
|
| 10 |
+
from distutils.dir_util import remove_tree, ensure_relative
|
| 11 |
+
from distutils.errors import *
|
| 12 |
+
from distutils.sysconfig import get_python_version
|
| 13 |
+
from distutils import log
|
| 14 |
+
|
| 15 |
+
class bdist_dumb(Command):
|
| 16 |
+
|
| 17 |
+
description = "create a \"dumb\" built distribution"
|
| 18 |
+
|
| 19 |
+
user_options = [('bdist-dir=', 'd',
|
| 20 |
+
"temporary directory for creating the distribution"),
|
| 21 |
+
('plat-name=', 'p',
|
| 22 |
+
"platform name to embed in generated filenames "
|
| 23 |
+
"(default: %s)" % get_platform()),
|
| 24 |
+
('format=', 'f',
|
| 25 |
+
"archive format to create (tar, gztar, bztar, xztar, "
|
| 26 |
+
"ztar, zip)"),
|
| 27 |
+
('keep-temp', 'k',
|
| 28 |
+
"keep the pseudo-installation tree around after " +
|
| 29 |
+
"creating the distribution archive"),
|
| 30 |
+
('dist-dir=', 'd',
|
| 31 |
+
"directory to put final built distributions in"),
|
| 32 |
+
('skip-build', None,
|
| 33 |
+
"skip rebuilding everything (for testing/debugging)"),
|
| 34 |
+
('relative', None,
|
| 35 |
+
"build the archive using relative paths "
|
| 36 |
+
"(default: false)"),
|
| 37 |
+
('owner=', 'u',
|
| 38 |
+
"Owner name used when creating a tar file"
|
| 39 |
+
" [default: current user]"),
|
| 40 |
+
('group=', 'g',
|
| 41 |
+
"Group name used when creating a tar file"
|
| 42 |
+
" [default: current group]"),
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
boolean_options = ['keep-temp', 'skip-build', 'relative']
|
| 46 |
+
|
| 47 |
+
default_format = { 'posix': 'gztar',
|
| 48 |
+
'nt': 'zip' }
|
| 49 |
+
|
| 50 |
+
def initialize_options(self):
|
| 51 |
+
self.bdist_dir = None
|
| 52 |
+
self.plat_name = None
|
| 53 |
+
self.format = None
|
| 54 |
+
self.keep_temp = 0
|
| 55 |
+
self.dist_dir = None
|
| 56 |
+
self.skip_build = None
|
| 57 |
+
self.relative = 0
|
| 58 |
+
self.owner = None
|
| 59 |
+
self.group = None
|
| 60 |
+
|
| 61 |
+
def finalize_options(self):
|
| 62 |
+
if self.bdist_dir is None:
|
| 63 |
+
bdist_base = self.get_finalized_command('bdist').bdist_base
|
| 64 |
+
self.bdist_dir = os.path.join(bdist_base, 'dumb')
|
| 65 |
+
|
| 66 |
+
if self.format is None:
|
| 67 |
+
try:
|
| 68 |
+
self.format = self.default_format[os.name]
|
| 69 |
+
except KeyError:
|
| 70 |
+
raise DistutilsPlatformError(
|
| 71 |
+
"don't know how to create dumb built distributions "
|
| 72 |
+
"on platform %s" % os.name)
|
| 73 |
+
|
| 74 |
+
self.set_undefined_options('bdist',
|
| 75 |
+
('dist_dir', 'dist_dir'),
|
| 76 |
+
('plat_name', 'plat_name'),
|
| 77 |
+
('skip_build', 'skip_build'))
|
| 78 |
+
|
| 79 |
+
def run(self):
|
| 80 |
+
if not self.skip_build:
|
| 81 |
+
self.run_command('build')
|
| 82 |
+
|
| 83 |
+
install = self.reinitialize_command('install', reinit_subcommands=1)
|
| 84 |
+
install.root = self.bdist_dir
|
| 85 |
+
install.skip_build = self.skip_build
|
| 86 |
+
install.warn_dir = 0
|
| 87 |
+
|
| 88 |
+
log.info("installing to %s", self.bdist_dir)
|
| 89 |
+
self.run_command('install')
|
| 90 |
+
|
| 91 |
+
# And make an archive relative to the root of the
|
| 92 |
+
# pseudo-installation tree.
|
| 93 |
+
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
|
| 94 |
+
self.plat_name)
|
| 95 |
+
|
| 96 |
+
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
|
| 97 |
+
if not self.relative:
|
| 98 |
+
archive_root = self.bdist_dir
|
| 99 |
+
else:
|
| 100 |
+
if (self.distribution.has_ext_modules() and
|
| 101 |
+
(install.install_base != install.install_platbase)):
|
| 102 |
+
raise DistutilsPlatformError(
|
| 103 |
+
"can't make a dumb built distribution where "
|
| 104 |
+
"base and platbase are different (%s, %s)"
|
| 105 |
+
% (repr(install.install_base),
|
| 106 |
+
repr(install.install_platbase)))
|
| 107 |
+
else:
|
| 108 |
+
archive_root = os.path.join(self.bdist_dir,
|
| 109 |
+
ensure_relative(install.install_base))
|
| 110 |
+
|
| 111 |
+
# Make the archive
|
| 112 |
+
filename = self.make_archive(pseudoinstall_root,
|
| 113 |
+
self.format, root_dir=archive_root,
|
| 114 |
+
owner=self.owner, group=self.group)
|
| 115 |
+
if self.distribution.has_ext_modules():
|
| 116 |
+
pyversion = get_python_version()
|
| 117 |
+
else:
|
| 118 |
+
pyversion = 'any'
|
| 119 |
+
self.distribution.dist_files.append(('bdist_dumb', pyversion,
|
| 120 |
+
filename))
|
| 121 |
+
|
| 122 |
+
if not self.keep_temp:
|
| 123 |
+
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
deepseek/lib/python3.10/ensurepip/__init__.py
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import os
|
| 3 |
+
import os.path
|
| 4 |
+
import subprocess
|
| 5 |
+
import sys
|
| 6 |
+
import sysconfig
|
| 7 |
+
import tempfile
|
| 8 |
+
from importlib import resources
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
__all__ = ["version", "bootstrap"]
|
| 13 |
+
_PACKAGE_NAMES = ('setuptools', 'pip')
|
| 14 |
+
_SETUPTOOLS_VERSION = "65.5.0"
|
| 15 |
+
_PIP_VERSION = "23.0.1"
|
| 16 |
+
_PROJECTS = [
|
| 17 |
+
("setuptools", _SETUPTOOLS_VERSION, "py3"),
|
| 18 |
+
("pip", _PIP_VERSION, "py3"),
|
| 19 |
+
]
|
| 20 |
+
|
| 21 |
+
# Packages bundled in ensurepip._bundled have wheel_name set.
|
| 22 |
+
# Packages from WHEEL_PKG_DIR have wheel_path set.
|
| 23 |
+
_Package = collections.namedtuple('Package',
|
| 24 |
+
('version', 'wheel_name', 'wheel_path'))
|
| 25 |
+
|
| 26 |
+
# Directory of system wheel packages. Some Linux distribution packaging
|
| 27 |
+
# policies recommend against bundling dependencies. For example, Fedora
|
| 28 |
+
# installs wheel packages in the /usr/share/python-wheels/ directory and don't
|
| 29 |
+
# install the ensurepip._bundled package.
|
| 30 |
+
_WHEEL_PKG_DIR = sysconfig.get_config_var('WHEEL_PKG_DIR')
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def _find_packages(path):
|
| 34 |
+
packages = {}
|
| 35 |
+
try:
|
| 36 |
+
filenames = os.listdir(path)
|
| 37 |
+
except OSError:
|
| 38 |
+
# Ignore: path doesn't exist or permission error
|
| 39 |
+
filenames = ()
|
| 40 |
+
# Make the code deterministic if a directory contains multiple wheel files
|
| 41 |
+
# of the same package, but don't attempt to implement correct version
|
| 42 |
+
# comparison since this case should not happen.
|
| 43 |
+
filenames = sorted(filenames)
|
| 44 |
+
for filename in filenames:
|
| 45 |
+
# filename is like 'pip-21.2.4-py3-none-any.whl'
|
| 46 |
+
if not filename.endswith(".whl"):
|
| 47 |
+
continue
|
| 48 |
+
for name in _PACKAGE_NAMES:
|
| 49 |
+
prefix = name + '-'
|
| 50 |
+
if filename.startswith(prefix):
|
| 51 |
+
break
|
| 52 |
+
else:
|
| 53 |
+
continue
|
| 54 |
+
|
| 55 |
+
# Extract '21.2.4' from 'pip-21.2.4-py3-none-any.whl'
|
| 56 |
+
version = filename.removeprefix(prefix).partition('-')[0]
|
| 57 |
+
wheel_path = os.path.join(path, filename)
|
| 58 |
+
packages[name] = _Package(version, None, wheel_path)
|
| 59 |
+
return packages
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def _get_packages():
|
| 63 |
+
global _PACKAGES, _WHEEL_PKG_DIR
|
| 64 |
+
if _PACKAGES is not None:
|
| 65 |
+
return _PACKAGES
|
| 66 |
+
|
| 67 |
+
packages = {}
|
| 68 |
+
for name, version, py_tag in _PROJECTS:
|
| 69 |
+
wheel_name = f"{name}-{version}-{py_tag}-none-any.whl"
|
| 70 |
+
packages[name] = _Package(version, wheel_name, None)
|
| 71 |
+
if _WHEEL_PKG_DIR:
|
| 72 |
+
dir_packages = _find_packages(_WHEEL_PKG_DIR)
|
| 73 |
+
# only used the wheel package directory if all packages are found there
|
| 74 |
+
if all(name in dir_packages for name in _PACKAGE_NAMES):
|
| 75 |
+
packages = dir_packages
|
| 76 |
+
_PACKAGES = packages
|
| 77 |
+
return packages
|
| 78 |
+
_PACKAGES = None
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def _run_pip(args, additional_paths=None):
|
| 82 |
+
# Run the bootstraping in a subprocess to avoid leaking any state that happens
|
| 83 |
+
# after pip has executed. Particulary, this avoids the case when pip holds onto
|
| 84 |
+
# the files in *additional_paths*, preventing us to remove them at the end of the
|
| 85 |
+
# invocation.
|
| 86 |
+
code = f"""
|
| 87 |
+
import runpy
|
| 88 |
+
import sys
|
| 89 |
+
sys.path = {additional_paths or []} + sys.path
|
| 90 |
+
sys.argv[1:] = {args}
|
| 91 |
+
runpy.run_module("pip", run_name="__main__", alter_sys=True)
|
| 92 |
+
"""
|
| 93 |
+
|
| 94 |
+
cmd = [
|
| 95 |
+
sys.executable,
|
| 96 |
+
'-W',
|
| 97 |
+
'ignore::DeprecationWarning',
|
| 98 |
+
'-c',
|
| 99 |
+
code,
|
| 100 |
+
]
|
| 101 |
+
if sys.flags.isolated:
|
| 102 |
+
# run code in isolated mode if currently running isolated
|
| 103 |
+
cmd.insert(1, '-I')
|
| 104 |
+
return subprocess.run(cmd, check=True).returncode
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def version():
|
| 108 |
+
"""
|
| 109 |
+
Returns a string specifying the bundled version of pip.
|
| 110 |
+
"""
|
| 111 |
+
return _get_packages()['pip'].version
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def _disable_pip_configuration_settings():
|
| 115 |
+
# We deliberately ignore all pip environment variables
|
| 116 |
+
# when invoking pip
|
| 117 |
+
# See http://bugs.python.org/issue19734 for details
|
| 118 |
+
keys_to_remove = [k for k in os.environ if k.startswith("PIP_")]
|
| 119 |
+
for k in keys_to_remove:
|
| 120 |
+
del os.environ[k]
|
| 121 |
+
# We also ignore the settings in the default pip configuration file
|
| 122 |
+
# See http://bugs.python.org/issue20053 for details
|
| 123 |
+
os.environ['PIP_CONFIG_FILE'] = os.devnull
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def bootstrap(*, root=None, upgrade=False, user=False,
|
| 127 |
+
altinstall=False, default_pip=False,
|
| 128 |
+
verbosity=0):
|
| 129 |
+
"""
|
| 130 |
+
Bootstrap pip into the current Python installation (or the given root
|
| 131 |
+
directory).
|
| 132 |
+
|
| 133 |
+
Note that calling this function will alter both sys.path and os.environ.
|
| 134 |
+
"""
|
| 135 |
+
# Discard the return value
|
| 136 |
+
_bootstrap(root=root, upgrade=upgrade, user=user,
|
| 137 |
+
altinstall=altinstall, default_pip=default_pip,
|
| 138 |
+
verbosity=verbosity)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def _bootstrap(*, root=None, upgrade=False, user=False,
|
| 142 |
+
altinstall=False, default_pip=False,
|
| 143 |
+
verbosity=0):
|
| 144 |
+
"""
|
| 145 |
+
Bootstrap pip into the current Python installation (or the given root
|
| 146 |
+
directory). Returns pip command status code.
|
| 147 |
+
|
| 148 |
+
Note that calling this function will alter both sys.path and os.environ.
|
| 149 |
+
"""
|
| 150 |
+
if altinstall and default_pip:
|
| 151 |
+
raise ValueError("Cannot use altinstall and default_pip together")
|
| 152 |
+
|
| 153 |
+
sys.audit("ensurepip.bootstrap", root)
|
| 154 |
+
|
| 155 |
+
_disable_pip_configuration_settings()
|
| 156 |
+
|
| 157 |
+
# By default, installing pip and setuptools installs all of the
|
| 158 |
+
# following scripts (X.Y == running Python version):
|
| 159 |
+
#
|
| 160 |
+
# pip, pipX, pipX.Y, easy_install, easy_install-X.Y
|
| 161 |
+
#
|
| 162 |
+
# pip 1.5+ allows ensurepip to request that some of those be left out
|
| 163 |
+
if altinstall:
|
| 164 |
+
# omit pip, pipX and easy_install
|
| 165 |
+
os.environ["ENSUREPIP_OPTIONS"] = "altinstall"
|
| 166 |
+
elif not default_pip:
|
| 167 |
+
# omit pip and easy_install
|
| 168 |
+
os.environ["ENSUREPIP_OPTIONS"] = "install"
|
| 169 |
+
|
| 170 |
+
with tempfile.TemporaryDirectory() as tmpdir:
|
| 171 |
+
# Put our bundled wheels into a temporary directory and construct the
|
| 172 |
+
# additional paths that need added to sys.path
|
| 173 |
+
additional_paths = []
|
| 174 |
+
for name, package in _get_packages().items():
|
| 175 |
+
if package.wheel_name:
|
| 176 |
+
# Use bundled wheel package
|
| 177 |
+
from ensurepip import _bundled
|
| 178 |
+
wheel_name = package.wheel_name
|
| 179 |
+
whl = resources.read_binary(_bundled, wheel_name)
|
| 180 |
+
else:
|
| 181 |
+
# Use the wheel package directory
|
| 182 |
+
with open(package.wheel_path, "rb") as fp:
|
| 183 |
+
whl = fp.read()
|
| 184 |
+
wheel_name = os.path.basename(package.wheel_path)
|
| 185 |
+
|
| 186 |
+
filename = os.path.join(tmpdir, wheel_name)
|
| 187 |
+
with open(filename, "wb") as fp:
|
| 188 |
+
fp.write(whl)
|
| 189 |
+
|
| 190 |
+
additional_paths.append(filename)
|
| 191 |
+
|
| 192 |
+
# Construct the arguments to be passed to the pip command
|
| 193 |
+
args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir]
|
| 194 |
+
if root:
|
| 195 |
+
args += ["--root", root]
|
| 196 |
+
if upgrade:
|
| 197 |
+
args += ["--upgrade"]
|
| 198 |
+
if user:
|
| 199 |
+
args += ["--user"]
|
| 200 |
+
if verbosity:
|
| 201 |
+
args += ["-" + "v" * verbosity]
|
| 202 |
+
|
| 203 |
+
return _run_pip([*args, *_PACKAGE_NAMES], additional_paths)
|
| 204 |
+
|
| 205 |
+
def _uninstall_helper(*, verbosity=0):
|
| 206 |
+
"""Helper to support a clean default uninstall process on Windows
|
| 207 |
+
|
| 208 |
+
Note that calling this function may alter os.environ.
|
| 209 |
+
"""
|
| 210 |
+
# Nothing to do if pip was never installed, or has been removed
|
| 211 |
+
try:
|
| 212 |
+
import pip
|
| 213 |
+
except ImportError:
|
| 214 |
+
return
|
| 215 |
+
|
| 216 |
+
# If the installed pip version doesn't match the available one,
|
| 217 |
+
# leave it alone
|
| 218 |
+
available_version = version()
|
| 219 |
+
if pip.__version__ != available_version:
|
| 220 |
+
print(f"ensurepip will only uninstall a matching version "
|
| 221 |
+
f"({pip.__version__!r} installed, "
|
| 222 |
+
f"{available_version!r} available)",
|
| 223 |
+
file=sys.stderr)
|
| 224 |
+
return
|
| 225 |
+
|
| 226 |
+
_disable_pip_configuration_settings()
|
| 227 |
+
|
| 228 |
+
# Construct the arguments to be passed to the pip command
|
| 229 |
+
args = ["uninstall", "-y", "--disable-pip-version-check"]
|
| 230 |
+
if verbosity:
|
| 231 |
+
args += ["-" + "v" * verbosity]
|
| 232 |
+
|
| 233 |
+
return _run_pip([*args, *reversed(_PACKAGE_NAMES)])
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def _main(argv=None):
|
| 237 |
+
import argparse
|
| 238 |
+
parser = argparse.ArgumentParser(prog="python -m ensurepip")
|
| 239 |
+
parser.add_argument(
|
| 240 |
+
"--version",
|
| 241 |
+
action="version",
|
| 242 |
+
version="pip {}".format(version()),
|
| 243 |
+
help="Show the version of pip that is bundled with this Python.",
|
| 244 |
+
)
|
| 245 |
+
parser.add_argument(
|
| 246 |
+
"-v", "--verbose",
|
| 247 |
+
action="count",
|
| 248 |
+
default=0,
|
| 249 |
+
dest="verbosity",
|
| 250 |
+
help=("Give more output. Option is additive, and can be used up to 3 "
|
| 251 |
+
"times."),
|
| 252 |
+
)
|
| 253 |
+
parser.add_argument(
|
| 254 |
+
"-U", "--upgrade",
|
| 255 |
+
action="store_true",
|
| 256 |
+
default=False,
|
| 257 |
+
help="Upgrade pip and dependencies, even if already installed.",
|
| 258 |
+
)
|
| 259 |
+
parser.add_argument(
|
| 260 |
+
"--user",
|
| 261 |
+
action="store_true",
|
| 262 |
+
default=False,
|
| 263 |
+
help="Install using the user scheme.",
|
| 264 |
+
)
|
| 265 |
+
parser.add_argument(
|
| 266 |
+
"--root",
|
| 267 |
+
default=None,
|
| 268 |
+
help="Install everything relative to this alternate root directory.",
|
| 269 |
+
)
|
| 270 |
+
parser.add_argument(
|
| 271 |
+
"--altinstall",
|
| 272 |
+
action="store_true",
|
| 273 |
+
default=False,
|
| 274 |
+
help=("Make an alternate install, installing only the X.Y versioned "
|
| 275 |
+
"scripts (Default: pipX, pipX.Y, easy_install-X.Y)."),
|
| 276 |
+
)
|
| 277 |
+
parser.add_argument(
|
| 278 |
+
"--default-pip",
|
| 279 |
+
action="store_true",
|
| 280 |
+
default=False,
|
| 281 |
+
help=("Make a default pip install, installing the unqualified pip "
|
| 282 |
+
"and easy_install in addition to the versioned scripts."),
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
args = parser.parse_args(argv)
|
| 286 |
+
|
| 287 |
+
return _bootstrap(
|
| 288 |
+
root=args.root,
|
| 289 |
+
upgrade=args.upgrade,
|
| 290 |
+
user=args.user,
|
| 291 |
+
verbosity=args.verbosity,
|
| 292 |
+
altinstall=args.altinstall,
|
| 293 |
+
default_pip=args.default_pip,
|
| 294 |
+
)
|
deepseek/lib/python3.10/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:236bcb61156d76c4b8a05821b988c7b8c35bf0da28a4b614e8d6ab5212c25c6f
|
| 3 |
+
size 2055563
|
deepseek/lib/python3.10/json/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (12.5 kB). View file
|
|
|
deepseek/lib/python3.10/json/__pycache__/decoder.cpython-310.pyc
ADDED
|
Binary file (10 kB). View file
|
|
|
deepseek/lib/python3.10/json/tool.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""Command-line tool to validate and pretty-print JSON
|
| 2 |
+
|
| 3 |
+
Usage::
|
| 4 |
+
|
| 5 |
+
$ echo '{"json":"obj"}' | python -m json.tool
|
| 6 |
+
{
|
| 7 |
+
"json": "obj"
|
| 8 |
+
}
|
| 9 |
+
$ echo '{ 1.2:3.4}' | python -m json.tool
|
| 10 |
+
Expecting property name enclosed in double quotes: line 1 column 3 (char 2)
|
| 11 |
+
|
| 12 |
+
"""
|
| 13 |
+
import argparse
|
| 14 |
+
import json
|
| 15 |
+
import sys
|
| 16 |
+
from pathlib import Path
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def main():
|
| 20 |
+
prog = 'python -m json.tool'
|
| 21 |
+
description = ('A simple command line interface for json module '
|
| 22 |
+
'to validate and pretty-print JSON objects.')
|
| 23 |
+
parser = argparse.ArgumentParser(prog=prog, description=description)
|
| 24 |
+
parser.add_argument('infile', nargs='?',
|
| 25 |
+
type=argparse.FileType(encoding="utf-8"),
|
| 26 |
+
help='a JSON file to be validated or pretty-printed',
|
| 27 |
+
default=sys.stdin)
|
| 28 |
+
parser.add_argument('outfile', nargs='?',
|
| 29 |
+
type=Path,
|
| 30 |
+
help='write the output of infile to outfile',
|
| 31 |
+
default=None)
|
| 32 |
+
parser.add_argument('--sort-keys', action='store_true', default=False,
|
| 33 |
+
help='sort the output of dictionaries alphabetically by key')
|
| 34 |
+
parser.add_argument('--no-ensure-ascii', dest='ensure_ascii', action='store_false',
|
| 35 |
+
help='disable escaping of non-ASCII characters')
|
| 36 |
+
parser.add_argument('--json-lines', action='store_true', default=False,
|
| 37 |
+
help='parse input using the JSON Lines format. '
|
| 38 |
+
'Use with --no-indent or --compact to produce valid JSON Lines output.')
|
| 39 |
+
group = parser.add_mutually_exclusive_group()
|
| 40 |
+
group.add_argument('--indent', default=4, type=int,
|
| 41 |
+
help='separate items with newlines and use this number '
|
| 42 |
+
'of spaces for indentation')
|
| 43 |
+
group.add_argument('--tab', action='store_const', dest='indent',
|
| 44 |
+
const='\t', help='separate items with newlines and use '
|
| 45 |
+
'tabs for indentation')
|
| 46 |
+
group.add_argument('--no-indent', action='store_const', dest='indent',
|
| 47 |
+
const=None,
|
| 48 |
+
help='separate items with spaces rather than newlines')
|
| 49 |
+
group.add_argument('--compact', action='store_true',
|
| 50 |
+
help='suppress all whitespace separation (most compact)')
|
| 51 |
+
options = parser.parse_args()
|
| 52 |
+
|
| 53 |
+
dump_args = {
|
| 54 |
+
'sort_keys': options.sort_keys,
|
| 55 |
+
'indent': options.indent,
|
| 56 |
+
'ensure_ascii': options.ensure_ascii,
|
| 57 |
+
}
|
| 58 |
+
if options.compact:
|
| 59 |
+
dump_args['indent'] = None
|
| 60 |
+
dump_args['separators'] = ',', ':'
|
| 61 |
+
|
| 62 |
+
with options.infile as infile:
|
| 63 |
+
try:
|
| 64 |
+
if options.json_lines:
|
| 65 |
+
objs = (json.loads(line) for line in infile)
|
| 66 |
+
else:
|
| 67 |
+
objs = (json.load(infile),)
|
| 68 |
+
|
| 69 |
+
if options.outfile is None:
|
| 70 |
+
out = sys.stdout
|
| 71 |
+
else:
|
| 72 |
+
out = options.outfile.open('w', encoding='utf-8')
|
| 73 |
+
with out as outfile:
|
| 74 |
+
for obj in objs:
|
| 75 |
+
json.dump(obj, outfile, **dump_args)
|
| 76 |
+
outfile.write('\n')
|
| 77 |
+
except ValueError as e:
|
| 78 |
+
raise SystemExit(e)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
if __name__ == '__main__':
|
| 82 |
+
try:
|
| 83 |
+
main()
|
| 84 |
+
except BrokenPipeError as exc:
|
| 85 |
+
sys.exit(exc.errno)
|
deepseek/lib/python3.10/lib2to3/PatternGrammar.txt
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
# A grammar to describe tree matching patterns.
|
| 5 |
+
# Not shown here:
|
| 6 |
+
# - 'TOKEN' stands for any token (leaf node)
|
| 7 |
+
# - 'any' stands for any node (leaf or interior)
|
| 8 |
+
# With 'any' we can still specify the sub-structure.
|
| 9 |
+
|
| 10 |
+
# The start symbol is 'Matcher'.
|
| 11 |
+
|
| 12 |
+
Matcher: Alternatives ENDMARKER
|
| 13 |
+
|
| 14 |
+
Alternatives: Alternative ('|' Alternative)*
|
| 15 |
+
|
| 16 |
+
Alternative: (Unit | NegatedUnit)+
|
| 17 |
+
|
| 18 |
+
Unit: [NAME '='] ( STRING [Repeater]
|
| 19 |
+
| NAME [Details] [Repeater]
|
| 20 |
+
| '(' Alternatives ')' [Repeater]
|
| 21 |
+
| '[' Alternatives ']'
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
NegatedUnit: 'not' (STRING | NAME [Details] | '(' Alternatives ')')
|
| 25 |
+
|
| 26 |
+
Repeater: '*' | '+' | '{' NUMBER [',' NUMBER] '}'
|
| 27 |
+
|
| 28 |
+
Details: '<' Alternatives '>'
|
deepseek/lib/python3.10/lib2to3/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import warnings
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
warnings.warn(
|
| 5 |
+
"lib2to3 package is deprecated and may not be able to parse Python 3.10+",
|
| 6 |
+
PendingDeprecationWarning,
|
| 7 |
+
stacklevel=2,
|
| 8 |
+
)
|
deepseek/lib/python3.10/lib2to3/__main__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from .main import main
|
| 3 |
+
|
| 4 |
+
sys.exit(main("lib2to3.fixes"))
|
deepseek/lib/python3.10/lib2to3/btm_matcher.py
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A bottom-up tree matching algorithm implementation meant to speed
|
| 2 |
+
up 2to3's matching process. After the tree patterns are reduced to
|
| 3 |
+
their rarest linear path, a linear Aho-Corasick automaton is
|
| 4 |
+
created. The linear automaton traverses the linear paths from the
|
| 5 |
+
leaves to the root of the AST and returns a set of nodes for further
|
| 6 |
+
matching. This reduces significantly the number of candidate nodes."""
|
| 7 |
+
|
| 8 |
+
__author__ = "George Boutsioukis <gboutsioukis@gmail.com>"
|
| 9 |
+
|
| 10 |
+
import logging
|
| 11 |
+
import itertools
|
| 12 |
+
from collections import defaultdict
|
| 13 |
+
|
| 14 |
+
from . import pytree
|
| 15 |
+
from .btm_utils import reduce_tree
|
| 16 |
+
|
| 17 |
+
class BMNode(object):
|
| 18 |
+
"""Class for a node of the Aho-Corasick automaton used in matching"""
|
| 19 |
+
count = itertools.count()
|
| 20 |
+
def __init__(self):
|
| 21 |
+
self.transition_table = {}
|
| 22 |
+
self.fixers = []
|
| 23 |
+
self.id = next(BMNode.count)
|
| 24 |
+
self.content = ''
|
| 25 |
+
|
| 26 |
+
class BottomMatcher(object):
|
| 27 |
+
"""The main matcher class. After instantiating the patterns should
|
| 28 |
+
be added using the add_fixer method"""
|
| 29 |
+
|
| 30 |
+
def __init__(self):
|
| 31 |
+
self.match = set()
|
| 32 |
+
self.root = BMNode()
|
| 33 |
+
self.nodes = [self.root]
|
| 34 |
+
self.fixers = []
|
| 35 |
+
self.logger = logging.getLogger("RefactoringTool")
|
| 36 |
+
|
| 37 |
+
def add_fixer(self, fixer):
|
| 38 |
+
"""Reduces a fixer's pattern tree to a linear path and adds it
|
| 39 |
+
to the matcher(a common Aho-Corasick automaton). The fixer is
|
| 40 |
+
appended on the matching states and called when they are
|
| 41 |
+
reached"""
|
| 42 |
+
self.fixers.append(fixer)
|
| 43 |
+
tree = reduce_tree(fixer.pattern_tree)
|
| 44 |
+
linear = tree.get_linear_subpattern()
|
| 45 |
+
match_nodes = self.add(linear, start=self.root)
|
| 46 |
+
for match_node in match_nodes:
|
| 47 |
+
match_node.fixers.append(fixer)
|
| 48 |
+
|
| 49 |
+
def add(self, pattern, start):
|
| 50 |
+
"Recursively adds a linear pattern to the AC automaton"
|
| 51 |
+
#print("adding pattern", pattern, "to", start)
|
| 52 |
+
if not pattern:
|
| 53 |
+
#print("empty pattern")
|
| 54 |
+
return [start]
|
| 55 |
+
if isinstance(pattern[0], tuple):
|
| 56 |
+
#alternatives
|
| 57 |
+
#print("alternatives")
|
| 58 |
+
match_nodes = []
|
| 59 |
+
for alternative in pattern[0]:
|
| 60 |
+
#add all alternatives, and add the rest of the pattern
|
| 61 |
+
#to each end node
|
| 62 |
+
end_nodes = self.add(alternative, start=start)
|
| 63 |
+
for end in end_nodes:
|
| 64 |
+
match_nodes.extend(self.add(pattern[1:], end))
|
| 65 |
+
return match_nodes
|
| 66 |
+
else:
|
| 67 |
+
#single token
|
| 68 |
+
#not last
|
| 69 |
+
if pattern[0] not in start.transition_table:
|
| 70 |
+
#transition did not exist, create new
|
| 71 |
+
next_node = BMNode()
|
| 72 |
+
start.transition_table[pattern[0]] = next_node
|
| 73 |
+
else:
|
| 74 |
+
#transition exists already, follow
|
| 75 |
+
next_node = start.transition_table[pattern[0]]
|
| 76 |
+
|
| 77 |
+
if pattern[1:]:
|
| 78 |
+
end_nodes = self.add(pattern[1:], start=next_node)
|
| 79 |
+
else:
|
| 80 |
+
end_nodes = [next_node]
|
| 81 |
+
return end_nodes
|
| 82 |
+
|
| 83 |
+
def run(self, leaves):
|
| 84 |
+
"""The main interface with the bottom matcher. The tree is
|
| 85 |
+
traversed from the bottom using the constructed
|
| 86 |
+
automaton. Nodes are only checked once as the tree is
|
| 87 |
+
retraversed. When the automaton fails, we give it one more
|
| 88 |
+
shot(in case the above tree matches as a whole with the
|
| 89 |
+
rejected leaf), then we break for the next leaf. There is the
|
| 90 |
+
special case of multiple arguments(see code comments) where we
|
| 91 |
+
recheck the nodes
|
| 92 |
+
|
| 93 |
+
Args:
|
| 94 |
+
The leaves of the AST tree to be matched
|
| 95 |
+
|
| 96 |
+
Returns:
|
| 97 |
+
A dictionary of node matches with fixers as the keys
|
| 98 |
+
"""
|
| 99 |
+
current_ac_node = self.root
|
| 100 |
+
results = defaultdict(list)
|
| 101 |
+
for leaf in leaves:
|
| 102 |
+
current_ast_node = leaf
|
| 103 |
+
while current_ast_node:
|
| 104 |
+
current_ast_node.was_checked = True
|
| 105 |
+
for child in current_ast_node.children:
|
| 106 |
+
# multiple statements, recheck
|
| 107 |
+
if isinstance(child, pytree.Leaf) and child.value == ";":
|
| 108 |
+
current_ast_node.was_checked = False
|
| 109 |
+
break
|
| 110 |
+
if current_ast_node.type == 1:
|
| 111 |
+
#name
|
| 112 |
+
node_token = current_ast_node.value
|
| 113 |
+
else:
|
| 114 |
+
node_token = current_ast_node.type
|
| 115 |
+
|
| 116 |
+
if node_token in current_ac_node.transition_table:
|
| 117 |
+
#token matches
|
| 118 |
+
current_ac_node = current_ac_node.transition_table[node_token]
|
| 119 |
+
for fixer in current_ac_node.fixers:
|
| 120 |
+
results[fixer].append(current_ast_node)
|
| 121 |
+
else:
|
| 122 |
+
#matching failed, reset automaton
|
| 123 |
+
current_ac_node = self.root
|
| 124 |
+
if (current_ast_node.parent is not None
|
| 125 |
+
and current_ast_node.parent.was_checked):
|
| 126 |
+
#the rest of the tree upwards has been checked, next leaf
|
| 127 |
+
break
|
| 128 |
+
|
| 129 |
+
#recheck the rejected node once from the root
|
| 130 |
+
if node_token in current_ac_node.transition_table:
|
| 131 |
+
#token matches
|
| 132 |
+
current_ac_node = current_ac_node.transition_table[node_token]
|
| 133 |
+
for fixer in current_ac_node.fixers:
|
| 134 |
+
results[fixer].append(current_ast_node)
|
| 135 |
+
|
| 136 |
+
current_ast_node = current_ast_node.parent
|
| 137 |
+
return results
|
| 138 |
+
|
| 139 |
+
def print_ac(self):
|
| 140 |
+
"Prints a graphviz diagram of the BM automaton(for debugging)"
|
| 141 |
+
print("digraph g{")
|
| 142 |
+
def print_node(node):
|
| 143 |
+
for subnode_key in node.transition_table.keys():
|
| 144 |
+
subnode = node.transition_table[subnode_key]
|
| 145 |
+
print("%d -> %d [label=%s] //%s" %
|
| 146 |
+
(node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers)))
|
| 147 |
+
if subnode_key == 1:
|
| 148 |
+
print(subnode.content)
|
| 149 |
+
print_node(subnode)
|
| 150 |
+
print_node(self.root)
|
| 151 |
+
print("}")
|
| 152 |
+
|
| 153 |
+
# taken from pytree.py for debugging; only used by print_ac
|
| 154 |
+
_type_reprs = {}
|
| 155 |
+
def type_repr(type_num):
|
| 156 |
+
global _type_reprs
|
| 157 |
+
if not _type_reprs:
|
| 158 |
+
from .pygram import python_symbols
|
| 159 |
+
# printing tokens is possible but not as useful
|
| 160 |
+
# from .pgen2 import token // token.__dict__.items():
|
| 161 |
+
for name, val in python_symbols.__dict__.items():
|
| 162 |
+
if type(val) == int: _type_reprs[val] = name
|
| 163 |
+
return _type_reprs.setdefault(type_num, type_num)
|
deepseek/lib/python3.10/lib2to3/fixer_base.py
ADDED
|
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Base class for fixers (optional, but recommended)."""
|
| 5 |
+
|
| 6 |
+
# Python imports
|
| 7 |
+
import itertools
|
| 8 |
+
|
| 9 |
+
# Local imports
|
| 10 |
+
from .patcomp import PatternCompiler
|
| 11 |
+
from . import pygram
|
| 12 |
+
from .fixer_util import does_tree_import
|
| 13 |
+
|
| 14 |
+
class BaseFix(object):
|
| 15 |
+
|
| 16 |
+
"""Optional base class for fixers.
|
| 17 |
+
|
| 18 |
+
The subclass name must be FixFooBar where FooBar is the result of
|
| 19 |
+
removing underscores and capitalizing the words of the fix name.
|
| 20 |
+
For example, the class name for a fixer named 'has_key' should be
|
| 21 |
+
FixHasKey.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
PATTERN = None # Most subclasses should override with a string literal
|
| 25 |
+
pattern = None # Compiled pattern, set by compile_pattern()
|
| 26 |
+
pattern_tree = None # Tree representation of the pattern
|
| 27 |
+
options = None # Options object passed to initializer
|
| 28 |
+
filename = None # The filename (set by set_filename)
|
| 29 |
+
numbers = itertools.count(1) # For new_name()
|
| 30 |
+
used_names = set() # A set of all used NAMEs
|
| 31 |
+
order = "post" # Does the fixer prefer pre- or post-order traversal
|
| 32 |
+
explicit = False # Is this ignored by refactor.py -f all?
|
| 33 |
+
run_order = 5 # Fixers will be sorted by run order before execution
|
| 34 |
+
# Lower numbers will be run first.
|
| 35 |
+
_accept_type = None # [Advanced and not public] This tells RefactoringTool
|
| 36 |
+
# which node type to accept when there's not a pattern.
|
| 37 |
+
|
| 38 |
+
keep_line_order = False # For the bottom matcher: match with the
|
| 39 |
+
# original line order
|
| 40 |
+
BM_compatible = False # Compatibility with the bottom matching
|
| 41 |
+
# module; every fixer should set this
|
| 42 |
+
# manually
|
| 43 |
+
|
| 44 |
+
# Shortcut for access to Python grammar symbols
|
| 45 |
+
syms = pygram.python_symbols
|
| 46 |
+
|
| 47 |
+
def __init__(self, options, log):
|
| 48 |
+
"""Initializer. Subclass may override.
|
| 49 |
+
|
| 50 |
+
Args:
|
| 51 |
+
options: a dict containing the options passed to RefactoringTool
|
| 52 |
+
that could be used to customize the fixer through the command line.
|
| 53 |
+
log: a list to append warnings and other messages to.
|
| 54 |
+
"""
|
| 55 |
+
self.options = options
|
| 56 |
+
self.log = log
|
| 57 |
+
self.compile_pattern()
|
| 58 |
+
|
| 59 |
+
def compile_pattern(self):
|
| 60 |
+
"""Compiles self.PATTERN into self.pattern.
|
| 61 |
+
|
| 62 |
+
Subclass may override if it doesn't want to use
|
| 63 |
+
self.{pattern,PATTERN} in .match().
|
| 64 |
+
"""
|
| 65 |
+
if self.PATTERN is not None:
|
| 66 |
+
PC = PatternCompiler()
|
| 67 |
+
self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN,
|
| 68 |
+
with_tree=True)
|
| 69 |
+
|
| 70 |
+
def set_filename(self, filename):
|
| 71 |
+
"""Set the filename.
|
| 72 |
+
|
| 73 |
+
The main refactoring tool should call this.
|
| 74 |
+
"""
|
| 75 |
+
self.filename = filename
|
| 76 |
+
|
| 77 |
+
def match(self, node):
|
| 78 |
+
"""Returns match for a given parse tree node.
|
| 79 |
+
|
| 80 |
+
Should return a true or false object (not necessarily a bool).
|
| 81 |
+
It may return a non-empty dict of matching sub-nodes as
|
| 82 |
+
returned by a matching pattern.
|
| 83 |
+
|
| 84 |
+
Subclass may override.
|
| 85 |
+
"""
|
| 86 |
+
results = {"node": node}
|
| 87 |
+
return self.pattern.match(node, results) and results
|
| 88 |
+
|
| 89 |
+
def transform(self, node, results):
|
| 90 |
+
"""Returns the transformation for a given parse tree node.
|
| 91 |
+
|
| 92 |
+
Args:
|
| 93 |
+
node: the root of the parse tree that matched the fixer.
|
| 94 |
+
results: a dict mapping symbolic names to part of the match.
|
| 95 |
+
|
| 96 |
+
Returns:
|
| 97 |
+
None, or a node that is a modified copy of the
|
| 98 |
+
argument node. The node argument may also be modified in-place to
|
| 99 |
+
effect the same change.
|
| 100 |
+
|
| 101 |
+
Subclass *must* override.
|
| 102 |
+
"""
|
| 103 |
+
raise NotImplementedError()
|
| 104 |
+
|
| 105 |
+
def new_name(self, template="xxx_todo_changeme"):
|
| 106 |
+
"""Return a string suitable for use as an identifier
|
| 107 |
+
|
| 108 |
+
The new name is guaranteed not to conflict with other identifiers.
|
| 109 |
+
"""
|
| 110 |
+
name = template
|
| 111 |
+
while name in self.used_names:
|
| 112 |
+
name = template + str(next(self.numbers))
|
| 113 |
+
self.used_names.add(name)
|
| 114 |
+
return name
|
| 115 |
+
|
| 116 |
+
def log_message(self, message):
|
| 117 |
+
if self.first_log:
|
| 118 |
+
self.first_log = False
|
| 119 |
+
self.log.append("### In file %s ###" % self.filename)
|
| 120 |
+
self.log.append(message)
|
| 121 |
+
|
| 122 |
+
def cannot_convert(self, node, reason=None):
|
| 123 |
+
"""Warn the user that a given chunk of code is not valid Python 3,
|
| 124 |
+
but that it cannot be converted automatically.
|
| 125 |
+
|
| 126 |
+
First argument is the top-level node for the code in question.
|
| 127 |
+
Optional second argument is why it can't be converted.
|
| 128 |
+
"""
|
| 129 |
+
lineno = node.get_lineno()
|
| 130 |
+
for_output = node.clone()
|
| 131 |
+
for_output.prefix = ""
|
| 132 |
+
msg = "Line %d: could not convert: %s"
|
| 133 |
+
self.log_message(msg % (lineno, for_output))
|
| 134 |
+
if reason:
|
| 135 |
+
self.log_message(reason)
|
| 136 |
+
|
| 137 |
+
def warning(self, node, reason):
|
| 138 |
+
"""Used for warning the user about possible uncertainty in the
|
| 139 |
+
translation.
|
| 140 |
+
|
| 141 |
+
First argument is the top-level node for the code in question.
|
| 142 |
+
Optional second argument is why it can't be converted.
|
| 143 |
+
"""
|
| 144 |
+
lineno = node.get_lineno()
|
| 145 |
+
self.log_message("Line %d: %s" % (lineno, reason))
|
| 146 |
+
|
| 147 |
+
def start_tree(self, tree, filename):
|
| 148 |
+
"""Some fixers need to maintain tree-wide state.
|
| 149 |
+
This method is called once, at the start of tree fix-up.
|
| 150 |
+
|
| 151 |
+
tree - the root node of the tree to be processed.
|
| 152 |
+
filename - the name of the file the tree came from.
|
| 153 |
+
"""
|
| 154 |
+
self.used_names = tree.used_names
|
| 155 |
+
self.set_filename(filename)
|
| 156 |
+
self.numbers = itertools.count(1)
|
| 157 |
+
self.first_log = True
|
| 158 |
+
|
| 159 |
+
def finish_tree(self, tree, filename):
|
| 160 |
+
"""Some fixers need to maintain tree-wide state.
|
| 161 |
+
This method is called once, at the conclusion of tree fix-up.
|
| 162 |
+
|
| 163 |
+
tree - the root node of the tree to be processed.
|
| 164 |
+
filename - the name of the file the tree came from.
|
| 165 |
+
"""
|
| 166 |
+
pass
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
class ConditionalFix(BaseFix):
|
| 170 |
+
""" Base class for fixers which not execute if an import is found. """
|
| 171 |
+
|
| 172 |
+
# This is the name of the import which, if found, will cause the test to be skipped
|
| 173 |
+
skip_on = None
|
| 174 |
+
|
| 175 |
+
def start_tree(self, *args):
|
| 176 |
+
super(ConditionalFix, self).start_tree(*args)
|
| 177 |
+
self._should_skip = None
|
| 178 |
+
|
| 179 |
+
def should_skip(self, node):
|
| 180 |
+
if self._should_skip is not None:
|
| 181 |
+
return self._should_skip
|
| 182 |
+
pkg = self.skip_on.split(".")
|
| 183 |
+
name = pkg[-1]
|
| 184 |
+
pkg = ".".join(pkg[:-1])
|
| 185 |
+
self._should_skip = does_tree_import(pkg, name, node)
|
| 186 |
+
return self._should_skip
|
deepseek/lib/python3.10/lib2to3/fixer_util.py
ADDED
|
@@ -0,0 +1,453 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utility functions, node construction macros, etc."""
|
| 2 |
+
# Author: Collin Winter
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .pgen2 import token
|
| 6 |
+
from .pytree import Leaf, Node
|
| 7 |
+
from .pygram import python_symbols as syms
|
| 8 |
+
from . import patcomp
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
###########################################################
|
| 12 |
+
### Common node-construction "macros"
|
| 13 |
+
###########################################################
|
| 14 |
+
|
| 15 |
+
def KeywordArg(keyword, value):
|
| 16 |
+
return Node(syms.argument,
|
| 17 |
+
[keyword, Leaf(token.EQUAL, "="), value])
|
| 18 |
+
|
| 19 |
+
def LParen():
|
| 20 |
+
return Leaf(token.LPAR, "(")
|
| 21 |
+
|
| 22 |
+
def RParen():
|
| 23 |
+
return Leaf(token.RPAR, ")")
|
| 24 |
+
|
| 25 |
+
def Assign(target, source):
|
| 26 |
+
"""Build an assignment statement"""
|
| 27 |
+
if not isinstance(target, list):
|
| 28 |
+
target = [target]
|
| 29 |
+
if not isinstance(source, list):
|
| 30 |
+
source.prefix = " "
|
| 31 |
+
source = [source]
|
| 32 |
+
|
| 33 |
+
return Node(syms.atom,
|
| 34 |
+
target + [Leaf(token.EQUAL, "=", prefix=" ")] + source)
|
| 35 |
+
|
| 36 |
+
def Name(name, prefix=None):
|
| 37 |
+
"""Return a NAME leaf"""
|
| 38 |
+
return Leaf(token.NAME, name, prefix=prefix)
|
| 39 |
+
|
| 40 |
+
def Attr(obj, attr):
|
| 41 |
+
"""A node tuple for obj.attr"""
|
| 42 |
+
return [obj, Node(syms.trailer, [Dot(), attr])]
|
| 43 |
+
|
| 44 |
+
def Comma():
|
| 45 |
+
"""A comma leaf"""
|
| 46 |
+
return Leaf(token.COMMA, ",")
|
| 47 |
+
|
| 48 |
+
def Dot():
|
| 49 |
+
"""A period (.) leaf"""
|
| 50 |
+
return Leaf(token.DOT, ".")
|
| 51 |
+
|
| 52 |
+
def ArgList(args, lparen=LParen(), rparen=RParen()):
|
| 53 |
+
"""A parenthesised argument list, used by Call()"""
|
| 54 |
+
node = Node(syms.trailer, [lparen.clone(), rparen.clone()])
|
| 55 |
+
if args:
|
| 56 |
+
node.insert_child(1, Node(syms.arglist, args))
|
| 57 |
+
return node
|
| 58 |
+
|
| 59 |
+
def Call(func_name, args=None, prefix=None):
|
| 60 |
+
"""A function call"""
|
| 61 |
+
node = Node(syms.power, [func_name, ArgList(args)])
|
| 62 |
+
if prefix is not None:
|
| 63 |
+
node.prefix = prefix
|
| 64 |
+
return node
|
| 65 |
+
|
| 66 |
+
def Newline():
|
| 67 |
+
"""A newline literal"""
|
| 68 |
+
return Leaf(token.NEWLINE, "\n")
|
| 69 |
+
|
| 70 |
+
def BlankLine():
|
| 71 |
+
"""A blank line"""
|
| 72 |
+
return Leaf(token.NEWLINE, "")
|
| 73 |
+
|
| 74 |
+
def Number(n, prefix=None):
|
| 75 |
+
return Leaf(token.NUMBER, n, prefix=prefix)
|
| 76 |
+
|
| 77 |
+
def Subscript(index_node):
|
| 78 |
+
"""A numeric or string subscript"""
|
| 79 |
+
return Node(syms.trailer, [Leaf(token.LBRACE, "["),
|
| 80 |
+
index_node,
|
| 81 |
+
Leaf(token.RBRACE, "]")])
|
| 82 |
+
|
| 83 |
+
def String(string, prefix=None):
|
| 84 |
+
"""A string leaf"""
|
| 85 |
+
return Leaf(token.STRING, string, prefix=prefix)
|
| 86 |
+
|
| 87 |
+
def ListComp(xp, fp, it, test=None):
|
| 88 |
+
"""A list comprehension of the form [xp for fp in it if test].
|
| 89 |
+
|
| 90 |
+
If test is None, the "if test" part is omitted.
|
| 91 |
+
"""
|
| 92 |
+
xp.prefix = ""
|
| 93 |
+
fp.prefix = " "
|
| 94 |
+
it.prefix = " "
|
| 95 |
+
for_leaf = Leaf(token.NAME, "for")
|
| 96 |
+
for_leaf.prefix = " "
|
| 97 |
+
in_leaf = Leaf(token.NAME, "in")
|
| 98 |
+
in_leaf.prefix = " "
|
| 99 |
+
inner_args = [for_leaf, fp, in_leaf, it]
|
| 100 |
+
if test:
|
| 101 |
+
test.prefix = " "
|
| 102 |
+
if_leaf = Leaf(token.NAME, "if")
|
| 103 |
+
if_leaf.prefix = " "
|
| 104 |
+
inner_args.append(Node(syms.comp_if, [if_leaf, test]))
|
| 105 |
+
inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)])
|
| 106 |
+
return Node(syms.atom,
|
| 107 |
+
[Leaf(token.LBRACE, "["),
|
| 108 |
+
inner,
|
| 109 |
+
Leaf(token.RBRACE, "]")])
|
| 110 |
+
|
| 111 |
+
def FromImport(package_name, name_leafs):
|
| 112 |
+
""" Return an import statement in the form:
|
| 113 |
+
from package import name_leafs"""
|
| 114 |
+
# XXX: May not handle dotted imports properly (eg, package_name='foo.bar')
|
| 115 |
+
#assert package_name == '.' or '.' not in package_name, "FromImport has "\
|
| 116 |
+
# "not been tested with dotted package names -- use at your own "\
|
| 117 |
+
# "peril!"
|
| 118 |
+
|
| 119 |
+
for leaf in name_leafs:
|
| 120 |
+
# Pull the leaves out of their old tree
|
| 121 |
+
leaf.remove()
|
| 122 |
+
|
| 123 |
+
children = [Leaf(token.NAME, "from"),
|
| 124 |
+
Leaf(token.NAME, package_name, prefix=" "),
|
| 125 |
+
Leaf(token.NAME, "import", prefix=" "),
|
| 126 |
+
Node(syms.import_as_names, name_leafs)]
|
| 127 |
+
imp = Node(syms.import_from, children)
|
| 128 |
+
return imp
|
| 129 |
+
|
| 130 |
+
def ImportAndCall(node, results, names):
|
| 131 |
+
"""Returns an import statement and calls a method
|
| 132 |
+
of the module:
|
| 133 |
+
|
| 134 |
+
import module
|
| 135 |
+
module.name()"""
|
| 136 |
+
obj = results["obj"].clone()
|
| 137 |
+
if obj.type == syms.arglist:
|
| 138 |
+
newarglist = obj.clone()
|
| 139 |
+
else:
|
| 140 |
+
newarglist = Node(syms.arglist, [obj.clone()])
|
| 141 |
+
after = results["after"]
|
| 142 |
+
if after:
|
| 143 |
+
after = [n.clone() for n in after]
|
| 144 |
+
new = Node(syms.power,
|
| 145 |
+
Attr(Name(names[0]), Name(names[1])) +
|
| 146 |
+
[Node(syms.trailer,
|
| 147 |
+
[results["lpar"].clone(),
|
| 148 |
+
newarglist,
|
| 149 |
+
results["rpar"].clone()])] + after)
|
| 150 |
+
new.prefix = node.prefix
|
| 151 |
+
return new
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
###########################################################
|
| 155 |
+
### Determine whether a node represents a given literal
|
| 156 |
+
###########################################################
|
| 157 |
+
|
| 158 |
+
def is_tuple(node):
|
| 159 |
+
"""Does the node represent a tuple literal?"""
|
| 160 |
+
if isinstance(node, Node) and node.children == [LParen(), RParen()]:
|
| 161 |
+
return True
|
| 162 |
+
return (isinstance(node, Node)
|
| 163 |
+
and len(node.children) == 3
|
| 164 |
+
and isinstance(node.children[0], Leaf)
|
| 165 |
+
and isinstance(node.children[1], Node)
|
| 166 |
+
and isinstance(node.children[2], Leaf)
|
| 167 |
+
and node.children[0].value == "("
|
| 168 |
+
and node.children[2].value == ")")
|
| 169 |
+
|
| 170 |
+
def is_list(node):
|
| 171 |
+
"""Does the node represent a list literal?"""
|
| 172 |
+
return (isinstance(node, Node)
|
| 173 |
+
and len(node.children) > 1
|
| 174 |
+
and isinstance(node.children[0], Leaf)
|
| 175 |
+
and isinstance(node.children[-1], Leaf)
|
| 176 |
+
and node.children[0].value == "["
|
| 177 |
+
and node.children[-1].value == "]")
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
###########################################################
|
| 181 |
+
### Misc
|
| 182 |
+
###########################################################
|
| 183 |
+
|
| 184 |
+
def parenthesize(node):
|
| 185 |
+
return Node(syms.atom, [LParen(), node, RParen()])
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
consuming_calls = {"sorted", "list", "set", "any", "all", "tuple", "sum",
|
| 189 |
+
"min", "max", "enumerate"}
|
| 190 |
+
|
| 191 |
+
def attr_chain(obj, attr):
|
| 192 |
+
"""Follow an attribute chain.
|
| 193 |
+
|
| 194 |
+
If you have a chain of objects where a.foo -> b, b.foo-> c, etc,
|
| 195 |
+
use this to iterate over all objects in the chain. Iteration is
|
| 196 |
+
terminated by getattr(x, attr) is None.
|
| 197 |
+
|
| 198 |
+
Args:
|
| 199 |
+
obj: the starting object
|
| 200 |
+
attr: the name of the chaining attribute
|
| 201 |
+
|
| 202 |
+
Yields:
|
| 203 |
+
Each successive object in the chain.
|
| 204 |
+
"""
|
| 205 |
+
next = getattr(obj, attr)
|
| 206 |
+
while next:
|
| 207 |
+
yield next
|
| 208 |
+
next = getattr(next, attr)
|
| 209 |
+
|
| 210 |
+
p0 = """for_stmt< 'for' any 'in' node=any ':' any* >
|
| 211 |
+
| comp_for< 'for' any 'in' node=any any* >
|
| 212 |
+
"""
|
| 213 |
+
p1 = """
|
| 214 |
+
power<
|
| 215 |
+
( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' |
|
| 216 |
+
'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) )
|
| 217 |
+
trailer< '(' node=any ')' >
|
| 218 |
+
any*
|
| 219 |
+
>
|
| 220 |
+
"""
|
| 221 |
+
p2 = """
|
| 222 |
+
power<
|
| 223 |
+
( 'sorted' | 'enumerate' )
|
| 224 |
+
trailer< '(' arglist<node=any any*> ')' >
|
| 225 |
+
any*
|
| 226 |
+
>
|
| 227 |
+
"""
|
| 228 |
+
pats_built = False
|
| 229 |
+
def in_special_context(node):
|
| 230 |
+
""" Returns true if node is in an environment where all that is required
|
| 231 |
+
of it is being iterable (ie, it doesn't matter if it returns a list
|
| 232 |
+
or an iterator).
|
| 233 |
+
See test_map_nochange in test_fixers.py for some examples and tests.
|
| 234 |
+
"""
|
| 235 |
+
global p0, p1, p2, pats_built
|
| 236 |
+
if not pats_built:
|
| 237 |
+
p0 = patcomp.compile_pattern(p0)
|
| 238 |
+
p1 = patcomp.compile_pattern(p1)
|
| 239 |
+
p2 = patcomp.compile_pattern(p2)
|
| 240 |
+
pats_built = True
|
| 241 |
+
patterns = [p0, p1, p2]
|
| 242 |
+
for pattern, parent in zip(patterns, attr_chain(node, "parent")):
|
| 243 |
+
results = {}
|
| 244 |
+
if pattern.match(parent, results) and results["node"] is node:
|
| 245 |
+
return True
|
| 246 |
+
return False
|
| 247 |
+
|
| 248 |
+
def is_probably_builtin(node):
|
| 249 |
+
"""
|
| 250 |
+
Check that something isn't an attribute or function name etc.
|
| 251 |
+
"""
|
| 252 |
+
prev = node.prev_sibling
|
| 253 |
+
if prev is not None and prev.type == token.DOT:
|
| 254 |
+
# Attribute lookup.
|
| 255 |
+
return False
|
| 256 |
+
parent = node.parent
|
| 257 |
+
if parent.type in (syms.funcdef, syms.classdef):
|
| 258 |
+
return False
|
| 259 |
+
if parent.type == syms.expr_stmt and parent.children[0] is node:
|
| 260 |
+
# Assignment.
|
| 261 |
+
return False
|
| 262 |
+
if parent.type == syms.parameters or \
|
| 263 |
+
(parent.type == syms.typedargslist and (
|
| 264 |
+
(prev is not None and prev.type == token.COMMA) or
|
| 265 |
+
parent.children[0] is node
|
| 266 |
+
)):
|
| 267 |
+
# The name of an argument.
|
| 268 |
+
return False
|
| 269 |
+
return True
|
| 270 |
+
|
| 271 |
+
def find_indentation(node):
|
| 272 |
+
"""Find the indentation of *node*."""
|
| 273 |
+
while node is not None:
|
| 274 |
+
if node.type == syms.suite and len(node.children) > 2:
|
| 275 |
+
indent = node.children[1]
|
| 276 |
+
if indent.type == token.INDENT:
|
| 277 |
+
return indent.value
|
| 278 |
+
node = node.parent
|
| 279 |
+
return ""
|
| 280 |
+
|
| 281 |
+
###########################################################
|
| 282 |
+
### The following functions are to find bindings in a suite
|
| 283 |
+
###########################################################
|
| 284 |
+
|
| 285 |
+
def make_suite(node):
|
| 286 |
+
if node.type == syms.suite:
|
| 287 |
+
return node
|
| 288 |
+
node = node.clone()
|
| 289 |
+
parent, node.parent = node.parent, None
|
| 290 |
+
suite = Node(syms.suite, [node])
|
| 291 |
+
suite.parent = parent
|
| 292 |
+
return suite
|
| 293 |
+
|
| 294 |
+
def find_root(node):
|
| 295 |
+
"""Find the top level namespace."""
|
| 296 |
+
# Scamper up to the top level namespace
|
| 297 |
+
while node.type != syms.file_input:
|
| 298 |
+
node = node.parent
|
| 299 |
+
if not node:
|
| 300 |
+
raise ValueError("root found before file_input node was found.")
|
| 301 |
+
return node
|
| 302 |
+
|
| 303 |
+
def does_tree_import(package, name, node):
|
| 304 |
+
""" Returns true if name is imported from package at the
|
| 305 |
+
top level of the tree which node belongs to.
|
| 306 |
+
To cover the case of an import like 'import foo', use
|
| 307 |
+
None for the package and 'foo' for the name. """
|
| 308 |
+
binding = find_binding(name, find_root(node), package)
|
| 309 |
+
return bool(binding)
|
| 310 |
+
|
| 311 |
+
def is_import(node):
|
| 312 |
+
"""Returns true if the node is an import statement."""
|
| 313 |
+
return node.type in (syms.import_name, syms.import_from)
|
| 314 |
+
|
| 315 |
+
def touch_import(package, name, node):
|
| 316 |
+
""" Works like `does_tree_import` but adds an import statement
|
| 317 |
+
if it was not imported. """
|
| 318 |
+
def is_import_stmt(node):
|
| 319 |
+
return (node.type == syms.simple_stmt and node.children and
|
| 320 |
+
is_import(node.children[0]))
|
| 321 |
+
|
| 322 |
+
root = find_root(node)
|
| 323 |
+
|
| 324 |
+
if does_tree_import(package, name, root):
|
| 325 |
+
return
|
| 326 |
+
|
| 327 |
+
# figure out where to insert the new import. First try to find
|
| 328 |
+
# the first import and then skip to the last one.
|
| 329 |
+
insert_pos = offset = 0
|
| 330 |
+
for idx, node in enumerate(root.children):
|
| 331 |
+
if not is_import_stmt(node):
|
| 332 |
+
continue
|
| 333 |
+
for offset, node2 in enumerate(root.children[idx:]):
|
| 334 |
+
if not is_import_stmt(node2):
|
| 335 |
+
break
|
| 336 |
+
insert_pos = idx + offset
|
| 337 |
+
break
|
| 338 |
+
|
| 339 |
+
# if there are no imports where we can insert, find the docstring.
|
| 340 |
+
# if that also fails, we stick to the beginning of the file
|
| 341 |
+
if insert_pos == 0:
|
| 342 |
+
for idx, node in enumerate(root.children):
|
| 343 |
+
if (node.type == syms.simple_stmt and node.children and
|
| 344 |
+
node.children[0].type == token.STRING):
|
| 345 |
+
insert_pos = idx + 1
|
| 346 |
+
break
|
| 347 |
+
|
| 348 |
+
if package is None:
|
| 349 |
+
import_ = Node(syms.import_name, [
|
| 350 |
+
Leaf(token.NAME, "import"),
|
| 351 |
+
Leaf(token.NAME, name, prefix=" ")
|
| 352 |
+
])
|
| 353 |
+
else:
|
| 354 |
+
import_ = FromImport(package, [Leaf(token.NAME, name, prefix=" ")])
|
| 355 |
+
|
| 356 |
+
children = [import_, Newline()]
|
| 357 |
+
root.insert_child(insert_pos, Node(syms.simple_stmt, children))
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
_def_syms = {syms.classdef, syms.funcdef}
|
| 361 |
+
def find_binding(name, node, package=None):
|
| 362 |
+
""" Returns the node which binds variable name, otherwise None.
|
| 363 |
+
If optional argument package is supplied, only imports will
|
| 364 |
+
be returned.
|
| 365 |
+
See test cases for examples."""
|
| 366 |
+
for child in node.children:
|
| 367 |
+
ret = None
|
| 368 |
+
if child.type == syms.for_stmt:
|
| 369 |
+
if _find(name, child.children[1]):
|
| 370 |
+
return child
|
| 371 |
+
n = find_binding(name, make_suite(child.children[-1]), package)
|
| 372 |
+
if n: ret = n
|
| 373 |
+
elif child.type in (syms.if_stmt, syms.while_stmt):
|
| 374 |
+
n = find_binding(name, make_suite(child.children[-1]), package)
|
| 375 |
+
if n: ret = n
|
| 376 |
+
elif child.type == syms.try_stmt:
|
| 377 |
+
n = find_binding(name, make_suite(child.children[2]), package)
|
| 378 |
+
if n:
|
| 379 |
+
ret = n
|
| 380 |
+
else:
|
| 381 |
+
for i, kid in enumerate(child.children[3:]):
|
| 382 |
+
if kid.type == token.COLON and kid.value == ":":
|
| 383 |
+
# i+3 is the colon, i+4 is the suite
|
| 384 |
+
n = find_binding(name, make_suite(child.children[i+4]), package)
|
| 385 |
+
if n: ret = n
|
| 386 |
+
elif child.type in _def_syms and child.children[1].value == name:
|
| 387 |
+
ret = child
|
| 388 |
+
elif _is_import_binding(child, name, package):
|
| 389 |
+
ret = child
|
| 390 |
+
elif child.type == syms.simple_stmt:
|
| 391 |
+
ret = find_binding(name, child, package)
|
| 392 |
+
elif child.type == syms.expr_stmt:
|
| 393 |
+
if _find(name, child.children[0]):
|
| 394 |
+
ret = child
|
| 395 |
+
|
| 396 |
+
if ret:
|
| 397 |
+
if not package:
|
| 398 |
+
return ret
|
| 399 |
+
if is_import(ret):
|
| 400 |
+
return ret
|
| 401 |
+
return None
|
| 402 |
+
|
| 403 |
+
_block_syms = {syms.funcdef, syms.classdef, syms.trailer}
|
| 404 |
+
def _find(name, node):
|
| 405 |
+
nodes = [node]
|
| 406 |
+
while nodes:
|
| 407 |
+
node = nodes.pop()
|
| 408 |
+
if node.type > 256 and node.type not in _block_syms:
|
| 409 |
+
nodes.extend(node.children)
|
| 410 |
+
elif node.type == token.NAME and node.value == name:
|
| 411 |
+
return node
|
| 412 |
+
return None
|
| 413 |
+
|
| 414 |
+
def _is_import_binding(node, name, package=None):
|
| 415 |
+
""" Will return node if node will import name, or node
|
| 416 |
+
will import * from package. None is returned otherwise.
|
| 417 |
+
See test cases for examples. """
|
| 418 |
+
|
| 419 |
+
if node.type == syms.import_name and not package:
|
| 420 |
+
imp = node.children[1]
|
| 421 |
+
if imp.type == syms.dotted_as_names:
|
| 422 |
+
for child in imp.children:
|
| 423 |
+
if child.type == syms.dotted_as_name:
|
| 424 |
+
if child.children[2].value == name:
|
| 425 |
+
return node
|
| 426 |
+
elif child.type == token.NAME and child.value == name:
|
| 427 |
+
return node
|
| 428 |
+
elif imp.type == syms.dotted_as_name:
|
| 429 |
+
last = imp.children[-1]
|
| 430 |
+
if last.type == token.NAME and last.value == name:
|
| 431 |
+
return node
|
| 432 |
+
elif imp.type == token.NAME and imp.value == name:
|
| 433 |
+
return node
|
| 434 |
+
elif node.type == syms.import_from:
|
| 435 |
+
# str(...) is used to make life easier here, because
|
| 436 |
+
# from a.b import parses to ['import', ['a', '.', 'b'], ...]
|
| 437 |
+
if package and str(node.children[1]).strip() != package:
|
| 438 |
+
return None
|
| 439 |
+
n = node.children[3]
|
| 440 |
+
if package and _find("as", n):
|
| 441 |
+
# See test_from_import_as for explanation
|
| 442 |
+
return None
|
| 443 |
+
elif n.type == syms.import_as_names and _find(name, n):
|
| 444 |
+
return node
|
| 445 |
+
elif n.type == syms.import_as_name:
|
| 446 |
+
child = n.children[2]
|
| 447 |
+
if child.type == token.NAME and child.value == name:
|
| 448 |
+
return node
|
| 449 |
+
elif n.type == token.NAME and n.value == name:
|
| 450 |
+
return node
|
| 451 |
+
elif package and n.type == token.STAR:
|
| 452 |
+
return node
|
| 453 |
+
return None
|
deepseek/lib/python3.10/lib2to3/fixes/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Dummy file to make this directory a package.
|
deepseek/lib/python3.10/lib2to3/fixes/fix_execfile.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for execfile.
|
| 5 |
+
|
| 6 |
+
This converts usages of the execfile function into calls to the built-in
|
| 7 |
+
exec() function.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from .. import fixer_base
|
| 11 |
+
from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node,
|
| 12 |
+
ArgList, String, syms)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FixExecfile(fixer_base.BaseFix):
|
| 16 |
+
BM_compatible = True
|
| 17 |
+
|
| 18 |
+
PATTERN = """
|
| 19 |
+
power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
|
| 20 |
+
|
|
| 21 |
+
power< 'execfile' trailer< '(' filename=any ')' > >
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def transform(self, node, results):
|
| 25 |
+
assert results
|
| 26 |
+
filename = results["filename"]
|
| 27 |
+
globals = results.get("globals")
|
| 28 |
+
locals = results.get("locals")
|
| 29 |
+
|
| 30 |
+
# Copy over the prefix from the right parentheses end of the execfile
|
| 31 |
+
# call.
|
| 32 |
+
execfile_paren = node.children[-1].children[-1].clone()
|
| 33 |
+
# Construct open().read().
|
| 34 |
+
open_args = ArgList([filename.clone(), Comma(), String('"rb"', ' ')],
|
| 35 |
+
rparen=execfile_paren)
|
| 36 |
+
open_call = Node(syms.power, [Name("open"), open_args])
|
| 37 |
+
read = [Node(syms.trailer, [Dot(), Name('read')]),
|
| 38 |
+
Node(syms.trailer, [LParen(), RParen()])]
|
| 39 |
+
open_expr = [open_call] + read
|
| 40 |
+
# Wrap the open call in a compile call. This is so the filename will be
|
| 41 |
+
# preserved in the execed code.
|
| 42 |
+
filename_arg = filename.clone()
|
| 43 |
+
filename_arg.prefix = " "
|
| 44 |
+
exec_str = String("'exec'", " ")
|
| 45 |
+
compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str]
|
| 46 |
+
compile_call = Call(Name("compile"), compile_args, "")
|
| 47 |
+
# Finally, replace the execfile call with an exec call.
|
| 48 |
+
args = [compile_call]
|
| 49 |
+
if globals is not None:
|
| 50 |
+
args.extend([Comma(), globals.clone()])
|
| 51 |
+
if locals is not None:
|
| 52 |
+
args.extend([Comma(), locals.clone()])
|
| 53 |
+
return Call(Name("exec"), args, prefix=node.prefix)
|
deepseek/lib/python3.10/lib2to3/fixes/fix_exitfunc.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Convert use of sys.exitfunc to use the atexit module.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
# Author: Benjamin Peterson
|
| 6 |
+
|
| 7 |
+
from lib2to3 import pytree, fixer_base
|
| 8 |
+
from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class FixExitfunc(fixer_base.BaseFix):
|
| 12 |
+
keep_line_order = True
|
| 13 |
+
BM_compatible = True
|
| 14 |
+
|
| 15 |
+
PATTERN = """
|
| 16 |
+
(
|
| 17 |
+
sys_import=import_name<'import'
|
| 18 |
+
('sys'
|
| 19 |
+
|
|
| 20 |
+
dotted_as_names< (any ',')* 'sys' (',' any)* >
|
| 21 |
+
)
|
| 22 |
+
>
|
| 23 |
+
|
|
| 24 |
+
expr_stmt<
|
| 25 |
+
power< 'sys' trailer< '.' 'exitfunc' > >
|
| 26 |
+
'=' func=any >
|
| 27 |
+
)
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
def __init__(self, *args):
|
| 31 |
+
super(FixExitfunc, self).__init__(*args)
|
| 32 |
+
|
| 33 |
+
def start_tree(self, tree, filename):
|
| 34 |
+
super(FixExitfunc, self).start_tree(tree, filename)
|
| 35 |
+
self.sys_import = None
|
| 36 |
+
|
| 37 |
+
def transform(self, node, results):
|
| 38 |
+
# First, find the sys import. We'll just hope it's global scope.
|
| 39 |
+
if "sys_import" in results:
|
| 40 |
+
if self.sys_import is None:
|
| 41 |
+
self.sys_import = results["sys_import"]
|
| 42 |
+
return
|
| 43 |
+
|
| 44 |
+
func = results["func"].clone()
|
| 45 |
+
func.prefix = ""
|
| 46 |
+
register = pytree.Node(syms.power,
|
| 47 |
+
Attr(Name("atexit"), Name("register"))
|
| 48 |
+
)
|
| 49 |
+
call = Call(register, [func], node.prefix)
|
| 50 |
+
node.replace(call)
|
| 51 |
+
|
| 52 |
+
if self.sys_import is None:
|
| 53 |
+
# That's interesting.
|
| 54 |
+
self.warning(node, "Can't find sys import; Please add an atexit "
|
| 55 |
+
"import at the top of your file.")
|
| 56 |
+
return
|
| 57 |
+
|
| 58 |
+
# Now add an atexit import after the sys import.
|
| 59 |
+
names = self.sys_import.children[1]
|
| 60 |
+
if names.type == syms.dotted_as_names:
|
| 61 |
+
names.append_child(Comma())
|
| 62 |
+
names.append_child(Name("atexit", " "))
|
| 63 |
+
else:
|
| 64 |
+
containing_stmt = self.sys_import.parent
|
| 65 |
+
position = containing_stmt.children.index(self.sys_import)
|
| 66 |
+
stmt_container = containing_stmt.parent
|
| 67 |
+
new_import = pytree.Node(syms.import_name,
|
| 68 |
+
[Name("import"), Name("atexit", " ")]
|
| 69 |
+
)
|
| 70 |
+
new = pytree.Node(syms.simple_stmt, [new_import])
|
| 71 |
+
containing_stmt.insert_child(position + 1, Newline())
|
| 72 |
+
containing_stmt.insert_child(position + 2, new)
|
deepseek/lib/python3.10/lib2to3/fixes/fix_idioms.py
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Adjust some old Python 2 idioms to their modern counterparts.
|
| 2 |
+
|
| 3 |
+
* Change some type comparisons to isinstance() calls:
|
| 4 |
+
type(x) == T -> isinstance(x, T)
|
| 5 |
+
type(x) is T -> isinstance(x, T)
|
| 6 |
+
type(x) != T -> not isinstance(x, T)
|
| 7 |
+
type(x) is not T -> not isinstance(x, T)
|
| 8 |
+
|
| 9 |
+
* Change "while 1:" into "while True:".
|
| 10 |
+
|
| 11 |
+
* Change both
|
| 12 |
+
|
| 13 |
+
v = list(EXPR)
|
| 14 |
+
v.sort()
|
| 15 |
+
foo(v)
|
| 16 |
+
|
| 17 |
+
and the more general
|
| 18 |
+
|
| 19 |
+
v = EXPR
|
| 20 |
+
v.sort()
|
| 21 |
+
foo(v)
|
| 22 |
+
|
| 23 |
+
into
|
| 24 |
+
|
| 25 |
+
v = sorted(EXPR)
|
| 26 |
+
foo(v)
|
| 27 |
+
"""
|
| 28 |
+
# Author: Jacques Frechet, Collin Winter
|
| 29 |
+
|
| 30 |
+
# Local imports
|
| 31 |
+
from .. import fixer_base
|
| 32 |
+
from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms
|
| 33 |
+
|
| 34 |
+
CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
|
| 35 |
+
TYPE = "power< 'type' trailer< '(' x=any ')' > >"
|
| 36 |
+
|
| 37 |
+
class FixIdioms(fixer_base.BaseFix):
|
| 38 |
+
explicit = True # The user must ask for this fixer
|
| 39 |
+
|
| 40 |
+
PATTERN = r"""
|
| 41 |
+
isinstance=comparison< %s %s T=any >
|
| 42 |
+
|
|
| 43 |
+
isinstance=comparison< T=any %s %s >
|
| 44 |
+
|
|
| 45 |
+
while_stmt< 'while' while='1' ':' any+ >
|
| 46 |
+
|
|
| 47 |
+
sorted=any<
|
| 48 |
+
any*
|
| 49 |
+
simple_stmt<
|
| 50 |
+
expr_stmt< id1=any '='
|
| 51 |
+
power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
|
| 52 |
+
>
|
| 53 |
+
'\n'
|
| 54 |
+
>
|
| 55 |
+
sort=
|
| 56 |
+
simple_stmt<
|
| 57 |
+
power< id2=any
|
| 58 |
+
trailer< '.' 'sort' > trailer< '(' ')' >
|
| 59 |
+
>
|
| 60 |
+
'\n'
|
| 61 |
+
>
|
| 62 |
+
next=any*
|
| 63 |
+
>
|
| 64 |
+
|
|
| 65 |
+
sorted=any<
|
| 66 |
+
any*
|
| 67 |
+
simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
|
| 68 |
+
sort=
|
| 69 |
+
simple_stmt<
|
| 70 |
+
power< id2=any
|
| 71 |
+
trailer< '.' 'sort' > trailer< '(' ')' >
|
| 72 |
+
>
|
| 73 |
+
'\n'
|
| 74 |
+
>
|
| 75 |
+
next=any*
|
| 76 |
+
>
|
| 77 |
+
""" % (TYPE, CMP, CMP, TYPE)
|
| 78 |
+
|
| 79 |
+
def match(self, node):
|
| 80 |
+
r = super(FixIdioms, self).match(node)
|
| 81 |
+
# If we've matched one of the sort/sorted subpatterns above, we
|
| 82 |
+
# want to reject matches where the initial assignment and the
|
| 83 |
+
# subsequent .sort() call involve different identifiers.
|
| 84 |
+
if r and "sorted" in r:
|
| 85 |
+
if r["id1"] == r["id2"]:
|
| 86 |
+
return r
|
| 87 |
+
return None
|
| 88 |
+
return r
|
| 89 |
+
|
| 90 |
+
def transform(self, node, results):
|
| 91 |
+
if "isinstance" in results:
|
| 92 |
+
return self.transform_isinstance(node, results)
|
| 93 |
+
elif "while" in results:
|
| 94 |
+
return self.transform_while(node, results)
|
| 95 |
+
elif "sorted" in results:
|
| 96 |
+
return self.transform_sort(node, results)
|
| 97 |
+
else:
|
| 98 |
+
raise RuntimeError("Invalid match")
|
| 99 |
+
|
| 100 |
+
def transform_isinstance(self, node, results):
|
| 101 |
+
x = results["x"].clone() # The thing inside of type()
|
| 102 |
+
T = results["T"].clone() # The type being compared against
|
| 103 |
+
x.prefix = ""
|
| 104 |
+
T.prefix = " "
|
| 105 |
+
test = Call(Name("isinstance"), [x, Comma(), T])
|
| 106 |
+
if "n" in results:
|
| 107 |
+
test.prefix = " "
|
| 108 |
+
test = Node(syms.not_test, [Name("not"), test])
|
| 109 |
+
test.prefix = node.prefix
|
| 110 |
+
return test
|
| 111 |
+
|
| 112 |
+
def transform_while(self, node, results):
|
| 113 |
+
one = results["while"]
|
| 114 |
+
one.replace(Name("True", prefix=one.prefix))
|
| 115 |
+
|
| 116 |
+
def transform_sort(self, node, results):
|
| 117 |
+
sort_stmt = results["sort"]
|
| 118 |
+
next_stmt = results["next"]
|
| 119 |
+
list_call = results.get("list")
|
| 120 |
+
simple_expr = results.get("expr")
|
| 121 |
+
|
| 122 |
+
if list_call:
|
| 123 |
+
list_call.replace(Name("sorted", prefix=list_call.prefix))
|
| 124 |
+
elif simple_expr:
|
| 125 |
+
new = simple_expr.clone()
|
| 126 |
+
new.prefix = ""
|
| 127 |
+
simple_expr.replace(Call(Name("sorted"), [new],
|
| 128 |
+
prefix=simple_expr.prefix))
|
| 129 |
+
else:
|
| 130 |
+
raise RuntimeError("should not have reached here")
|
| 131 |
+
sort_stmt.remove()
|
| 132 |
+
|
| 133 |
+
btwn = sort_stmt.prefix
|
| 134 |
+
# Keep any prefix lines between the sort_stmt and the list_call and
|
| 135 |
+
# shove them right after the sorted() call.
|
| 136 |
+
if "\n" in btwn:
|
| 137 |
+
if next_stmt:
|
| 138 |
+
# The new prefix should be everything from the sort_stmt's
|
| 139 |
+
# prefix up to the last newline, then the old prefix after a new
|
| 140 |
+
# line.
|
| 141 |
+
prefix_lines = (btwn.rpartition("\n")[0], next_stmt[0].prefix)
|
| 142 |
+
next_stmt[0].prefix = "\n".join(prefix_lines)
|
| 143 |
+
else:
|
| 144 |
+
assert list_call.parent
|
| 145 |
+
assert list_call.next_sibling is None
|
| 146 |
+
# Put a blank line after list_call and set its prefix.
|
| 147 |
+
end_line = BlankLine()
|
| 148 |
+
list_call.parent.append_child(end_line)
|
| 149 |
+
assert list_call.next_sibling is end_line
|
| 150 |
+
# The new prefix should be everything up to the first new line
|
| 151 |
+
# of sort_stmt's prefix.
|
| 152 |
+
end_line.prefix = btwn.rpartition("\n")[0]
|
deepseek/lib/python3.10/lib2to3/fixes/fix_imports.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix incompatible imports and module references."""
|
| 2 |
+
# Authors: Collin Winter, Nick Edds
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Name, attr_chain
|
| 7 |
+
|
| 8 |
+
MAPPING = {'StringIO': 'io',
|
| 9 |
+
'cStringIO': 'io',
|
| 10 |
+
'cPickle': 'pickle',
|
| 11 |
+
'__builtin__' : 'builtins',
|
| 12 |
+
'copy_reg': 'copyreg',
|
| 13 |
+
'Queue': 'queue',
|
| 14 |
+
'SocketServer': 'socketserver',
|
| 15 |
+
'ConfigParser': 'configparser',
|
| 16 |
+
'repr': 'reprlib',
|
| 17 |
+
'FileDialog': 'tkinter.filedialog',
|
| 18 |
+
'tkFileDialog': 'tkinter.filedialog',
|
| 19 |
+
'SimpleDialog': 'tkinter.simpledialog',
|
| 20 |
+
'tkSimpleDialog': 'tkinter.simpledialog',
|
| 21 |
+
'tkColorChooser': 'tkinter.colorchooser',
|
| 22 |
+
'tkCommonDialog': 'tkinter.commondialog',
|
| 23 |
+
'Dialog': 'tkinter.dialog',
|
| 24 |
+
'Tkdnd': 'tkinter.dnd',
|
| 25 |
+
'tkFont': 'tkinter.font',
|
| 26 |
+
'tkMessageBox': 'tkinter.messagebox',
|
| 27 |
+
'ScrolledText': 'tkinter.scrolledtext',
|
| 28 |
+
'Tkconstants': 'tkinter.constants',
|
| 29 |
+
'Tix': 'tkinter.tix',
|
| 30 |
+
'ttk': 'tkinter.ttk',
|
| 31 |
+
'Tkinter': 'tkinter',
|
| 32 |
+
'markupbase': '_markupbase',
|
| 33 |
+
'_winreg': 'winreg',
|
| 34 |
+
'thread': '_thread',
|
| 35 |
+
'dummy_thread': '_dummy_thread',
|
| 36 |
+
# anydbm and whichdb are handled by fix_imports2
|
| 37 |
+
'dbhash': 'dbm.bsd',
|
| 38 |
+
'dumbdbm': 'dbm.dumb',
|
| 39 |
+
'dbm': 'dbm.ndbm',
|
| 40 |
+
'gdbm': 'dbm.gnu',
|
| 41 |
+
'xmlrpclib': 'xmlrpc.client',
|
| 42 |
+
'DocXMLRPCServer': 'xmlrpc.server',
|
| 43 |
+
'SimpleXMLRPCServer': 'xmlrpc.server',
|
| 44 |
+
'httplib': 'http.client',
|
| 45 |
+
'htmlentitydefs' : 'html.entities',
|
| 46 |
+
'HTMLParser' : 'html.parser',
|
| 47 |
+
'Cookie': 'http.cookies',
|
| 48 |
+
'cookielib': 'http.cookiejar',
|
| 49 |
+
'BaseHTTPServer': 'http.server',
|
| 50 |
+
'SimpleHTTPServer': 'http.server',
|
| 51 |
+
'CGIHTTPServer': 'http.server',
|
| 52 |
+
#'test.test_support': 'test.support',
|
| 53 |
+
'commands': 'subprocess',
|
| 54 |
+
'UserString' : 'collections',
|
| 55 |
+
'UserList' : 'collections',
|
| 56 |
+
'urlparse' : 'urllib.parse',
|
| 57 |
+
'robotparser' : 'urllib.robotparser',
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def alternates(members):
|
| 62 |
+
return "(" + "|".join(map(repr, members)) + ")"
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def build_pattern(mapping=MAPPING):
|
| 66 |
+
mod_list = ' | '.join(["module_name='%s'" % key for key in mapping])
|
| 67 |
+
bare_names = alternates(mapping.keys())
|
| 68 |
+
|
| 69 |
+
yield """name_import=import_name< 'import' ((%s) |
|
| 70 |
+
multiple_imports=dotted_as_names< any* (%s) any* >) >
|
| 71 |
+
""" % (mod_list, mod_list)
|
| 72 |
+
yield """import_from< 'from' (%s) 'import' ['(']
|
| 73 |
+
( any | import_as_name< any 'as' any > |
|
| 74 |
+
import_as_names< any* >) [')'] >
|
| 75 |
+
""" % mod_list
|
| 76 |
+
yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > |
|
| 77 |
+
multiple_imports=dotted_as_names<
|
| 78 |
+
any* dotted_as_name< (%s) 'as' any > any* >) >
|
| 79 |
+
""" % (mod_list, mod_list)
|
| 80 |
+
|
| 81 |
+
# Find usages of module members in code e.g. thread.foo(bar)
|
| 82 |
+
yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class FixImports(fixer_base.BaseFix):
|
| 86 |
+
|
| 87 |
+
BM_compatible = True
|
| 88 |
+
keep_line_order = True
|
| 89 |
+
# This is overridden in fix_imports2.
|
| 90 |
+
mapping = MAPPING
|
| 91 |
+
|
| 92 |
+
# We want to run this fixer late, so fix_import doesn't try to make stdlib
|
| 93 |
+
# renames into relative imports.
|
| 94 |
+
run_order = 6
|
| 95 |
+
|
| 96 |
+
def build_pattern(self):
|
| 97 |
+
return "|".join(build_pattern(self.mapping))
|
| 98 |
+
|
| 99 |
+
def compile_pattern(self):
|
| 100 |
+
# We override this, so MAPPING can be pragmatically altered and the
|
| 101 |
+
# changes will be reflected in PATTERN.
|
| 102 |
+
self.PATTERN = self.build_pattern()
|
| 103 |
+
super(FixImports, self).compile_pattern()
|
| 104 |
+
|
| 105 |
+
# Don't match the node if it's within another match.
|
| 106 |
+
def match(self, node):
|
| 107 |
+
match = super(FixImports, self).match
|
| 108 |
+
results = match(node)
|
| 109 |
+
if results:
|
| 110 |
+
# Module usage could be in the trailer of an attribute lookup, so we
|
| 111 |
+
# might have nested matches when "bare_with_attr" is present.
|
| 112 |
+
if "bare_with_attr" not in results and \
|
| 113 |
+
any(match(obj) for obj in attr_chain(node, "parent")):
|
| 114 |
+
return False
|
| 115 |
+
return results
|
| 116 |
+
return False
|
| 117 |
+
|
| 118 |
+
def start_tree(self, tree, filename):
|
| 119 |
+
super(FixImports, self).start_tree(tree, filename)
|
| 120 |
+
self.replace = {}
|
| 121 |
+
|
| 122 |
+
def transform(self, node, results):
|
| 123 |
+
import_mod = results.get("module_name")
|
| 124 |
+
if import_mod:
|
| 125 |
+
mod_name = import_mod.value
|
| 126 |
+
new_name = self.mapping[mod_name]
|
| 127 |
+
import_mod.replace(Name(new_name, prefix=import_mod.prefix))
|
| 128 |
+
if "name_import" in results:
|
| 129 |
+
# If it's not a "from x import x, y" or "import x as y" import,
|
| 130 |
+
# marked its usage to be replaced.
|
| 131 |
+
self.replace[mod_name] = new_name
|
| 132 |
+
if "multiple_imports" in results:
|
| 133 |
+
# This is a nasty hack to fix multiple imports on a line (e.g.,
|
| 134 |
+
# "import StringIO, urlparse"). The problem is that I can't
|
| 135 |
+
# figure out an easy way to make a pattern recognize the keys of
|
| 136 |
+
# MAPPING randomly sprinkled in an import statement.
|
| 137 |
+
results = self.match(node)
|
| 138 |
+
if results:
|
| 139 |
+
self.transform(node, results)
|
| 140 |
+
else:
|
| 141 |
+
# Replace usage of the module.
|
| 142 |
+
bare_name = results["bare_with_attr"][0]
|
| 143 |
+
new_name = self.replace.get(bare_name.value)
|
| 144 |
+
if new_name:
|
| 145 |
+
bare_name.replace(Name(new_name, prefix=bare_name.prefix))
|
deepseek/lib/python3.10/lib2to3/fixes/fix_imports2.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix incompatible imports and module references that must be fixed after
|
| 2 |
+
fix_imports."""
|
| 3 |
+
from . import fix_imports
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
MAPPING = {
|
| 7 |
+
'whichdb': 'dbm',
|
| 8 |
+
'anydbm': 'dbm',
|
| 9 |
+
}
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixImports2(fix_imports.FixImports):
|
| 13 |
+
|
| 14 |
+
run_order = 7
|
| 15 |
+
|
| 16 |
+
mapping = MAPPING
|
deepseek/lib/python3.10/lib2to3/fixes/fix_intern.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Georg Brandl.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for intern().
|
| 5 |
+
|
| 6 |
+
intern(s) -> sys.intern(s)"""
|
| 7 |
+
|
| 8 |
+
# Local imports
|
| 9 |
+
from .. import fixer_base
|
| 10 |
+
from ..fixer_util import ImportAndCall, touch_import
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class FixIntern(fixer_base.BaseFix):
|
| 14 |
+
BM_compatible = True
|
| 15 |
+
order = "pre"
|
| 16 |
+
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< 'intern'
|
| 19 |
+
trailer< lpar='('
|
| 20 |
+
( not(arglist | argument<any '=' any>) obj=any
|
| 21 |
+
| obj=arglist<(not argument<any '=' any>) any ','> )
|
| 22 |
+
rpar=')' >
|
| 23 |
+
after=any*
|
| 24 |
+
>
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def transform(self, node, results):
|
| 28 |
+
if results:
|
| 29 |
+
# I feel like we should be able to express this logic in the
|
| 30 |
+
# PATTERN above but I don't know how to do it so...
|
| 31 |
+
obj = results['obj']
|
| 32 |
+
if obj:
|
| 33 |
+
if (obj.type == self.syms.argument and
|
| 34 |
+
obj.children[0].value in {'**', '*'}):
|
| 35 |
+
return # Make no change.
|
| 36 |
+
names = ('sys', 'intern')
|
| 37 |
+
new = ImportAndCall(node, results, names)
|
| 38 |
+
touch_import(None, 'sys', node)
|
| 39 |
+
return new
|
deepseek/lib/python3.10/lib2to3/fixes/fix_itertools.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
|
| 2 |
+
itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
|
| 3 |
+
|
| 4 |
+
imports from itertools are fixed in fix_itertools_import.py
|
| 5 |
+
|
| 6 |
+
If itertools is imported as something else (ie: import itertools as it;
|
| 7 |
+
it.izip(spam, eggs)) method calls will not get fixed.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# Local imports
|
| 11 |
+
from .. import fixer_base
|
| 12 |
+
from ..fixer_util import Name
|
| 13 |
+
|
| 14 |
+
class FixItertools(fixer_base.BaseFix):
|
| 15 |
+
BM_compatible = True
|
| 16 |
+
it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')"
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< it='itertools'
|
| 19 |
+
trailer<
|
| 20 |
+
dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > >
|
| 21 |
+
|
|
| 22 |
+
power< func=%(it_funcs)s trailer< '(' [any] ')' > >
|
| 23 |
+
""" %(locals())
|
| 24 |
+
|
| 25 |
+
# Needs to be run after fix_(map|zip|filter)
|
| 26 |
+
run_order = 6
|
| 27 |
+
|
| 28 |
+
def transform(self, node, results):
|
| 29 |
+
prefix = None
|
| 30 |
+
func = results['func'][0]
|
| 31 |
+
if ('it' in results and
|
| 32 |
+
func.value not in ('ifilterfalse', 'izip_longest')):
|
| 33 |
+
dot, it = (results['dot'], results['it'])
|
| 34 |
+
# Remove the 'itertools'
|
| 35 |
+
prefix = it.prefix
|
| 36 |
+
it.remove()
|
| 37 |
+
# Replace the node which contains ('.', 'function') with the
|
| 38 |
+
# function (to be consistent with the second part of the pattern)
|
| 39 |
+
dot.remove()
|
| 40 |
+
func.parent.replace(func)
|
| 41 |
+
|
| 42 |
+
prefix = prefix or func.prefix
|
| 43 |
+
func.replace(Name(func.value[1:], prefix=prefix))
|
deepseek/lib/python3.10/lib2to3/fixes/fix_next.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for it.next() -> next(it), per PEP 3114."""
|
| 2 |
+
# Author: Collin Winter
|
| 3 |
+
|
| 4 |
+
# Things that currently aren't covered:
|
| 5 |
+
# - listcomp "next" names aren't warned
|
| 6 |
+
# - "with" statement targets aren't checked
|
| 7 |
+
|
| 8 |
+
# Local imports
|
| 9 |
+
from ..pgen2 import token
|
| 10 |
+
from ..pygram import python_symbols as syms
|
| 11 |
+
from .. import fixer_base
|
| 12 |
+
from ..fixer_util import Name, Call, find_binding
|
| 13 |
+
|
| 14 |
+
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class FixNext(fixer_base.BaseFix):
|
| 18 |
+
BM_compatible = True
|
| 19 |
+
PATTERN = """
|
| 20 |
+
power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
|
| 21 |
+
|
|
| 22 |
+
power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > >
|
| 23 |
+
|
|
| 24 |
+
classdef< 'class' any+ ':'
|
| 25 |
+
suite< any*
|
| 26 |
+
funcdef< 'def'
|
| 27 |
+
name='next'
|
| 28 |
+
parameters< '(' NAME ')' > any+ >
|
| 29 |
+
any* > >
|
| 30 |
+
|
|
| 31 |
+
global=global_stmt< 'global' any* 'next' any* >
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
order = "pre" # Pre-order tree traversal
|
| 35 |
+
|
| 36 |
+
def start_tree(self, tree, filename):
|
| 37 |
+
super(FixNext, self).start_tree(tree, filename)
|
| 38 |
+
|
| 39 |
+
n = find_binding('next', tree)
|
| 40 |
+
if n:
|
| 41 |
+
self.warning(n, bind_warning)
|
| 42 |
+
self.shadowed_next = True
|
| 43 |
+
else:
|
| 44 |
+
self.shadowed_next = False
|
| 45 |
+
|
| 46 |
+
def transform(self, node, results):
|
| 47 |
+
assert results
|
| 48 |
+
|
| 49 |
+
base = results.get("base")
|
| 50 |
+
attr = results.get("attr")
|
| 51 |
+
name = results.get("name")
|
| 52 |
+
|
| 53 |
+
if base:
|
| 54 |
+
if self.shadowed_next:
|
| 55 |
+
attr.replace(Name("__next__", prefix=attr.prefix))
|
| 56 |
+
else:
|
| 57 |
+
base = [n.clone() for n in base]
|
| 58 |
+
base[0].prefix = ""
|
| 59 |
+
node.replace(Call(Name("next", prefix=node.prefix), base))
|
| 60 |
+
elif name:
|
| 61 |
+
n = Name("__next__", prefix=name.prefix)
|
| 62 |
+
name.replace(n)
|
| 63 |
+
elif attr:
|
| 64 |
+
# We don't do this transformation if we're assigning to "x.next".
|
| 65 |
+
# Unfortunately, it doesn't seem possible to do this in PATTERN,
|
| 66 |
+
# so it's being done here.
|
| 67 |
+
if is_assign_target(node):
|
| 68 |
+
head = results["head"]
|
| 69 |
+
if "".join([str(n) for n in head]).strip() == '__builtin__':
|
| 70 |
+
self.warning(node, bind_warning)
|
| 71 |
+
return
|
| 72 |
+
attr.replace(Name("__next__"))
|
| 73 |
+
elif "global" in results:
|
| 74 |
+
self.warning(node, bind_warning)
|
| 75 |
+
self.shadowed_next = True
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
### The following functions help test if node is part of an assignment
|
| 79 |
+
### target.
|
| 80 |
+
|
| 81 |
+
def is_assign_target(node):
|
| 82 |
+
assign = find_assign(node)
|
| 83 |
+
if assign is None:
|
| 84 |
+
return False
|
| 85 |
+
|
| 86 |
+
for child in assign.children:
|
| 87 |
+
if child.type == token.EQUAL:
|
| 88 |
+
return False
|
| 89 |
+
elif is_subtree(child, node):
|
| 90 |
+
return True
|
| 91 |
+
return False
|
| 92 |
+
|
| 93 |
+
def find_assign(node):
|
| 94 |
+
if node.type == syms.expr_stmt:
|
| 95 |
+
return node
|
| 96 |
+
if node.type == syms.simple_stmt or node.parent is None:
|
| 97 |
+
return None
|
| 98 |
+
return find_assign(node.parent)
|
| 99 |
+
|
| 100 |
+
def is_subtree(root, node):
|
| 101 |
+
if root == node:
|
| 102 |
+
return True
|
| 103 |
+
return any(is_subtree(c, node) for c in root.children)
|
deepseek/lib/python3.10/lib2to3/fixes/fix_numliterals.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer that turns 1L into 1, 0755 into 0o755.
|
| 2 |
+
"""
|
| 3 |
+
# Copyright 2007 Georg Brandl.
|
| 4 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from ..pgen2 import token
|
| 8 |
+
from .. import fixer_base
|
| 9 |
+
from ..fixer_util import Number
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixNumliterals(fixer_base.BaseFix):
|
| 13 |
+
# This is so simple that we don't need the pattern compiler.
|
| 14 |
+
|
| 15 |
+
_accept_type = token.NUMBER
|
| 16 |
+
|
| 17 |
+
def match(self, node):
|
| 18 |
+
# Override
|
| 19 |
+
return (node.value.startswith("0") or node.value[-1] in "Ll")
|
| 20 |
+
|
| 21 |
+
def transform(self, node, results):
|
| 22 |
+
val = node.value
|
| 23 |
+
if val[-1] in 'Ll':
|
| 24 |
+
val = val[:-1]
|
| 25 |
+
elif val.startswith('0') and val.isdigit() and len(set(val)) > 1:
|
| 26 |
+
val = "0o" + val[1:]
|
| 27 |
+
|
| 28 |
+
return Number(val, prefix=node.prefix)
|
deepseek/lib/python3.10/lib2to3/fixes/fix_reduce.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2008 Armin Ronacher.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for reduce().
|
| 5 |
+
|
| 6 |
+
Makes sure reduce() is imported from the functools module if reduce is
|
| 7 |
+
used in that module.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from lib2to3 import fixer_base
|
| 11 |
+
from lib2to3.fixer_util import touch_import
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FixReduce(fixer_base.BaseFix):
|
| 16 |
+
|
| 17 |
+
BM_compatible = True
|
| 18 |
+
order = "pre"
|
| 19 |
+
|
| 20 |
+
PATTERN = """
|
| 21 |
+
power< 'reduce'
|
| 22 |
+
trailer< '('
|
| 23 |
+
arglist< (
|
| 24 |
+
(not(argument<any '=' any>) any ','
|
| 25 |
+
not(argument<any '=' any>) any) |
|
| 26 |
+
(not(argument<any '=' any>) any ','
|
| 27 |
+
not(argument<any '=' any>) any ','
|
| 28 |
+
not(argument<any '=' any>) any)
|
| 29 |
+
) >
|
| 30 |
+
')' >
|
| 31 |
+
>
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
def transform(self, node, results):
|
| 35 |
+
touch_import('functools', 'reduce', node)
|
deepseek/lib/python3.10/lib2to3/fixes/fix_types.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for removing uses of the types module.
|
| 5 |
+
|
| 6 |
+
These work for only the known names in the types module. The forms above
|
| 7 |
+
can include types. or not. ie, It is assumed the module is imported either as:
|
| 8 |
+
|
| 9 |
+
import types
|
| 10 |
+
from types import ... # either * or specific types
|
| 11 |
+
|
| 12 |
+
The import statements are not modified.
|
| 13 |
+
|
| 14 |
+
There should be another fixer that handles at least the following constants:
|
| 15 |
+
|
| 16 |
+
type([]) -> list
|
| 17 |
+
type(()) -> tuple
|
| 18 |
+
type('') -> str
|
| 19 |
+
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
# Local imports
|
| 23 |
+
from .. import fixer_base
|
| 24 |
+
from ..fixer_util import Name
|
| 25 |
+
|
| 26 |
+
_TYPE_MAPPING = {
|
| 27 |
+
'BooleanType' : 'bool',
|
| 28 |
+
'BufferType' : 'memoryview',
|
| 29 |
+
'ClassType' : 'type',
|
| 30 |
+
'ComplexType' : 'complex',
|
| 31 |
+
'DictType': 'dict',
|
| 32 |
+
'DictionaryType' : 'dict',
|
| 33 |
+
'EllipsisType' : 'type(Ellipsis)',
|
| 34 |
+
#'FileType' : 'io.IOBase',
|
| 35 |
+
'FloatType': 'float',
|
| 36 |
+
'IntType': 'int',
|
| 37 |
+
'ListType': 'list',
|
| 38 |
+
'LongType': 'int',
|
| 39 |
+
'ObjectType' : 'object',
|
| 40 |
+
'NoneType': 'type(None)',
|
| 41 |
+
'NotImplementedType' : 'type(NotImplemented)',
|
| 42 |
+
'SliceType' : 'slice',
|
| 43 |
+
'StringType': 'bytes', # XXX ?
|
| 44 |
+
'StringTypes' : '(str,)', # XXX ?
|
| 45 |
+
'TupleType': 'tuple',
|
| 46 |
+
'TypeType' : 'type',
|
| 47 |
+
'UnicodeType': 'str',
|
| 48 |
+
'XRangeType' : 'range',
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
|
| 52 |
+
|
| 53 |
+
class FixTypes(fixer_base.BaseFix):
|
| 54 |
+
BM_compatible = True
|
| 55 |
+
PATTERN = '|'.join(_pats)
|
| 56 |
+
|
| 57 |
+
def transform(self, node, results):
|
| 58 |
+
new_value = _TYPE_MAPPING.get(results["name"].value)
|
| 59 |
+
if new_value:
|
| 60 |
+
return Name(new_value, prefix=node.prefix)
|
| 61 |
+
return None
|
deepseek/lib/python3.10/lib2to3/fixes/fix_unicode.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""Fixer for unicode.
|
| 2 |
+
|
| 3 |
+
* Changes unicode to str and unichr to chr.
|
| 4 |
+
|
| 5 |
+
* If "...\u..." is not unicode literal change it into "...\\u...".
|
| 6 |
+
|
| 7 |
+
* Change u"..." into "...".
|
| 8 |
+
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from ..pgen2 import token
|
| 12 |
+
from .. import fixer_base
|
| 13 |
+
|
| 14 |
+
_mapping = {"unichr" : "chr", "unicode" : "str"}
|
| 15 |
+
|
| 16 |
+
class FixUnicode(fixer_base.BaseFix):
|
| 17 |
+
BM_compatible = True
|
| 18 |
+
PATTERN = "STRING | 'unicode' | 'unichr'"
|
| 19 |
+
|
| 20 |
+
def start_tree(self, tree, filename):
|
| 21 |
+
super(FixUnicode, self).start_tree(tree, filename)
|
| 22 |
+
self.unicode_literals = 'unicode_literals' in tree.future_features
|
| 23 |
+
|
| 24 |
+
def transform(self, node, results):
|
| 25 |
+
if node.type == token.NAME:
|
| 26 |
+
new = node.clone()
|
| 27 |
+
new.value = _mapping[node.value]
|
| 28 |
+
return new
|
| 29 |
+
elif node.type == token.STRING:
|
| 30 |
+
val = node.value
|
| 31 |
+
if not self.unicode_literals and val[0] in '\'"' and '\\' in val:
|
| 32 |
+
val = r'\\'.join([
|
| 33 |
+
v.replace('\\u', r'\\u').replace('\\U', r'\\U')
|
| 34 |
+
for v in val.split(r'\\')
|
| 35 |
+
])
|
| 36 |
+
if val[0] in 'uU':
|
| 37 |
+
val = val[1:]
|
| 38 |
+
if val == node.value:
|
| 39 |
+
return node
|
| 40 |
+
new = node.clone()
|
| 41 |
+
new.value = val
|
| 42 |
+
return new
|
deepseek/lib/python3.10/lib2to3/pytree.py
ADDED
|
@@ -0,0 +1,853 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""
|
| 5 |
+
Python parse tree definitions.
|
| 6 |
+
|
| 7 |
+
This is a very concrete parse tree; we need to keep every token and
|
| 8 |
+
even the comments and whitespace between tokens.
|
| 9 |
+
|
| 10 |
+
There's also a pattern matching implementation here.
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
__author__ = "Guido van Rossum <guido@python.org>"
|
| 14 |
+
|
| 15 |
+
import sys
|
| 16 |
+
from io import StringIO
|
| 17 |
+
|
| 18 |
+
HUGE = 0x7FFFFFFF # maximum repeat count, default max
|
| 19 |
+
|
| 20 |
+
_type_reprs = {}
|
| 21 |
+
def type_repr(type_num):
|
| 22 |
+
global _type_reprs
|
| 23 |
+
if not _type_reprs:
|
| 24 |
+
from .pygram import python_symbols
|
| 25 |
+
# printing tokens is possible but not as useful
|
| 26 |
+
# from .pgen2 import token // token.__dict__.items():
|
| 27 |
+
for name, val in python_symbols.__dict__.items():
|
| 28 |
+
if type(val) == int: _type_reprs[val] = name
|
| 29 |
+
return _type_reprs.setdefault(type_num, type_num)
|
| 30 |
+
|
| 31 |
+
class Base(object):
|
| 32 |
+
|
| 33 |
+
"""
|
| 34 |
+
Abstract base class for Node and Leaf.
|
| 35 |
+
|
| 36 |
+
This provides some default functionality and boilerplate using the
|
| 37 |
+
template pattern.
|
| 38 |
+
|
| 39 |
+
A node may be a subnode of at most one parent.
|
| 40 |
+
"""
|
| 41 |
+
|
| 42 |
+
# Default values for instance variables
|
| 43 |
+
type = None # int: token number (< 256) or symbol number (>= 256)
|
| 44 |
+
parent = None # Parent node pointer, or None
|
| 45 |
+
children = () # Tuple of subnodes
|
| 46 |
+
was_changed = False
|
| 47 |
+
was_checked = False
|
| 48 |
+
|
| 49 |
+
def __new__(cls, *args, **kwds):
|
| 50 |
+
"""Constructor that prevents Base from being instantiated."""
|
| 51 |
+
assert cls is not Base, "Cannot instantiate Base"
|
| 52 |
+
return object.__new__(cls)
|
| 53 |
+
|
| 54 |
+
def __eq__(self, other):
|
| 55 |
+
"""
|
| 56 |
+
Compare two nodes for equality.
|
| 57 |
+
|
| 58 |
+
This calls the method _eq().
|
| 59 |
+
"""
|
| 60 |
+
if self.__class__ is not other.__class__:
|
| 61 |
+
return NotImplemented
|
| 62 |
+
return self._eq(other)
|
| 63 |
+
|
| 64 |
+
__hash__ = None # For Py3 compatibility.
|
| 65 |
+
|
| 66 |
+
def _eq(self, other):
|
| 67 |
+
"""
|
| 68 |
+
Compare two nodes for equality.
|
| 69 |
+
|
| 70 |
+
This is called by __eq__ and __ne__. It is only called if the two nodes
|
| 71 |
+
have the same type. This must be implemented by the concrete subclass.
|
| 72 |
+
Nodes should be considered equal if they have the same structure,
|
| 73 |
+
ignoring the prefix string and other context information.
|
| 74 |
+
"""
|
| 75 |
+
raise NotImplementedError
|
| 76 |
+
|
| 77 |
+
def clone(self):
|
| 78 |
+
"""
|
| 79 |
+
Return a cloned (deep) copy of self.
|
| 80 |
+
|
| 81 |
+
This must be implemented by the concrete subclass.
|
| 82 |
+
"""
|
| 83 |
+
raise NotImplementedError
|
| 84 |
+
|
| 85 |
+
def post_order(self):
|
| 86 |
+
"""
|
| 87 |
+
Return a post-order iterator for the tree.
|
| 88 |
+
|
| 89 |
+
This must be implemented by the concrete subclass.
|
| 90 |
+
"""
|
| 91 |
+
raise NotImplementedError
|
| 92 |
+
|
| 93 |
+
def pre_order(self):
|
| 94 |
+
"""
|
| 95 |
+
Return a pre-order iterator for the tree.
|
| 96 |
+
|
| 97 |
+
This must be implemented by the concrete subclass.
|
| 98 |
+
"""
|
| 99 |
+
raise NotImplementedError
|
| 100 |
+
|
| 101 |
+
def replace(self, new):
|
| 102 |
+
"""Replace this node with a new one in the parent."""
|
| 103 |
+
assert self.parent is not None, str(self)
|
| 104 |
+
assert new is not None
|
| 105 |
+
if not isinstance(new, list):
|
| 106 |
+
new = [new]
|
| 107 |
+
l_children = []
|
| 108 |
+
found = False
|
| 109 |
+
for ch in self.parent.children:
|
| 110 |
+
if ch is self:
|
| 111 |
+
assert not found, (self.parent.children, self, new)
|
| 112 |
+
if new is not None:
|
| 113 |
+
l_children.extend(new)
|
| 114 |
+
found = True
|
| 115 |
+
else:
|
| 116 |
+
l_children.append(ch)
|
| 117 |
+
assert found, (self.children, self, new)
|
| 118 |
+
self.parent.changed()
|
| 119 |
+
self.parent.children = l_children
|
| 120 |
+
for x in new:
|
| 121 |
+
x.parent = self.parent
|
| 122 |
+
self.parent = None
|
| 123 |
+
|
| 124 |
+
def get_lineno(self):
|
| 125 |
+
"""Return the line number which generated the invocant node."""
|
| 126 |
+
node = self
|
| 127 |
+
while not isinstance(node, Leaf):
|
| 128 |
+
if not node.children:
|
| 129 |
+
return
|
| 130 |
+
node = node.children[0]
|
| 131 |
+
return node.lineno
|
| 132 |
+
|
| 133 |
+
def changed(self):
|
| 134 |
+
if self.parent:
|
| 135 |
+
self.parent.changed()
|
| 136 |
+
self.was_changed = True
|
| 137 |
+
|
| 138 |
+
def remove(self):
|
| 139 |
+
"""
|
| 140 |
+
Remove the node from the tree. Returns the position of the node in its
|
| 141 |
+
parent's children before it was removed.
|
| 142 |
+
"""
|
| 143 |
+
if self.parent:
|
| 144 |
+
for i, node in enumerate(self.parent.children):
|
| 145 |
+
if node is self:
|
| 146 |
+
self.parent.changed()
|
| 147 |
+
del self.parent.children[i]
|
| 148 |
+
self.parent = None
|
| 149 |
+
return i
|
| 150 |
+
|
| 151 |
+
@property
|
| 152 |
+
def next_sibling(self):
|
| 153 |
+
"""
|
| 154 |
+
The node immediately following the invocant in their parent's children
|
| 155 |
+
list. If the invocant does not have a next sibling, it is None
|
| 156 |
+
"""
|
| 157 |
+
if self.parent is None:
|
| 158 |
+
return None
|
| 159 |
+
|
| 160 |
+
# Can't use index(); we need to test by identity
|
| 161 |
+
for i, child in enumerate(self.parent.children):
|
| 162 |
+
if child is self:
|
| 163 |
+
try:
|
| 164 |
+
return self.parent.children[i+1]
|
| 165 |
+
except IndexError:
|
| 166 |
+
return None
|
| 167 |
+
|
| 168 |
+
@property
|
| 169 |
+
def prev_sibling(self):
|
| 170 |
+
"""
|
| 171 |
+
The node immediately preceding the invocant in their parent's children
|
| 172 |
+
list. If the invocant does not have a previous sibling, it is None.
|
| 173 |
+
"""
|
| 174 |
+
if self.parent is None:
|
| 175 |
+
return None
|
| 176 |
+
|
| 177 |
+
# Can't use index(); we need to test by identity
|
| 178 |
+
for i, child in enumerate(self.parent.children):
|
| 179 |
+
if child is self:
|
| 180 |
+
if i == 0:
|
| 181 |
+
return None
|
| 182 |
+
return self.parent.children[i-1]
|
| 183 |
+
|
| 184 |
+
def leaves(self):
|
| 185 |
+
for child in self.children:
|
| 186 |
+
yield from child.leaves()
|
| 187 |
+
|
| 188 |
+
def depth(self):
|
| 189 |
+
if self.parent is None:
|
| 190 |
+
return 0
|
| 191 |
+
return 1 + self.parent.depth()
|
| 192 |
+
|
| 193 |
+
def get_suffix(self):
|
| 194 |
+
"""
|
| 195 |
+
Return the string immediately following the invocant node. This is
|
| 196 |
+
effectively equivalent to node.next_sibling.prefix
|
| 197 |
+
"""
|
| 198 |
+
next_sib = self.next_sibling
|
| 199 |
+
if next_sib is None:
|
| 200 |
+
return ""
|
| 201 |
+
return next_sib.prefix
|
| 202 |
+
|
| 203 |
+
if sys.version_info < (3, 0):
|
| 204 |
+
def __str__(self):
|
| 205 |
+
return str(self).encode("ascii")
|
| 206 |
+
|
| 207 |
+
class Node(Base):
|
| 208 |
+
|
| 209 |
+
"""Concrete implementation for interior nodes."""
|
| 210 |
+
|
| 211 |
+
def __init__(self,type, children,
|
| 212 |
+
context=None,
|
| 213 |
+
prefix=None,
|
| 214 |
+
fixers_applied=None):
|
| 215 |
+
"""
|
| 216 |
+
Initializer.
|
| 217 |
+
|
| 218 |
+
Takes a type constant (a symbol number >= 256), a sequence of
|
| 219 |
+
child nodes, and an optional context keyword argument.
|
| 220 |
+
|
| 221 |
+
As a side effect, the parent pointers of the children are updated.
|
| 222 |
+
"""
|
| 223 |
+
assert type >= 256, type
|
| 224 |
+
self.type = type
|
| 225 |
+
self.children = list(children)
|
| 226 |
+
for ch in self.children:
|
| 227 |
+
assert ch.parent is None, repr(ch)
|
| 228 |
+
ch.parent = self
|
| 229 |
+
if prefix is not None:
|
| 230 |
+
self.prefix = prefix
|
| 231 |
+
if fixers_applied:
|
| 232 |
+
self.fixers_applied = fixers_applied[:]
|
| 233 |
+
else:
|
| 234 |
+
self.fixers_applied = None
|
| 235 |
+
|
| 236 |
+
def __repr__(self):
|
| 237 |
+
"""Return a canonical string representation."""
|
| 238 |
+
return "%s(%s, %r)" % (self.__class__.__name__,
|
| 239 |
+
type_repr(self.type),
|
| 240 |
+
self.children)
|
| 241 |
+
|
| 242 |
+
def __unicode__(self):
|
| 243 |
+
"""
|
| 244 |
+
Return a pretty string representation.
|
| 245 |
+
|
| 246 |
+
This reproduces the input source exactly.
|
| 247 |
+
"""
|
| 248 |
+
return "".join(map(str, self.children))
|
| 249 |
+
|
| 250 |
+
if sys.version_info > (3, 0):
|
| 251 |
+
__str__ = __unicode__
|
| 252 |
+
|
| 253 |
+
def _eq(self, other):
|
| 254 |
+
"""Compare two nodes for equality."""
|
| 255 |
+
return (self.type, self.children) == (other.type, other.children)
|
| 256 |
+
|
| 257 |
+
def clone(self):
|
| 258 |
+
"""Return a cloned (deep) copy of self."""
|
| 259 |
+
return Node(self.type, [ch.clone() for ch in self.children],
|
| 260 |
+
fixers_applied=self.fixers_applied)
|
| 261 |
+
|
| 262 |
+
def post_order(self):
|
| 263 |
+
"""Return a post-order iterator for the tree."""
|
| 264 |
+
for child in self.children:
|
| 265 |
+
yield from child.post_order()
|
| 266 |
+
yield self
|
| 267 |
+
|
| 268 |
+
def pre_order(self):
|
| 269 |
+
"""Return a pre-order iterator for the tree."""
|
| 270 |
+
yield self
|
| 271 |
+
for child in self.children:
|
| 272 |
+
yield from child.pre_order()
|
| 273 |
+
|
| 274 |
+
@property
|
| 275 |
+
def prefix(self):
|
| 276 |
+
"""
|
| 277 |
+
The whitespace and comments preceding this node in the input.
|
| 278 |
+
"""
|
| 279 |
+
if not self.children:
|
| 280 |
+
return ""
|
| 281 |
+
return self.children[0].prefix
|
| 282 |
+
|
| 283 |
+
@prefix.setter
|
| 284 |
+
def prefix(self, prefix):
|
| 285 |
+
if self.children:
|
| 286 |
+
self.children[0].prefix = prefix
|
| 287 |
+
|
| 288 |
+
def set_child(self, i, child):
|
| 289 |
+
"""
|
| 290 |
+
Equivalent to 'node.children[i] = child'. This method also sets the
|
| 291 |
+
child's parent attribute appropriately.
|
| 292 |
+
"""
|
| 293 |
+
child.parent = self
|
| 294 |
+
self.children[i].parent = None
|
| 295 |
+
self.children[i] = child
|
| 296 |
+
self.changed()
|
| 297 |
+
|
| 298 |
+
def insert_child(self, i, child):
|
| 299 |
+
"""
|
| 300 |
+
Equivalent to 'node.children.insert(i, child)'. This method also sets
|
| 301 |
+
the child's parent attribute appropriately.
|
| 302 |
+
"""
|
| 303 |
+
child.parent = self
|
| 304 |
+
self.children.insert(i, child)
|
| 305 |
+
self.changed()
|
| 306 |
+
|
| 307 |
+
def append_child(self, child):
|
| 308 |
+
"""
|
| 309 |
+
Equivalent to 'node.children.append(child)'. This method also sets the
|
| 310 |
+
child's parent attribute appropriately.
|
| 311 |
+
"""
|
| 312 |
+
child.parent = self
|
| 313 |
+
self.children.append(child)
|
| 314 |
+
self.changed()
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
class Leaf(Base):
|
| 318 |
+
|
| 319 |
+
"""Concrete implementation for leaf nodes."""
|
| 320 |
+
|
| 321 |
+
# Default values for instance variables
|
| 322 |
+
_prefix = "" # Whitespace and comments preceding this token in the input
|
| 323 |
+
lineno = 0 # Line where this token starts in the input
|
| 324 |
+
column = 0 # Column where this token tarts in the input
|
| 325 |
+
|
| 326 |
+
def __init__(self, type, value,
|
| 327 |
+
context=None,
|
| 328 |
+
prefix=None,
|
| 329 |
+
fixers_applied=[]):
|
| 330 |
+
"""
|
| 331 |
+
Initializer.
|
| 332 |
+
|
| 333 |
+
Takes a type constant (a token number < 256), a string value, and an
|
| 334 |
+
optional context keyword argument.
|
| 335 |
+
"""
|
| 336 |
+
assert 0 <= type < 256, type
|
| 337 |
+
if context is not None:
|
| 338 |
+
self._prefix, (self.lineno, self.column) = context
|
| 339 |
+
self.type = type
|
| 340 |
+
self.value = value
|
| 341 |
+
if prefix is not None:
|
| 342 |
+
self._prefix = prefix
|
| 343 |
+
self.fixers_applied = fixers_applied[:]
|
| 344 |
+
|
| 345 |
+
def __repr__(self):
|
| 346 |
+
"""Return a canonical string representation."""
|
| 347 |
+
return "%s(%r, %r)" % (self.__class__.__name__,
|
| 348 |
+
self.type,
|
| 349 |
+
self.value)
|
| 350 |
+
|
| 351 |
+
def __unicode__(self):
|
| 352 |
+
"""
|
| 353 |
+
Return a pretty string representation.
|
| 354 |
+
|
| 355 |
+
This reproduces the input source exactly.
|
| 356 |
+
"""
|
| 357 |
+
return self.prefix + str(self.value)
|
| 358 |
+
|
| 359 |
+
if sys.version_info > (3, 0):
|
| 360 |
+
__str__ = __unicode__
|
| 361 |
+
|
| 362 |
+
def _eq(self, other):
|
| 363 |
+
"""Compare two nodes for equality."""
|
| 364 |
+
return (self.type, self.value) == (other.type, other.value)
|
| 365 |
+
|
| 366 |
+
def clone(self):
|
| 367 |
+
"""Return a cloned (deep) copy of self."""
|
| 368 |
+
return Leaf(self.type, self.value,
|
| 369 |
+
(self.prefix, (self.lineno, self.column)),
|
| 370 |
+
fixers_applied=self.fixers_applied)
|
| 371 |
+
|
| 372 |
+
def leaves(self):
|
| 373 |
+
yield self
|
| 374 |
+
|
| 375 |
+
def post_order(self):
|
| 376 |
+
"""Return a post-order iterator for the tree."""
|
| 377 |
+
yield self
|
| 378 |
+
|
| 379 |
+
def pre_order(self):
|
| 380 |
+
"""Return a pre-order iterator for the tree."""
|
| 381 |
+
yield self
|
| 382 |
+
|
| 383 |
+
@property
|
| 384 |
+
def prefix(self):
|
| 385 |
+
"""
|
| 386 |
+
The whitespace and comments preceding this token in the input.
|
| 387 |
+
"""
|
| 388 |
+
return self._prefix
|
| 389 |
+
|
| 390 |
+
@prefix.setter
|
| 391 |
+
def prefix(self, prefix):
|
| 392 |
+
self.changed()
|
| 393 |
+
self._prefix = prefix
|
| 394 |
+
|
| 395 |
+
def convert(gr, raw_node):
|
| 396 |
+
"""
|
| 397 |
+
Convert raw node information to a Node or Leaf instance.
|
| 398 |
+
|
| 399 |
+
This is passed to the parser driver which calls it whenever a reduction of a
|
| 400 |
+
grammar rule produces a new complete node, so that the tree is build
|
| 401 |
+
strictly bottom-up.
|
| 402 |
+
"""
|
| 403 |
+
type, value, context, children = raw_node
|
| 404 |
+
if children or type in gr.number2symbol:
|
| 405 |
+
# If there's exactly one child, return that child instead of
|
| 406 |
+
# creating a new node.
|
| 407 |
+
if len(children) == 1:
|
| 408 |
+
return children[0]
|
| 409 |
+
return Node(type, children, context=context)
|
| 410 |
+
else:
|
| 411 |
+
return Leaf(type, value, context=context)
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
class BasePattern(object):
|
| 415 |
+
|
| 416 |
+
"""
|
| 417 |
+
A pattern is a tree matching pattern.
|
| 418 |
+
|
| 419 |
+
It looks for a specific node type (token or symbol), and
|
| 420 |
+
optionally for a specific content.
|
| 421 |
+
|
| 422 |
+
This is an abstract base class. There are three concrete
|
| 423 |
+
subclasses:
|
| 424 |
+
|
| 425 |
+
- LeafPattern matches a single leaf node;
|
| 426 |
+
- NodePattern matches a single node (usually non-leaf);
|
| 427 |
+
- WildcardPattern matches a sequence of nodes of variable length.
|
| 428 |
+
"""
|
| 429 |
+
|
| 430 |
+
# Defaults for instance variables
|
| 431 |
+
type = None # Node type (token if < 256, symbol if >= 256)
|
| 432 |
+
content = None # Optional content matching pattern
|
| 433 |
+
name = None # Optional name used to store match in results dict
|
| 434 |
+
|
| 435 |
+
def __new__(cls, *args, **kwds):
|
| 436 |
+
"""Constructor that prevents BasePattern from being instantiated."""
|
| 437 |
+
assert cls is not BasePattern, "Cannot instantiate BasePattern"
|
| 438 |
+
return object.__new__(cls)
|
| 439 |
+
|
| 440 |
+
def __repr__(self):
|
| 441 |
+
args = [type_repr(self.type), self.content, self.name]
|
| 442 |
+
while args and args[-1] is None:
|
| 443 |
+
del args[-1]
|
| 444 |
+
return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args)))
|
| 445 |
+
|
| 446 |
+
def optimize(self):
|
| 447 |
+
"""
|
| 448 |
+
A subclass can define this as a hook for optimizations.
|
| 449 |
+
|
| 450 |
+
Returns either self or another node with the same effect.
|
| 451 |
+
"""
|
| 452 |
+
return self
|
| 453 |
+
|
| 454 |
+
def match(self, node, results=None):
|
| 455 |
+
"""
|
| 456 |
+
Does this pattern exactly match a node?
|
| 457 |
+
|
| 458 |
+
Returns True if it matches, False if not.
|
| 459 |
+
|
| 460 |
+
If results is not None, it must be a dict which will be
|
| 461 |
+
updated with the nodes matching named subpatterns.
|
| 462 |
+
|
| 463 |
+
Default implementation for non-wildcard patterns.
|
| 464 |
+
"""
|
| 465 |
+
if self.type is not None and node.type != self.type:
|
| 466 |
+
return False
|
| 467 |
+
if self.content is not None:
|
| 468 |
+
r = None
|
| 469 |
+
if results is not None:
|
| 470 |
+
r = {}
|
| 471 |
+
if not self._submatch(node, r):
|
| 472 |
+
return False
|
| 473 |
+
if r:
|
| 474 |
+
results.update(r)
|
| 475 |
+
if results is not None and self.name:
|
| 476 |
+
results[self.name] = node
|
| 477 |
+
return True
|
| 478 |
+
|
| 479 |
+
def match_seq(self, nodes, results=None):
|
| 480 |
+
"""
|
| 481 |
+
Does this pattern exactly match a sequence of nodes?
|
| 482 |
+
|
| 483 |
+
Default implementation for non-wildcard patterns.
|
| 484 |
+
"""
|
| 485 |
+
if len(nodes) != 1:
|
| 486 |
+
return False
|
| 487 |
+
return self.match(nodes[0], results)
|
| 488 |
+
|
| 489 |
+
def generate_matches(self, nodes):
|
| 490 |
+
"""
|
| 491 |
+
Generator yielding all matches for this pattern.
|
| 492 |
+
|
| 493 |
+
Default implementation for non-wildcard patterns.
|
| 494 |
+
"""
|
| 495 |
+
r = {}
|
| 496 |
+
if nodes and self.match(nodes[0], r):
|
| 497 |
+
yield 1, r
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
class LeafPattern(BasePattern):
|
| 501 |
+
|
| 502 |
+
def __init__(self, type=None, content=None, name=None):
|
| 503 |
+
"""
|
| 504 |
+
Initializer. Takes optional type, content, and name.
|
| 505 |
+
|
| 506 |
+
The type, if given must be a token type (< 256). If not given,
|
| 507 |
+
this matches any *leaf* node; the content may still be required.
|
| 508 |
+
|
| 509 |
+
The content, if given, must be a string.
|
| 510 |
+
|
| 511 |
+
If a name is given, the matching node is stored in the results
|
| 512 |
+
dict under that key.
|
| 513 |
+
"""
|
| 514 |
+
if type is not None:
|
| 515 |
+
assert 0 <= type < 256, type
|
| 516 |
+
if content is not None:
|
| 517 |
+
assert isinstance(content, str), repr(content)
|
| 518 |
+
self.type = type
|
| 519 |
+
self.content = content
|
| 520 |
+
self.name = name
|
| 521 |
+
|
| 522 |
+
def match(self, node, results=None):
|
| 523 |
+
"""Override match() to insist on a leaf node."""
|
| 524 |
+
if not isinstance(node, Leaf):
|
| 525 |
+
return False
|
| 526 |
+
return BasePattern.match(self, node, results)
|
| 527 |
+
|
| 528 |
+
def _submatch(self, node, results=None):
|
| 529 |
+
"""
|
| 530 |
+
Match the pattern's content to the node's children.
|
| 531 |
+
|
| 532 |
+
This assumes the node type matches and self.content is not None.
|
| 533 |
+
|
| 534 |
+
Returns True if it matches, False if not.
|
| 535 |
+
|
| 536 |
+
If results is not None, it must be a dict which will be
|
| 537 |
+
updated with the nodes matching named subpatterns.
|
| 538 |
+
|
| 539 |
+
When returning False, the results dict may still be updated.
|
| 540 |
+
"""
|
| 541 |
+
return self.content == node.value
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
class NodePattern(BasePattern):
|
| 545 |
+
|
| 546 |
+
wildcards = False
|
| 547 |
+
|
| 548 |
+
def __init__(self, type=None, content=None, name=None):
|
| 549 |
+
"""
|
| 550 |
+
Initializer. Takes optional type, content, and name.
|
| 551 |
+
|
| 552 |
+
The type, if given, must be a symbol type (>= 256). If the
|
| 553 |
+
type is None this matches *any* single node (leaf or not),
|
| 554 |
+
except if content is not None, in which it only matches
|
| 555 |
+
non-leaf nodes that also match the content pattern.
|
| 556 |
+
|
| 557 |
+
The content, if not None, must be a sequence of Patterns that
|
| 558 |
+
must match the node's children exactly. If the content is
|
| 559 |
+
given, the type must not be None.
|
| 560 |
+
|
| 561 |
+
If a name is given, the matching node is stored in the results
|
| 562 |
+
dict under that key.
|
| 563 |
+
"""
|
| 564 |
+
if type is not None:
|
| 565 |
+
assert type >= 256, type
|
| 566 |
+
if content is not None:
|
| 567 |
+
assert not isinstance(content, str), repr(content)
|
| 568 |
+
content = list(content)
|
| 569 |
+
for i, item in enumerate(content):
|
| 570 |
+
assert isinstance(item, BasePattern), (i, item)
|
| 571 |
+
if isinstance(item, WildcardPattern):
|
| 572 |
+
self.wildcards = True
|
| 573 |
+
self.type = type
|
| 574 |
+
self.content = content
|
| 575 |
+
self.name = name
|
| 576 |
+
|
| 577 |
+
def _submatch(self, node, results=None):
|
| 578 |
+
"""
|
| 579 |
+
Match the pattern's content to the node's children.
|
| 580 |
+
|
| 581 |
+
This assumes the node type matches and self.content is not None.
|
| 582 |
+
|
| 583 |
+
Returns True if it matches, False if not.
|
| 584 |
+
|
| 585 |
+
If results is not None, it must be a dict which will be
|
| 586 |
+
updated with the nodes matching named subpatterns.
|
| 587 |
+
|
| 588 |
+
When returning False, the results dict may still be updated.
|
| 589 |
+
"""
|
| 590 |
+
if self.wildcards:
|
| 591 |
+
for c, r in generate_matches(self.content, node.children):
|
| 592 |
+
if c == len(node.children):
|
| 593 |
+
if results is not None:
|
| 594 |
+
results.update(r)
|
| 595 |
+
return True
|
| 596 |
+
return False
|
| 597 |
+
if len(self.content) != len(node.children):
|
| 598 |
+
return False
|
| 599 |
+
for subpattern, child in zip(self.content, node.children):
|
| 600 |
+
if not subpattern.match(child, results):
|
| 601 |
+
return False
|
| 602 |
+
return True
|
| 603 |
+
|
| 604 |
+
|
| 605 |
+
class WildcardPattern(BasePattern):
|
| 606 |
+
|
| 607 |
+
"""
|
| 608 |
+
A wildcard pattern can match zero or more nodes.
|
| 609 |
+
|
| 610 |
+
This has all the flexibility needed to implement patterns like:
|
| 611 |
+
|
| 612 |
+
.* .+ .? .{m,n}
|
| 613 |
+
(a b c | d e | f)
|
| 614 |
+
(...)* (...)+ (...)? (...){m,n}
|
| 615 |
+
|
| 616 |
+
except it always uses non-greedy matching.
|
| 617 |
+
"""
|
| 618 |
+
|
| 619 |
+
def __init__(self, content=None, min=0, max=HUGE, name=None):
|
| 620 |
+
"""
|
| 621 |
+
Initializer.
|
| 622 |
+
|
| 623 |
+
Args:
|
| 624 |
+
content: optional sequence of subsequences of patterns;
|
| 625 |
+
if absent, matches one node;
|
| 626 |
+
if present, each subsequence is an alternative [*]
|
| 627 |
+
min: optional minimum number of times to match, default 0
|
| 628 |
+
max: optional maximum number of times to match, default HUGE
|
| 629 |
+
name: optional name assigned to this match
|
| 630 |
+
|
| 631 |
+
[*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is
|
| 632 |
+
equivalent to (a b c | d e | f g h); if content is None,
|
| 633 |
+
this is equivalent to '.' in regular expression terms.
|
| 634 |
+
The min and max parameters work as follows:
|
| 635 |
+
min=0, max=maxint: .*
|
| 636 |
+
min=1, max=maxint: .+
|
| 637 |
+
min=0, max=1: .?
|
| 638 |
+
min=1, max=1: .
|
| 639 |
+
If content is not None, replace the dot with the parenthesized
|
| 640 |
+
list of alternatives, e.g. (a b c | d e | f g h)*
|
| 641 |
+
"""
|
| 642 |
+
assert 0 <= min <= max <= HUGE, (min, max)
|
| 643 |
+
if content is not None:
|
| 644 |
+
content = tuple(map(tuple, content)) # Protect against alterations
|
| 645 |
+
# Check sanity of alternatives
|
| 646 |
+
assert len(content), repr(content) # Can't have zero alternatives
|
| 647 |
+
for alt in content:
|
| 648 |
+
assert len(alt), repr(alt) # Can have empty alternatives
|
| 649 |
+
self.content = content
|
| 650 |
+
self.min = min
|
| 651 |
+
self.max = max
|
| 652 |
+
self.name = name
|
| 653 |
+
|
| 654 |
+
def optimize(self):
|
| 655 |
+
"""Optimize certain stacked wildcard patterns."""
|
| 656 |
+
subpattern = None
|
| 657 |
+
if (self.content is not None and
|
| 658 |
+
len(self.content) == 1 and len(self.content[0]) == 1):
|
| 659 |
+
subpattern = self.content[0][0]
|
| 660 |
+
if self.min == 1 and self.max == 1:
|
| 661 |
+
if self.content is None:
|
| 662 |
+
return NodePattern(name=self.name)
|
| 663 |
+
if subpattern is not None and self.name == subpattern.name:
|
| 664 |
+
return subpattern.optimize()
|
| 665 |
+
if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and
|
| 666 |
+
subpattern.min <= 1 and self.name == subpattern.name):
|
| 667 |
+
return WildcardPattern(subpattern.content,
|
| 668 |
+
self.min*subpattern.min,
|
| 669 |
+
self.max*subpattern.max,
|
| 670 |
+
subpattern.name)
|
| 671 |
+
return self
|
| 672 |
+
|
| 673 |
+
def match(self, node, results=None):
|
| 674 |
+
"""Does this pattern exactly match a node?"""
|
| 675 |
+
return self.match_seq([node], results)
|
| 676 |
+
|
| 677 |
+
def match_seq(self, nodes, results=None):
|
| 678 |
+
"""Does this pattern exactly match a sequence of nodes?"""
|
| 679 |
+
for c, r in self.generate_matches(nodes):
|
| 680 |
+
if c == len(nodes):
|
| 681 |
+
if results is not None:
|
| 682 |
+
results.update(r)
|
| 683 |
+
if self.name:
|
| 684 |
+
results[self.name] = list(nodes)
|
| 685 |
+
return True
|
| 686 |
+
return False
|
| 687 |
+
|
| 688 |
+
def generate_matches(self, nodes):
|
| 689 |
+
"""
|
| 690 |
+
Generator yielding matches for a sequence of nodes.
|
| 691 |
+
|
| 692 |
+
Args:
|
| 693 |
+
nodes: sequence of nodes
|
| 694 |
+
|
| 695 |
+
Yields:
|
| 696 |
+
(count, results) tuples where:
|
| 697 |
+
count: the match comprises nodes[:count];
|
| 698 |
+
results: dict containing named submatches.
|
| 699 |
+
"""
|
| 700 |
+
if self.content is None:
|
| 701 |
+
# Shortcut for special case (see __init__.__doc__)
|
| 702 |
+
for count in range(self.min, 1 + min(len(nodes), self.max)):
|
| 703 |
+
r = {}
|
| 704 |
+
if self.name:
|
| 705 |
+
r[self.name] = nodes[:count]
|
| 706 |
+
yield count, r
|
| 707 |
+
elif self.name == "bare_name":
|
| 708 |
+
yield self._bare_name_matches(nodes)
|
| 709 |
+
else:
|
| 710 |
+
# The reason for this is that hitting the recursion limit usually
|
| 711 |
+
# results in some ugly messages about how RuntimeErrors are being
|
| 712 |
+
# ignored. We only have to do this on CPython, though, because other
|
| 713 |
+
# implementations don't have this nasty bug in the first place.
|
| 714 |
+
if hasattr(sys, "getrefcount"):
|
| 715 |
+
save_stderr = sys.stderr
|
| 716 |
+
sys.stderr = StringIO()
|
| 717 |
+
try:
|
| 718 |
+
for count, r in self._recursive_matches(nodes, 0):
|
| 719 |
+
if self.name:
|
| 720 |
+
r[self.name] = nodes[:count]
|
| 721 |
+
yield count, r
|
| 722 |
+
except RuntimeError:
|
| 723 |
+
# Fall back to the iterative pattern matching scheme if the
|
| 724 |
+
# recursive scheme hits the recursion limit (RecursionError).
|
| 725 |
+
for count, r in self._iterative_matches(nodes):
|
| 726 |
+
if self.name:
|
| 727 |
+
r[self.name] = nodes[:count]
|
| 728 |
+
yield count, r
|
| 729 |
+
finally:
|
| 730 |
+
if hasattr(sys, "getrefcount"):
|
| 731 |
+
sys.stderr = save_stderr
|
| 732 |
+
|
| 733 |
+
def _iterative_matches(self, nodes):
|
| 734 |
+
"""Helper to iteratively yield the matches."""
|
| 735 |
+
nodelen = len(nodes)
|
| 736 |
+
if 0 >= self.min:
|
| 737 |
+
yield 0, {}
|
| 738 |
+
|
| 739 |
+
results = []
|
| 740 |
+
# generate matches that use just one alt from self.content
|
| 741 |
+
for alt in self.content:
|
| 742 |
+
for c, r in generate_matches(alt, nodes):
|
| 743 |
+
yield c, r
|
| 744 |
+
results.append((c, r))
|
| 745 |
+
|
| 746 |
+
# for each match, iterate down the nodes
|
| 747 |
+
while results:
|
| 748 |
+
new_results = []
|
| 749 |
+
for c0, r0 in results:
|
| 750 |
+
# stop if the entire set of nodes has been matched
|
| 751 |
+
if c0 < nodelen and c0 <= self.max:
|
| 752 |
+
for alt in self.content:
|
| 753 |
+
for c1, r1 in generate_matches(alt, nodes[c0:]):
|
| 754 |
+
if c1 > 0:
|
| 755 |
+
r = {}
|
| 756 |
+
r.update(r0)
|
| 757 |
+
r.update(r1)
|
| 758 |
+
yield c0 + c1, r
|
| 759 |
+
new_results.append((c0 + c1, r))
|
| 760 |
+
results = new_results
|
| 761 |
+
|
| 762 |
+
def _bare_name_matches(self, nodes):
|
| 763 |
+
"""Special optimized matcher for bare_name."""
|
| 764 |
+
count = 0
|
| 765 |
+
r = {}
|
| 766 |
+
done = False
|
| 767 |
+
max = len(nodes)
|
| 768 |
+
while not done and count < max:
|
| 769 |
+
done = True
|
| 770 |
+
for leaf in self.content:
|
| 771 |
+
if leaf[0].match(nodes[count], r):
|
| 772 |
+
count += 1
|
| 773 |
+
done = False
|
| 774 |
+
break
|
| 775 |
+
r[self.name] = nodes[:count]
|
| 776 |
+
return count, r
|
| 777 |
+
|
| 778 |
+
def _recursive_matches(self, nodes, count):
|
| 779 |
+
"""Helper to recursively yield the matches."""
|
| 780 |
+
assert self.content is not None
|
| 781 |
+
if count >= self.min:
|
| 782 |
+
yield 0, {}
|
| 783 |
+
if count < self.max:
|
| 784 |
+
for alt in self.content:
|
| 785 |
+
for c0, r0 in generate_matches(alt, nodes):
|
| 786 |
+
for c1, r1 in self._recursive_matches(nodes[c0:], count+1):
|
| 787 |
+
r = {}
|
| 788 |
+
r.update(r0)
|
| 789 |
+
r.update(r1)
|
| 790 |
+
yield c0 + c1, r
|
| 791 |
+
|
| 792 |
+
|
| 793 |
+
class NegatedPattern(BasePattern):
|
| 794 |
+
|
| 795 |
+
def __init__(self, content=None):
|
| 796 |
+
"""
|
| 797 |
+
Initializer.
|
| 798 |
+
|
| 799 |
+
The argument is either a pattern or None. If it is None, this
|
| 800 |
+
only matches an empty sequence (effectively '$' in regex
|
| 801 |
+
lingo). If it is not None, this matches whenever the argument
|
| 802 |
+
pattern doesn't have any matches.
|
| 803 |
+
"""
|
| 804 |
+
if content is not None:
|
| 805 |
+
assert isinstance(content, BasePattern), repr(content)
|
| 806 |
+
self.content = content
|
| 807 |
+
|
| 808 |
+
def match(self, node):
|
| 809 |
+
# We never match a node in its entirety
|
| 810 |
+
return False
|
| 811 |
+
|
| 812 |
+
def match_seq(self, nodes):
|
| 813 |
+
# We only match an empty sequence of nodes in its entirety
|
| 814 |
+
return len(nodes) == 0
|
| 815 |
+
|
| 816 |
+
def generate_matches(self, nodes):
|
| 817 |
+
if self.content is None:
|
| 818 |
+
# Return a match if there is an empty sequence
|
| 819 |
+
if len(nodes) == 0:
|
| 820 |
+
yield 0, {}
|
| 821 |
+
else:
|
| 822 |
+
# Return a match if the argument pattern has no matches
|
| 823 |
+
for c, r in self.content.generate_matches(nodes):
|
| 824 |
+
return
|
| 825 |
+
yield 0, {}
|
| 826 |
+
|
| 827 |
+
|
| 828 |
+
def generate_matches(patterns, nodes):
|
| 829 |
+
"""
|
| 830 |
+
Generator yielding matches for a sequence of patterns and nodes.
|
| 831 |
+
|
| 832 |
+
Args:
|
| 833 |
+
patterns: a sequence of patterns
|
| 834 |
+
nodes: a sequence of nodes
|
| 835 |
+
|
| 836 |
+
Yields:
|
| 837 |
+
(count, results) tuples where:
|
| 838 |
+
count: the entire sequence of patterns matches nodes[:count];
|
| 839 |
+
results: dict containing named submatches.
|
| 840 |
+
"""
|
| 841 |
+
if not patterns:
|
| 842 |
+
yield 0, {}
|
| 843 |
+
else:
|
| 844 |
+
p, rest = patterns[0], patterns[1:]
|
| 845 |
+
for c0, r0 in p.generate_matches(nodes):
|
| 846 |
+
if not rest:
|
| 847 |
+
yield c0, r0
|
| 848 |
+
else:
|
| 849 |
+
for c1, r1 in generate_matches(rest, nodes[c0:]):
|
| 850 |
+
r = {}
|
| 851 |
+
r.update(r0)
|
| 852 |
+
r.update(r1)
|
| 853 |
+
yield c0 + c1, r
|
deepseek/lib/python3.10/lib2to3/refactor.py
ADDED
|
@@ -0,0 +1,732 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Refactoring framework.
|
| 5 |
+
|
| 6 |
+
Used as a main program, this can refactor any number of files and/or
|
| 7 |
+
recursively descend down directories. Imported as a module, this
|
| 8 |
+
provides infrastructure to write your own refactoring tool.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
__author__ = "Guido van Rossum <guido@python.org>"
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# Python imports
|
| 15 |
+
import io
|
| 16 |
+
import os
|
| 17 |
+
import pkgutil
|
| 18 |
+
import sys
|
| 19 |
+
import logging
|
| 20 |
+
import operator
|
| 21 |
+
import collections
|
| 22 |
+
from itertools import chain
|
| 23 |
+
|
| 24 |
+
# Local imports
|
| 25 |
+
from .pgen2 import driver, tokenize, token
|
| 26 |
+
from .fixer_util import find_root
|
| 27 |
+
from . import pytree, pygram
|
| 28 |
+
from . import btm_matcher as bm
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def get_all_fix_names(fixer_pkg, remove_prefix=True):
|
| 32 |
+
"""Return a sorted list of all available fix names in the given package."""
|
| 33 |
+
pkg = __import__(fixer_pkg, [], [], ["*"])
|
| 34 |
+
fix_names = []
|
| 35 |
+
for finder, name, ispkg in pkgutil.iter_modules(pkg.__path__):
|
| 36 |
+
if name.startswith("fix_"):
|
| 37 |
+
if remove_prefix:
|
| 38 |
+
name = name[4:]
|
| 39 |
+
fix_names.append(name)
|
| 40 |
+
return fix_names
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class _EveryNode(Exception):
|
| 44 |
+
pass
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def _get_head_types(pat):
|
| 48 |
+
""" Accepts a pytree Pattern Node and returns a set
|
| 49 |
+
of the pattern types which will match first. """
|
| 50 |
+
|
| 51 |
+
if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)):
|
| 52 |
+
# NodePatters must either have no type and no content
|
| 53 |
+
# or a type and content -- so they don't get any farther
|
| 54 |
+
# Always return leafs
|
| 55 |
+
if pat.type is None:
|
| 56 |
+
raise _EveryNode
|
| 57 |
+
return {pat.type}
|
| 58 |
+
|
| 59 |
+
if isinstance(pat, pytree.NegatedPattern):
|
| 60 |
+
if pat.content:
|
| 61 |
+
return _get_head_types(pat.content)
|
| 62 |
+
raise _EveryNode # Negated Patterns don't have a type
|
| 63 |
+
|
| 64 |
+
if isinstance(pat, pytree.WildcardPattern):
|
| 65 |
+
# Recurse on each node in content
|
| 66 |
+
r = set()
|
| 67 |
+
for p in pat.content:
|
| 68 |
+
for x in p:
|
| 69 |
+
r.update(_get_head_types(x))
|
| 70 |
+
return r
|
| 71 |
+
|
| 72 |
+
raise Exception("Oh no! I don't understand pattern %s" %(pat))
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def _get_headnode_dict(fixer_list):
|
| 76 |
+
""" Accepts a list of fixers and returns a dictionary
|
| 77 |
+
of head node type --> fixer list. """
|
| 78 |
+
head_nodes = collections.defaultdict(list)
|
| 79 |
+
every = []
|
| 80 |
+
for fixer in fixer_list:
|
| 81 |
+
if fixer.pattern:
|
| 82 |
+
try:
|
| 83 |
+
heads = _get_head_types(fixer.pattern)
|
| 84 |
+
except _EveryNode:
|
| 85 |
+
every.append(fixer)
|
| 86 |
+
else:
|
| 87 |
+
for node_type in heads:
|
| 88 |
+
head_nodes[node_type].append(fixer)
|
| 89 |
+
else:
|
| 90 |
+
if fixer._accept_type is not None:
|
| 91 |
+
head_nodes[fixer._accept_type].append(fixer)
|
| 92 |
+
else:
|
| 93 |
+
every.append(fixer)
|
| 94 |
+
for node_type in chain(pygram.python_grammar.symbol2number.values(),
|
| 95 |
+
pygram.python_grammar.tokens):
|
| 96 |
+
head_nodes[node_type].extend(every)
|
| 97 |
+
return dict(head_nodes)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def get_fixers_from_package(pkg_name):
|
| 101 |
+
"""
|
| 102 |
+
Return the fully qualified names for fixers in the package pkg_name.
|
| 103 |
+
"""
|
| 104 |
+
return [pkg_name + "." + fix_name
|
| 105 |
+
for fix_name in get_all_fix_names(pkg_name, False)]
|
| 106 |
+
|
| 107 |
+
def _identity(obj):
|
| 108 |
+
return obj
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def _detect_future_features(source):
|
| 112 |
+
have_docstring = False
|
| 113 |
+
gen = tokenize.generate_tokens(io.StringIO(source).readline)
|
| 114 |
+
def advance():
|
| 115 |
+
tok = next(gen)
|
| 116 |
+
return tok[0], tok[1]
|
| 117 |
+
ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT})
|
| 118 |
+
features = set()
|
| 119 |
+
try:
|
| 120 |
+
while True:
|
| 121 |
+
tp, value = advance()
|
| 122 |
+
if tp in ignore:
|
| 123 |
+
continue
|
| 124 |
+
elif tp == token.STRING:
|
| 125 |
+
if have_docstring:
|
| 126 |
+
break
|
| 127 |
+
have_docstring = True
|
| 128 |
+
elif tp == token.NAME and value == "from":
|
| 129 |
+
tp, value = advance()
|
| 130 |
+
if tp != token.NAME or value != "__future__":
|
| 131 |
+
break
|
| 132 |
+
tp, value = advance()
|
| 133 |
+
if tp != token.NAME or value != "import":
|
| 134 |
+
break
|
| 135 |
+
tp, value = advance()
|
| 136 |
+
if tp == token.OP and value == "(":
|
| 137 |
+
tp, value = advance()
|
| 138 |
+
while tp == token.NAME:
|
| 139 |
+
features.add(value)
|
| 140 |
+
tp, value = advance()
|
| 141 |
+
if tp != token.OP or value != ",":
|
| 142 |
+
break
|
| 143 |
+
tp, value = advance()
|
| 144 |
+
else:
|
| 145 |
+
break
|
| 146 |
+
except StopIteration:
|
| 147 |
+
pass
|
| 148 |
+
return frozenset(features)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class FixerError(Exception):
|
| 152 |
+
"""A fixer could not be loaded."""
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class RefactoringTool(object):
|
| 156 |
+
|
| 157 |
+
_default_options = {"print_function" : False,
|
| 158 |
+
"exec_function": False,
|
| 159 |
+
"write_unchanged_files" : False}
|
| 160 |
+
|
| 161 |
+
CLASS_PREFIX = "Fix" # The prefix for fixer classes
|
| 162 |
+
FILE_PREFIX = "fix_" # The prefix for modules with a fixer within
|
| 163 |
+
|
| 164 |
+
def __init__(self, fixer_names, options=None, explicit=None):
|
| 165 |
+
"""Initializer.
|
| 166 |
+
|
| 167 |
+
Args:
|
| 168 |
+
fixer_names: a list of fixers to import
|
| 169 |
+
options: a dict with configuration.
|
| 170 |
+
explicit: a list of fixers to run even if they are explicit.
|
| 171 |
+
"""
|
| 172 |
+
self.fixers = fixer_names
|
| 173 |
+
self.explicit = explicit or []
|
| 174 |
+
self.options = self._default_options.copy()
|
| 175 |
+
if options is not None:
|
| 176 |
+
self.options.update(options)
|
| 177 |
+
self.grammar = pygram.python_grammar.copy()
|
| 178 |
+
|
| 179 |
+
if self.options['print_function']:
|
| 180 |
+
del self.grammar.keywords["print"]
|
| 181 |
+
elif self.options['exec_function']:
|
| 182 |
+
del self.grammar.keywords["exec"]
|
| 183 |
+
|
| 184 |
+
# When this is True, the refactor*() methods will call write_file() for
|
| 185 |
+
# files processed even if they were not changed during refactoring. If
|
| 186 |
+
# and only if the refactor method's write parameter was True.
|
| 187 |
+
self.write_unchanged_files = self.options.get("write_unchanged_files")
|
| 188 |
+
self.errors = []
|
| 189 |
+
self.logger = logging.getLogger("RefactoringTool")
|
| 190 |
+
self.fixer_log = []
|
| 191 |
+
self.wrote = False
|
| 192 |
+
self.driver = driver.Driver(self.grammar,
|
| 193 |
+
convert=pytree.convert,
|
| 194 |
+
logger=self.logger)
|
| 195 |
+
self.pre_order, self.post_order = self.get_fixers()
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
self.files = [] # List of files that were or should be modified
|
| 199 |
+
|
| 200 |
+
self.BM = bm.BottomMatcher()
|
| 201 |
+
self.bmi_pre_order = [] # Bottom Matcher incompatible fixers
|
| 202 |
+
self.bmi_post_order = []
|
| 203 |
+
|
| 204 |
+
for fixer in chain(self.post_order, self.pre_order):
|
| 205 |
+
if fixer.BM_compatible:
|
| 206 |
+
self.BM.add_fixer(fixer)
|
| 207 |
+
# remove fixers that will be handled by the bottom-up
|
| 208 |
+
# matcher
|
| 209 |
+
elif fixer in self.pre_order:
|
| 210 |
+
self.bmi_pre_order.append(fixer)
|
| 211 |
+
elif fixer in self.post_order:
|
| 212 |
+
self.bmi_post_order.append(fixer)
|
| 213 |
+
|
| 214 |
+
self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order)
|
| 215 |
+
self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def get_fixers(self):
|
| 220 |
+
"""Inspects the options to load the requested patterns and handlers.
|
| 221 |
+
|
| 222 |
+
Returns:
|
| 223 |
+
(pre_order, post_order), where pre_order is the list of fixers that
|
| 224 |
+
want a pre-order AST traversal, and post_order is the list that want
|
| 225 |
+
post-order traversal.
|
| 226 |
+
"""
|
| 227 |
+
pre_order_fixers = []
|
| 228 |
+
post_order_fixers = []
|
| 229 |
+
for fix_mod_path in self.fixers:
|
| 230 |
+
mod = __import__(fix_mod_path, {}, {}, ["*"])
|
| 231 |
+
fix_name = fix_mod_path.rsplit(".", 1)[-1]
|
| 232 |
+
if fix_name.startswith(self.FILE_PREFIX):
|
| 233 |
+
fix_name = fix_name[len(self.FILE_PREFIX):]
|
| 234 |
+
parts = fix_name.split("_")
|
| 235 |
+
class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts])
|
| 236 |
+
try:
|
| 237 |
+
fix_class = getattr(mod, class_name)
|
| 238 |
+
except AttributeError:
|
| 239 |
+
raise FixerError("Can't find %s.%s" % (fix_name, class_name)) from None
|
| 240 |
+
fixer = fix_class(self.options, self.fixer_log)
|
| 241 |
+
if fixer.explicit and self.explicit is not True and \
|
| 242 |
+
fix_mod_path not in self.explicit:
|
| 243 |
+
self.log_message("Skipping optional fixer: %s", fix_name)
|
| 244 |
+
continue
|
| 245 |
+
|
| 246 |
+
self.log_debug("Adding transformation: %s", fix_name)
|
| 247 |
+
if fixer.order == "pre":
|
| 248 |
+
pre_order_fixers.append(fixer)
|
| 249 |
+
elif fixer.order == "post":
|
| 250 |
+
post_order_fixers.append(fixer)
|
| 251 |
+
else:
|
| 252 |
+
raise FixerError("Illegal fixer order: %r" % fixer.order)
|
| 253 |
+
|
| 254 |
+
key_func = operator.attrgetter("run_order")
|
| 255 |
+
pre_order_fixers.sort(key=key_func)
|
| 256 |
+
post_order_fixers.sort(key=key_func)
|
| 257 |
+
return (pre_order_fixers, post_order_fixers)
|
| 258 |
+
|
| 259 |
+
def log_error(self, msg, *args, **kwds):
|
| 260 |
+
"""Called when an error occurs."""
|
| 261 |
+
raise
|
| 262 |
+
|
| 263 |
+
def log_message(self, msg, *args):
|
| 264 |
+
"""Hook to log a message."""
|
| 265 |
+
if args:
|
| 266 |
+
msg = msg % args
|
| 267 |
+
self.logger.info(msg)
|
| 268 |
+
|
| 269 |
+
def log_debug(self, msg, *args):
|
| 270 |
+
if args:
|
| 271 |
+
msg = msg % args
|
| 272 |
+
self.logger.debug(msg)
|
| 273 |
+
|
| 274 |
+
def print_output(self, old_text, new_text, filename, equal):
|
| 275 |
+
"""Called with the old version, new version, and filename of a
|
| 276 |
+
refactored file."""
|
| 277 |
+
pass
|
| 278 |
+
|
| 279 |
+
def refactor(self, items, write=False, doctests_only=False):
|
| 280 |
+
"""Refactor a list of files and directories."""
|
| 281 |
+
|
| 282 |
+
for dir_or_file in items:
|
| 283 |
+
if os.path.isdir(dir_or_file):
|
| 284 |
+
self.refactor_dir(dir_or_file, write, doctests_only)
|
| 285 |
+
else:
|
| 286 |
+
self.refactor_file(dir_or_file, write, doctests_only)
|
| 287 |
+
|
| 288 |
+
def refactor_dir(self, dir_name, write=False, doctests_only=False):
|
| 289 |
+
"""Descends down a directory and refactor every Python file found.
|
| 290 |
+
|
| 291 |
+
Python files are assumed to have a .py extension.
|
| 292 |
+
|
| 293 |
+
Files and subdirectories starting with '.' are skipped.
|
| 294 |
+
"""
|
| 295 |
+
py_ext = os.extsep + "py"
|
| 296 |
+
for dirpath, dirnames, filenames in os.walk(dir_name):
|
| 297 |
+
self.log_debug("Descending into %s", dirpath)
|
| 298 |
+
dirnames.sort()
|
| 299 |
+
filenames.sort()
|
| 300 |
+
for name in filenames:
|
| 301 |
+
if (not name.startswith(".") and
|
| 302 |
+
os.path.splitext(name)[1] == py_ext):
|
| 303 |
+
fullname = os.path.join(dirpath, name)
|
| 304 |
+
self.refactor_file(fullname, write, doctests_only)
|
| 305 |
+
# Modify dirnames in-place to remove subdirs with leading dots
|
| 306 |
+
dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")]
|
| 307 |
+
|
| 308 |
+
def _read_python_source(self, filename):
|
| 309 |
+
"""
|
| 310 |
+
Do our best to decode a Python source file correctly.
|
| 311 |
+
"""
|
| 312 |
+
try:
|
| 313 |
+
f = open(filename, "rb")
|
| 314 |
+
except OSError as err:
|
| 315 |
+
self.log_error("Can't open %s: %s", filename, err)
|
| 316 |
+
return None, None
|
| 317 |
+
try:
|
| 318 |
+
encoding = tokenize.detect_encoding(f.readline)[0]
|
| 319 |
+
finally:
|
| 320 |
+
f.close()
|
| 321 |
+
with io.open(filename, "r", encoding=encoding, newline='') as f:
|
| 322 |
+
return f.read(), encoding
|
| 323 |
+
|
| 324 |
+
def refactor_file(self, filename, write=False, doctests_only=False):
|
| 325 |
+
"""Refactors a file."""
|
| 326 |
+
input, encoding = self._read_python_source(filename)
|
| 327 |
+
if input is None:
|
| 328 |
+
# Reading the file failed.
|
| 329 |
+
return
|
| 330 |
+
input += "\n" # Silence certain parse errors
|
| 331 |
+
if doctests_only:
|
| 332 |
+
self.log_debug("Refactoring doctests in %s", filename)
|
| 333 |
+
output = self.refactor_docstring(input, filename)
|
| 334 |
+
if self.write_unchanged_files or output != input:
|
| 335 |
+
self.processed_file(output, filename, input, write, encoding)
|
| 336 |
+
else:
|
| 337 |
+
self.log_debug("No doctest changes in %s", filename)
|
| 338 |
+
else:
|
| 339 |
+
tree = self.refactor_string(input, filename)
|
| 340 |
+
if self.write_unchanged_files or (tree and tree.was_changed):
|
| 341 |
+
# The [:-1] is to take off the \n we added earlier
|
| 342 |
+
self.processed_file(str(tree)[:-1], filename,
|
| 343 |
+
write=write, encoding=encoding)
|
| 344 |
+
else:
|
| 345 |
+
self.log_debug("No changes in %s", filename)
|
| 346 |
+
|
| 347 |
+
def refactor_string(self, data, name):
|
| 348 |
+
"""Refactor a given input string.
|
| 349 |
+
|
| 350 |
+
Args:
|
| 351 |
+
data: a string holding the code to be refactored.
|
| 352 |
+
name: a human-readable name for use in error/log messages.
|
| 353 |
+
|
| 354 |
+
Returns:
|
| 355 |
+
An AST corresponding to the refactored input stream; None if
|
| 356 |
+
there were errors during the parse.
|
| 357 |
+
"""
|
| 358 |
+
features = _detect_future_features(data)
|
| 359 |
+
if "print_function" in features:
|
| 360 |
+
self.driver.grammar = pygram.python_grammar_no_print_statement
|
| 361 |
+
try:
|
| 362 |
+
tree = self.driver.parse_string(data)
|
| 363 |
+
except Exception as err:
|
| 364 |
+
self.log_error("Can't parse %s: %s: %s",
|
| 365 |
+
name, err.__class__.__name__, err)
|
| 366 |
+
return
|
| 367 |
+
finally:
|
| 368 |
+
self.driver.grammar = self.grammar
|
| 369 |
+
tree.future_features = features
|
| 370 |
+
self.log_debug("Refactoring %s", name)
|
| 371 |
+
self.refactor_tree(tree, name)
|
| 372 |
+
return tree
|
| 373 |
+
|
| 374 |
+
def refactor_stdin(self, doctests_only=False):
|
| 375 |
+
input = sys.stdin.read()
|
| 376 |
+
if doctests_only:
|
| 377 |
+
self.log_debug("Refactoring doctests in stdin")
|
| 378 |
+
output = self.refactor_docstring(input, "<stdin>")
|
| 379 |
+
if self.write_unchanged_files or output != input:
|
| 380 |
+
self.processed_file(output, "<stdin>", input)
|
| 381 |
+
else:
|
| 382 |
+
self.log_debug("No doctest changes in stdin")
|
| 383 |
+
else:
|
| 384 |
+
tree = self.refactor_string(input, "<stdin>")
|
| 385 |
+
if self.write_unchanged_files or (tree and tree.was_changed):
|
| 386 |
+
self.processed_file(str(tree), "<stdin>", input)
|
| 387 |
+
else:
|
| 388 |
+
self.log_debug("No changes in stdin")
|
| 389 |
+
|
| 390 |
+
def refactor_tree(self, tree, name):
|
| 391 |
+
"""Refactors a parse tree (modifying the tree in place).
|
| 392 |
+
|
| 393 |
+
For compatible patterns the bottom matcher module is
|
| 394 |
+
used. Otherwise the tree is traversed node-to-node for
|
| 395 |
+
matches.
|
| 396 |
+
|
| 397 |
+
Args:
|
| 398 |
+
tree: a pytree.Node instance representing the root of the tree
|
| 399 |
+
to be refactored.
|
| 400 |
+
name: a human-readable name for this tree.
|
| 401 |
+
|
| 402 |
+
Returns:
|
| 403 |
+
True if the tree was modified, False otherwise.
|
| 404 |
+
"""
|
| 405 |
+
|
| 406 |
+
for fixer in chain(self.pre_order, self.post_order):
|
| 407 |
+
fixer.start_tree(tree, name)
|
| 408 |
+
|
| 409 |
+
#use traditional matching for the incompatible fixers
|
| 410 |
+
self.traverse_by(self.bmi_pre_order_heads, tree.pre_order())
|
| 411 |
+
self.traverse_by(self.bmi_post_order_heads, tree.post_order())
|
| 412 |
+
|
| 413 |
+
# obtain a set of candidate nodes
|
| 414 |
+
match_set = self.BM.run(tree.leaves())
|
| 415 |
+
|
| 416 |
+
while any(match_set.values()):
|
| 417 |
+
for fixer in self.BM.fixers:
|
| 418 |
+
if fixer in match_set and match_set[fixer]:
|
| 419 |
+
#sort by depth; apply fixers from bottom(of the AST) to top
|
| 420 |
+
match_set[fixer].sort(key=pytree.Base.depth, reverse=True)
|
| 421 |
+
|
| 422 |
+
if fixer.keep_line_order:
|
| 423 |
+
#some fixers(eg fix_imports) must be applied
|
| 424 |
+
#with the original file's line order
|
| 425 |
+
match_set[fixer].sort(key=pytree.Base.get_lineno)
|
| 426 |
+
|
| 427 |
+
for node in list(match_set[fixer]):
|
| 428 |
+
if node in match_set[fixer]:
|
| 429 |
+
match_set[fixer].remove(node)
|
| 430 |
+
|
| 431 |
+
try:
|
| 432 |
+
find_root(node)
|
| 433 |
+
except ValueError:
|
| 434 |
+
# this node has been cut off from a
|
| 435 |
+
# previous transformation ; skip
|
| 436 |
+
continue
|
| 437 |
+
|
| 438 |
+
if node.fixers_applied and fixer in node.fixers_applied:
|
| 439 |
+
# do not apply the same fixer again
|
| 440 |
+
continue
|
| 441 |
+
|
| 442 |
+
results = fixer.match(node)
|
| 443 |
+
|
| 444 |
+
if results:
|
| 445 |
+
new = fixer.transform(node, results)
|
| 446 |
+
if new is not None:
|
| 447 |
+
node.replace(new)
|
| 448 |
+
#new.fixers_applied.append(fixer)
|
| 449 |
+
for node in new.post_order():
|
| 450 |
+
# do not apply the fixer again to
|
| 451 |
+
# this or any subnode
|
| 452 |
+
if not node.fixers_applied:
|
| 453 |
+
node.fixers_applied = []
|
| 454 |
+
node.fixers_applied.append(fixer)
|
| 455 |
+
|
| 456 |
+
# update the original match set for
|
| 457 |
+
# the added code
|
| 458 |
+
new_matches = self.BM.run(new.leaves())
|
| 459 |
+
for fxr in new_matches:
|
| 460 |
+
if not fxr in match_set:
|
| 461 |
+
match_set[fxr]=[]
|
| 462 |
+
|
| 463 |
+
match_set[fxr].extend(new_matches[fxr])
|
| 464 |
+
|
| 465 |
+
for fixer in chain(self.pre_order, self.post_order):
|
| 466 |
+
fixer.finish_tree(tree, name)
|
| 467 |
+
return tree.was_changed
|
| 468 |
+
|
| 469 |
+
def traverse_by(self, fixers, traversal):
|
| 470 |
+
"""Traverse an AST, applying a set of fixers to each node.
|
| 471 |
+
|
| 472 |
+
This is a helper method for refactor_tree().
|
| 473 |
+
|
| 474 |
+
Args:
|
| 475 |
+
fixers: a list of fixer instances.
|
| 476 |
+
traversal: a generator that yields AST nodes.
|
| 477 |
+
|
| 478 |
+
Returns:
|
| 479 |
+
None
|
| 480 |
+
"""
|
| 481 |
+
if not fixers:
|
| 482 |
+
return
|
| 483 |
+
for node in traversal:
|
| 484 |
+
for fixer in fixers[node.type]:
|
| 485 |
+
results = fixer.match(node)
|
| 486 |
+
if results:
|
| 487 |
+
new = fixer.transform(node, results)
|
| 488 |
+
if new is not None:
|
| 489 |
+
node.replace(new)
|
| 490 |
+
node = new
|
| 491 |
+
|
| 492 |
+
def processed_file(self, new_text, filename, old_text=None, write=False,
|
| 493 |
+
encoding=None):
|
| 494 |
+
"""
|
| 495 |
+
Called when a file has been refactored and there may be changes.
|
| 496 |
+
"""
|
| 497 |
+
self.files.append(filename)
|
| 498 |
+
if old_text is None:
|
| 499 |
+
old_text = self._read_python_source(filename)[0]
|
| 500 |
+
if old_text is None:
|
| 501 |
+
return
|
| 502 |
+
equal = old_text == new_text
|
| 503 |
+
self.print_output(old_text, new_text, filename, equal)
|
| 504 |
+
if equal:
|
| 505 |
+
self.log_debug("No changes to %s", filename)
|
| 506 |
+
if not self.write_unchanged_files:
|
| 507 |
+
return
|
| 508 |
+
if write:
|
| 509 |
+
self.write_file(new_text, filename, old_text, encoding)
|
| 510 |
+
else:
|
| 511 |
+
self.log_debug("Not writing changes to %s", filename)
|
| 512 |
+
|
| 513 |
+
def write_file(self, new_text, filename, old_text, encoding=None):
|
| 514 |
+
"""Writes a string to a file.
|
| 515 |
+
|
| 516 |
+
It first shows a unified diff between the old text and the new text, and
|
| 517 |
+
then rewrites the file; the latter is only done if the write option is
|
| 518 |
+
set.
|
| 519 |
+
"""
|
| 520 |
+
try:
|
| 521 |
+
fp = io.open(filename, "w", encoding=encoding, newline='')
|
| 522 |
+
except OSError as err:
|
| 523 |
+
self.log_error("Can't create %s: %s", filename, err)
|
| 524 |
+
return
|
| 525 |
+
|
| 526 |
+
with fp:
|
| 527 |
+
try:
|
| 528 |
+
fp.write(new_text)
|
| 529 |
+
except OSError as err:
|
| 530 |
+
self.log_error("Can't write %s: %s", filename, err)
|
| 531 |
+
self.log_debug("Wrote changes to %s", filename)
|
| 532 |
+
self.wrote = True
|
| 533 |
+
|
| 534 |
+
PS1 = ">>> "
|
| 535 |
+
PS2 = "... "
|
| 536 |
+
|
| 537 |
+
def refactor_docstring(self, input, filename):
|
| 538 |
+
"""Refactors a docstring, looking for doctests.
|
| 539 |
+
|
| 540 |
+
This returns a modified version of the input string. It looks
|
| 541 |
+
for doctests, which start with a ">>>" prompt, and may be
|
| 542 |
+
continued with "..." prompts, as long as the "..." is indented
|
| 543 |
+
the same as the ">>>".
|
| 544 |
+
|
| 545 |
+
(Unfortunately we can't use the doctest module's parser,
|
| 546 |
+
since, like most parsers, it is not geared towards preserving
|
| 547 |
+
the original source.)
|
| 548 |
+
"""
|
| 549 |
+
result = []
|
| 550 |
+
block = None
|
| 551 |
+
block_lineno = None
|
| 552 |
+
indent = None
|
| 553 |
+
lineno = 0
|
| 554 |
+
for line in input.splitlines(keepends=True):
|
| 555 |
+
lineno += 1
|
| 556 |
+
if line.lstrip().startswith(self.PS1):
|
| 557 |
+
if block is not None:
|
| 558 |
+
result.extend(self.refactor_doctest(block, block_lineno,
|
| 559 |
+
indent, filename))
|
| 560 |
+
block_lineno = lineno
|
| 561 |
+
block = [line]
|
| 562 |
+
i = line.find(self.PS1)
|
| 563 |
+
indent = line[:i]
|
| 564 |
+
elif (indent is not None and
|
| 565 |
+
(line.startswith(indent + self.PS2) or
|
| 566 |
+
line == indent + self.PS2.rstrip() + "\n")):
|
| 567 |
+
block.append(line)
|
| 568 |
+
else:
|
| 569 |
+
if block is not None:
|
| 570 |
+
result.extend(self.refactor_doctest(block, block_lineno,
|
| 571 |
+
indent, filename))
|
| 572 |
+
block = None
|
| 573 |
+
indent = None
|
| 574 |
+
result.append(line)
|
| 575 |
+
if block is not None:
|
| 576 |
+
result.extend(self.refactor_doctest(block, block_lineno,
|
| 577 |
+
indent, filename))
|
| 578 |
+
return "".join(result)
|
| 579 |
+
|
| 580 |
+
def refactor_doctest(self, block, lineno, indent, filename):
|
| 581 |
+
"""Refactors one doctest.
|
| 582 |
+
|
| 583 |
+
A doctest is given as a block of lines, the first of which starts
|
| 584 |
+
with ">>>" (possibly indented), while the remaining lines start
|
| 585 |
+
with "..." (identically indented).
|
| 586 |
+
|
| 587 |
+
"""
|
| 588 |
+
try:
|
| 589 |
+
tree = self.parse_block(block, lineno, indent)
|
| 590 |
+
except Exception as err:
|
| 591 |
+
if self.logger.isEnabledFor(logging.DEBUG):
|
| 592 |
+
for line in block:
|
| 593 |
+
self.log_debug("Source: %s", line.rstrip("\n"))
|
| 594 |
+
self.log_error("Can't parse docstring in %s line %s: %s: %s",
|
| 595 |
+
filename, lineno, err.__class__.__name__, err)
|
| 596 |
+
return block
|
| 597 |
+
if self.refactor_tree(tree, filename):
|
| 598 |
+
new = str(tree).splitlines(keepends=True)
|
| 599 |
+
# Undo the adjustment of the line numbers in wrap_toks() below.
|
| 600 |
+
clipped, new = new[:lineno-1], new[lineno-1:]
|
| 601 |
+
assert clipped == ["\n"] * (lineno-1), clipped
|
| 602 |
+
if not new[-1].endswith("\n"):
|
| 603 |
+
new[-1] += "\n"
|
| 604 |
+
block = [indent + self.PS1 + new.pop(0)]
|
| 605 |
+
if new:
|
| 606 |
+
block += [indent + self.PS2 + line for line in new]
|
| 607 |
+
return block
|
| 608 |
+
|
| 609 |
+
def summarize(self):
|
| 610 |
+
if self.wrote:
|
| 611 |
+
were = "were"
|
| 612 |
+
else:
|
| 613 |
+
were = "need to be"
|
| 614 |
+
if not self.files:
|
| 615 |
+
self.log_message("No files %s modified.", were)
|
| 616 |
+
else:
|
| 617 |
+
self.log_message("Files that %s modified:", were)
|
| 618 |
+
for file in self.files:
|
| 619 |
+
self.log_message(file)
|
| 620 |
+
if self.fixer_log:
|
| 621 |
+
self.log_message("Warnings/messages while refactoring:")
|
| 622 |
+
for message in self.fixer_log:
|
| 623 |
+
self.log_message(message)
|
| 624 |
+
if self.errors:
|
| 625 |
+
if len(self.errors) == 1:
|
| 626 |
+
self.log_message("There was 1 error:")
|
| 627 |
+
else:
|
| 628 |
+
self.log_message("There were %d errors:", len(self.errors))
|
| 629 |
+
for msg, args, kwds in self.errors:
|
| 630 |
+
self.log_message(msg, *args, **kwds)
|
| 631 |
+
|
| 632 |
+
def parse_block(self, block, lineno, indent):
|
| 633 |
+
"""Parses a block into a tree.
|
| 634 |
+
|
| 635 |
+
This is necessary to get correct line number / offset information
|
| 636 |
+
in the parser diagnostics and embedded into the parse tree.
|
| 637 |
+
"""
|
| 638 |
+
tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent))
|
| 639 |
+
tree.future_features = frozenset()
|
| 640 |
+
return tree
|
| 641 |
+
|
| 642 |
+
def wrap_toks(self, block, lineno, indent):
|
| 643 |
+
"""Wraps a tokenize stream to systematically modify start/end."""
|
| 644 |
+
tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__)
|
| 645 |
+
for type, value, (line0, col0), (line1, col1), line_text in tokens:
|
| 646 |
+
line0 += lineno - 1
|
| 647 |
+
line1 += lineno - 1
|
| 648 |
+
# Don't bother updating the columns; this is too complicated
|
| 649 |
+
# since line_text would also have to be updated and it would
|
| 650 |
+
# still break for tokens spanning lines. Let the user guess
|
| 651 |
+
# that the column numbers for doctests are relative to the
|
| 652 |
+
# end of the prompt string (PS1 or PS2).
|
| 653 |
+
yield type, value, (line0, col0), (line1, col1), line_text
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
def gen_lines(self, block, indent):
|
| 657 |
+
"""Generates lines as expected by tokenize from a list of lines.
|
| 658 |
+
|
| 659 |
+
This strips the first len(indent + self.PS1) characters off each line.
|
| 660 |
+
"""
|
| 661 |
+
prefix1 = indent + self.PS1
|
| 662 |
+
prefix2 = indent + self.PS2
|
| 663 |
+
prefix = prefix1
|
| 664 |
+
for line in block:
|
| 665 |
+
if line.startswith(prefix):
|
| 666 |
+
yield line[len(prefix):]
|
| 667 |
+
elif line == prefix.rstrip() + "\n":
|
| 668 |
+
yield "\n"
|
| 669 |
+
else:
|
| 670 |
+
raise AssertionError("line=%r, prefix=%r" % (line, prefix))
|
| 671 |
+
prefix = prefix2
|
| 672 |
+
while True:
|
| 673 |
+
yield ""
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
class MultiprocessingUnsupported(Exception):
|
| 677 |
+
pass
|
| 678 |
+
|
| 679 |
+
|
| 680 |
+
class MultiprocessRefactoringTool(RefactoringTool):
|
| 681 |
+
|
| 682 |
+
def __init__(self, *args, **kwargs):
|
| 683 |
+
super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
|
| 684 |
+
self.queue = None
|
| 685 |
+
self.output_lock = None
|
| 686 |
+
|
| 687 |
+
def refactor(self, items, write=False, doctests_only=False,
|
| 688 |
+
num_processes=1):
|
| 689 |
+
if num_processes == 1:
|
| 690 |
+
return super(MultiprocessRefactoringTool, self).refactor(
|
| 691 |
+
items, write, doctests_only)
|
| 692 |
+
try:
|
| 693 |
+
import multiprocessing
|
| 694 |
+
except ImportError:
|
| 695 |
+
raise MultiprocessingUnsupported
|
| 696 |
+
if self.queue is not None:
|
| 697 |
+
raise RuntimeError("already doing multiple processes")
|
| 698 |
+
self.queue = multiprocessing.JoinableQueue()
|
| 699 |
+
self.output_lock = multiprocessing.Lock()
|
| 700 |
+
processes = [multiprocessing.Process(target=self._child)
|
| 701 |
+
for i in range(num_processes)]
|
| 702 |
+
try:
|
| 703 |
+
for p in processes:
|
| 704 |
+
p.start()
|
| 705 |
+
super(MultiprocessRefactoringTool, self).refactor(items, write,
|
| 706 |
+
doctests_only)
|
| 707 |
+
finally:
|
| 708 |
+
self.queue.join()
|
| 709 |
+
for i in range(num_processes):
|
| 710 |
+
self.queue.put(None)
|
| 711 |
+
for p in processes:
|
| 712 |
+
if p.is_alive():
|
| 713 |
+
p.join()
|
| 714 |
+
self.queue = None
|
| 715 |
+
|
| 716 |
+
def _child(self):
|
| 717 |
+
task = self.queue.get()
|
| 718 |
+
while task is not None:
|
| 719 |
+
args, kwargs = task
|
| 720 |
+
try:
|
| 721 |
+
super(MultiprocessRefactoringTool, self).refactor_file(
|
| 722 |
+
*args, **kwargs)
|
| 723 |
+
finally:
|
| 724 |
+
self.queue.task_done()
|
| 725 |
+
task = self.queue.get()
|
| 726 |
+
|
| 727 |
+
def refactor_file(self, *args, **kwargs):
|
| 728 |
+
if self.queue is not None:
|
| 729 |
+
self.queue.put((args, kwargs))
|
| 730 |
+
else:
|
| 731 |
+
return super(MultiprocessRefactoringTool, self).refactor_file(
|
| 732 |
+
*args, **kwargs)
|
deepseek/lib/python3.10/multiprocessing/__init__.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Package analogous to 'threading.py' but using processes
|
| 3 |
+
#
|
| 4 |
+
# multiprocessing/__init__.py
|
| 5 |
+
#
|
| 6 |
+
# This package is intended to duplicate the functionality (and much of
|
| 7 |
+
# the API) of threading.py but uses processes instead of threads. A
|
| 8 |
+
# subpackage 'multiprocessing.dummy' has the same API but is a simple
|
| 9 |
+
# wrapper for 'threading'.
|
| 10 |
+
#
|
| 11 |
+
# Copyright (c) 2006-2008, R Oudkerk
|
| 12 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 13 |
+
#
|
| 14 |
+
|
| 15 |
+
import sys
|
| 16 |
+
from . import context
|
| 17 |
+
|
| 18 |
+
#
|
| 19 |
+
# Copy stuff from default context
|
| 20 |
+
#
|
| 21 |
+
|
| 22 |
+
__all__ = [x for x in dir(context._default_context) if not x.startswith('_')]
|
| 23 |
+
globals().update((name, getattr(context._default_context, name)) for name in __all__)
|
| 24 |
+
|
| 25 |
+
#
|
| 26 |
+
# XXX These should not really be documented or public.
|
| 27 |
+
#
|
| 28 |
+
|
| 29 |
+
SUBDEBUG = 5
|
| 30 |
+
SUBWARNING = 25
|
| 31 |
+
|
| 32 |
+
#
|
| 33 |
+
# Alias for main module -- will be reset by bootstrapping child processes
|
| 34 |
+
#
|
| 35 |
+
|
| 36 |
+
if '__main__' in sys.modules:
|
| 37 |
+
sys.modules['__mp_main__'] = sys.modules['__main__']
|
deepseek/lib/python3.10/multiprocessing/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (664 Bytes). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/connection.cpython-310.pyc
ADDED
|
Binary file (25.6 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/context.cpython-310.pyc
ADDED
|
Binary file (13.1 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/forkserver.cpython-310.pyc
ADDED
|
Binary file (8.42 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/heap.cpython-310.pyc
ADDED
|
Binary file (7.92 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/managers.cpython-310.pyc
ADDED
|
Binary file (40.8 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/pool.cpython-310.pyc
ADDED
|
Binary file (25.5 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/popen_fork.cpython-310.pyc
ADDED
|
Binary file (2.52 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/popen_forkserver.cpython-310.pyc
ADDED
|
Binary file (2.46 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/popen_spawn_posix.cpython-310.pyc
ADDED
|
Binary file (2.34 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/popen_spawn_win32.cpython-310.pyc
ADDED
|
Binary file (3.73 kB). View file
|
|
|
deepseek/lib/python3.10/multiprocessing/__pycache__/process.cpython-310.pyc
ADDED
|
Binary file (11.5 kB). View file
|
|
|