Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- parrot/lib/python3.10/ensurepip/__init__.py +294 -0
- parrot/lib/python3.10/ensurepip/_bundled/__init__.py +0 -0
- parrot/lib/python3.10/idlelib/debugobj.py +142 -0
- parrot/lib/python3.10/json/scanner.py +73 -0
- parrot/lib/python3.10/lib2to3/__pycache__/__main__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/__pycache__/btm_utils.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/__pycache__/refactor.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixer_util.py +453 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_apply.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_basestring.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_dict.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_exec.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_execfile.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_exitfunc.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_filter.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_idioms.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_import.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_imports.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_itertools.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_itertools_imports.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_next.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_paren.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_reduce.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_renames.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_repr.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_set_literal.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_types.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_unicode.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_xreadlines.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_basestring.py +14 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_dict.py +106 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_import.py +99 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_itertools_imports.py +57 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_methodattrs.py +24 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_ne.py +23 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_numliterals.py +28 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_renames.py +70 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_set_literal.py +53 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_urllib.py +196 -0
- parrot/lib/python3.10/lib2to3/fixes/fix_ws_comma.py +39 -0
- parrot/lib/python3.10/lib2to3/patcomp.py +204 -0
- parrot/lib/python3.10/lib2to3/pgen2/parse.py +204 -0
- parrot/lib/python3.10/lib2to3/refactor.py +732 -0
- parrot/lib/python3.10/lib2to3/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/tests/__pycache__/__main__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/tests/__pycache__/test_all_fixers.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/tests/__pycache__/test_main.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/tests/__pycache__/test_refactor.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/tests/data/__pycache__/infinite_recursion.cpython-310.pyc +0 -0
- parrot/lib/python3.10/lib2to3/tests/data/__pycache__/py3_test_grammar.cpython-310.pyc +0 -0
parrot/lib/python3.10/ensurepip/__init__.py
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import os
|
| 3 |
+
import os.path
|
| 4 |
+
import subprocess
|
| 5 |
+
import sys
|
| 6 |
+
import sysconfig
|
| 7 |
+
import tempfile
|
| 8 |
+
from importlib import resources
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
__all__ = ["version", "bootstrap"]
|
| 13 |
+
_PACKAGE_NAMES = ('setuptools', 'pip')
|
| 14 |
+
_SETUPTOOLS_VERSION = "65.5.0"
|
| 15 |
+
_PIP_VERSION = "23.0.1"
|
| 16 |
+
_PROJECTS = [
|
| 17 |
+
("setuptools", _SETUPTOOLS_VERSION, "py3"),
|
| 18 |
+
("pip", _PIP_VERSION, "py3"),
|
| 19 |
+
]
|
| 20 |
+
|
| 21 |
+
# Packages bundled in ensurepip._bundled have wheel_name set.
|
| 22 |
+
# Packages from WHEEL_PKG_DIR have wheel_path set.
|
| 23 |
+
_Package = collections.namedtuple('Package',
|
| 24 |
+
('version', 'wheel_name', 'wheel_path'))
|
| 25 |
+
|
| 26 |
+
# Directory of system wheel packages. Some Linux distribution packaging
|
| 27 |
+
# policies recommend against bundling dependencies. For example, Fedora
|
| 28 |
+
# installs wheel packages in the /usr/share/python-wheels/ directory and don't
|
| 29 |
+
# install the ensurepip._bundled package.
|
| 30 |
+
_WHEEL_PKG_DIR = sysconfig.get_config_var('WHEEL_PKG_DIR')
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def _find_packages(path):
|
| 34 |
+
packages = {}
|
| 35 |
+
try:
|
| 36 |
+
filenames = os.listdir(path)
|
| 37 |
+
except OSError:
|
| 38 |
+
# Ignore: path doesn't exist or permission error
|
| 39 |
+
filenames = ()
|
| 40 |
+
# Make the code deterministic if a directory contains multiple wheel files
|
| 41 |
+
# of the same package, but don't attempt to implement correct version
|
| 42 |
+
# comparison since this case should not happen.
|
| 43 |
+
filenames = sorted(filenames)
|
| 44 |
+
for filename in filenames:
|
| 45 |
+
# filename is like 'pip-21.2.4-py3-none-any.whl'
|
| 46 |
+
if not filename.endswith(".whl"):
|
| 47 |
+
continue
|
| 48 |
+
for name in _PACKAGE_NAMES:
|
| 49 |
+
prefix = name + '-'
|
| 50 |
+
if filename.startswith(prefix):
|
| 51 |
+
break
|
| 52 |
+
else:
|
| 53 |
+
continue
|
| 54 |
+
|
| 55 |
+
# Extract '21.2.4' from 'pip-21.2.4-py3-none-any.whl'
|
| 56 |
+
version = filename.removeprefix(prefix).partition('-')[0]
|
| 57 |
+
wheel_path = os.path.join(path, filename)
|
| 58 |
+
packages[name] = _Package(version, None, wheel_path)
|
| 59 |
+
return packages
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def _get_packages():
|
| 63 |
+
global _PACKAGES, _WHEEL_PKG_DIR
|
| 64 |
+
if _PACKAGES is not None:
|
| 65 |
+
return _PACKAGES
|
| 66 |
+
|
| 67 |
+
packages = {}
|
| 68 |
+
for name, version, py_tag in _PROJECTS:
|
| 69 |
+
wheel_name = f"{name}-{version}-{py_tag}-none-any.whl"
|
| 70 |
+
packages[name] = _Package(version, wheel_name, None)
|
| 71 |
+
if _WHEEL_PKG_DIR:
|
| 72 |
+
dir_packages = _find_packages(_WHEEL_PKG_DIR)
|
| 73 |
+
# only used the wheel package directory if all packages are found there
|
| 74 |
+
if all(name in dir_packages for name in _PACKAGE_NAMES):
|
| 75 |
+
packages = dir_packages
|
| 76 |
+
_PACKAGES = packages
|
| 77 |
+
return packages
|
| 78 |
+
_PACKAGES = None
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def _run_pip(args, additional_paths=None):
|
| 82 |
+
# Run the bootstraping in a subprocess to avoid leaking any state that happens
|
| 83 |
+
# after pip has executed. Particulary, this avoids the case when pip holds onto
|
| 84 |
+
# the files in *additional_paths*, preventing us to remove them at the end of the
|
| 85 |
+
# invocation.
|
| 86 |
+
code = f"""
|
| 87 |
+
import runpy
|
| 88 |
+
import sys
|
| 89 |
+
sys.path = {additional_paths or []} + sys.path
|
| 90 |
+
sys.argv[1:] = {args}
|
| 91 |
+
runpy.run_module("pip", run_name="__main__", alter_sys=True)
|
| 92 |
+
"""
|
| 93 |
+
|
| 94 |
+
cmd = [
|
| 95 |
+
sys.executable,
|
| 96 |
+
'-W',
|
| 97 |
+
'ignore::DeprecationWarning',
|
| 98 |
+
'-c',
|
| 99 |
+
code,
|
| 100 |
+
]
|
| 101 |
+
if sys.flags.isolated:
|
| 102 |
+
# run code in isolated mode if currently running isolated
|
| 103 |
+
cmd.insert(1, '-I')
|
| 104 |
+
return subprocess.run(cmd, check=True).returncode
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def version():
|
| 108 |
+
"""
|
| 109 |
+
Returns a string specifying the bundled version of pip.
|
| 110 |
+
"""
|
| 111 |
+
return _get_packages()['pip'].version
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def _disable_pip_configuration_settings():
|
| 115 |
+
# We deliberately ignore all pip environment variables
|
| 116 |
+
# when invoking pip
|
| 117 |
+
# See http://bugs.python.org/issue19734 for details
|
| 118 |
+
keys_to_remove = [k for k in os.environ if k.startswith("PIP_")]
|
| 119 |
+
for k in keys_to_remove:
|
| 120 |
+
del os.environ[k]
|
| 121 |
+
# We also ignore the settings in the default pip configuration file
|
| 122 |
+
# See http://bugs.python.org/issue20053 for details
|
| 123 |
+
os.environ['PIP_CONFIG_FILE'] = os.devnull
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def bootstrap(*, root=None, upgrade=False, user=False,
|
| 127 |
+
altinstall=False, default_pip=False,
|
| 128 |
+
verbosity=0):
|
| 129 |
+
"""
|
| 130 |
+
Bootstrap pip into the current Python installation (or the given root
|
| 131 |
+
directory).
|
| 132 |
+
|
| 133 |
+
Note that calling this function will alter both sys.path and os.environ.
|
| 134 |
+
"""
|
| 135 |
+
# Discard the return value
|
| 136 |
+
_bootstrap(root=root, upgrade=upgrade, user=user,
|
| 137 |
+
altinstall=altinstall, default_pip=default_pip,
|
| 138 |
+
verbosity=verbosity)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def _bootstrap(*, root=None, upgrade=False, user=False,
|
| 142 |
+
altinstall=False, default_pip=False,
|
| 143 |
+
verbosity=0):
|
| 144 |
+
"""
|
| 145 |
+
Bootstrap pip into the current Python installation (or the given root
|
| 146 |
+
directory). Returns pip command status code.
|
| 147 |
+
|
| 148 |
+
Note that calling this function will alter both sys.path and os.environ.
|
| 149 |
+
"""
|
| 150 |
+
if altinstall and default_pip:
|
| 151 |
+
raise ValueError("Cannot use altinstall and default_pip together")
|
| 152 |
+
|
| 153 |
+
sys.audit("ensurepip.bootstrap", root)
|
| 154 |
+
|
| 155 |
+
_disable_pip_configuration_settings()
|
| 156 |
+
|
| 157 |
+
# By default, installing pip and setuptools installs all of the
|
| 158 |
+
# following scripts (X.Y == running Python version):
|
| 159 |
+
#
|
| 160 |
+
# pip, pipX, pipX.Y, easy_install, easy_install-X.Y
|
| 161 |
+
#
|
| 162 |
+
# pip 1.5+ allows ensurepip to request that some of those be left out
|
| 163 |
+
if altinstall:
|
| 164 |
+
# omit pip, pipX and easy_install
|
| 165 |
+
os.environ["ENSUREPIP_OPTIONS"] = "altinstall"
|
| 166 |
+
elif not default_pip:
|
| 167 |
+
# omit pip and easy_install
|
| 168 |
+
os.environ["ENSUREPIP_OPTIONS"] = "install"
|
| 169 |
+
|
| 170 |
+
with tempfile.TemporaryDirectory() as tmpdir:
|
| 171 |
+
# Put our bundled wheels into a temporary directory and construct the
|
| 172 |
+
# additional paths that need added to sys.path
|
| 173 |
+
additional_paths = []
|
| 174 |
+
for name, package in _get_packages().items():
|
| 175 |
+
if package.wheel_name:
|
| 176 |
+
# Use bundled wheel package
|
| 177 |
+
from ensurepip import _bundled
|
| 178 |
+
wheel_name = package.wheel_name
|
| 179 |
+
whl = resources.read_binary(_bundled, wheel_name)
|
| 180 |
+
else:
|
| 181 |
+
# Use the wheel package directory
|
| 182 |
+
with open(package.wheel_path, "rb") as fp:
|
| 183 |
+
whl = fp.read()
|
| 184 |
+
wheel_name = os.path.basename(package.wheel_path)
|
| 185 |
+
|
| 186 |
+
filename = os.path.join(tmpdir, wheel_name)
|
| 187 |
+
with open(filename, "wb") as fp:
|
| 188 |
+
fp.write(whl)
|
| 189 |
+
|
| 190 |
+
additional_paths.append(filename)
|
| 191 |
+
|
| 192 |
+
# Construct the arguments to be passed to the pip command
|
| 193 |
+
args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir]
|
| 194 |
+
if root:
|
| 195 |
+
args += ["--root", root]
|
| 196 |
+
if upgrade:
|
| 197 |
+
args += ["--upgrade"]
|
| 198 |
+
if user:
|
| 199 |
+
args += ["--user"]
|
| 200 |
+
if verbosity:
|
| 201 |
+
args += ["-" + "v" * verbosity]
|
| 202 |
+
|
| 203 |
+
return _run_pip([*args, *_PACKAGE_NAMES], additional_paths)
|
| 204 |
+
|
| 205 |
+
def _uninstall_helper(*, verbosity=0):
|
| 206 |
+
"""Helper to support a clean default uninstall process on Windows
|
| 207 |
+
|
| 208 |
+
Note that calling this function may alter os.environ.
|
| 209 |
+
"""
|
| 210 |
+
# Nothing to do if pip was never installed, or has been removed
|
| 211 |
+
try:
|
| 212 |
+
import pip
|
| 213 |
+
except ImportError:
|
| 214 |
+
return
|
| 215 |
+
|
| 216 |
+
# If the installed pip version doesn't match the available one,
|
| 217 |
+
# leave it alone
|
| 218 |
+
available_version = version()
|
| 219 |
+
if pip.__version__ != available_version:
|
| 220 |
+
print(f"ensurepip will only uninstall a matching version "
|
| 221 |
+
f"({pip.__version__!r} installed, "
|
| 222 |
+
f"{available_version!r} available)",
|
| 223 |
+
file=sys.stderr)
|
| 224 |
+
return
|
| 225 |
+
|
| 226 |
+
_disable_pip_configuration_settings()
|
| 227 |
+
|
| 228 |
+
# Construct the arguments to be passed to the pip command
|
| 229 |
+
args = ["uninstall", "-y", "--disable-pip-version-check"]
|
| 230 |
+
if verbosity:
|
| 231 |
+
args += ["-" + "v" * verbosity]
|
| 232 |
+
|
| 233 |
+
return _run_pip([*args, *reversed(_PACKAGE_NAMES)])
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def _main(argv=None):
|
| 237 |
+
import argparse
|
| 238 |
+
parser = argparse.ArgumentParser(prog="python -m ensurepip")
|
| 239 |
+
parser.add_argument(
|
| 240 |
+
"--version",
|
| 241 |
+
action="version",
|
| 242 |
+
version="pip {}".format(version()),
|
| 243 |
+
help="Show the version of pip that is bundled with this Python.",
|
| 244 |
+
)
|
| 245 |
+
parser.add_argument(
|
| 246 |
+
"-v", "--verbose",
|
| 247 |
+
action="count",
|
| 248 |
+
default=0,
|
| 249 |
+
dest="verbosity",
|
| 250 |
+
help=("Give more output. Option is additive, and can be used up to 3 "
|
| 251 |
+
"times."),
|
| 252 |
+
)
|
| 253 |
+
parser.add_argument(
|
| 254 |
+
"-U", "--upgrade",
|
| 255 |
+
action="store_true",
|
| 256 |
+
default=False,
|
| 257 |
+
help="Upgrade pip and dependencies, even if already installed.",
|
| 258 |
+
)
|
| 259 |
+
parser.add_argument(
|
| 260 |
+
"--user",
|
| 261 |
+
action="store_true",
|
| 262 |
+
default=False,
|
| 263 |
+
help="Install using the user scheme.",
|
| 264 |
+
)
|
| 265 |
+
parser.add_argument(
|
| 266 |
+
"--root",
|
| 267 |
+
default=None,
|
| 268 |
+
help="Install everything relative to this alternate root directory.",
|
| 269 |
+
)
|
| 270 |
+
parser.add_argument(
|
| 271 |
+
"--altinstall",
|
| 272 |
+
action="store_true",
|
| 273 |
+
default=False,
|
| 274 |
+
help=("Make an alternate install, installing only the X.Y versioned "
|
| 275 |
+
"scripts (Default: pipX, pipX.Y, easy_install-X.Y)."),
|
| 276 |
+
)
|
| 277 |
+
parser.add_argument(
|
| 278 |
+
"--default-pip",
|
| 279 |
+
action="store_true",
|
| 280 |
+
default=False,
|
| 281 |
+
help=("Make a default pip install, installing the unqualified pip "
|
| 282 |
+
"and easy_install in addition to the versioned scripts."),
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
args = parser.parse_args(argv)
|
| 286 |
+
|
| 287 |
+
return _bootstrap(
|
| 288 |
+
root=args.root,
|
| 289 |
+
upgrade=args.upgrade,
|
| 290 |
+
user=args.user,
|
| 291 |
+
verbosity=args.verbosity,
|
| 292 |
+
altinstall=args.altinstall,
|
| 293 |
+
default_pip=args.default_pip,
|
| 294 |
+
)
|
parrot/lib/python3.10/ensurepip/_bundled/__init__.py
ADDED
|
File without changes
|
parrot/lib/python3.10/idlelib/debugobj.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# XXX TO DO:
|
| 2 |
+
# - popup menu
|
| 3 |
+
# - support partial or total redisplay
|
| 4 |
+
# - more doc strings
|
| 5 |
+
# - tooltips
|
| 6 |
+
|
| 7 |
+
# object browser
|
| 8 |
+
|
| 9 |
+
# XXX TO DO:
|
| 10 |
+
# - for classes/modules, add "open source" to object browser
|
| 11 |
+
from reprlib import Repr
|
| 12 |
+
|
| 13 |
+
from idlelib.tree import TreeItem, TreeNode, ScrolledCanvas
|
| 14 |
+
|
| 15 |
+
myrepr = Repr()
|
| 16 |
+
myrepr.maxstring = 100
|
| 17 |
+
myrepr.maxother = 100
|
| 18 |
+
|
| 19 |
+
class ObjectTreeItem(TreeItem):
|
| 20 |
+
def __init__(self, labeltext, object, setfunction=None):
|
| 21 |
+
self.labeltext = labeltext
|
| 22 |
+
self.object = object
|
| 23 |
+
self.setfunction = setfunction
|
| 24 |
+
def GetLabelText(self):
|
| 25 |
+
return self.labeltext
|
| 26 |
+
def GetText(self):
|
| 27 |
+
return myrepr.repr(self.object)
|
| 28 |
+
def GetIconName(self):
|
| 29 |
+
if not self.IsExpandable():
|
| 30 |
+
return "python"
|
| 31 |
+
def IsEditable(self):
|
| 32 |
+
return self.setfunction is not None
|
| 33 |
+
def SetText(self, text):
|
| 34 |
+
try:
|
| 35 |
+
value = eval(text)
|
| 36 |
+
self.setfunction(value)
|
| 37 |
+
except:
|
| 38 |
+
pass
|
| 39 |
+
else:
|
| 40 |
+
self.object = value
|
| 41 |
+
def IsExpandable(self):
|
| 42 |
+
return not not dir(self.object)
|
| 43 |
+
def GetSubList(self):
|
| 44 |
+
keys = dir(self.object)
|
| 45 |
+
sublist = []
|
| 46 |
+
for key in keys:
|
| 47 |
+
try:
|
| 48 |
+
value = getattr(self.object, key)
|
| 49 |
+
except AttributeError:
|
| 50 |
+
continue
|
| 51 |
+
item = make_objecttreeitem(
|
| 52 |
+
str(key) + " =",
|
| 53 |
+
value,
|
| 54 |
+
lambda value, key=key, object=self.object:
|
| 55 |
+
setattr(object, key, value))
|
| 56 |
+
sublist.append(item)
|
| 57 |
+
return sublist
|
| 58 |
+
|
| 59 |
+
class ClassTreeItem(ObjectTreeItem):
|
| 60 |
+
def IsExpandable(self):
|
| 61 |
+
return True
|
| 62 |
+
def GetSubList(self):
|
| 63 |
+
sublist = ObjectTreeItem.GetSubList(self)
|
| 64 |
+
if len(self.object.__bases__) == 1:
|
| 65 |
+
item = make_objecttreeitem("__bases__[0] =",
|
| 66 |
+
self.object.__bases__[0])
|
| 67 |
+
else:
|
| 68 |
+
item = make_objecttreeitem("__bases__ =", self.object.__bases__)
|
| 69 |
+
sublist.insert(0, item)
|
| 70 |
+
return sublist
|
| 71 |
+
|
| 72 |
+
class AtomicObjectTreeItem(ObjectTreeItem):
|
| 73 |
+
def IsExpandable(self):
|
| 74 |
+
return False
|
| 75 |
+
|
| 76 |
+
class SequenceTreeItem(ObjectTreeItem):
|
| 77 |
+
def IsExpandable(self):
|
| 78 |
+
return len(self.object) > 0
|
| 79 |
+
def keys(self):
|
| 80 |
+
return range(len(self.object))
|
| 81 |
+
def GetSubList(self):
|
| 82 |
+
sublist = []
|
| 83 |
+
for key in self.keys():
|
| 84 |
+
try:
|
| 85 |
+
value = self.object[key]
|
| 86 |
+
except KeyError:
|
| 87 |
+
continue
|
| 88 |
+
def setfunction(value, key=key, object=self.object):
|
| 89 |
+
object[key] = value
|
| 90 |
+
item = make_objecttreeitem("%r:" % (key,), value, setfunction)
|
| 91 |
+
sublist.append(item)
|
| 92 |
+
return sublist
|
| 93 |
+
|
| 94 |
+
class DictTreeItem(SequenceTreeItem):
|
| 95 |
+
def keys(self):
|
| 96 |
+
keys = list(self.object.keys())
|
| 97 |
+
try:
|
| 98 |
+
keys.sort()
|
| 99 |
+
except:
|
| 100 |
+
pass
|
| 101 |
+
return keys
|
| 102 |
+
|
| 103 |
+
dispatch = {
|
| 104 |
+
int: AtomicObjectTreeItem,
|
| 105 |
+
float: AtomicObjectTreeItem,
|
| 106 |
+
str: AtomicObjectTreeItem,
|
| 107 |
+
tuple: SequenceTreeItem,
|
| 108 |
+
list: SequenceTreeItem,
|
| 109 |
+
dict: DictTreeItem,
|
| 110 |
+
type: ClassTreeItem,
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
def make_objecttreeitem(labeltext, object, setfunction=None):
|
| 114 |
+
t = type(object)
|
| 115 |
+
if t in dispatch:
|
| 116 |
+
c = dispatch[t]
|
| 117 |
+
else:
|
| 118 |
+
c = ObjectTreeItem
|
| 119 |
+
return c(labeltext, object, setfunction)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _object_browser(parent): # htest #
|
| 123 |
+
import sys
|
| 124 |
+
from tkinter import Toplevel
|
| 125 |
+
top = Toplevel(parent)
|
| 126 |
+
top.title("Test debug object browser")
|
| 127 |
+
x, y = map(int, parent.geometry().split('+')[1:])
|
| 128 |
+
top.geometry("+%d+%d" % (x + 100, y + 175))
|
| 129 |
+
top.configure(bd=0, bg="yellow")
|
| 130 |
+
top.focus_set()
|
| 131 |
+
sc = ScrolledCanvas(top, bg="white", highlightthickness=0, takefocus=1)
|
| 132 |
+
sc.frame.pack(expand=1, fill="both")
|
| 133 |
+
item = make_objecttreeitem("sys", sys)
|
| 134 |
+
node = TreeNode(sc.canvas, None, item)
|
| 135 |
+
node.update()
|
| 136 |
+
|
| 137 |
+
if __name__ == '__main__':
|
| 138 |
+
from unittest import main
|
| 139 |
+
main('idlelib.idle_test.test_debugobj', verbosity=2, exit=False)
|
| 140 |
+
|
| 141 |
+
from idlelib.idle_test.htest import run
|
| 142 |
+
run(_object_browser)
|
parrot/lib/python3.10/json/scanner.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""JSON token scanner
|
| 2 |
+
"""
|
| 3 |
+
import re
|
| 4 |
+
try:
|
| 5 |
+
from _json import make_scanner as c_make_scanner
|
| 6 |
+
except ImportError:
|
| 7 |
+
c_make_scanner = None
|
| 8 |
+
|
| 9 |
+
__all__ = ['make_scanner']
|
| 10 |
+
|
| 11 |
+
NUMBER_RE = re.compile(
|
| 12 |
+
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
|
| 13 |
+
(re.VERBOSE | re.MULTILINE | re.DOTALL))
|
| 14 |
+
|
| 15 |
+
def py_make_scanner(context):
|
| 16 |
+
parse_object = context.parse_object
|
| 17 |
+
parse_array = context.parse_array
|
| 18 |
+
parse_string = context.parse_string
|
| 19 |
+
match_number = NUMBER_RE.match
|
| 20 |
+
strict = context.strict
|
| 21 |
+
parse_float = context.parse_float
|
| 22 |
+
parse_int = context.parse_int
|
| 23 |
+
parse_constant = context.parse_constant
|
| 24 |
+
object_hook = context.object_hook
|
| 25 |
+
object_pairs_hook = context.object_pairs_hook
|
| 26 |
+
memo = context.memo
|
| 27 |
+
|
| 28 |
+
def _scan_once(string, idx):
|
| 29 |
+
try:
|
| 30 |
+
nextchar = string[idx]
|
| 31 |
+
except IndexError:
|
| 32 |
+
raise StopIteration(idx) from None
|
| 33 |
+
|
| 34 |
+
if nextchar == '"':
|
| 35 |
+
return parse_string(string, idx + 1, strict)
|
| 36 |
+
elif nextchar == '{':
|
| 37 |
+
return parse_object((string, idx + 1), strict,
|
| 38 |
+
_scan_once, object_hook, object_pairs_hook, memo)
|
| 39 |
+
elif nextchar == '[':
|
| 40 |
+
return parse_array((string, idx + 1), _scan_once)
|
| 41 |
+
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
|
| 42 |
+
return None, idx + 4
|
| 43 |
+
elif nextchar == 't' and string[idx:idx + 4] == 'true':
|
| 44 |
+
return True, idx + 4
|
| 45 |
+
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
|
| 46 |
+
return False, idx + 5
|
| 47 |
+
|
| 48 |
+
m = match_number(string, idx)
|
| 49 |
+
if m is not None:
|
| 50 |
+
integer, frac, exp = m.groups()
|
| 51 |
+
if frac or exp:
|
| 52 |
+
res = parse_float(integer + (frac or '') + (exp or ''))
|
| 53 |
+
else:
|
| 54 |
+
res = parse_int(integer)
|
| 55 |
+
return res, m.end()
|
| 56 |
+
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
|
| 57 |
+
return parse_constant('NaN'), idx + 3
|
| 58 |
+
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
|
| 59 |
+
return parse_constant('Infinity'), idx + 8
|
| 60 |
+
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
|
| 61 |
+
return parse_constant('-Infinity'), idx + 9
|
| 62 |
+
else:
|
| 63 |
+
raise StopIteration(idx)
|
| 64 |
+
|
| 65 |
+
def scan_once(string, idx):
|
| 66 |
+
try:
|
| 67 |
+
return _scan_once(string, idx)
|
| 68 |
+
finally:
|
| 69 |
+
memo.clear()
|
| 70 |
+
|
| 71 |
+
return scan_once
|
| 72 |
+
|
| 73 |
+
make_scanner = c_make_scanner or py_make_scanner
|
parrot/lib/python3.10/lib2to3/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (470 Bytes). View file
|
|
|
parrot/lib/python3.10/lib2to3/__pycache__/btm_utils.cpython-310.pyc
ADDED
|
Binary file (6.42 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/__pycache__/refactor.cpython-310.pyc
ADDED
|
Binary file (21 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixer_util.py
ADDED
|
@@ -0,0 +1,453 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utility functions, node construction macros, etc."""
|
| 2 |
+
# Author: Collin Winter
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .pgen2 import token
|
| 6 |
+
from .pytree import Leaf, Node
|
| 7 |
+
from .pygram import python_symbols as syms
|
| 8 |
+
from . import patcomp
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
###########################################################
|
| 12 |
+
### Common node-construction "macros"
|
| 13 |
+
###########################################################
|
| 14 |
+
|
| 15 |
+
def KeywordArg(keyword, value):
|
| 16 |
+
return Node(syms.argument,
|
| 17 |
+
[keyword, Leaf(token.EQUAL, "="), value])
|
| 18 |
+
|
| 19 |
+
def LParen():
|
| 20 |
+
return Leaf(token.LPAR, "(")
|
| 21 |
+
|
| 22 |
+
def RParen():
|
| 23 |
+
return Leaf(token.RPAR, ")")
|
| 24 |
+
|
| 25 |
+
def Assign(target, source):
|
| 26 |
+
"""Build an assignment statement"""
|
| 27 |
+
if not isinstance(target, list):
|
| 28 |
+
target = [target]
|
| 29 |
+
if not isinstance(source, list):
|
| 30 |
+
source.prefix = " "
|
| 31 |
+
source = [source]
|
| 32 |
+
|
| 33 |
+
return Node(syms.atom,
|
| 34 |
+
target + [Leaf(token.EQUAL, "=", prefix=" ")] + source)
|
| 35 |
+
|
| 36 |
+
def Name(name, prefix=None):
|
| 37 |
+
"""Return a NAME leaf"""
|
| 38 |
+
return Leaf(token.NAME, name, prefix=prefix)
|
| 39 |
+
|
| 40 |
+
def Attr(obj, attr):
|
| 41 |
+
"""A node tuple for obj.attr"""
|
| 42 |
+
return [obj, Node(syms.trailer, [Dot(), attr])]
|
| 43 |
+
|
| 44 |
+
def Comma():
|
| 45 |
+
"""A comma leaf"""
|
| 46 |
+
return Leaf(token.COMMA, ",")
|
| 47 |
+
|
| 48 |
+
def Dot():
|
| 49 |
+
"""A period (.) leaf"""
|
| 50 |
+
return Leaf(token.DOT, ".")
|
| 51 |
+
|
| 52 |
+
def ArgList(args, lparen=LParen(), rparen=RParen()):
|
| 53 |
+
"""A parenthesised argument list, used by Call()"""
|
| 54 |
+
node = Node(syms.trailer, [lparen.clone(), rparen.clone()])
|
| 55 |
+
if args:
|
| 56 |
+
node.insert_child(1, Node(syms.arglist, args))
|
| 57 |
+
return node
|
| 58 |
+
|
| 59 |
+
def Call(func_name, args=None, prefix=None):
|
| 60 |
+
"""A function call"""
|
| 61 |
+
node = Node(syms.power, [func_name, ArgList(args)])
|
| 62 |
+
if prefix is not None:
|
| 63 |
+
node.prefix = prefix
|
| 64 |
+
return node
|
| 65 |
+
|
| 66 |
+
def Newline():
|
| 67 |
+
"""A newline literal"""
|
| 68 |
+
return Leaf(token.NEWLINE, "\n")
|
| 69 |
+
|
| 70 |
+
def BlankLine():
|
| 71 |
+
"""A blank line"""
|
| 72 |
+
return Leaf(token.NEWLINE, "")
|
| 73 |
+
|
| 74 |
+
def Number(n, prefix=None):
|
| 75 |
+
return Leaf(token.NUMBER, n, prefix=prefix)
|
| 76 |
+
|
| 77 |
+
def Subscript(index_node):
|
| 78 |
+
"""A numeric or string subscript"""
|
| 79 |
+
return Node(syms.trailer, [Leaf(token.LBRACE, "["),
|
| 80 |
+
index_node,
|
| 81 |
+
Leaf(token.RBRACE, "]")])
|
| 82 |
+
|
| 83 |
+
def String(string, prefix=None):
|
| 84 |
+
"""A string leaf"""
|
| 85 |
+
return Leaf(token.STRING, string, prefix=prefix)
|
| 86 |
+
|
| 87 |
+
def ListComp(xp, fp, it, test=None):
|
| 88 |
+
"""A list comprehension of the form [xp for fp in it if test].
|
| 89 |
+
|
| 90 |
+
If test is None, the "if test" part is omitted.
|
| 91 |
+
"""
|
| 92 |
+
xp.prefix = ""
|
| 93 |
+
fp.prefix = " "
|
| 94 |
+
it.prefix = " "
|
| 95 |
+
for_leaf = Leaf(token.NAME, "for")
|
| 96 |
+
for_leaf.prefix = " "
|
| 97 |
+
in_leaf = Leaf(token.NAME, "in")
|
| 98 |
+
in_leaf.prefix = " "
|
| 99 |
+
inner_args = [for_leaf, fp, in_leaf, it]
|
| 100 |
+
if test:
|
| 101 |
+
test.prefix = " "
|
| 102 |
+
if_leaf = Leaf(token.NAME, "if")
|
| 103 |
+
if_leaf.prefix = " "
|
| 104 |
+
inner_args.append(Node(syms.comp_if, [if_leaf, test]))
|
| 105 |
+
inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)])
|
| 106 |
+
return Node(syms.atom,
|
| 107 |
+
[Leaf(token.LBRACE, "["),
|
| 108 |
+
inner,
|
| 109 |
+
Leaf(token.RBRACE, "]")])
|
| 110 |
+
|
| 111 |
+
def FromImport(package_name, name_leafs):
|
| 112 |
+
""" Return an import statement in the form:
|
| 113 |
+
from package import name_leafs"""
|
| 114 |
+
# XXX: May not handle dotted imports properly (eg, package_name='foo.bar')
|
| 115 |
+
#assert package_name == '.' or '.' not in package_name, "FromImport has "\
|
| 116 |
+
# "not been tested with dotted package names -- use at your own "\
|
| 117 |
+
# "peril!"
|
| 118 |
+
|
| 119 |
+
for leaf in name_leafs:
|
| 120 |
+
# Pull the leaves out of their old tree
|
| 121 |
+
leaf.remove()
|
| 122 |
+
|
| 123 |
+
children = [Leaf(token.NAME, "from"),
|
| 124 |
+
Leaf(token.NAME, package_name, prefix=" "),
|
| 125 |
+
Leaf(token.NAME, "import", prefix=" "),
|
| 126 |
+
Node(syms.import_as_names, name_leafs)]
|
| 127 |
+
imp = Node(syms.import_from, children)
|
| 128 |
+
return imp
|
| 129 |
+
|
| 130 |
+
def ImportAndCall(node, results, names):
|
| 131 |
+
"""Returns an import statement and calls a method
|
| 132 |
+
of the module:
|
| 133 |
+
|
| 134 |
+
import module
|
| 135 |
+
module.name()"""
|
| 136 |
+
obj = results["obj"].clone()
|
| 137 |
+
if obj.type == syms.arglist:
|
| 138 |
+
newarglist = obj.clone()
|
| 139 |
+
else:
|
| 140 |
+
newarglist = Node(syms.arglist, [obj.clone()])
|
| 141 |
+
after = results["after"]
|
| 142 |
+
if after:
|
| 143 |
+
after = [n.clone() for n in after]
|
| 144 |
+
new = Node(syms.power,
|
| 145 |
+
Attr(Name(names[0]), Name(names[1])) +
|
| 146 |
+
[Node(syms.trailer,
|
| 147 |
+
[results["lpar"].clone(),
|
| 148 |
+
newarglist,
|
| 149 |
+
results["rpar"].clone()])] + after)
|
| 150 |
+
new.prefix = node.prefix
|
| 151 |
+
return new
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
###########################################################
|
| 155 |
+
### Determine whether a node represents a given literal
|
| 156 |
+
###########################################################
|
| 157 |
+
|
| 158 |
+
def is_tuple(node):
|
| 159 |
+
"""Does the node represent a tuple literal?"""
|
| 160 |
+
if isinstance(node, Node) and node.children == [LParen(), RParen()]:
|
| 161 |
+
return True
|
| 162 |
+
return (isinstance(node, Node)
|
| 163 |
+
and len(node.children) == 3
|
| 164 |
+
and isinstance(node.children[0], Leaf)
|
| 165 |
+
and isinstance(node.children[1], Node)
|
| 166 |
+
and isinstance(node.children[2], Leaf)
|
| 167 |
+
and node.children[0].value == "("
|
| 168 |
+
and node.children[2].value == ")")
|
| 169 |
+
|
| 170 |
+
def is_list(node):
|
| 171 |
+
"""Does the node represent a list literal?"""
|
| 172 |
+
return (isinstance(node, Node)
|
| 173 |
+
and len(node.children) > 1
|
| 174 |
+
and isinstance(node.children[0], Leaf)
|
| 175 |
+
and isinstance(node.children[-1], Leaf)
|
| 176 |
+
and node.children[0].value == "["
|
| 177 |
+
and node.children[-1].value == "]")
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
###########################################################
|
| 181 |
+
### Misc
|
| 182 |
+
###########################################################
|
| 183 |
+
|
| 184 |
+
def parenthesize(node):
|
| 185 |
+
return Node(syms.atom, [LParen(), node, RParen()])
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
consuming_calls = {"sorted", "list", "set", "any", "all", "tuple", "sum",
|
| 189 |
+
"min", "max", "enumerate"}
|
| 190 |
+
|
| 191 |
+
def attr_chain(obj, attr):
|
| 192 |
+
"""Follow an attribute chain.
|
| 193 |
+
|
| 194 |
+
If you have a chain of objects where a.foo -> b, b.foo-> c, etc,
|
| 195 |
+
use this to iterate over all objects in the chain. Iteration is
|
| 196 |
+
terminated by getattr(x, attr) is None.
|
| 197 |
+
|
| 198 |
+
Args:
|
| 199 |
+
obj: the starting object
|
| 200 |
+
attr: the name of the chaining attribute
|
| 201 |
+
|
| 202 |
+
Yields:
|
| 203 |
+
Each successive object in the chain.
|
| 204 |
+
"""
|
| 205 |
+
next = getattr(obj, attr)
|
| 206 |
+
while next:
|
| 207 |
+
yield next
|
| 208 |
+
next = getattr(next, attr)
|
| 209 |
+
|
| 210 |
+
p0 = """for_stmt< 'for' any 'in' node=any ':' any* >
|
| 211 |
+
| comp_for< 'for' any 'in' node=any any* >
|
| 212 |
+
"""
|
| 213 |
+
p1 = """
|
| 214 |
+
power<
|
| 215 |
+
( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' |
|
| 216 |
+
'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) )
|
| 217 |
+
trailer< '(' node=any ')' >
|
| 218 |
+
any*
|
| 219 |
+
>
|
| 220 |
+
"""
|
| 221 |
+
p2 = """
|
| 222 |
+
power<
|
| 223 |
+
( 'sorted' | 'enumerate' )
|
| 224 |
+
trailer< '(' arglist<node=any any*> ')' >
|
| 225 |
+
any*
|
| 226 |
+
>
|
| 227 |
+
"""
|
| 228 |
+
pats_built = False
|
| 229 |
+
def in_special_context(node):
|
| 230 |
+
""" Returns true if node is in an environment where all that is required
|
| 231 |
+
of it is being iterable (ie, it doesn't matter if it returns a list
|
| 232 |
+
or an iterator).
|
| 233 |
+
See test_map_nochange in test_fixers.py for some examples and tests.
|
| 234 |
+
"""
|
| 235 |
+
global p0, p1, p2, pats_built
|
| 236 |
+
if not pats_built:
|
| 237 |
+
p0 = patcomp.compile_pattern(p0)
|
| 238 |
+
p1 = patcomp.compile_pattern(p1)
|
| 239 |
+
p2 = patcomp.compile_pattern(p2)
|
| 240 |
+
pats_built = True
|
| 241 |
+
patterns = [p0, p1, p2]
|
| 242 |
+
for pattern, parent in zip(patterns, attr_chain(node, "parent")):
|
| 243 |
+
results = {}
|
| 244 |
+
if pattern.match(parent, results) and results["node"] is node:
|
| 245 |
+
return True
|
| 246 |
+
return False
|
| 247 |
+
|
| 248 |
+
def is_probably_builtin(node):
|
| 249 |
+
"""
|
| 250 |
+
Check that something isn't an attribute or function name etc.
|
| 251 |
+
"""
|
| 252 |
+
prev = node.prev_sibling
|
| 253 |
+
if prev is not None and prev.type == token.DOT:
|
| 254 |
+
# Attribute lookup.
|
| 255 |
+
return False
|
| 256 |
+
parent = node.parent
|
| 257 |
+
if parent.type in (syms.funcdef, syms.classdef):
|
| 258 |
+
return False
|
| 259 |
+
if parent.type == syms.expr_stmt and parent.children[0] is node:
|
| 260 |
+
# Assignment.
|
| 261 |
+
return False
|
| 262 |
+
if parent.type == syms.parameters or \
|
| 263 |
+
(parent.type == syms.typedargslist and (
|
| 264 |
+
(prev is not None and prev.type == token.COMMA) or
|
| 265 |
+
parent.children[0] is node
|
| 266 |
+
)):
|
| 267 |
+
# The name of an argument.
|
| 268 |
+
return False
|
| 269 |
+
return True
|
| 270 |
+
|
| 271 |
+
def find_indentation(node):
|
| 272 |
+
"""Find the indentation of *node*."""
|
| 273 |
+
while node is not None:
|
| 274 |
+
if node.type == syms.suite and len(node.children) > 2:
|
| 275 |
+
indent = node.children[1]
|
| 276 |
+
if indent.type == token.INDENT:
|
| 277 |
+
return indent.value
|
| 278 |
+
node = node.parent
|
| 279 |
+
return ""
|
| 280 |
+
|
| 281 |
+
###########################################################
|
| 282 |
+
### The following functions are to find bindings in a suite
|
| 283 |
+
###########################################################
|
| 284 |
+
|
| 285 |
+
def make_suite(node):
|
| 286 |
+
if node.type == syms.suite:
|
| 287 |
+
return node
|
| 288 |
+
node = node.clone()
|
| 289 |
+
parent, node.parent = node.parent, None
|
| 290 |
+
suite = Node(syms.suite, [node])
|
| 291 |
+
suite.parent = parent
|
| 292 |
+
return suite
|
| 293 |
+
|
| 294 |
+
def find_root(node):
|
| 295 |
+
"""Find the top level namespace."""
|
| 296 |
+
# Scamper up to the top level namespace
|
| 297 |
+
while node.type != syms.file_input:
|
| 298 |
+
node = node.parent
|
| 299 |
+
if not node:
|
| 300 |
+
raise ValueError("root found before file_input node was found.")
|
| 301 |
+
return node
|
| 302 |
+
|
| 303 |
+
def does_tree_import(package, name, node):
|
| 304 |
+
""" Returns true if name is imported from package at the
|
| 305 |
+
top level of the tree which node belongs to.
|
| 306 |
+
To cover the case of an import like 'import foo', use
|
| 307 |
+
None for the package and 'foo' for the name. """
|
| 308 |
+
binding = find_binding(name, find_root(node), package)
|
| 309 |
+
return bool(binding)
|
| 310 |
+
|
| 311 |
+
def is_import(node):
|
| 312 |
+
"""Returns true if the node is an import statement."""
|
| 313 |
+
return node.type in (syms.import_name, syms.import_from)
|
| 314 |
+
|
| 315 |
+
def touch_import(package, name, node):
|
| 316 |
+
""" Works like `does_tree_import` but adds an import statement
|
| 317 |
+
if it was not imported. """
|
| 318 |
+
def is_import_stmt(node):
|
| 319 |
+
return (node.type == syms.simple_stmt and node.children and
|
| 320 |
+
is_import(node.children[0]))
|
| 321 |
+
|
| 322 |
+
root = find_root(node)
|
| 323 |
+
|
| 324 |
+
if does_tree_import(package, name, root):
|
| 325 |
+
return
|
| 326 |
+
|
| 327 |
+
# figure out where to insert the new import. First try to find
|
| 328 |
+
# the first import and then skip to the last one.
|
| 329 |
+
insert_pos = offset = 0
|
| 330 |
+
for idx, node in enumerate(root.children):
|
| 331 |
+
if not is_import_stmt(node):
|
| 332 |
+
continue
|
| 333 |
+
for offset, node2 in enumerate(root.children[idx:]):
|
| 334 |
+
if not is_import_stmt(node2):
|
| 335 |
+
break
|
| 336 |
+
insert_pos = idx + offset
|
| 337 |
+
break
|
| 338 |
+
|
| 339 |
+
# if there are no imports where we can insert, find the docstring.
|
| 340 |
+
# if that also fails, we stick to the beginning of the file
|
| 341 |
+
if insert_pos == 0:
|
| 342 |
+
for idx, node in enumerate(root.children):
|
| 343 |
+
if (node.type == syms.simple_stmt and node.children and
|
| 344 |
+
node.children[0].type == token.STRING):
|
| 345 |
+
insert_pos = idx + 1
|
| 346 |
+
break
|
| 347 |
+
|
| 348 |
+
if package is None:
|
| 349 |
+
import_ = Node(syms.import_name, [
|
| 350 |
+
Leaf(token.NAME, "import"),
|
| 351 |
+
Leaf(token.NAME, name, prefix=" ")
|
| 352 |
+
])
|
| 353 |
+
else:
|
| 354 |
+
import_ = FromImport(package, [Leaf(token.NAME, name, prefix=" ")])
|
| 355 |
+
|
| 356 |
+
children = [import_, Newline()]
|
| 357 |
+
root.insert_child(insert_pos, Node(syms.simple_stmt, children))
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
_def_syms = {syms.classdef, syms.funcdef}
|
| 361 |
+
def find_binding(name, node, package=None):
|
| 362 |
+
""" Returns the node which binds variable name, otherwise None.
|
| 363 |
+
If optional argument package is supplied, only imports will
|
| 364 |
+
be returned.
|
| 365 |
+
See test cases for examples."""
|
| 366 |
+
for child in node.children:
|
| 367 |
+
ret = None
|
| 368 |
+
if child.type == syms.for_stmt:
|
| 369 |
+
if _find(name, child.children[1]):
|
| 370 |
+
return child
|
| 371 |
+
n = find_binding(name, make_suite(child.children[-1]), package)
|
| 372 |
+
if n: ret = n
|
| 373 |
+
elif child.type in (syms.if_stmt, syms.while_stmt):
|
| 374 |
+
n = find_binding(name, make_suite(child.children[-1]), package)
|
| 375 |
+
if n: ret = n
|
| 376 |
+
elif child.type == syms.try_stmt:
|
| 377 |
+
n = find_binding(name, make_suite(child.children[2]), package)
|
| 378 |
+
if n:
|
| 379 |
+
ret = n
|
| 380 |
+
else:
|
| 381 |
+
for i, kid in enumerate(child.children[3:]):
|
| 382 |
+
if kid.type == token.COLON and kid.value == ":":
|
| 383 |
+
# i+3 is the colon, i+4 is the suite
|
| 384 |
+
n = find_binding(name, make_suite(child.children[i+4]), package)
|
| 385 |
+
if n: ret = n
|
| 386 |
+
elif child.type in _def_syms and child.children[1].value == name:
|
| 387 |
+
ret = child
|
| 388 |
+
elif _is_import_binding(child, name, package):
|
| 389 |
+
ret = child
|
| 390 |
+
elif child.type == syms.simple_stmt:
|
| 391 |
+
ret = find_binding(name, child, package)
|
| 392 |
+
elif child.type == syms.expr_stmt:
|
| 393 |
+
if _find(name, child.children[0]):
|
| 394 |
+
ret = child
|
| 395 |
+
|
| 396 |
+
if ret:
|
| 397 |
+
if not package:
|
| 398 |
+
return ret
|
| 399 |
+
if is_import(ret):
|
| 400 |
+
return ret
|
| 401 |
+
return None
|
| 402 |
+
|
| 403 |
+
_block_syms = {syms.funcdef, syms.classdef, syms.trailer}
|
| 404 |
+
def _find(name, node):
|
| 405 |
+
nodes = [node]
|
| 406 |
+
while nodes:
|
| 407 |
+
node = nodes.pop()
|
| 408 |
+
if node.type > 256 and node.type not in _block_syms:
|
| 409 |
+
nodes.extend(node.children)
|
| 410 |
+
elif node.type == token.NAME and node.value == name:
|
| 411 |
+
return node
|
| 412 |
+
return None
|
| 413 |
+
|
| 414 |
+
def _is_import_binding(node, name, package=None):
|
| 415 |
+
""" Will return node if node will import name, or node
|
| 416 |
+
will import * from package. None is returned otherwise.
|
| 417 |
+
See test cases for examples. """
|
| 418 |
+
|
| 419 |
+
if node.type == syms.import_name and not package:
|
| 420 |
+
imp = node.children[1]
|
| 421 |
+
if imp.type == syms.dotted_as_names:
|
| 422 |
+
for child in imp.children:
|
| 423 |
+
if child.type == syms.dotted_as_name:
|
| 424 |
+
if child.children[2].value == name:
|
| 425 |
+
return node
|
| 426 |
+
elif child.type == token.NAME and child.value == name:
|
| 427 |
+
return node
|
| 428 |
+
elif imp.type == syms.dotted_as_name:
|
| 429 |
+
last = imp.children[-1]
|
| 430 |
+
if last.type == token.NAME and last.value == name:
|
| 431 |
+
return node
|
| 432 |
+
elif imp.type == token.NAME and imp.value == name:
|
| 433 |
+
return node
|
| 434 |
+
elif node.type == syms.import_from:
|
| 435 |
+
# str(...) is used to make life easier here, because
|
| 436 |
+
# from a.b import parses to ['import', ['a', '.', 'b'], ...]
|
| 437 |
+
if package and str(node.children[1]).strip() != package:
|
| 438 |
+
return None
|
| 439 |
+
n = node.children[3]
|
| 440 |
+
if package and _find("as", n):
|
| 441 |
+
# See test_from_import_as for explanation
|
| 442 |
+
return None
|
| 443 |
+
elif n.type == syms.import_as_names and _find(name, n):
|
| 444 |
+
return node
|
| 445 |
+
elif n.type == syms.import_as_name:
|
| 446 |
+
child = n.children[2]
|
| 447 |
+
if child.type == token.NAME and child.value == name:
|
| 448 |
+
return node
|
| 449 |
+
elif n.type == token.NAME and n.value == name:
|
| 450 |
+
return node
|
| 451 |
+
elif package and n.type == token.STAR:
|
| 452 |
+
return node
|
| 453 |
+
return None
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_apply.cpython-310.pyc
ADDED
|
Binary file (1.91 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_basestring.cpython-310.pyc
ADDED
|
Binary file (915 Bytes). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_dict.cpython-310.pyc
ADDED
|
Binary file (3.28 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_exec.cpython-310.pyc
ADDED
|
Binary file (1.39 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_execfile.cpython-310.pyc
ADDED
|
Binary file (1.67 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_exitfunc.cpython-310.pyc
ADDED
|
Binary file (2.56 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_filter.cpython-310.pyc
ADDED
|
Binary file (2.69 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_idioms.cpython-310.pyc
ADDED
|
Binary file (3.9 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_import.cpython-310.pyc
ADDED
|
Binary file (3.07 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_imports.cpython-310.pyc
ADDED
|
Binary file (4.88 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_itertools.cpython-310.pyc
ADDED
|
Binary file (1.8 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_itertools_imports.cpython-310.pyc
ADDED
|
Binary file (1.56 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_next.cpython-310.pyc
ADDED
|
Binary file (3.33 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_paren.cpython-310.pyc
ADDED
|
Binary file (1.64 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_reduce.cpython-310.pyc
ADDED
|
Binary file (1.12 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_renames.cpython-310.pyc
ADDED
|
Binary file (2.28 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_repr.cpython-310.pyc
ADDED
|
Binary file (1.1 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_set_literal.cpython-310.pyc
ADDED
|
Binary file (1.94 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_types.cpython-310.pyc
ADDED
|
Binary file (2.2 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_unicode.cpython-310.pyc
ADDED
|
Binary file (1.81 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/__pycache__/fix_xreadlines.cpython-310.pyc
ADDED
|
Binary file (1.12 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/fixes/fix_basestring.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for basestring -> str."""
|
| 2 |
+
# Author: Christian Heimes
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Name
|
| 7 |
+
|
| 8 |
+
class FixBasestring(fixer_base.BaseFix):
|
| 9 |
+
BM_compatible = True
|
| 10 |
+
|
| 11 |
+
PATTERN = "'basestring'"
|
| 12 |
+
|
| 13 |
+
def transform(self, node, results):
|
| 14 |
+
return Name("str", prefix=node.prefix)
|
parrot/lib/python3.10/lib2to3/fixes/fix_dict.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for dict methods.
|
| 5 |
+
|
| 6 |
+
d.keys() -> list(d.keys())
|
| 7 |
+
d.items() -> list(d.items())
|
| 8 |
+
d.values() -> list(d.values())
|
| 9 |
+
|
| 10 |
+
d.iterkeys() -> iter(d.keys())
|
| 11 |
+
d.iteritems() -> iter(d.items())
|
| 12 |
+
d.itervalues() -> iter(d.values())
|
| 13 |
+
|
| 14 |
+
d.viewkeys() -> d.keys()
|
| 15 |
+
d.viewitems() -> d.items()
|
| 16 |
+
d.viewvalues() -> d.values()
|
| 17 |
+
|
| 18 |
+
Except in certain very specific contexts: the iter() can be dropped
|
| 19 |
+
when the context is list(), sorted(), iter() or for...in; the list()
|
| 20 |
+
can be dropped when the context is list() or sorted() (but not iter()
|
| 21 |
+
or for...in!). Special contexts that apply to both: list(), sorted(), tuple()
|
| 22 |
+
set(), any(), all(), sum().
|
| 23 |
+
|
| 24 |
+
Note: iter(d.keys()) could be written as iter(d) but since the
|
| 25 |
+
original d.iterkeys() was also redundant we don't fix this. And there
|
| 26 |
+
are (rare) contexts where it makes a difference (e.g. when passing it
|
| 27 |
+
as an argument to a function that introspects the argument).
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
# Local imports
|
| 31 |
+
from .. import pytree
|
| 32 |
+
from .. import patcomp
|
| 33 |
+
from .. import fixer_base
|
| 34 |
+
from ..fixer_util import Name, Call, Dot
|
| 35 |
+
from .. import fixer_util
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
iter_exempt = fixer_util.consuming_calls | {"iter"}
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class FixDict(fixer_base.BaseFix):
|
| 42 |
+
BM_compatible = True
|
| 43 |
+
|
| 44 |
+
PATTERN = """
|
| 45 |
+
power< head=any+
|
| 46 |
+
trailer< '.' method=('keys'|'items'|'values'|
|
| 47 |
+
'iterkeys'|'iteritems'|'itervalues'|
|
| 48 |
+
'viewkeys'|'viewitems'|'viewvalues') >
|
| 49 |
+
parens=trailer< '(' ')' >
|
| 50 |
+
tail=any*
|
| 51 |
+
>
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
def transform(self, node, results):
|
| 55 |
+
head = results["head"]
|
| 56 |
+
method = results["method"][0] # Extract node for method name
|
| 57 |
+
tail = results["tail"]
|
| 58 |
+
syms = self.syms
|
| 59 |
+
method_name = method.value
|
| 60 |
+
isiter = method_name.startswith("iter")
|
| 61 |
+
isview = method_name.startswith("view")
|
| 62 |
+
if isiter or isview:
|
| 63 |
+
method_name = method_name[4:]
|
| 64 |
+
assert method_name in ("keys", "items", "values"), repr(method)
|
| 65 |
+
head = [n.clone() for n in head]
|
| 66 |
+
tail = [n.clone() for n in tail]
|
| 67 |
+
special = not tail and self.in_special_context(node, isiter)
|
| 68 |
+
args = head + [pytree.Node(syms.trailer,
|
| 69 |
+
[Dot(),
|
| 70 |
+
Name(method_name,
|
| 71 |
+
prefix=method.prefix)]),
|
| 72 |
+
results["parens"].clone()]
|
| 73 |
+
new = pytree.Node(syms.power, args)
|
| 74 |
+
if not (special or isview):
|
| 75 |
+
new.prefix = ""
|
| 76 |
+
new = Call(Name("iter" if isiter else "list"), [new])
|
| 77 |
+
if tail:
|
| 78 |
+
new = pytree.Node(syms.power, [new] + tail)
|
| 79 |
+
new.prefix = node.prefix
|
| 80 |
+
return new
|
| 81 |
+
|
| 82 |
+
P1 = "power< func=NAME trailer< '(' node=any ')' > any* >"
|
| 83 |
+
p1 = patcomp.compile_pattern(P1)
|
| 84 |
+
|
| 85 |
+
P2 = """for_stmt< 'for' any 'in' node=any ':' any* >
|
| 86 |
+
| comp_for< 'for' any 'in' node=any any* >
|
| 87 |
+
"""
|
| 88 |
+
p2 = patcomp.compile_pattern(P2)
|
| 89 |
+
|
| 90 |
+
def in_special_context(self, node, isiter):
|
| 91 |
+
if node.parent is None:
|
| 92 |
+
return False
|
| 93 |
+
results = {}
|
| 94 |
+
if (node.parent.parent is not None and
|
| 95 |
+
self.p1.match(node.parent.parent, results) and
|
| 96 |
+
results["node"] is node):
|
| 97 |
+
if isiter:
|
| 98 |
+
# iter(d.iterkeys()) -> iter(d.keys()), etc.
|
| 99 |
+
return results["func"].value in iter_exempt
|
| 100 |
+
else:
|
| 101 |
+
# list(d.keys()) -> list(d.keys()), etc.
|
| 102 |
+
return results["func"].value in fixer_util.consuming_calls
|
| 103 |
+
if not isiter:
|
| 104 |
+
return False
|
| 105 |
+
# for ... in d.iterkeys() -> for ... in d.keys(), etc.
|
| 106 |
+
return self.p2.match(node.parent, results) and results["node"] is node
|
parrot/lib/python3.10/lib2to3/fixes/fix_import.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for import statements.
|
| 2 |
+
If spam is being imported from the local directory, this import:
|
| 3 |
+
from spam import eggs
|
| 4 |
+
Becomes:
|
| 5 |
+
from .spam import eggs
|
| 6 |
+
|
| 7 |
+
And this import:
|
| 8 |
+
import spam
|
| 9 |
+
Becomes:
|
| 10 |
+
from . import spam
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
# Local imports
|
| 14 |
+
from .. import fixer_base
|
| 15 |
+
from os.path import dirname, join, exists, sep
|
| 16 |
+
from ..fixer_util import FromImport, syms, token
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def traverse_imports(names):
|
| 20 |
+
"""
|
| 21 |
+
Walks over all the names imported in a dotted_as_names node.
|
| 22 |
+
"""
|
| 23 |
+
pending = [names]
|
| 24 |
+
while pending:
|
| 25 |
+
node = pending.pop()
|
| 26 |
+
if node.type == token.NAME:
|
| 27 |
+
yield node.value
|
| 28 |
+
elif node.type == syms.dotted_name:
|
| 29 |
+
yield "".join([ch.value for ch in node.children])
|
| 30 |
+
elif node.type == syms.dotted_as_name:
|
| 31 |
+
pending.append(node.children[0])
|
| 32 |
+
elif node.type == syms.dotted_as_names:
|
| 33 |
+
pending.extend(node.children[::-2])
|
| 34 |
+
else:
|
| 35 |
+
raise AssertionError("unknown node type")
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class FixImport(fixer_base.BaseFix):
|
| 39 |
+
BM_compatible = True
|
| 40 |
+
|
| 41 |
+
PATTERN = """
|
| 42 |
+
import_from< 'from' imp=any 'import' ['('] any [')'] >
|
| 43 |
+
|
|
| 44 |
+
import_name< 'import' imp=any >
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
def start_tree(self, tree, name):
|
| 48 |
+
super(FixImport, self).start_tree(tree, name)
|
| 49 |
+
self.skip = "absolute_import" in tree.future_features
|
| 50 |
+
|
| 51 |
+
def transform(self, node, results):
|
| 52 |
+
if self.skip:
|
| 53 |
+
return
|
| 54 |
+
imp = results['imp']
|
| 55 |
+
|
| 56 |
+
if node.type == syms.import_from:
|
| 57 |
+
# Some imps are top-level (eg: 'import ham')
|
| 58 |
+
# some are first level (eg: 'import ham.eggs')
|
| 59 |
+
# some are third level (eg: 'import ham.eggs as spam')
|
| 60 |
+
# Hence, the loop
|
| 61 |
+
while not hasattr(imp, 'value'):
|
| 62 |
+
imp = imp.children[0]
|
| 63 |
+
if self.probably_a_local_import(imp.value):
|
| 64 |
+
imp.value = "." + imp.value
|
| 65 |
+
imp.changed()
|
| 66 |
+
else:
|
| 67 |
+
have_local = False
|
| 68 |
+
have_absolute = False
|
| 69 |
+
for mod_name in traverse_imports(imp):
|
| 70 |
+
if self.probably_a_local_import(mod_name):
|
| 71 |
+
have_local = True
|
| 72 |
+
else:
|
| 73 |
+
have_absolute = True
|
| 74 |
+
if have_absolute:
|
| 75 |
+
if have_local:
|
| 76 |
+
# We won't handle both sibling and absolute imports in the
|
| 77 |
+
# same statement at the moment.
|
| 78 |
+
self.warning(node, "absolute and local imports together")
|
| 79 |
+
return
|
| 80 |
+
|
| 81 |
+
new = FromImport(".", [imp])
|
| 82 |
+
new.prefix = node.prefix
|
| 83 |
+
return new
|
| 84 |
+
|
| 85 |
+
def probably_a_local_import(self, imp_name):
|
| 86 |
+
if imp_name.startswith("."):
|
| 87 |
+
# Relative imports are certainly not local imports.
|
| 88 |
+
return False
|
| 89 |
+
imp_name = imp_name.split(".", 1)[0]
|
| 90 |
+
base_path = dirname(self.filename)
|
| 91 |
+
base_path = join(base_path, imp_name)
|
| 92 |
+
# If there is no __init__.py next to the file its not in a package
|
| 93 |
+
# so can't be a relative import.
|
| 94 |
+
if not exists(join(dirname(base_path), "__init__.py")):
|
| 95 |
+
return False
|
| 96 |
+
for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]:
|
| 97 |
+
if exists(base_path + ext):
|
| 98 |
+
return True
|
| 99 |
+
return False
|
parrot/lib/python3.10/lib2to3/fixes/fix_itertools_imports.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
|
| 2 |
+
|
| 3 |
+
# Local imports
|
| 4 |
+
from lib2to3 import fixer_base
|
| 5 |
+
from lib2to3.fixer_util import BlankLine, syms, token
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class FixItertoolsImports(fixer_base.BaseFix):
|
| 9 |
+
BM_compatible = True
|
| 10 |
+
PATTERN = """
|
| 11 |
+
import_from< 'from' 'itertools' 'import' imports=any >
|
| 12 |
+
""" %(locals())
|
| 13 |
+
|
| 14 |
+
def transform(self, node, results):
|
| 15 |
+
imports = results['imports']
|
| 16 |
+
if imports.type == syms.import_as_name or not imports.children:
|
| 17 |
+
children = [imports]
|
| 18 |
+
else:
|
| 19 |
+
children = imports.children
|
| 20 |
+
for child in children[::2]:
|
| 21 |
+
if child.type == token.NAME:
|
| 22 |
+
member = child.value
|
| 23 |
+
name_node = child
|
| 24 |
+
elif child.type == token.STAR:
|
| 25 |
+
# Just leave the import as is.
|
| 26 |
+
return
|
| 27 |
+
else:
|
| 28 |
+
assert child.type == syms.import_as_name
|
| 29 |
+
name_node = child.children[0]
|
| 30 |
+
member_name = name_node.value
|
| 31 |
+
if member_name in ('imap', 'izip', 'ifilter'):
|
| 32 |
+
child.value = None
|
| 33 |
+
child.remove()
|
| 34 |
+
elif member_name in ('ifilterfalse', 'izip_longest'):
|
| 35 |
+
node.changed()
|
| 36 |
+
name_node.value = ('filterfalse' if member_name[1] == 'f'
|
| 37 |
+
else 'zip_longest')
|
| 38 |
+
|
| 39 |
+
# Make sure the import statement is still sane
|
| 40 |
+
children = imports.children[:] or [imports]
|
| 41 |
+
remove_comma = True
|
| 42 |
+
for child in children:
|
| 43 |
+
if remove_comma and child.type == token.COMMA:
|
| 44 |
+
child.remove()
|
| 45 |
+
else:
|
| 46 |
+
remove_comma ^= True
|
| 47 |
+
|
| 48 |
+
while children and children[-1].type == token.COMMA:
|
| 49 |
+
children.pop().remove()
|
| 50 |
+
|
| 51 |
+
# If there are no imports left, just get rid of the entire statement
|
| 52 |
+
if (not (imports.children or getattr(imports, 'value', None)) or
|
| 53 |
+
imports.parent is None):
|
| 54 |
+
p = node.prefix
|
| 55 |
+
node = BlankLine()
|
| 56 |
+
node.prefix = p
|
| 57 |
+
return node
|
parrot/lib/python3.10/lib2to3/fixes/fix_methodattrs.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix bound method attributes (method.im_? -> method.__?__).
|
| 2 |
+
"""
|
| 3 |
+
# Author: Christian Heimes
|
| 4 |
+
|
| 5 |
+
# Local imports
|
| 6 |
+
from .. import fixer_base
|
| 7 |
+
from ..fixer_util import Name
|
| 8 |
+
|
| 9 |
+
MAP = {
|
| 10 |
+
"im_func" : "__func__",
|
| 11 |
+
"im_self" : "__self__",
|
| 12 |
+
"im_class" : "__self__.__class__"
|
| 13 |
+
}
|
| 14 |
+
|
| 15 |
+
class FixMethodattrs(fixer_base.BaseFix):
|
| 16 |
+
BM_compatible = True
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def transform(self, node, results):
|
| 22 |
+
attr = results["attr"][0]
|
| 23 |
+
new = MAP[attr.value]
|
| 24 |
+
attr.replace(Name(new, prefix=attr.prefix))
|
parrot/lib/python3.10/lib2to3/fixes/fix_ne.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that turns <> into !=."""
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import pytree
|
| 8 |
+
from ..pgen2 import token
|
| 9 |
+
from .. import fixer_base
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixNe(fixer_base.BaseFix):
|
| 13 |
+
# This is so simple that we don't need the pattern compiler.
|
| 14 |
+
|
| 15 |
+
_accept_type = token.NOTEQUAL
|
| 16 |
+
|
| 17 |
+
def match(self, node):
|
| 18 |
+
# Override
|
| 19 |
+
return node.value == "<>"
|
| 20 |
+
|
| 21 |
+
def transform(self, node, results):
|
| 22 |
+
new = pytree.Leaf(token.NOTEQUAL, "!=", prefix=node.prefix)
|
| 23 |
+
return new
|
parrot/lib/python3.10/lib2to3/fixes/fix_numliterals.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer that turns 1L into 1, 0755 into 0o755.
|
| 2 |
+
"""
|
| 3 |
+
# Copyright 2007 Georg Brandl.
|
| 4 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from ..pgen2 import token
|
| 8 |
+
from .. import fixer_base
|
| 9 |
+
from ..fixer_util import Number
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixNumliterals(fixer_base.BaseFix):
|
| 13 |
+
# This is so simple that we don't need the pattern compiler.
|
| 14 |
+
|
| 15 |
+
_accept_type = token.NUMBER
|
| 16 |
+
|
| 17 |
+
def match(self, node):
|
| 18 |
+
# Override
|
| 19 |
+
return (node.value.startswith("0") or node.value[-1] in "Ll")
|
| 20 |
+
|
| 21 |
+
def transform(self, node, results):
|
| 22 |
+
val = node.value
|
| 23 |
+
if val[-1] in 'Ll':
|
| 24 |
+
val = val[:-1]
|
| 25 |
+
elif val.startswith('0') and val.isdigit() and len(set(val)) > 1:
|
| 26 |
+
val = "0o" + val[1:]
|
| 27 |
+
|
| 28 |
+
return Number(val, prefix=node.prefix)
|
parrot/lib/python3.10/lib2to3/fixes/fix_renames.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix incompatible renames
|
| 2 |
+
|
| 3 |
+
Fixes:
|
| 4 |
+
* sys.maxint -> sys.maxsize
|
| 5 |
+
"""
|
| 6 |
+
# Author: Christian Heimes
|
| 7 |
+
# based on Collin Winter's fix_import
|
| 8 |
+
|
| 9 |
+
# Local imports
|
| 10 |
+
from .. import fixer_base
|
| 11 |
+
from ..fixer_util import Name, attr_chain
|
| 12 |
+
|
| 13 |
+
MAPPING = {"sys": {"maxint" : "maxsize"},
|
| 14 |
+
}
|
| 15 |
+
LOOKUP = {}
|
| 16 |
+
|
| 17 |
+
def alternates(members):
|
| 18 |
+
return "(" + "|".join(map(repr, members)) + ")"
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def build_pattern():
|
| 22 |
+
#bare = set()
|
| 23 |
+
for module, replace in list(MAPPING.items()):
|
| 24 |
+
for old_attr, new_attr in list(replace.items()):
|
| 25 |
+
LOOKUP[(module, old_attr)] = new_attr
|
| 26 |
+
#bare.add(module)
|
| 27 |
+
#bare.add(old_attr)
|
| 28 |
+
#yield """
|
| 29 |
+
# import_name< 'import' (module=%r
|
| 30 |
+
# | dotted_as_names< any* module=%r any* >) >
|
| 31 |
+
# """ % (module, module)
|
| 32 |
+
yield """
|
| 33 |
+
import_from< 'from' module_name=%r 'import'
|
| 34 |
+
( attr_name=%r | import_as_name< attr_name=%r 'as' any >) >
|
| 35 |
+
""" % (module, old_attr, old_attr)
|
| 36 |
+
yield """
|
| 37 |
+
power< module_name=%r trailer< '.' attr_name=%r > any* >
|
| 38 |
+
""" % (module, old_attr)
|
| 39 |
+
#yield """bare_name=%s""" % alternates(bare)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class FixRenames(fixer_base.BaseFix):
|
| 43 |
+
BM_compatible = True
|
| 44 |
+
PATTERN = "|".join(build_pattern())
|
| 45 |
+
|
| 46 |
+
order = "pre" # Pre-order tree traversal
|
| 47 |
+
|
| 48 |
+
# Don't match the node if it's within another match
|
| 49 |
+
def match(self, node):
|
| 50 |
+
match = super(FixRenames, self).match
|
| 51 |
+
results = match(node)
|
| 52 |
+
if results:
|
| 53 |
+
if any(match(obj) for obj in attr_chain(node, "parent")):
|
| 54 |
+
return False
|
| 55 |
+
return results
|
| 56 |
+
return False
|
| 57 |
+
|
| 58 |
+
#def start_tree(self, tree, filename):
|
| 59 |
+
# super(FixRenames, self).start_tree(tree, filename)
|
| 60 |
+
# self.replace = {}
|
| 61 |
+
|
| 62 |
+
def transform(self, node, results):
|
| 63 |
+
mod_name = results.get("module_name")
|
| 64 |
+
attr_name = results.get("attr_name")
|
| 65 |
+
#bare_name = results.get("bare_name")
|
| 66 |
+
#import_mod = results.get("module")
|
| 67 |
+
|
| 68 |
+
if mod_name and attr_name:
|
| 69 |
+
new_attr = LOOKUP[(mod_name.value, attr_name.value)]
|
| 70 |
+
attr_name.replace(Name(new_attr, prefix=attr_name.prefix))
|
parrot/lib/python3.10/lib2to3/fixes/fix_set_literal.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Optional fixer to transform set() calls to set literals.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
# Author: Benjamin Peterson
|
| 6 |
+
|
| 7 |
+
from lib2to3 import fixer_base, pytree
|
| 8 |
+
from lib2to3.fixer_util import token, syms
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixSetLiteral(fixer_base.BaseFix):
|
| 13 |
+
|
| 14 |
+
BM_compatible = True
|
| 15 |
+
explicit = True
|
| 16 |
+
|
| 17 |
+
PATTERN = """power< 'set' trailer< '('
|
| 18 |
+
(atom=atom< '[' (items=listmaker< any ((',' any)* [',']) >
|
| 19 |
+
|
|
| 20 |
+
single=any) ']' >
|
| 21 |
+
|
|
| 22 |
+
atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' >
|
| 23 |
+
)
|
| 24 |
+
')' > >
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def transform(self, node, results):
|
| 28 |
+
single = results.get("single")
|
| 29 |
+
if single:
|
| 30 |
+
# Make a fake listmaker
|
| 31 |
+
fake = pytree.Node(syms.listmaker, [single.clone()])
|
| 32 |
+
single.replace(fake)
|
| 33 |
+
items = fake
|
| 34 |
+
else:
|
| 35 |
+
items = results["items"]
|
| 36 |
+
|
| 37 |
+
# Build the contents of the literal
|
| 38 |
+
literal = [pytree.Leaf(token.LBRACE, "{")]
|
| 39 |
+
literal.extend(n.clone() for n in items.children)
|
| 40 |
+
literal.append(pytree.Leaf(token.RBRACE, "}"))
|
| 41 |
+
# Set the prefix of the right brace to that of the ')' or ']'
|
| 42 |
+
literal[-1].prefix = items.next_sibling.prefix
|
| 43 |
+
maker = pytree.Node(syms.dictsetmaker, literal)
|
| 44 |
+
maker.prefix = node.prefix
|
| 45 |
+
|
| 46 |
+
# If the original was a one tuple, we need to remove the extra comma.
|
| 47 |
+
if len(maker.children) == 4:
|
| 48 |
+
n = maker.children[2]
|
| 49 |
+
n.remove()
|
| 50 |
+
maker.children[-1].prefix = n.prefix
|
| 51 |
+
|
| 52 |
+
# Finally, replace the set call with our shiny new literal.
|
| 53 |
+
return maker
|
parrot/lib/python3.10/lib2to3/fixes/fix_urllib.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix changes imports of urllib which are now incompatible.
|
| 2 |
+
This is rather similar to fix_imports, but because of the more
|
| 3 |
+
complex nature of the fixing for urllib, it has its own fixer.
|
| 4 |
+
"""
|
| 5 |
+
# Author: Nick Edds
|
| 6 |
+
|
| 7 |
+
# Local imports
|
| 8 |
+
from lib2to3.fixes.fix_imports import alternates, FixImports
|
| 9 |
+
from lib2to3.fixer_util import (Name, Comma, FromImport, Newline,
|
| 10 |
+
find_indentation, Node, syms)
|
| 11 |
+
|
| 12 |
+
MAPPING = {"urllib": [
|
| 13 |
+
("urllib.request",
|
| 14 |
+
["URLopener", "FancyURLopener", "urlretrieve",
|
| 15 |
+
"_urlopener", "urlopen", "urlcleanup",
|
| 16 |
+
"pathname2url", "url2pathname", "getproxies"]),
|
| 17 |
+
("urllib.parse",
|
| 18 |
+
["quote", "quote_plus", "unquote", "unquote_plus",
|
| 19 |
+
"urlencode", "splitattr", "splithost", "splitnport",
|
| 20 |
+
"splitpasswd", "splitport", "splitquery", "splittag",
|
| 21 |
+
"splittype", "splituser", "splitvalue", ]),
|
| 22 |
+
("urllib.error",
|
| 23 |
+
["ContentTooShortError"])],
|
| 24 |
+
"urllib2" : [
|
| 25 |
+
("urllib.request",
|
| 26 |
+
["urlopen", "install_opener", "build_opener",
|
| 27 |
+
"Request", "OpenerDirector", "BaseHandler",
|
| 28 |
+
"HTTPDefaultErrorHandler", "HTTPRedirectHandler",
|
| 29 |
+
"HTTPCookieProcessor", "ProxyHandler",
|
| 30 |
+
"HTTPPasswordMgr",
|
| 31 |
+
"HTTPPasswordMgrWithDefaultRealm",
|
| 32 |
+
"AbstractBasicAuthHandler",
|
| 33 |
+
"HTTPBasicAuthHandler", "ProxyBasicAuthHandler",
|
| 34 |
+
"AbstractDigestAuthHandler",
|
| 35 |
+
"HTTPDigestAuthHandler", "ProxyDigestAuthHandler",
|
| 36 |
+
"HTTPHandler", "HTTPSHandler", "FileHandler",
|
| 37 |
+
"FTPHandler", "CacheFTPHandler",
|
| 38 |
+
"UnknownHandler"]),
|
| 39 |
+
("urllib.error",
|
| 40 |
+
["URLError", "HTTPError"]),
|
| 41 |
+
]
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
# Duplicate the url parsing functions for urllib2.
|
| 45 |
+
MAPPING["urllib2"].append(MAPPING["urllib"][1])
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def build_pattern():
|
| 49 |
+
bare = set()
|
| 50 |
+
for old_module, changes in MAPPING.items():
|
| 51 |
+
for change in changes:
|
| 52 |
+
new_module, members = change
|
| 53 |
+
members = alternates(members)
|
| 54 |
+
yield """import_name< 'import' (module=%r
|
| 55 |
+
| dotted_as_names< any* module=%r any* >) >
|
| 56 |
+
""" % (old_module, old_module)
|
| 57 |
+
yield """import_from< 'from' mod_member=%r 'import'
|
| 58 |
+
( member=%s | import_as_name< member=%s 'as' any > |
|
| 59 |
+
import_as_names< members=any* >) >
|
| 60 |
+
""" % (old_module, members, members)
|
| 61 |
+
yield """import_from< 'from' module_star=%r 'import' star='*' >
|
| 62 |
+
""" % old_module
|
| 63 |
+
yield """import_name< 'import'
|
| 64 |
+
dotted_as_name< module_as=%r 'as' any > >
|
| 65 |
+
""" % old_module
|
| 66 |
+
# bare_with_attr has a special significance for FixImports.match().
|
| 67 |
+
yield """power< bare_with_attr=%r trailer< '.' member=%s > any* >
|
| 68 |
+
""" % (old_module, members)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class FixUrllib(FixImports):
|
| 72 |
+
|
| 73 |
+
def build_pattern(self):
|
| 74 |
+
return "|".join(build_pattern())
|
| 75 |
+
|
| 76 |
+
def transform_import(self, node, results):
|
| 77 |
+
"""Transform for the basic import case. Replaces the old
|
| 78 |
+
import name with a comma separated list of its
|
| 79 |
+
replacements.
|
| 80 |
+
"""
|
| 81 |
+
import_mod = results.get("module")
|
| 82 |
+
pref = import_mod.prefix
|
| 83 |
+
|
| 84 |
+
names = []
|
| 85 |
+
|
| 86 |
+
# create a Node list of the replacement modules
|
| 87 |
+
for name in MAPPING[import_mod.value][:-1]:
|
| 88 |
+
names.extend([Name(name[0], prefix=pref), Comma()])
|
| 89 |
+
names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref))
|
| 90 |
+
import_mod.replace(names)
|
| 91 |
+
|
| 92 |
+
def transform_member(self, node, results):
|
| 93 |
+
"""Transform for imports of specific module elements. Replaces
|
| 94 |
+
the module to be imported from with the appropriate new
|
| 95 |
+
module.
|
| 96 |
+
"""
|
| 97 |
+
mod_member = results.get("mod_member")
|
| 98 |
+
pref = mod_member.prefix
|
| 99 |
+
member = results.get("member")
|
| 100 |
+
|
| 101 |
+
# Simple case with only a single member being imported
|
| 102 |
+
if member:
|
| 103 |
+
# this may be a list of length one, or just a node
|
| 104 |
+
if isinstance(member, list):
|
| 105 |
+
member = member[0]
|
| 106 |
+
new_name = None
|
| 107 |
+
for change in MAPPING[mod_member.value]:
|
| 108 |
+
if member.value in change[1]:
|
| 109 |
+
new_name = change[0]
|
| 110 |
+
break
|
| 111 |
+
if new_name:
|
| 112 |
+
mod_member.replace(Name(new_name, prefix=pref))
|
| 113 |
+
else:
|
| 114 |
+
self.cannot_convert(node, "This is an invalid module element")
|
| 115 |
+
|
| 116 |
+
# Multiple members being imported
|
| 117 |
+
else:
|
| 118 |
+
# a dictionary for replacements, order matters
|
| 119 |
+
modules = []
|
| 120 |
+
mod_dict = {}
|
| 121 |
+
members = results["members"]
|
| 122 |
+
for member in members:
|
| 123 |
+
# we only care about the actual members
|
| 124 |
+
if member.type == syms.import_as_name:
|
| 125 |
+
as_name = member.children[2].value
|
| 126 |
+
member_name = member.children[0].value
|
| 127 |
+
else:
|
| 128 |
+
member_name = member.value
|
| 129 |
+
as_name = None
|
| 130 |
+
if member_name != ",":
|
| 131 |
+
for change in MAPPING[mod_member.value]:
|
| 132 |
+
if member_name in change[1]:
|
| 133 |
+
if change[0] not in mod_dict:
|
| 134 |
+
modules.append(change[0])
|
| 135 |
+
mod_dict.setdefault(change[0], []).append(member)
|
| 136 |
+
|
| 137 |
+
new_nodes = []
|
| 138 |
+
indentation = find_indentation(node)
|
| 139 |
+
first = True
|
| 140 |
+
def handle_name(name, prefix):
|
| 141 |
+
if name.type == syms.import_as_name:
|
| 142 |
+
kids = [Name(name.children[0].value, prefix=prefix),
|
| 143 |
+
name.children[1].clone(),
|
| 144 |
+
name.children[2].clone()]
|
| 145 |
+
return [Node(syms.import_as_name, kids)]
|
| 146 |
+
return [Name(name.value, prefix=prefix)]
|
| 147 |
+
for module in modules:
|
| 148 |
+
elts = mod_dict[module]
|
| 149 |
+
names = []
|
| 150 |
+
for elt in elts[:-1]:
|
| 151 |
+
names.extend(handle_name(elt, pref))
|
| 152 |
+
names.append(Comma())
|
| 153 |
+
names.extend(handle_name(elts[-1], pref))
|
| 154 |
+
new = FromImport(module, names)
|
| 155 |
+
if not first or node.parent.prefix.endswith(indentation):
|
| 156 |
+
new.prefix = indentation
|
| 157 |
+
new_nodes.append(new)
|
| 158 |
+
first = False
|
| 159 |
+
if new_nodes:
|
| 160 |
+
nodes = []
|
| 161 |
+
for new_node in new_nodes[:-1]:
|
| 162 |
+
nodes.extend([new_node, Newline()])
|
| 163 |
+
nodes.append(new_nodes[-1])
|
| 164 |
+
node.replace(nodes)
|
| 165 |
+
else:
|
| 166 |
+
self.cannot_convert(node, "All module elements are invalid")
|
| 167 |
+
|
| 168 |
+
def transform_dot(self, node, results):
|
| 169 |
+
"""Transform for calls to module members in code."""
|
| 170 |
+
module_dot = results.get("bare_with_attr")
|
| 171 |
+
member = results.get("member")
|
| 172 |
+
new_name = None
|
| 173 |
+
if isinstance(member, list):
|
| 174 |
+
member = member[0]
|
| 175 |
+
for change in MAPPING[module_dot.value]:
|
| 176 |
+
if member.value in change[1]:
|
| 177 |
+
new_name = change[0]
|
| 178 |
+
break
|
| 179 |
+
if new_name:
|
| 180 |
+
module_dot.replace(Name(new_name,
|
| 181 |
+
prefix=module_dot.prefix))
|
| 182 |
+
else:
|
| 183 |
+
self.cannot_convert(node, "This is an invalid module element")
|
| 184 |
+
|
| 185 |
+
def transform(self, node, results):
|
| 186 |
+
if results.get("module"):
|
| 187 |
+
self.transform_import(node, results)
|
| 188 |
+
elif results.get("mod_member"):
|
| 189 |
+
self.transform_member(node, results)
|
| 190 |
+
elif results.get("bare_with_attr"):
|
| 191 |
+
self.transform_dot(node, results)
|
| 192 |
+
# Renaming and star imports are not supported for these modules.
|
| 193 |
+
elif results.get("module_star"):
|
| 194 |
+
self.cannot_convert(node, "Cannot handle star imports.")
|
| 195 |
+
elif results.get("module_as"):
|
| 196 |
+
self.cannot_convert(node, "This module is now multiple modules")
|
parrot/lib/python3.10/lib2to3/fixes/fix_ws_comma.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer that changes 'a ,b' into 'a, b'.
|
| 2 |
+
|
| 3 |
+
This also changes '{a :b}' into '{a: b}', but does not touch other
|
| 4 |
+
uses of colons. It does not touch other uses of whitespace.
|
| 5 |
+
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from .. import pytree
|
| 9 |
+
from ..pgen2 import token
|
| 10 |
+
from .. import fixer_base
|
| 11 |
+
|
| 12 |
+
class FixWsComma(fixer_base.BaseFix):
|
| 13 |
+
|
| 14 |
+
explicit = True # The user must ask for this fixers
|
| 15 |
+
|
| 16 |
+
PATTERN = """
|
| 17 |
+
any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]>
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
COMMA = pytree.Leaf(token.COMMA, ",")
|
| 21 |
+
COLON = pytree.Leaf(token.COLON, ":")
|
| 22 |
+
SEPS = (COMMA, COLON)
|
| 23 |
+
|
| 24 |
+
def transform(self, node, results):
|
| 25 |
+
new = node.clone()
|
| 26 |
+
comma = False
|
| 27 |
+
for child in new.children:
|
| 28 |
+
if child in self.SEPS:
|
| 29 |
+
prefix = child.prefix
|
| 30 |
+
if prefix.isspace() and "\n" not in prefix:
|
| 31 |
+
child.prefix = ""
|
| 32 |
+
comma = True
|
| 33 |
+
else:
|
| 34 |
+
if comma:
|
| 35 |
+
prefix = child.prefix
|
| 36 |
+
if not prefix:
|
| 37 |
+
child.prefix = " "
|
| 38 |
+
comma = False
|
| 39 |
+
return new
|
parrot/lib/python3.10/lib2to3/patcomp.py
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Pattern compiler.
|
| 5 |
+
|
| 6 |
+
The grammar is taken from PatternGrammar.txt.
|
| 7 |
+
|
| 8 |
+
The compiler compiles a pattern to a pytree.*Pattern instance.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
__author__ = "Guido van Rossum <guido@python.org>"
|
| 12 |
+
|
| 13 |
+
# Python imports
|
| 14 |
+
import io
|
| 15 |
+
|
| 16 |
+
# Fairly local imports
|
| 17 |
+
from .pgen2 import driver, literals, token, tokenize, parse, grammar
|
| 18 |
+
|
| 19 |
+
# Really local imports
|
| 20 |
+
from . import pytree
|
| 21 |
+
from . import pygram
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class PatternSyntaxError(Exception):
|
| 25 |
+
pass
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def tokenize_wrapper(input):
|
| 29 |
+
"""Tokenizes a string suppressing significant whitespace."""
|
| 30 |
+
skip = {token.NEWLINE, token.INDENT, token.DEDENT}
|
| 31 |
+
tokens = tokenize.generate_tokens(io.StringIO(input).readline)
|
| 32 |
+
for quintuple in tokens:
|
| 33 |
+
type, value, start, end, line_text = quintuple
|
| 34 |
+
if type not in skip:
|
| 35 |
+
yield quintuple
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class PatternCompiler(object):
|
| 39 |
+
|
| 40 |
+
def __init__(self, grammar_file=None):
|
| 41 |
+
"""Initializer.
|
| 42 |
+
|
| 43 |
+
Takes an optional alternative filename for the pattern grammar.
|
| 44 |
+
"""
|
| 45 |
+
if grammar_file is None:
|
| 46 |
+
self.grammar = pygram.pattern_grammar
|
| 47 |
+
self.syms = pygram.pattern_symbols
|
| 48 |
+
else:
|
| 49 |
+
self.grammar = driver.load_grammar(grammar_file)
|
| 50 |
+
self.syms = pygram.Symbols(self.grammar)
|
| 51 |
+
self.pygrammar = pygram.python_grammar
|
| 52 |
+
self.pysyms = pygram.python_symbols
|
| 53 |
+
self.driver = driver.Driver(self.grammar, convert=pattern_convert)
|
| 54 |
+
|
| 55 |
+
def compile_pattern(self, input, debug=False, with_tree=False):
|
| 56 |
+
"""Compiles a pattern string to a nested pytree.*Pattern object."""
|
| 57 |
+
tokens = tokenize_wrapper(input)
|
| 58 |
+
try:
|
| 59 |
+
root = self.driver.parse_tokens(tokens, debug=debug)
|
| 60 |
+
except parse.ParseError as e:
|
| 61 |
+
raise PatternSyntaxError(str(e)) from None
|
| 62 |
+
if with_tree:
|
| 63 |
+
return self.compile_node(root), root
|
| 64 |
+
else:
|
| 65 |
+
return self.compile_node(root)
|
| 66 |
+
|
| 67 |
+
def compile_node(self, node):
|
| 68 |
+
"""Compiles a node, recursively.
|
| 69 |
+
|
| 70 |
+
This is one big switch on the node type.
|
| 71 |
+
"""
|
| 72 |
+
# XXX Optimize certain Wildcard-containing-Wildcard patterns
|
| 73 |
+
# that can be merged
|
| 74 |
+
if node.type == self.syms.Matcher:
|
| 75 |
+
node = node.children[0] # Avoid unneeded recursion
|
| 76 |
+
|
| 77 |
+
if node.type == self.syms.Alternatives:
|
| 78 |
+
# Skip the odd children since they are just '|' tokens
|
| 79 |
+
alts = [self.compile_node(ch) for ch in node.children[::2]]
|
| 80 |
+
if len(alts) == 1:
|
| 81 |
+
return alts[0]
|
| 82 |
+
p = pytree.WildcardPattern([[a] for a in alts], min=1, max=1)
|
| 83 |
+
return p.optimize()
|
| 84 |
+
|
| 85 |
+
if node.type == self.syms.Alternative:
|
| 86 |
+
units = [self.compile_node(ch) for ch in node.children]
|
| 87 |
+
if len(units) == 1:
|
| 88 |
+
return units[0]
|
| 89 |
+
p = pytree.WildcardPattern([units], min=1, max=1)
|
| 90 |
+
return p.optimize()
|
| 91 |
+
|
| 92 |
+
if node.type == self.syms.NegatedUnit:
|
| 93 |
+
pattern = self.compile_basic(node.children[1:])
|
| 94 |
+
p = pytree.NegatedPattern(pattern)
|
| 95 |
+
return p.optimize()
|
| 96 |
+
|
| 97 |
+
assert node.type == self.syms.Unit
|
| 98 |
+
|
| 99 |
+
name = None
|
| 100 |
+
nodes = node.children
|
| 101 |
+
if len(nodes) >= 3 and nodes[1].type == token.EQUAL:
|
| 102 |
+
name = nodes[0].value
|
| 103 |
+
nodes = nodes[2:]
|
| 104 |
+
repeat = None
|
| 105 |
+
if len(nodes) >= 2 and nodes[-1].type == self.syms.Repeater:
|
| 106 |
+
repeat = nodes[-1]
|
| 107 |
+
nodes = nodes[:-1]
|
| 108 |
+
|
| 109 |
+
# Now we've reduced it to: STRING | NAME [Details] | (...) | [...]
|
| 110 |
+
pattern = self.compile_basic(nodes, repeat)
|
| 111 |
+
|
| 112 |
+
if repeat is not None:
|
| 113 |
+
assert repeat.type == self.syms.Repeater
|
| 114 |
+
children = repeat.children
|
| 115 |
+
child = children[0]
|
| 116 |
+
if child.type == token.STAR:
|
| 117 |
+
min = 0
|
| 118 |
+
max = pytree.HUGE
|
| 119 |
+
elif child.type == token.PLUS:
|
| 120 |
+
min = 1
|
| 121 |
+
max = pytree.HUGE
|
| 122 |
+
elif child.type == token.LBRACE:
|
| 123 |
+
assert children[-1].type == token.RBRACE
|
| 124 |
+
assert len(children) in (3, 5)
|
| 125 |
+
min = max = self.get_int(children[1])
|
| 126 |
+
if len(children) == 5:
|
| 127 |
+
max = self.get_int(children[3])
|
| 128 |
+
else:
|
| 129 |
+
assert False
|
| 130 |
+
if min != 1 or max != 1:
|
| 131 |
+
pattern = pattern.optimize()
|
| 132 |
+
pattern = pytree.WildcardPattern([[pattern]], min=min, max=max)
|
| 133 |
+
|
| 134 |
+
if name is not None:
|
| 135 |
+
pattern.name = name
|
| 136 |
+
return pattern.optimize()
|
| 137 |
+
|
| 138 |
+
def compile_basic(self, nodes, repeat=None):
|
| 139 |
+
# Compile STRING | NAME [Details] | (...) | [...]
|
| 140 |
+
assert len(nodes) >= 1
|
| 141 |
+
node = nodes[0]
|
| 142 |
+
if node.type == token.STRING:
|
| 143 |
+
value = str(literals.evalString(node.value))
|
| 144 |
+
return pytree.LeafPattern(_type_of_literal(value), value)
|
| 145 |
+
elif node.type == token.NAME:
|
| 146 |
+
value = node.value
|
| 147 |
+
if value.isupper():
|
| 148 |
+
if value not in TOKEN_MAP:
|
| 149 |
+
raise PatternSyntaxError("Invalid token: %r" % value)
|
| 150 |
+
if nodes[1:]:
|
| 151 |
+
raise PatternSyntaxError("Can't have details for token")
|
| 152 |
+
return pytree.LeafPattern(TOKEN_MAP[value])
|
| 153 |
+
else:
|
| 154 |
+
if value == "any":
|
| 155 |
+
type = None
|
| 156 |
+
elif not value.startswith("_"):
|
| 157 |
+
type = getattr(self.pysyms, value, None)
|
| 158 |
+
if type is None:
|
| 159 |
+
raise PatternSyntaxError("Invalid symbol: %r" % value)
|
| 160 |
+
if nodes[1:]: # Details present
|
| 161 |
+
content = [self.compile_node(nodes[1].children[1])]
|
| 162 |
+
else:
|
| 163 |
+
content = None
|
| 164 |
+
return pytree.NodePattern(type, content)
|
| 165 |
+
elif node.value == "(":
|
| 166 |
+
return self.compile_node(nodes[1])
|
| 167 |
+
elif node.value == "[":
|
| 168 |
+
assert repeat is None
|
| 169 |
+
subpattern = self.compile_node(nodes[1])
|
| 170 |
+
return pytree.WildcardPattern([[subpattern]], min=0, max=1)
|
| 171 |
+
assert False, node
|
| 172 |
+
|
| 173 |
+
def get_int(self, node):
|
| 174 |
+
assert node.type == token.NUMBER
|
| 175 |
+
return int(node.value)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
# Map named tokens to the type value for a LeafPattern
|
| 179 |
+
TOKEN_MAP = {"NAME": token.NAME,
|
| 180 |
+
"STRING": token.STRING,
|
| 181 |
+
"NUMBER": token.NUMBER,
|
| 182 |
+
"TOKEN": None}
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def _type_of_literal(value):
|
| 186 |
+
if value[0].isalpha():
|
| 187 |
+
return token.NAME
|
| 188 |
+
elif value in grammar.opmap:
|
| 189 |
+
return grammar.opmap[value]
|
| 190 |
+
else:
|
| 191 |
+
return None
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def pattern_convert(grammar, raw_node_info):
|
| 195 |
+
"""Converts raw node information to a Node or Leaf instance."""
|
| 196 |
+
type, value, context, children = raw_node_info
|
| 197 |
+
if children or type in grammar.number2symbol:
|
| 198 |
+
return pytree.Node(type, children, context=context)
|
| 199 |
+
else:
|
| 200 |
+
return pytree.Leaf(type, value, context=context)
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
def compile_pattern(pattern):
|
| 204 |
+
return PatternCompiler().compile_pattern(pattern)
|
parrot/lib/python3.10/lib2to3/pgen2/parse.py
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Parser engine for the grammar tables generated by pgen.
|
| 5 |
+
|
| 6 |
+
The grammar table must be loaded first.
|
| 7 |
+
|
| 8 |
+
See Parser/parser.c in the Python distribution for additional info on
|
| 9 |
+
how this parsing engine works.
|
| 10 |
+
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
# Local imports
|
| 14 |
+
from . import token
|
| 15 |
+
|
| 16 |
+
class ParseError(Exception):
|
| 17 |
+
"""Exception to signal the parser is stuck."""
|
| 18 |
+
|
| 19 |
+
def __init__(self, msg, type, value, context):
|
| 20 |
+
Exception.__init__(self, "%s: type=%r, value=%r, context=%r" %
|
| 21 |
+
(msg, type, value, context))
|
| 22 |
+
self.msg = msg
|
| 23 |
+
self.type = type
|
| 24 |
+
self.value = value
|
| 25 |
+
self.context = context
|
| 26 |
+
|
| 27 |
+
def __reduce__(self):
|
| 28 |
+
return type(self), (self.msg, self.type, self.value, self.context)
|
| 29 |
+
|
| 30 |
+
class Parser(object):
|
| 31 |
+
"""Parser engine.
|
| 32 |
+
|
| 33 |
+
The proper usage sequence is:
|
| 34 |
+
|
| 35 |
+
p = Parser(grammar, [converter]) # create instance
|
| 36 |
+
p.setup([start]) # prepare for parsing
|
| 37 |
+
<for each input token>:
|
| 38 |
+
if p.addtoken(...): # parse a token; may raise ParseError
|
| 39 |
+
break
|
| 40 |
+
root = p.rootnode # root of abstract syntax tree
|
| 41 |
+
|
| 42 |
+
A Parser instance may be reused by calling setup() repeatedly.
|
| 43 |
+
|
| 44 |
+
A Parser instance contains state pertaining to the current token
|
| 45 |
+
sequence, and should not be used concurrently by different threads
|
| 46 |
+
to parse separate token sequences.
|
| 47 |
+
|
| 48 |
+
See driver.py for how to get input tokens by tokenizing a file or
|
| 49 |
+
string.
|
| 50 |
+
|
| 51 |
+
Parsing is complete when addtoken() returns True; the root of the
|
| 52 |
+
abstract syntax tree can then be retrieved from the rootnode
|
| 53 |
+
instance variable. When a syntax error occurs, addtoken() raises
|
| 54 |
+
the ParseError exception. There is no error recovery; the parser
|
| 55 |
+
cannot be used after a syntax error was reported (but it can be
|
| 56 |
+
reinitialized by calling setup()).
|
| 57 |
+
|
| 58 |
+
"""
|
| 59 |
+
|
| 60 |
+
def __init__(self, grammar, convert=None):
|
| 61 |
+
"""Constructor.
|
| 62 |
+
|
| 63 |
+
The grammar argument is a grammar.Grammar instance; see the
|
| 64 |
+
grammar module for more information.
|
| 65 |
+
|
| 66 |
+
The parser is not ready yet for parsing; you must call the
|
| 67 |
+
setup() method to get it started.
|
| 68 |
+
|
| 69 |
+
The optional convert argument is a function mapping concrete
|
| 70 |
+
syntax tree nodes to abstract syntax tree nodes. If not
|
| 71 |
+
given, no conversion is done and the syntax tree produced is
|
| 72 |
+
the concrete syntax tree. If given, it must be a function of
|
| 73 |
+
two arguments, the first being the grammar (a grammar.Grammar
|
| 74 |
+
instance), and the second being the concrete syntax tree node
|
| 75 |
+
to be converted. The syntax tree is converted from the bottom
|
| 76 |
+
up.
|
| 77 |
+
|
| 78 |
+
A concrete syntax tree node is a (type, value, context, nodes)
|
| 79 |
+
tuple, where type is the node type (a token or symbol number),
|
| 80 |
+
value is None for symbols and a string for tokens, context is
|
| 81 |
+
None or an opaque value used for error reporting (typically a
|
| 82 |
+
(lineno, offset) pair), and nodes is a list of children for
|
| 83 |
+
symbols, and None for tokens.
|
| 84 |
+
|
| 85 |
+
An abstract syntax tree node may be anything; this is entirely
|
| 86 |
+
up to the converter function.
|
| 87 |
+
|
| 88 |
+
"""
|
| 89 |
+
self.grammar = grammar
|
| 90 |
+
self.convert = convert or (lambda grammar, node: node)
|
| 91 |
+
|
| 92 |
+
def setup(self, start=None):
|
| 93 |
+
"""Prepare for parsing.
|
| 94 |
+
|
| 95 |
+
This *must* be called before starting to parse.
|
| 96 |
+
|
| 97 |
+
The optional argument is an alternative start symbol; it
|
| 98 |
+
defaults to the grammar's start symbol.
|
| 99 |
+
|
| 100 |
+
You can use a Parser instance to parse any number of programs;
|
| 101 |
+
each time you call setup() the parser is reset to an initial
|
| 102 |
+
state determined by the (implicit or explicit) start symbol.
|
| 103 |
+
|
| 104 |
+
"""
|
| 105 |
+
if start is None:
|
| 106 |
+
start = self.grammar.start
|
| 107 |
+
# Each stack entry is a tuple: (dfa, state, node).
|
| 108 |
+
# A node is a tuple: (type, value, context, children),
|
| 109 |
+
# where children is a list of nodes or None, and context may be None.
|
| 110 |
+
newnode = (start, None, None, [])
|
| 111 |
+
stackentry = (self.grammar.dfas[start], 0, newnode)
|
| 112 |
+
self.stack = [stackentry]
|
| 113 |
+
self.rootnode = None
|
| 114 |
+
self.used_names = set() # Aliased to self.rootnode.used_names in pop()
|
| 115 |
+
|
| 116 |
+
def addtoken(self, type, value, context):
|
| 117 |
+
"""Add a token; return True iff this is the end of the program."""
|
| 118 |
+
# Map from token to label
|
| 119 |
+
ilabel = self.classify(type, value, context)
|
| 120 |
+
# Loop until the token is shifted; may raise exceptions
|
| 121 |
+
while True:
|
| 122 |
+
dfa, state, node = self.stack[-1]
|
| 123 |
+
states, first = dfa
|
| 124 |
+
arcs = states[state]
|
| 125 |
+
# Look for a state with this label
|
| 126 |
+
for i, newstate in arcs:
|
| 127 |
+
t, v = self.grammar.labels[i]
|
| 128 |
+
if ilabel == i:
|
| 129 |
+
# Look it up in the list of labels
|
| 130 |
+
assert t < 256
|
| 131 |
+
# Shift a token; we're done with it
|
| 132 |
+
self.shift(type, value, newstate, context)
|
| 133 |
+
# Pop while we are in an accept-only state
|
| 134 |
+
state = newstate
|
| 135 |
+
while states[state] == [(0, state)]:
|
| 136 |
+
self.pop()
|
| 137 |
+
if not self.stack:
|
| 138 |
+
# Done parsing!
|
| 139 |
+
return True
|
| 140 |
+
dfa, state, node = self.stack[-1]
|
| 141 |
+
states, first = dfa
|
| 142 |
+
# Done with this token
|
| 143 |
+
return False
|
| 144 |
+
elif t >= 256:
|
| 145 |
+
# See if it's a symbol and if we're in its first set
|
| 146 |
+
itsdfa = self.grammar.dfas[t]
|
| 147 |
+
itsstates, itsfirst = itsdfa
|
| 148 |
+
if ilabel in itsfirst:
|
| 149 |
+
# Push a symbol
|
| 150 |
+
self.push(t, self.grammar.dfas[t], newstate, context)
|
| 151 |
+
break # To continue the outer while loop
|
| 152 |
+
else:
|
| 153 |
+
if (0, state) in arcs:
|
| 154 |
+
# An accepting state, pop it and try something else
|
| 155 |
+
self.pop()
|
| 156 |
+
if not self.stack:
|
| 157 |
+
# Done parsing, but another token is input
|
| 158 |
+
raise ParseError("too much input",
|
| 159 |
+
type, value, context)
|
| 160 |
+
else:
|
| 161 |
+
# No success finding a transition
|
| 162 |
+
raise ParseError("bad input", type, value, context)
|
| 163 |
+
|
| 164 |
+
def classify(self, type, value, context):
|
| 165 |
+
"""Turn a token into a label. (Internal)"""
|
| 166 |
+
if type == token.NAME:
|
| 167 |
+
# Keep a listing of all used names
|
| 168 |
+
self.used_names.add(value)
|
| 169 |
+
# Check for reserved words
|
| 170 |
+
ilabel = self.grammar.keywords.get(value)
|
| 171 |
+
if ilabel is not None:
|
| 172 |
+
return ilabel
|
| 173 |
+
ilabel = self.grammar.tokens.get(type)
|
| 174 |
+
if ilabel is None:
|
| 175 |
+
raise ParseError("bad token", type, value, context)
|
| 176 |
+
return ilabel
|
| 177 |
+
|
| 178 |
+
def shift(self, type, value, newstate, context):
|
| 179 |
+
"""Shift a token. (Internal)"""
|
| 180 |
+
dfa, state, node = self.stack[-1]
|
| 181 |
+
newnode = (type, value, context, None)
|
| 182 |
+
newnode = self.convert(self.grammar, newnode)
|
| 183 |
+
if newnode is not None:
|
| 184 |
+
node[-1].append(newnode)
|
| 185 |
+
self.stack[-1] = (dfa, newstate, node)
|
| 186 |
+
|
| 187 |
+
def push(self, type, newdfa, newstate, context):
|
| 188 |
+
"""Push a nonterminal. (Internal)"""
|
| 189 |
+
dfa, state, node = self.stack[-1]
|
| 190 |
+
newnode = (type, None, context, [])
|
| 191 |
+
self.stack[-1] = (dfa, newstate, node)
|
| 192 |
+
self.stack.append((newdfa, 0, newnode))
|
| 193 |
+
|
| 194 |
+
def pop(self):
|
| 195 |
+
"""Pop a nonterminal. (Internal)"""
|
| 196 |
+
popdfa, popstate, popnode = self.stack.pop()
|
| 197 |
+
newnode = self.convert(self.grammar, popnode)
|
| 198 |
+
if newnode is not None:
|
| 199 |
+
if self.stack:
|
| 200 |
+
dfa, state, node = self.stack[-1]
|
| 201 |
+
node[-1].append(newnode)
|
| 202 |
+
else:
|
| 203 |
+
self.rootnode = newnode
|
| 204 |
+
self.rootnode.used_names = self.used_names
|
parrot/lib/python3.10/lib2to3/refactor.py
ADDED
|
@@ -0,0 +1,732 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Refactoring framework.
|
| 5 |
+
|
| 6 |
+
Used as a main program, this can refactor any number of files and/or
|
| 7 |
+
recursively descend down directories. Imported as a module, this
|
| 8 |
+
provides infrastructure to write your own refactoring tool.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
__author__ = "Guido van Rossum <guido@python.org>"
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# Python imports
|
| 15 |
+
import io
|
| 16 |
+
import os
|
| 17 |
+
import pkgutil
|
| 18 |
+
import sys
|
| 19 |
+
import logging
|
| 20 |
+
import operator
|
| 21 |
+
import collections
|
| 22 |
+
from itertools import chain
|
| 23 |
+
|
| 24 |
+
# Local imports
|
| 25 |
+
from .pgen2 import driver, tokenize, token
|
| 26 |
+
from .fixer_util import find_root
|
| 27 |
+
from . import pytree, pygram
|
| 28 |
+
from . import btm_matcher as bm
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def get_all_fix_names(fixer_pkg, remove_prefix=True):
|
| 32 |
+
"""Return a sorted list of all available fix names in the given package."""
|
| 33 |
+
pkg = __import__(fixer_pkg, [], [], ["*"])
|
| 34 |
+
fix_names = []
|
| 35 |
+
for finder, name, ispkg in pkgutil.iter_modules(pkg.__path__):
|
| 36 |
+
if name.startswith("fix_"):
|
| 37 |
+
if remove_prefix:
|
| 38 |
+
name = name[4:]
|
| 39 |
+
fix_names.append(name)
|
| 40 |
+
return fix_names
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class _EveryNode(Exception):
|
| 44 |
+
pass
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def _get_head_types(pat):
|
| 48 |
+
""" Accepts a pytree Pattern Node and returns a set
|
| 49 |
+
of the pattern types which will match first. """
|
| 50 |
+
|
| 51 |
+
if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)):
|
| 52 |
+
# NodePatters must either have no type and no content
|
| 53 |
+
# or a type and content -- so they don't get any farther
|
| 54 |
+
# Always return leafs
|
| 55 |
+
if pat.type is None:
|
| 56 |
+
raise _EveryNode
|
| 57 |
+
return {pat.type}
|
| 58 |
+
|
| 59 |
+
if isinstance(pat, pytree.NegatedPattern):
|
| 60 |
+
if pat.content:
|
| 61 |
+
return _get_head_types(pat.content)
|
| 62 |
+
raise _EveryNode # Negated Patterns don't have a type
|
| 63 |
+
|
| 64 |
+
if isinstance(pat, pytree.WildcardPattern):
|
| 65 |
+
# Recurse on each node in content
|
| 66 |
+
r = set()
|
| 67 |
+
for p in pat.content:
|
| 68 |
+
for x in p:
|
| 69 |
+
r.update(_get_head_types(x))
|
| 70 |
+
return r
|
| 71 |
+
|
| 72 |
+
raise Exception("Oh no! I don't understand pattern %s" %(pat))
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def _get_headnode_dict(fixer_list):
|
| 76 |
+
""" Accepts a list of fixers and returns a dictionary
|
| 77 |
+
of head node type --> fixer list. """
|
| 78 |
+
head_nodes = collections.defaultdict(list)
|
| 79 |
+
every = []
|
| 80 |
+
for fixer in fixer_list:
|
| 81 |
+
if fixer.pattern:
|
| 82 |
+
try:
|
| 83 |
+
heads = _get_head_types(fixer.pattern)
|
| 84 |
+
except _EveryNode:
|
| 85 |
+
every.append(fixer)
|
| 86 |
+
else:
|
| 87 |
+
for node_type in heads:
|
| 88 |
+
head_nodes[node_type].append(fixer)
|
| 89 |
+
else:
|
| 90 |
+
if fixer._accept_type is not None:
|
| 91 |
+
head_nodes[fixer._accept_type].append(fixer)
|
| 92 |
+
else:
|
| 93 |
+
every.append(fixer)
|
| 94 |
+
for node_type in chain(pygram.python_grammar.symbol2number.values(),
|
| 95 |
+
pygram.python_grammar.tokens):
|
| 96 |
+
head_nodes[node_type].extend(every)
|
| 97 |
+
return dict(head_nodes)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def get_fixers_from_package(pkg_name):
|
| 101 |
+
"""
|
| 102 |
+
Return the fully qualified names for fixers in the package pkg_name.
|
| 103 |
+
"""
|
| 104 |
+
return [pkg_name + "." + fix_name
|
| 105 |
+
for fix_name in get_all_fix_names(pkg_name, False)]
|
| 106 |
+
|
| 107 |
+
def _identity(obj):
|
| 108 |
+
return obj
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def _detect_future_features(source):
|
| 112 |
+
have_docstring = False
|
| 113 |
+
gen = tokenize.generate_tokens(io.StringIO(source).readline)
|
| 114 |
+
def advance():
|
| 115 |
+
tok = next(gen)
|
| 116 |
+
return tok[0], tok[1]
|
| 117 |
+
ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT})
|
| 118 |
+
features = set()
|
| 119 |
+
try:
|
| 120 |
+
while True:
|
| 121 |
+
tp, value = advance()
|
| 122 |
+
if tp in ignore:
|
| 123 |
+
continue
|
| 124 |
+
elif tp == token.STRING:
|
| 125 |
+
if have_docstring:
|
| 126 |
+
break
|
| 127 |
+
have_docstring = True
|
| 128 |
+
elif tp == token.NAME and value == "from":
|
| 129 |
+
tp, value = advance()
|
| 130 |
+
if tp != token.NAME or value != "__future__":
|
| 131 |
+
break
|
| 132 |
+
tp, value = advance()
|
| 133 |
+
if tp != token.NAME or value != "import":
|
| 134 |
+
break
|
| 135 |
+
tp, value = advance()
|
| 136 |
+
if tp == token.OP and value == "(":
|
| 137 |
+
tp, value = advance()
|
| 138 |
+
while tp == token.NAME:
|
| 139 |
+
features.add(value)
|
| 140 |
+
tp, value = advance()
|
| 141 |
+
if tp != token.OP or value != ",":
|
| 142 |
+
break
|
| 143 |
+
tp, value = advance()
|
| 144 |
+
else:
|
| 145 |
+
break
|
| 146 |
+
except StopIteration:
|
| 147 |
+
pass
|
| 148 |
+
return frozenset(features)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class FixerError(Exception):
|
| 152 |
+
"""A fixer could not be loaded."""
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class RefactoringTool(object):
|
| 156 |
+
|
| 157 |
+
_default_options = {"print_function" : False,
|
| 158 |
+
"exec_function": False,
|
| 159 |
+
"write_unchanged_files" : False}
|
| 160 |
+
|
| 161 |
+
CLASS_PREFIX = "Fix" # The prefix for fixer classes
|
| 162 |
+
FILE_PREFIX = "fix_" # The prefix for modules with a fixer within
|
| 163 |
+
|
| 164 |
+
def __init__(self, fixer_names, options=None, explicit=None):
|
| 165 |
+
"""Initializer.
|
| 166 |
+
|
| 167 |
+
Args:
|
| 168 |
+
fixer_names: a list of fixers to import
|
| 169 |
+
options: a dict with configuration.
|
| 170 |
+
explicit: a list of fixers to run even if they are explicit.
|
| 171 |
+
"""
|
| 172 |
+
self.fixers = fixer_names
|
| 173 |
+
self.explicit = explicit or []
|
| 174 |
+
self.options = self._default_options.copy()
|
| 175 |
+
if options is not None:
|
| 176 |
+
self.options.update(options)
|
| 177 |
+
self.grammar = pygram.python_grammar.copy()
|
| 178 |
+
|
| 179 |
+
if self.options['print_function']:
|
| 180 |
+
del self.grammar.keywords["print"]
|
| 181 |
+
elif self.options['exec_function']:
|
| 182 |
+
del self.grammar.keywords["exec"]
|
| 183 |
+
|
| 184 |
+
# When this is True, the refactor*() methods will call write_file() for
|
| 185 |
+
# files processed even if they were not changed during refactoring. If
|
| 186 |
+
# and only if the refactor method's write parameter was True.
|
| 187 |
+
self.write_unchanged_files = self.options.get("write_unchanged_files")
|
| 188 |
+
self.errors = []
|
| 189 |
+
self.logger = logging.getLogger("RefactoringTool")
|
| 190 |
+
self.fixer_log = []
|
| 191 |
+
self.wrote = False
|
| 192 |
+
self.driver = driver.Driver(self.grammar,
|
| 193 |
+
convert=pytree.convert,
|
| 194 |
+
logger=self.logger)
|
| 195 |
+
self.pre_order, self.post_order = self.get_fixers()
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
self.files = [] # List of files that were or should be modified
|
| 199 |
+
|
| 200 |
+
self.BM = bm.BottomMatcher()
|
| 201 |
+
self.bmi_pre_order = [] # Bottom Matcher incompatible fixers
|
| 202 |
+
self.bmi_post_order = []
|
| 203 |
+
|
| 204 |
+
for fixer in chain(self.post_order, self.pre_order):
|
| 205 |
+
if fixer.BM_compatible:
|
| 206 |
+
self.BM.add_fixer(fixer)
|
| 207 |
+
# remove fixers that will be handled by the bottom-up
|
| 208 |
+
# matcher
|
| 209 |
+
elif fixer in self.pre_order:
|
| 210 |
+
self.bmi_pre_order.append(fixer)
|
| 211 |
+
elif fixer in self.post_order:
|
| 212 |
+
self.bmi_post_order.append(fixer)
|
| 213 |
+
|
| 214 |
+
self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order)
|
| 215 |
+
self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def get_fixers(self):
|
| 220 |
+
"""Inspects the options to load the requested patterns and handlers.
|
| 221 |
+
|
| 222 |
+
Returns:
|
| 223 |
+
(pre_order, post_order), where pre_order is the list of fixers that
|
| 224 |
+
want a pre-order AST traversal, and post_order is the list that want
|
| 225 |
+
post-order traversal.
|
| 226 |
+
"""
|
| 227 |
+
pre_order_fixers = []
|
| 228 |
+
post_order_fixers = []
|
| 229 |
+
for fix_mod_path in self.fixers:
|
| 230 |
+
mod = __import__(fix_mod_path, {}, {}, ["*"])
|
| 231 |
+
fix_name = fix_mod_path.rsplit(".", 1)[-1]
|
| 232 |
+
if fix_name.startswith(self.FILE_PREFIX):
|
| 233 |
+
fix_name = fix_name[len(self.FILE_PREFIX):]
|
| 234 |
+
parts = fix_name.split("_")
|
| 235 |
+
class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts])
|
| 236 |
+
try:
|
| 237 |
+
fix_class = getattr(mod, class_name)
|
| 238 |
+
except AttributeError:
|
| 239 |
+
raise FixerError("Can't find %s.%s" % (fix_name, class_name)) from None
|
| 240 |
+
fixer = fix_class(self.options, self.fixer_log)
|
| 241 |
+
if fixer.explicit and self.explicit is not True and \
|
| 242 |
+
fix_mod_path not in self.explicit:
|
| 243 |
+
self.log_message("Skipping optional fixer: %s", fix_name)
|
| 244 |
+
continue
|
| 245 |
+
|
| 246 |
+
self.log_debug("Adding transformation: %s", fix_name)
|
| 247 |
+
if fixer.order == "pre":
|
| 248 |
+
pre_order_fixers.append(fixer)
|
| 249 |
+
elif fixer.order == "post":
|
| 250 |
+
post_order_fixers.append(fixer)
|
| 251 |
+
else:
|
| 252 |
+
raise FixerError("Illegal fixer order: %r" % fixer.order)
|
| 253 |
+
|
| 254 |
+
key_func = operator.attrgetter("run_order")
|
| 255 |
+
pre_order_fixers.sort(key=key_func)
|
| 256 |
+
post_order_fixers.sort(key=key_func)
|
| 257 |
+
return (pre_order_fixers, post_order_fixers)
|
| 258 |
+
|
| 259 |
+
def log_error(self, msg, *args, **kwds):
|
| 260 |
+
"""Called when an error occurs."""
|
| 261 |
+
raise
|
| 262 |
+
|
| 263 |
+
def log_message(self, msg, *args):
|
| 264 |
+
"""Hook to log a message."""
|
| 265 |
+
if args:
|
| 266 |
+
msg = msg % args
|
| 267 |
+
self.logger.info(msg)
|
| 268 |
+
|
| 269 |
+
def log_debug(self, msg, *args):
|
| 270 |
+
if args:
|
| 271 |
+
msg = msg % args
|
| 272 |
+
self.logger.debug(msg)
|
| 273 |
+
|
| 274 |
+
def print_output(self, old_text, new_text, filename, equal):
|
| 275 |
+
"""Called with the old version, new version, and filename of a
|
| 276 |
+
refactored file."""
|
| 277 |
+
pass
|
| 278 |
+
|
| 279 |
+
def refactor(self, items, write=False, doctests_only=False):
|
| 280 |
+
"""Refactor a list of files and directories."""
|
| 281 |
+
|
| 282 |
+
for dir_or_file in items:
|
| 283 |
+
if os.path.isdir(dir_or_file):
|
| 284 |
+
self.refactor_dir(dir_or_file, write, doctests_only)
|
| 285 |
+
else:
|
| 286 |
+
self.refactor_file(dir_or_file, write, doctests_only)
|
| 287 |
+
|
| 288 |
+
def refactor_dir(self, dir_name, write=False, doctests_only=False):
|
| 289 |
+
"""Descends down a directory and refactor every Python file found.
|
| 290 |
+
|
| 291 |
+
Python files are assumed to have a .py extension.
|
| 292 |
+
|
| 293 |
+
Files and subdirectories starting with '.' are skipped.
|
| 294 |
+
"""
|
| 295 |
+
py_ext = os.extsep + "py"
|
| 296 |
+
for dirpath, dirnames, filenames in os.walk(dir_name):
|
| 297 |
+
self.log_debug("Descending into %s", dirpath)
|
| 298 |
+
dirnames.sort()
|
| 299 |
+
filenames.sort()
|
| 300 |
+
for name in filenames:
|
| 301 |
+
if (not name.startswith(".") and
|
| 302 |
+
os.path.splitext(name)[1] == py_ext):
|
| 303 |
+
fullname = os.path.join(dirpath, name)
|
| 304 |
+
self.refactor_file(fullname, write, doctests_only)
|
| 305 |
+
# Modify dirnames in-place to remove subdirs with leading dots
|
| 306 |
+
dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")]
|
| 307 |
+
|
| 308 |
+
def _read_python_source(self, filename):
|
| 309 |
+
"""
|
| 310 |
+
Do our best to decode a Python source file correctly.
|
| 311 |
+
"""
|
| 312 |
+
try:
|
| 313 |
+
f = open(filename, "rb")
|
| 314 |
+
except OSError as err:
|
| 315 |
+
self.log_error("Can't open %s: %s", filename, err)
|
| 316 |
+
return None, None
|
| 317 |
+
try:
|
| 318 |
+
encoding = tokenize.detect_encoding(f.readline)[0]
|
| 319 |
+
finally:
|
| 320 |
+
f.close()
|
| 321 |
+
with io.open(filename, "r", encoding=encoding, newline='') as f:
|
| 322 |
+
return f.read(), encoding
|
| 323 |
+
|
| 324 |
+
def refactor_file(self, filename, write=False, doctests_only=False):
|
| 325 |
+
"""Refactors a file."""
|
| 326 |
+
input, encoding = self._read_python_source(filename)
|
| 327 |
+
if input is None:
|
| 328 |
+
# Reading the file failed.
|
| 329 |
+
return
|
| 330 |
+
input += "\n" # Silence certain parse errors
|
| 331 |
+
if doctests_only:
|
| 332 |
+
self.log_debug("Refactoring doctests in %s", filename)
|
| 333 |
+
output = self.refactor_docstring(input, filename)
|
| 334 |
+
if self.write_unchanged_files or output != input:
|
| 335 |
+
self.processed_file(output, filename, input, write, encoding)
|
| 336 |
+
else:
|
| 337 |
+
self.log_debug("No doctest changes in %s", filename)
|
| 338 |
+
else:
|
| 339 |
+
tree = self.refactor_string(input, filename)
|
| 340 |
+
if self.write_unchanged_files or (tree and tree.was_changed):
|
| 341 |
+
# The [:-1] is to take off the \n we added earlier
|
| 342 |
+
self.processed_file(str(tree)[:-1], filename,
|
| 343 |
+
write=write, encoding=encoding)
|
| 344 |
+
else:
|
| 345 |
+
self.log_debug("No changes in %s", filename)
|
| 346 |
+
|
| 347 |
+
def refactor_string(self, data, name):
|
| 348 |
+
"""Refactor a given input string.
|
| 349 |
+
|
| 350 |
+
Args:
|
| 351 |
+
data: a string holding the code to be refactored.
|
| 352 |
+
name: a human-readable name for use in error/log messages.
|
| 353 |
+
|
| 354 |
+
Returns:
|
| 355 |
+
An AST corresponding to the refactored input stream; None if
|
| 356 |
+
there were errors during the parse.
|
| 357 |
+
"""
|
| 358 |
+
features = _detect_future_features(data)
|
| 359 |
+
if "print_function" in features:
|
| 360 |
+
self.driver.grammar = pygram.python_grammar_no_print_statement
|
| 361 |
+
try:
|
| 362 |
+
tree = self.driver.parse_string(data)
|
| 363 |
+
except Exception as err:
|
| 364 |
+
self.log_error("Can't parse %s: %s: %s",
|
| 365 |
+
name, err.__class__.__name__, err)
|
| 366 |
+
return
|
| 367 |
+
finally:
|
| 368 |
+
self.driver.grammar = self.grammar
|
| 369 |
+
tree.future_features = features
|
| 370 |
+
self.log_debug("Refactoring %s", name)
|
| 371 |
+
self.refactor_tree(tree, name)
|
| 372 |
+
return tree
|
| 373 |
+
|
| 374 |
+
def refactor_stdin(self, doctests_only=False):
|
| 375 |
+
input = sys.stdin.read()
|
| 376 |
+
if doctests_only:
|
| 377 |
+
self.log_debug("Refactoring doctests in stdin")
|
| 378 |
+
output = self.refactor_docstring(input, "<stdin>")
|
| 379 |
+
if self.write_unchanged_files or output != input:
|
| 380 |
+
self.processed_file(output, "<stdin>", input)
|
| 381 |
+
else:
|
| 382 |
+
self.log_debug("No doctest changes in stdin")
|
| 383 |
+
else:
|
| 384 |
+
tree = self.refactor_string(input, "<stdin>")
|
| 385 |
+
if self.write_unchanged_files or (tree and tree.was_changed):
|
| 386 |
+
self.processed_file(str(tree), "<stdin>", input)
|
| 387 |
+
else:
|
| 388 |
+
self.log_debug("No changes in stdin")
|
| 389 |
+
|
| 390 |
+
def refactor_tree(self, tree, name):
|
| 391 |
+
"""Refactors a parse tree (modifying the tree in place).
|
| 392 |
+
|
| 393 |
+
For compatible patterns the bottom matcher module is
|
| 394 |
+
used. Otherwise the tree is traversed node-to-node for
|
| 395 |
+
matches.
|
| 396 |
+
|
| 397 |
+
Args:
|
| 398 |
+
tree: a pytree.Node instance representing the root of the tree
|
| 399 |
+
to be refactored.
|
| 400 |
+
name: a human-readable name for this tree.
|
| 401 |
+
|
| 402 |
+
Returns:
|
| 403 |
+
True if the tree was modified, False otherwise.
|
| 404 |
+
"""
|
| 405 |
+
|
| 406 |
+
for fixer in chain(self.pre_order, self.post_order):
|
| 407 |
+
fixer.start_tree(tree, name)
|
| 408 |
+
|
| 409 |
+
#use traditional matching for the incompatible fixers
|
| 410 |
+
self.traverse_by(self.bmi_pre_order_heads, tree.pre_order())
|
| 411 |
+
self.traverse_by(self.bmi_post_order_heads, tree.post_order())
|
| 412 |
+
|
| 413 |
+
# obtain a set of candidate nodes
|
| 414 |
+
match_set = self.BM.run(tree.leaves())
|
| 415 |
+
|
| 416 |
+
while any(match_set.values()):
|
| 417 |
+
for fixer in self.BM.fixers:
|
| 418 |
+
if fixer in match_set and match_set[fixer]:
|
| 419 |
+
#sort by depth; apply fixers from bottom(of the AST) to top
|
| 420 |
+
match_set[fixer].sort(key=pytree.Base.depth, reverse=True)
|
| 421 |
+
|
| 422 |
+
if fixer.keep_line_order:
|
| 423 |
+
#some fixers(eg fix_imports) must be applied
|
| 424 |
+
#with the original file's line order
|
| 425 |
+
match_set[fixer].sort(key=pytree.Base.get_lineno)
|
| 426 |
+
|
| 427 |
+
for node in list(match_set[fixer]):
|
| 428 |
+
if node in match_set[fixer]:
|
| 429 |
+
match_set[fixer].remove(node)
|
| 430 |
+
|
| 431 |
+
try:
|
| 432 |
+
find_root(node)
|
| 433 |
+
except ValueError:
|
| 434 |
+
# this node has been cut off from a
|
| 435 |
+
# previous transformation ; skip
|
| 436 |
+
continue
|
| 437 |
+
|
| 438 |
+
if node.fixers_applied and fixer in node.fixers_applied:
|
| 439 |
+
# do not apply the same fixer again
|
| 440 |
+
continue
|
| 441 |
+
|
| 442 |
+
results = fixer.match(node)
|
| 443 |
+
|
| 444 |
+
if results:
|
| 445 |
+
new = fixer.transform(node, results)
|
| 446 |
+
if new is not None:
|
| 447 |
+
node.replace(new)
|
| 448 |
+
#new.fixers_applied.append(fixer)
|
| 449 |
+
for node in new.post_order():
|
| 450 |
+
# do not apply the fixer again to
|
| 451 |
+
# this or any subnode
|
| 452 |
+
if not node.fixers_applied:
|
| 453 |
+
node.fixers_applied = []
|
| 454 |
+
node.fixers_applied.append(fixer)
|
| 455 |
+
|
| 456 |
+
# update the original match set for
|
| 457 |
+
# the added code
|
| 458 |
+
new_matches = self.BM.run(new.leaves())
|
| 459 |
+
for fxr in new_matches:
|
| 460 |
+
if not fxr in match_set:
|
| 461 |
+
match_set[fxr]=[]
|
| 462 |
+
|
| 463 |
+
match_set[fxr].extend(new_matches[fxr])
|
| 464 |
+
|
| 465 |
+
for fixer in chain(self.pre_order, self.post_order):
|
| 466 |
+
fixer.finish_tree(tree, name)
|
| 467 |
+
return tree.was_changed
|
| 468 |
+
|
| 469 |
+
def traverse_by(self, fixers, traversal):
|
| 470 |
+
"""Traverse an AST, applying a set of fixers to each node.
|
| 471 |
+
|
| 472 |
+
This is a helper method for refactor_tree().
|
| 473 |
+
|
| 474 |
+
Args:
|
| 475 |
+
fixers: a list of fixer instances.
|
| 476 |
+
traversal: a generator that yields AST nodes.
|
| 477 |
+
|
| 478 |
+
Returns:
|
| 479 |
+
None
|
| 480 |
+
"""
|
| 481 |
+
if not fixers:
|
| 482 |
+
return
|
| 483 |
+
for node in traversal:
|
| 484 |
+
for fixer in fixers[node.type]:
|
| 485 |
+
results = fixer.match(node)
|
| 486 |
+
if results:
|
| 487 |
+
new = fixer.transform(node, results)
|
| 488 |
+
if new is not None:
|
| 489 |
+
node.replace(new)
|
| 490 |
+
node = new
|
| 491 |
+
|
| 492 |
+
def processed_file(self, new_text, filename, old_text=None, write=False,
|
| 493 |
+
encoding=None):
|
| 494 |
+
"""
|
| 495 |
+
Called when a file has been refactored and there may be changes.
|
| 496 |
+
"""
|
| 497 |
+
self.files.append(filename)
|
| 498 |
+
if old_text is None:
|
| 499 |
+
old_text = self._read_python_source(filename)[0]
|
| 500 |
+
if old_text is None:
|
| 501 |
+
return
|
| 502 |
+
equal = old_text == new_text
|
| 503 |
+
self.print_output(old_text, new_text, filename, equal)
|
| 504 |
+
if equal:
|
| 505 |
+
self.log_debug("No changes to %s", filename)
|
| 506 |
+
if not self.write_unchanged_files:
|
| 507 |
+
return
|
| 508 |
+
if write:
|
| 509 |
+
self.write_file(new_text, filename, old_text, encoding)
|
| 510 |
+
else:
|
| 511 |
+
self.log_debug("Not writing changes to %s", filename)
|
| 512 |
+
|
| 513 |
+
def write_file(self, new_text, filename, old_text, encoding=None):
|
| 514 |
+
"""Writes a string to a file.
|
| 515 |
+
|
| 516 |
+
It first shows a unified diff between the old text and the new text, and
|
| 517 |
+
then rewrites the file; the latter is only done if the write option is
|
| 518 |
+
set.
|
| 519 |
+
"""
|
| 520 |
+
try:
|
| 521 |
+
fp = io.open(filename, "w", encoding=encoding, newline='')
|
| 522 |
+
except OSError as err:
|
| 523 |
+
self.log_error("Can't create %s: %s", filename, err)
|
| 524 |
+
return
|
| 525 |
+
|
| 526 |
+
with fp:
|
| 527 |
+
try:
|
| 528 |
+
fp.write(new_text)
|
| 529 |
+
except OSError as err:
|
| 530 |
+
self.log_error("Can't write %s: %s", filename, err)
|
| 531 |
+
self.log_debug("Wrote changes to %s", filename)
|
| 532 |
+
self.wrote = True
|
| 533 |
+
|
| 534 |
+
PS1 = ">>> "
|
| 535 |
+
PS2 = "... "
|
| 536 |
+
|
| 537 |
+
def refactor_docstring(self, input, filename):
|
| 538 |
+
"""Refactors a docstring, looking for doctests.
|
| 539 |
+
|
| 540 |
+
This returns a modified version of the input string. It looks
|
| 541 |
+
for doctests, which start with a ">>>" prompt, and may be
|
| 542 |
+
continued with "..." prompts, as long as the "..." is indented
|
| 543 |
+
the same as the ">>>".
|
| 544 |
+
|
| 545 |
+
(Unfortunately we can't use the doctest module's parser,
|
| 546 |
+
since, like most parsers, it is not geared towards preserving
|
| 547 |
+
the original source.)
|
| 548 |
+
"""
|
| 549 |
+
result = []
|
| 550 |
+
block = None
|
| 551 |
+
block_lineno = None
|
| 552 |
+
indent = None
|
| 553 |
+
lineno = 0
|
| 554 |
+
for line in input.splitlines(keepends=True):
|
| 555 |
+
lineno += 1
|
| 556 |
+
if line.lstrip().startswith(self.PS1):
|
| 557 |
+
if block is not None:
|
| 558 |
+
result.extend(self.refactor_doctest(block, block_lineno,
|
| 559 |
+
indent, filename))
|
| 560 |
+
block_lineno = lineno
|
| 561 |
+
block = [line]
|
| 562 |
+
i = line.find(self.PS1)
|
| 563 |
+
indent = line[:i]
|
| 564 |
+
elif (indent is not None and
|
| 565 |
+
(line.startswith(indent + self.PS2) or
|
| 566 |
+
line == indent + self.PS2.rstrip() + "\n")):
|
| 567 |
+
block.append(line)
|
| 568 |
+
else:
|
| 569 |
+
if block is not None:
|
| 570 |
+
result.extend(self.refactor_doctest(block, block_lineno,
|
| 571 |
+
indent, filename))
|
| 572 |
+
block = None
|
| 573 |
+
indent = None
|
| 574 |
+
result.append(line)
|
| 575 |
+
if block is not None:
|
| 576 |
+
result.extend(self.refactor_doctest(block, block_lineno,
|
| 577 |
+
indent, filename))
|
| 578 |
+
return "".join(result)
|
| 579 |
+
|
| 580 |
+
def refactor_doctest(self, block, lineno, indent, filename):
|
| 581 |
+
"""Refactors one doctest.
|
| 582 |
+
|
| 583 |
+
A doctest is given as a block of lines, the first of which starts
|
| 584 |
+
with ">>>" (possibly indented), while the remaining lines start
|
| 585 |
+
with "..." (identically indented).
|
| 586 |
+
|
| 587 |
+
"""
|
| 588 |
+
try:
|
| 589 |
+
tree = self.parse_block(block, lineno, indent)
|
| 590 |
+
except Exception as err:
|
| 591 |
+
if self.logger.isEnabledFor(logging.DEBUG):
|
| 592 |
+
for line in block:
|
| 593 |
+
self.log_debug("Source: %s", line.rstrip("\n"))
|
| 594 |
+
self.log_error("Can't parse docstring in %s line %s: %s: %s",
|
| 595 |
+
filename, lineno, err.__class__.__name__, err)
|
| 596 |
+
return block
|
| 597 |
+
if self.refactor_tree(tree, filename):
|
| 598 |
+
new = str(tree).splitlines(keepends=True)
|
| 599 |
+
# Undo the adjustment of the line numbers in wrap_toks() below.
|
| 600 |
+
clipped, new = new[:lineno-1], new[lineno-1:]
|
| 601 |
+
assert clipped == ["\n"] * (lineno-1), clipped
|
| 602 |
+
if not new[-1].endswith("\n"):
|
| 603 |
+
new[-1] += "\n"
|
| 604 |
+
block = [indent + self.PS1 + new.pop(0)]
|
| 605 |
+
if new:
|
| 606 |
+
block += [indent + self.PS2 + line for line in new]
|
| 607 |
+
return block
|
| 608 |
+
|
| 609 |
+
def summarize(self):
|
| 610 |
+
if self.wrote:
|
| 611 |
+
were = "were"
|
| 612 |
+
else:
|
| 613 |
+
were = "need to be"
|
| 614 |
+
if not self.files:
|
| 615 |
+
self.log_message("No files %s modified.", were)
|
| 616 |
+
else:
|
| 617 |
+
self.log_message("Files that %s modified:", were)
|
| 618 |
+
for file in self.files:
|
| 619 |
+
self.log_message(file)
|
| 620 |
+
if self.fixer_log:
|
| 621 |
+
self.log_message("Warnings/messages while refactoring:")
|
| 622 |
+
for message in self.fixer_log:
|
| 623 |
+
self.log_message(message)
|
| 624 |
+
if self.errors:
|
| 625 |
+
if len(self.errors) == 1:
|
| 626 |
+
self.log_message("There was 1 error:")
|
| 627 |
+
else:
|
| 628 |
+
self.log_message("There were %d errors:", len(self.errors))
|
| 629 |
+
for msg, args, kwds in self.errors:
|
| 630 |
+
self.log_message(msg, *args, **kwds)
|
| 631 |
+
|
| 632 |
+
def parse_block(self, block, lineno, indent):
|
| 633 |
+
"""Parses a block into a tree.
|
| 634 |
+
|
| 635 |
+
This is necessary to get correct line number / offset information
|
| 636 |
+
in the parser diagnostics and embedded into the parse tree.
|
| 637 |
+
"""
|
| 638 |
+
tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent))
|
| 639 |
+
tree.future_features = frozenset()
|
| 640 |
+
return tree
|
| 641 |
+
|
| 642 |
+
def wrap_toks(self, block, lineno, indent):
|
| 643 |
+
"""Wraps a tokenize stream to systematically modify start/end."""
|
| 644 |
+
tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__)
|
| 645 |
+
for type, value, (line0, col0), (line1, col1), line_text in tokens:
|
| 646 |
+
line0 += lineno - 1
|
| 647 |
+
line1 += lineno - 1
|
| 648 |
+
# Don't bother updating the columns; this is too complicated
|
| 649 |
+
# since line_text would also have to be updated and it would
|
| 650 |
+
# still break for tokens spanning lines. Let the user guess
|
| 651 |
+
# that the column numbers for doctests are relative to the
|
| 652 |
+
# end of the prompt string (PS1 or PS2).
|
| 653 |
+
yield type, value, (line0, col0), (line1, col1), line_text
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
def gen_lines(self, block, indent):
|
| 657 |
+
"""Generates lines as expected by tokenize from a list of lines.
|
| 658 |
+
|
| 659 |
+
This strips the first len(indent + self.PS1) characters off each line.
|
| 660 |
+
"""
|
| 661 |
+
prefix1 = indent + self.PS1
|
| 662 |
+
prefix2 = indent + self.PS2
|
| 663 |
+
prefix = prefix1
|
| 664 |
+
for line in block:
|
| 665 |
+
if line.startswith(prefix):
|
| 666 |
+
yield line[len(prefix):]
|
| 667 |
+
elif line == prefix.rstrip() + "\n":
|
| 668 |
+
yield "\n"
|
| 669 |
+
else:
|
| 670 |
+
raise AssertionError("line=%r, prefix=%r" % (line, prefix))
|
| 671 |
+
prefix = prefix2
|
| 672 |
+
while True:
|
| 673 |
+
yield ""
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
class MultiprocessingUnsupported(Exception):
|
| 677 |
+
pass
|
| 678 |
+
|
| 679 |
+
|
| 680 |
+
class MultiprocessRefactoringTool(RefactoringTool):
|
| 681 |
+
|
| 682 |
+
def __init__(self, *args, **kwargs):
|
| 683 |
+
super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
|
| 684 |
+
self.queue = None
|
| 685 |
+
self.output_lock = None
|
| 686 |
+
|
| 687 |
+
def refactor(self, items, write=False, doctests_only=False,
|
| 688 |
+
num_processes=1):
|
| 689 |
+
if num_processes == 1:
|
| 690 |
+
return super(MultiprocessRefactoringTool, self).refactor(
|
| 691 |
+
items, write, doctests_only)
|
| 692 |
+
try:
|
| 693 |
+
import multiprocessing
|
| 694 |
+
except ImportError:
|
| 695 |
+
raise MultiprocessingUnsupported
|
| 696 |
+
if self.queue is not None:
|
| 697 |
+
raise RuntimeError("already doing multiple processes")
|
| 698 |
+
self.queue = multiprocessing.JoinableQueue()
|
| 699 |
+
self.output_lock = multiprocessing.Lock()
|
| 700 |
+
processes = [multiprocessing.Process(target=self._child)
|
| 701 |
+
for i in range(num_processes)]
|
| 702 |
+
try:
|
| 703 |
+
for p in processes:
|
| 704 |
+
p.start()
|
| 705 |
+
super(MultiprocessRefactoringTool, self).refactor(items, write,
|
| 706 |
+
doctests_only)
|
| 707 |
+
finally:
|
| 708 |
+
self.queue.join()
|
| 709 |
+
for i in range(num_processes):
|
| 710 |
+
self.queue.put(None)
|
| 711 |
+
for p in processes:
|
| 712 |
+
if p.is_alive():
|
| 713 |
+
p.join()
|
| 714 |
+
self.queue = None
|
| 715 |
+
|
| 716 |
+
def _child(self):
|
| 717 |
+
task = self.queue.get()
|
| 718 |
+
while task is not None:
|
| 719 |
+
args, kwargs = task
|
| 720 |
+
try:
|
| 721 |
+
super(MultiprocessRefactoringTool, self).refactor_file(
|
| 722 |
+
*args, **kwargs)
|
| 723 |
+
finally:
|
| 724 |
+
self.queue.task_done()
|
| 725 |
+
task = self.queue.get()
|
| 726 |
+
|
| 727 |
+
def refactor_file(self, *args, **kwargs):
|
| 728 |
+
if self.queue is not None:
|
| 729 |
+
self.queue.put((args, kwargs))
|
| 730 |
+
else:
|
| 731 |
+
return super(MultiprocessRefactoringTool, self).refactor_file(
|
| 732 |
+
*args, **kwargs)
|
parrot/lib/python3.10/lib2to3/tests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (621 Bytes). View file
|
|
|
parrot/lib/python3.10/lib2to3/tests/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (209 Bytes). View file
|
|
|
parrot/lib/python3.10/lib2to3/tests/__pycache__/test_all_fixers.cpython-310.pyc
ADDED
|
Binary file (1.47 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/tests/__pycache__/test_main.cpython-310.pyc
ADDED
|
Binary file (5.42 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/tests/__pycache__/test_refactor.cpython-310.pyc
ADDED
|
Binary file (13.5 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/tests/data/__pycache__/infinite_recursion.cpython-310.pyc
ADDED
|
Binary file (66.8 kB). View file
|
|
|
parrot/lib/python3.10/lib2to3/tests/data/__pycache__/py3_test_grammar.cpython-310.pyc
ADDED
|
Binary file (35.8 kB). View file
|
|
|