Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +7 -0
- evalkit_cambrian/bin/xz +3 -0
- evalkit_cambrian/lib/libssl.so +3 -0
- evalkit_cambrian/lib/libz.so.1.2.13 +3 -0
- evalkit_cambrian/lib/python3.10/distutils/command/__pycache__/bdist_dumb.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/distutils/command/__pycache__/build_clib.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/distutils/command/__pycache__/check.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/distutils/command/__pycache__/sdist.cpython-310.pyc +0 -0
- evalkit_cambrian/lib/python3.10/distutils/command/bdist.py +141 -0
- evalkit_cambrian/lib/python3.10/distutils/command/build_py.py +416 -0
- evalkit_cambrian/lib/python3.10/distutils/command/build_scripts.py +160 -0
- evalkit_cambrian/lib/python3.10/distutils/command/command_template +33 -0
- evalkit_cambrian/lib/python3.10/distutils/command/install_headers.py +47 -0
- evalkit_cambrian/lib/python3.10/distutils/command/install_scripts.py +60 -0
- evalkit_cambrian/lib/python3.10/distutils/command/upload.py +215 -0
- evalkit_cambrian/lib/python3.10/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl +3 -0
- evalkit_cambrian/lib/python3.10/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl +3 -0
- evalkit_cambrian/lib/python3.10/lib2to3/Grammar.txt +196 -0
- evalkit_cambrian/lib/python3.10/lib2to3/PatternGrammar.txt +28 -0
- evalkit_cambrian/lib/python3.10/lib2to3/__init__.py +8 -0
- evalkit_cambrian/lib/python3.10/lib2to3/__main__.py +4 -0
- evalkit_cambrian/lib/python3.10/lib2to3/btm_matcher.py +163 -0
- evalkit_cambrian/lib/python3.10/lib2to3/btm_utils.py +281 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixer_base.py +186 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixer_util.py +453 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_basestring.py +14 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_buffer.py +22 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_dict.py +106 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_execfile.py +53 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_filter.py +94 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_getcwdu.py +19 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_idioms.py +152 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_imports.py +145 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_intern.py +39 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_itertools.py +43 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_itertools_imports.py +57 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_map.py +110 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_ne.py +23 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_next.py +103 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_nonzero.py +21 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_numliterals.py +28 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_reduce.py +35 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_reload.py +36 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_renames.py +70 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_standarderror.py +18 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_types.py +61 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_unicode.py +42 -0
- evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_xreadlines.py +25 -0
- evalkit_cambrian/lib/python3.10/lib2to3/main.py +273 -0
- evalkit_cambrian/lib/python3.10/lib2to3/patcomp.py +204 -0
.gitattributes
CHANGED
|
@@ -259,3 +259,10 @@ evalkit_cambrian/lib/libtinfo.so.6 filter=lfs diff=lfs merge=lfs -text
|
|
| 259 |
evalkit_cambrian/lib/libtinfow.so.6.4 filter=lfs diff=lfs merge=lfs -text
|
| 260 |
evalkit_cambrian/bin/unxz filter=lfs diff=lfs merge=lfs -text
|
| 261 |
evalkit_cambrian/lib/libtinfo.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 259 |
evalkit_cambrian/lib/libtinfow.so.6.4 filter=lfs diff=lfs merge=lfs -text
|
| 260 |
evalkit_cambrian/bin/unxz filter=lfs diff=lfs merge=lfs -text
|
| 261 |
evalkit_cambrian/lib/libtinfo.so filter=lfs diff=lfs merge=lfs -text
|
| 262 |
+
evalkit_cambrian/lib/libssl.so filter=lfs diff=lfs merge=lfs -text
|
| 263 |
+
evalkit_llava/lib/python3.10/lib-dynload/_decimal.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 264 |
+
evalkit_cambrian/lib/libz.so.1.2.13 filter=lfs diff=lfs merge=lfs -text
|
| 265 |
+
evalkit_cambrian/bin/xz filter=lfs diff=lfs merge=lfs -text
|
| 266 |
+
evalkit_cambrian/lib/python3.10/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
| 267 |
+
evalkit_cambrian/lib/python3.10/tkinter/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 268 |
+
evalkit_cambrian/lib/python3.10/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
evalkit_cambrian/bin/xz
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a127c4faacf3bcd0d328c3eb0cc7096176e28a0f61306173489f619827c98276
|
| 3 |
+
size 108336
|
evalkit_cambrian/lib/libssl.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3480c91df4e0c1a33514955568641405e37924f680e8ba42f494a209640516c6
|
| 3 |
+
size 775712
|
evalkit_cambrian/lib/libz.so.1.2.13
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0b0e682a9dc7fd4895a6783288f851b793dc89633f28714027974fa4d66f3914
|
| 3 |
+
size 124744
|
evalkit_cambrian/lib/python3.10/distutils/command/__pycache__/bdist_dumb.cpython-310.pyc
ADDED
|
Binary file (3.58 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/distutils/command/__pycache__/build_clib.cpython-310.pyc
ADDED
|
Binary file (5.07 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/distutils/command/__pycache__/check.cpython-310.pyc
ADDED
|
Binary file (5.21 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/distutils/command/__pycache__/sdist.cpython-310.pyc
ADDED
|
Binary file (14.7 kB). View file
|
|
|
evalkit_cambrian/lib/python3.10/distutils/command/bdist.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.bdist
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'bdist' command (create a built [binary]
|
| 4 |
+
distribution)."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from distutils.core import Command
|
| 8 |
+
from distutils.errors import *
|
| 9 |
+
from distutils.util import get_platform
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def show_formats():
|
| 13 |
+
"""Print list of available formats (arguments to "--format" option).
|
| 14 |
+
"""
|
| 15 |
+
from distutils.fancy_getopt import FancyGetopt
|
| 16 |
+
formats = []
|
| 17 |
+
for format in bdist.format_commands:
|
| 18 |
+
formats.append(("formats=" + format, None,
|
| 19 |
+
bdist.format_command[format][1]))
|
| 20 |
+
pretty_printer = FancyGetopt(formats)
|
| 21 |
+
pretty_printer.print_help("List of available distribution formats:")
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class bdist(Command):
|
| 25 |
+
|
| 26 |
+
description = "create a built (binary) distribution"
|
| 27 |
+
|
| 28 |
+
user_options = [('bdist-base=', 'b',
|
| 29 |
+
"temporary directory for creating built distributions"),
|
| 30 |
+
('plat-name=', 'p',
|
| 31 |
+
"platform name to embed in generated filenames "
|
| 32 |
+
"(default: %s)" % get_platform()),
|
| 33 |
+
('formats=', None,
|
| 34 |
+
"formats for distribution (comma-separated list)"),
|
| 35 |
+
('dist-dir=', 'd',
|
| 36 |
+
"directory to put final built distributions in "
|
| 37 |
+
"[default: dist]"),
|
| 38 |
+
('skip-build', None,
|
| 39 |
+
"skip rebuilding everything (for testing/debugging)"),
|
| 40 |
+
('owner=', 'u',
|
| 41 |
+
"Owner name used when creating a tar file"
|
| 42 |
+
" [default: current user]"),
|
| 43 |
+
('group=', 'g',
|
| 44 |
+
"Group name used when creating a tar file"
|
| 45 |
+
" [default: current group]"),
|
| 46 |
+
]
|
| 47 |
+
|
| 48 |
+
boolean_options = ['skip-build']
|
| 49 |
+
|
| 50 |
+
help_options = [
|
| 51 |
+
('help-formats', None,
|
| 52 |
+
"lists available distribution formats", show_formats),
|
| 53 |
+
]
|
| 54 |
+
|
| 55 |
+
# The following commands do not take a format option from bdist
|
| 56 |
+
no_format_option = ('bdist_rpm',)
|
| 57 |
+
|
| 58 |
+
# This won't do in reality: will need to distinguish RPM-ish Linux,
|
| 59 |
+
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
|
| 60 |
+
default_format = {'posix': 'gztar',
|
| 61 |
+
'nt': 'zip'}
|
| 62 |
+
|
| 63 |
+
# Establish the preferred order (for the --help-formats option).
|
| 64 |
+
format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar',
|
| 65 |
+
'zip', 'msi']
|
| 66 |
+
|
| 67 |
+
# And the real information.
|
| 68 |
+
format_command = {'rpm': ('bdist_rpm', "RPM distribution"),
|
| 69 |
+
'gztar': ('bdist_dumb', "gzip'ed tar file"),
|
| 70 |
+
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
|
| 71 |
+
'xztar': ('bdist_dumb', "xz'ed tar file"),
|
| 72 |
+
'ztar': ('bdist_dumb', "compressed tar file"),
|
| 73 |
+
'tar': ('bdist_dumb', "tar file"),
|
| 74 |
+
'zip': ('bdist_dumb', "ZIP file"),
|
| 75 |
+
'msi': ('bdist_msi', "Microsoft Installer")
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def initialize_options(self):
|
| 80 |
+
self.bdist_base = None
|
| 81 |
+
self.plat_name = None
|
| 82 |
+
self.formats = None
|
| 83 |
+
self.dist_dir = None
|
| 84 |
+
self.skip_build = 0
|
| 85 |
+
self.group = None
|
| 86 |
+
self.owner = None
|
| 87 |
+
|
| 88 |
+
def finalize_options(self):
|
| 89 |
+
# have to finalize 'plat_name' before 'bdist_base'
|
| 90 |
+
if self.plat_name is None:
|
| 91 |
+
if self.skip_build:
|
| 92 |
+
self.plat_name = get_platform()
|
| 93 |
+
else:
|
| 94 |
+
self.plat_name = self.get_finalized_command('build').plat_name
|
| 95 |
+
|
| 96 |
+
# 'bdist_base' -- parent of per-built-distribution-format
|
| 97 |
+
# temporary directories (eg. we'll probably have
|
| 98 |
+
# "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
|
| 99 |
+
if self.bdist_base is None:
|
| 100 |
+
build_base = self.get_finalized_command('build').build_base
|
| 101 |
+
self.bdist_base = os.path.join(build_base,
|
| 102 |
+
'bdist.' + self.plat_name)
|
| 103 |
+
|
| 104 |
+
self.ensure_string_list('formats')
|
| 105 |
+
if self.formats is None:
|
| 106 |
+
try:
|
| 107 |
+
self.formats = [self.default_format[os.name]]
|
| 108 |
+
except KeyError:
|
| 109 |
+
raise DistutilsPlatformError(
|
| 110 |
+
"don't know how to create built distributions "
|
| 111 |
+
"on platform %s" % os.name)
|
| 112 |
+
|
| 113 |
+
if self.dist_dir is None:
|
| 114 |
+
self.dist_dir = "dist"
|
| 115 |
+
|
| 116 |
+
def run(self):
|
| 117 |
+
# Figure out which sub-commands we need to run.
|
| 118 |
+
commands = []
|
| 119 |
+
for format in self.formats:
|
| 120 |
+
try:
|
| 121 |
+
commands.append(self.format_command[format][0])
|
| 122 |
+
except KeyError:
|
| 123 |
+
raise DistutilsOptionError("invalid format '%s'" % format)
|
| 124 |
+
|
| 125 |
+
# Reinitialize and run each command.
|
| 126 |
+
for i in range(len(self.formats)):
|
| 127 |
+
cmd_name = commands[i]
|
| 128 |
+
sub_cmd = self.reinitialize_command(cmd_name)
|
| 129 |
+
if cmd_name not in self.no_format_option:
|
| 130 |
+
sub_cmd.format = self.formats[i]
|
| 131 |
+
|
| 132 |
+
# passing the owner and group names for tar archiving
|
| 133 |
+
if cmd_name == 'bdist_dumb':
|
| 134 |
+
sub_cmd.owner = self.owner
|
| 135 |
+
sub_cmd.group = self.group
|
| 136 |
+
|
| 137 |
+
# If we're going to need to run this command again, tell it to
|
| 138 |
+
# keep its temporary files around so subsequent runs go faster.
|
| 139 |
+
if cmd_name in commands[i+1:]:
|
| 140 |
+
sub_cmd.keep_temp = 1
|
| 141 |
+
self.run_command(cmd_name)
|
evalkit_cambrian/lib/python3.10/distutils/command/build_py.py
ADDED
|
@@ -0,0 +1,416 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_py
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_py' command."""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import importlib.util
|
| 7 |
+
import sys
|
| 8 |
+
import glob
|
| 9 |
+
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
from distutils.errors import *
|
| 12 |
+
from distutils.util import convert_path, Mixin2to3
|
| 13 |
+
from distutils import log
|
| 14 |
+
|
| 15 |
+
class build_py (Command):
|
| 16 |
+
|
| 17 |
+
description = "\"build\" pure Python modules (copy to build directory)"
|
| 18 |
+
|
| 19 |
+
user_options = [
|
| 20 |
+
('build-lib=', 'd', "directory to \"build\" (copy) to"),
|
| 21 |
+
('compile', 'c', "compile .py to .pyc"),
|
| 22 |
+
('no-compile', None, "don't compile .py files [default]"),
|
| 23 |
+
('optimize=', 'O',
|
| 24 |
+
"also compile with optimization: -O1 for \"python -O\", "
|
| 25 |
+
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
| 26 |
+
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
| 27 |
+
]
|
| 28 |
+
|
| 29 |
+
boolean_options = ['compile', 'force']
|
| 30 |
+
negative_opt = {'no-compile' : 'compile'}
|
| 31 |
+
|
| 32 |
+
def initialize_options(self):
|
| 33 |
+
self.build_lib = None
|
| 34 |
+
self.py_modules = None
|
| 35 |
+
self.package = None
|
| 36 |
+
self.package_data = None
|
| 37 |
+
self.package_dir = None
|
| 38 |
+
self.compile = 0
|
| 39 |
+
self.optimize = 0
|
| 40 |
+
self.force = None
|
| 41 |
+
|
| 42 |
+
def finalize_options(self):
|
| 43 |
+
self.set_undefined_options('build',
|
| 44 |
+
('build_lib', 'build_lib'),
|
| 45 |
+
('force', 'force'))
|
| 46 |
+
|
| 47 |
+
# Get the distribution options that are aliases for build_py
|
| 48 |
+
# options -- list of packages and list of modules.
|
| 49 |
+
self.packages = self.distribution.packages
|
| 50 |
+
self.py_modules = self.distribution.py_modules
|
| 51 |
+
self.package_data = self.distribution.package_data
|
| 52 |
+
self.package_dir = {}
|
| 53 |
+
if self.distribution.package_dir:
|
| 54 |
+
for name, path in self.distribution.package_dir.items():
|
| 55 |
+
self.package_dir[name] = convert_path(path)
|
| 56 |
+
self.data_files = self.get_data_files()
|
| 57 |
+
|
| 58 |
+
# Ick, copied straight from install_lib.py (fancy_getopt needs a
|
| 59 |
+
# type system! Hell, *everything* needs a type system!!!)
|
| 60 |
+
if not isinstance(self.optimize, int):
|
| 61 |
+
try:
|
| 62 |
+
self.optimize = int(self.optimize)
|
| 63 |
+
assert 0 <= self.optimize <= 2
|
| 64 |
+
except (ValueError, AssertionError):
|
| 65 |
+
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
| 66 |
+
|
| 67 |
+
def run(self):
|
| 68 |
+
# XXX copy_file by default preserves atime and mtime. IMHO this is
|
| 69 |
+
# the right thing to do, but perhaps it should be an option -- in
|
| 70 |
+
# particular, a site administrator might want installed files to
|
| 71 |
+
# reflect the time of installation rather than the last
|
| 72 |
+
# modification time before the installed release.
|
| 73 |
+
|
| 74 |
+
# XXX copy_file by default preserves mode, which appears to be the
|
| 75 |
+
# wrong thing to do: if a file is read-only in the working
|
| 76 |
+
# directory, we want it to be installed read/write so that the next
|
| 77 |
+
# installation of the same module distribution can overwrite it
|
| 78 |
+
# without problems. (This might be a Unix-specific issue.) Thus
|
| 79 |
+
# we turn off 'preserve_mode' when copying to the build directory,
|
| 80 |
+
# since the build directory is supposed to be exactly what the
|
| 81 |
+
# installation will look like (ie. we preserve mode when
|
| 82 |
+
# installing).
|
| 83 |
+
|
| 84 |
+
# Two options control which modules will be installed: 'packages'
|
| 85 |
+
# and 'py_modules'. The former lets us work with whole packages, not
|
| 86 |
+
# specifying individual modules at all; the latter is for
|
| 87 |
+
# specifying modules one-at-a-time.
|
| 88 |
+
|
| 89 |
+
if self.py_modules:
|
| 90 |
+
self.build_modules()
|
| 91 |
+
if self.packages:
|
| 92 |
+
self.build_packages()
|
| 93 |
+
self.build_package_data()
|
| 94 |
+
|
| 95 |
+
self.byte_compile(self.get_outputs(include_bytecode=0))
|
| 96 |
+
|
| 97 |
+
def get_data_files(self):
|
| 98 |
+
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
| 99 |
+
data = []
|
| 100 |
+
if not self.packages:
|
| 101 |
+
return data
|
| 102 |
+
for package in self.packages:
|
| 103 |
+
# Locate package source directory
|
| 104 |
+
src_dir = self.get_package_dir(package)
|
| 105 |
+
|
| 106 |
+
# Compute package build directory
|
| 107 |
+
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
| 108 |
+
|
| 109 |
+
# Length of path to strip from found files
|
| 110 |
+
plen = 0
|
| 111 |
+
if src_dir:
|
| 112 |
+
plen = len(src_dir)+1
|
| 113 |
+
|
| 114 |
+
# Strip directory from globbed filenames
|
| 115 |
+
filenames = [
|
| 116 |
+
file[plen:] for file in self.find_data_files(package, src_dir)
|
| 117 |
+
]
|
| 118 |
+
data.append((package, src_dir, build_dir, filenames))
|
| 119 |
+
return data
|
| 120 |
+
|
| 121 |
+
def find_data_files(self, package, src_dir):
|
| 122 |
+
"""Return filenames for package's data files in 'src_dir'"""
|
| 123 |
+
globs = (self.package_data.get('', [])
|
| 124 |
+
+ self.package_data.get(package, []))
|
| 125 |
+
files = []
|
| 126 |
+
for pattern in globs:
|
| 127 |
+
# Each pattern has to be converted to a platform-specific path
|
| 128 |
+
filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern)))
|
| 129 |
+
# Files that match more than one pattern are only added once
|
| 130 |
+
files.extend([fn for fn in filelist if fn not in files
|
| 131 |
+
and os.path.isfile(fn)])
|
| 132 |
+
return files
|
| 133 |
+
|
| 134 |
+
def build_package_data(self):
|
| 135 |
+
"""Copy data files into build directory"""
|
| 136 |
+
lastdir = None
|
| 137 |
+
for package, src_dir, build_dir, filenames in self.data_files:
|
| 138 |
+
for filename in filenames:
|
| 139 |
+
target = os.path.join(build_dir, filename)
|
| 140 |
+
self.mkpath(os.path.dirname(target))
|
| 141 |
+
self.copy_file(os.path.join(src_dir, filename), target,
|
| 142 |
+
preserve_mode=False)
|
| 143 |
+
|
| 144 |
+
def get_package_dir(self, package):
|
| 145 |
+
"""Return the directory, relative to the top of the source
|
| 146 |
+
distribution, where package 'package' should be found
|
| 147 |
+
(at least according to the 'package_dir' option, if any)."""
|
| 148 |
+
path = package.split('.')
|
| 149 |
+
|
| 150 |
+
if not self.package_dir:
|
| 151 |
+
if path:
|
| 152 |
+
return os.path.join(*path)
|
| 153 |
+
else:
|
| 154 |
+
return ''
|
| 155 |
+
else:
|
| 156 |
+
tail = []
|
| 157 |
+
while path:
|
| 158 |
+
try:
|
| 159 |
+
pdir = self.package_dir['.'.join(path)]
|
| 160 |
+
except KeyError:
|
| 161 |
+
tail.insert(0, path[-1])
|
| 162 |
+
del path[-1]
|
| 163 |
+
else:
|
| 164 |
+
tail.insert(0, pdir)
|
| 165 |
+
return os.path.join(*tail)
|
| 166 |
+
else:
|
| 167 |
+
# Oops, got all the way through 'path' without finding a
|
| 168 |
+
# match in package_dir. If package_dir defines a directory
|
| 169 |
+
# for the root (nameless) package, then fallback on it;
|
| 170 |
+
# otherwise, we might as well have not consulted
|
| 171 |
+
# package_dir at all, as we just use the directory implied
|
| 172 |
+
# by 'tail' (which should be the same as the original value
|
| 173 |
+
# of 'path' at this point).
|
| 174 |
+
pdir = self.package_dir.get('')
|
| 175 |
+
if pdir is not None:
|
| 176 |
+
tail.insert(0, pdir)
|
| 177 |
+
|
| 178 |
+
if tail:
|
| 179 |
+
return os.path.join(*tail)
|
| 180 |
+
else:
|
| 181 |
+
return ''
|
| 182 |
+
|
| 183 |
+
def check_package(self, package, package_dir):
|
| 184 |
+
# Empty dir name means current directory, which we can probably
|
| 185 |
+
# assume exists. Also, os.path.exists and isdir don't know about
|
| 186 |
+
# my "empty string means current dir" convention, so we have to
|
| 187 |
+
# circumvent them.
|
| 188 |
+
if package_dir != "":
|
| 189 |
+
if not os.path.exists(package_dir):
|
| 190 |
+
raise DistutilsFileError(
|
| 191 |
+
"package directory '%s' does not exist" % package_dir)
|
| 192 |
+
if not os.path.isdir(package_dir):
|
| 193 |
+
raise DistutilsFileError(
|
| 194 |
+
"supposed package directory '%s' exists, "
|
| 195 |
+
"but is not a directory" % package_dir)
|
| 196 |
+
|
| 197 |
+
# Require __init__.py for all but the "root package"
|
| 198 |
+
if package:
|
| 199 |
+
init_py = os.path.join(package_dir, "__init__.py")
|
| 200 |
+
if os.path.isfile(init_py):
|
| 201 |
+
return init_py
|
| 202 |
+
else:
|
| 203 |
+
log.warn(("package init file '%s' not found " +
|
| 204 |
+
"(or not a regular file)"), init_py)
|
| 205 |
+
|
| 206 |
+
# Either not in a package at all (__init__.py not expected), or
|
| 207 |
+
# __init__.py doesn't exist -- so don't return the filename.
|
| 208 |
+
return None
|
| 209 |
+
|
| 210 |
+
def check_module(self, module, module_file):
|
| 211 |
+
if not os.path.isfile(module_file):
|
| 212 |
+
log.warn("file %s (for module %s) not found", module_file, module)
|
| 213 |
+
return False
|
| 214 |
+
else:
|
| 215 |
+
return True
|
| 216 |
+
|
| 217 |
+
def find_package_modules(self, package, package_dir):
|
| 218 |
+
self.check_package(package, package_dir)
|
| 219 |
+
module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
|
| 220 |
+
modules = []
|
| 221 |
+
setup_script = os.path.abspath(self.distribution.script_name)
|
| 222 |
+
|
| 223 |
+
for f in module_files:
|
| 224 |
+
abs_f = os.path.abspath(f)
|
| 225 |
+
if abs_f != setup_script:
|
| 226 |
+
module = os.path.splitext(os.path.basename(f))[0]
|
| 227 |
+
modules.append((package, module, f))
|
| 228 |
+
else:
|
| 229 |
+
self.debug_print("excluding %s" % setup_script)
|
| 230 |
+
return modules
|
| 231 |
+
|
| 232 |
+
def find_modules(self):
|
| 233 |
+
"""Finds individually-specified Python modules, ie. those listed by
|
| 234 |
+
module name in 'self.py_modules'. Returns a list of tuples (package,
|
| 235 |
+
module_base, filename): 'package' is a tuple of the path through
|
| 236 |
+
package-space to the module; 'module_base' is the bare (no
|
| 237 |
+
packages, no dots) module name, and 'filename' is the path to the
|
| 238 |
+
".py" file (relative to the distribution root) that implements the
|
| 239 |
+
module.
|
| 240 |
+
"""
|
| 241 |
+
# Map package names to tuples of useful info about the package:
|
| 242 |
+
# (package_dir, checked)
|
| 243 |
+
# package_dir - the directory where we'll find source files for
|
| 244 |
+
# this package
|
| 245 |
+
# checked - true if we have checked that the package directory
|
| 246 |
+
# is valid (exists, contains __init__.py, ... ?)
|
| 247 |
+
packages = {}
|
| 248 |
+
|
| 249 |
+
# List of (package, module, filename) tuples to return
|
| 250 |
+
modules = []
|
| 251 |
+
|
| 252 |
+
# We treat modules-in-packages almost the same as toplevel modules,
|
| 253 |
+
# just the "package" for a toplevel is empty (either an empty
|
| 254 |
+
# string or empty list, depending on context). Differences:
|
| 255 |
+
# - don't check for __init__.py in directory for empty package
|
| 256 |
+
for module in self.py_modules:
|
| 257 |
+
path = module.split('.')
|
| 258 |
+
package = '.'.join(path[0:-1])
|
| 259 |
+
module_base = path[-1]
|
| 260 |
+
|
| 261 |
+
try:
|
| 262 |
+
(package_dir, checked) = packages[package]
|
| 263 |
+
except KeyError:
|
| 264 |
+
package_dir = self.get_package_dir(package)
|
| 265 |
+
checked = 0
|
| 266 |
+
|
| 267 |
+
if not checked:
|
| 268 |
+
init_py = self.check_package(package, package_dir)
|
| 269 |
+
packages[package] = (package_dir, 1)
|
| 270 |
+
if init_py:
|
| 271 |
+
modules.append((package, "__init__", init_py))
|
| 272 |
+
|
| 273 |
+
# XXX perhaps we should also check for just .pyc files
|
| 274 |
+
# (so greedy closed-source bastards can distribute Python
|
| 275 |
+
# modules too)
|
| 276 |
+
module_file = os.path.join(package_dir, module_base + ".py")
|
| 277 |
+
if not self.check_module(module, module_file):
|
| 278 |
+
continue
|
| 279 |
+
|
| 280 |
+
modules.append((package, module_base, module_file))
|
| 281 |
+
|
| 282 |
+
return modules
|
| 283 |
+
|
| 284 |
+
def find_all_modules(self):
|
| 285 |
+
"""Compute the list of all modules that will be built, whether
|
| 286 |
+
they are specified one-module-at-a-time ('self.py_modules') or
|
| 287 |
+
by whole packages ('self.packages'). Return a list of tuples
|
| 288 |
+
(package, module, module_file), just like 'find_modules()' and
|
| 289 |
+
'find_package_modules()' do."""
|
| 290 |
+
modules = []
|
| 291 |
+
if self.py_modules:
|
| 292 |
+
modules.extend(self.find_modules())
|
| 293 |
+
if self.packages:
|
| 294 |
+
for package in self.packages:
|
| 295 |
+
package_dir = self.get_package_dir(package)
|
| 296 |
+
m = self.find_package_modules(package, package_dir)
|
| 297 |
+
modules.extend(m)
|
| 298 |
+
return modules
|
| 299 |
+
|
| 300 |
+
def get_source_files(self):
|
| 301 |
+
return [module[-1] for module in self.find_all_modules()]
|
| 302 |
+
|
| 303 |
+
def get_module_outfile(self, build_dir, package, module):
|
| 304 |
+
outfile_path = [build_dir] + list(package) + [module + ".py"]
|
| 305 |
+
return os.path.join(*outfile_path)
|
| 306 |
+
|
| 307 |
+
def get_outputs(self, include_bytecode=1):
|
| 308 |
+
modules = self.find_all_modules()
|
| 309 |
+
outputs = []
|
| 310 |
+
for (package, module, module_file) in modules:
|
| 311 |
+
package = package.split('.')
|
| 312 |
+
filename = self.get_module_outfile(self.build_lib, package, module)
|
| 313 |
+
outputs.append(filename)
|
| 314 |
+
if include_bytecode:
|
| 315 |
+
if self.compile:
|
| 316 |
+
outputs.append(importlib.util.cache_from_source(
|
| 317 |
+
filename, optimization=''))
|
| 318 |
+
if self.optimize > 0:
|
| 319 |
+
outputs.append(importlib.util.cache_from_source(
|
| 320 |
+
filename, optimization=self.optimize))
|
| 321 |
+
|
| 322 |
+
outputs += [
|
| 323 |
+
os.path.join(build_dir, filename)
|
| 324 |
+
for package, src_dir, build_dir, filenames in self.data_files
|
| 325 |
+
for filename in filenames
|
| 326 |
+
]
|
| 327 |
+
|
| 328 |
+
return outputs
|
| 329 |
+
|
| 330 |
+
def build_module(self, module, module_file, package):
|
| 331 |
+
if isinstance(package, str):
|
| 332 |
+
package = package.split('.')
|
| 333 |
+
elif not isinstance(package, (list, tuple)):
|
| 334 |
+
raise TypeError(
|
| 335 |
+
"'package' must be a string (dot-separated), list, or tuple")
|
| 336 |
+
|
| 337 |
+
# Now put the module source file into the "build" area -- this is
|
| 338 |
+
# easy, we just copy it somewhere under self.build_lib (the build
|
| 339 |
+
# directory for Python source).
|
| 340 |
+
outfile = self.get_module_outfile(self.build_lib, package, module)
|
| 341 |
+
dir = os.path.dirname(outfile)
|
| 342 |
+
self.mkpath(dir)
|
| 343 |
+
return self.copy_file(module_file, outfile, preserve_mode=0)
|
| 344 |
+
|
| 345 |
+
def build_modules(self):
|
| 346 |
+
modules = self.find_modules()
|
| 347 |
+
for (package, module, module_file) in modules:
|
| 348 |
+
# Now "build" the module -- ie. copy the source file to
|
| 349 |
+
# self.build_lib (the build directory for Python source).
|
| 350 |
+
# (Actually, it gets copied to the directory for this package
|
| 351 |
+
# under self.build_lib.)
|
| 352 |
+
self.build_module(module, module_file, package)
|
| 353 |
+
|
| 354 |
+
def build_packages(self):
|
| 355 |
+
for package in self.packages:
|
| 356 |
+
# Get list of (package, module, module_file) tuples based on
|
| 357 |
+
# scanning the package directory. 'package' is only included
|
| 358 |
+
# in the tuple so that 'find_modules()' and
|
| 359 |
+
# 'find_package_tuples()' have a consistent interface; it's
|
| 360 |
+
# ignored here (apart from a sanity check). Also, 'module' is
|
| 361 |
+
# the *unqualified* module name (ie. no dots, no package -- we
|
| 362 |
+
# already know its package!), and 'module_file' is the path to
|
| 363 |
+
# the .py file, relative to the current directory
|
| 364 |
+
# (ie. including 'package_dir').
|
| 365 |
+
package_dir = self.get_package_dir(package)
|
| 366 |
+
modules = self.find_package_modules(package, package_dir)
|
| 367 |
+
|
| 368 |
+
# Now loop over the modules we found, "building" each one (just
|
| 369 |
+
# copy it to self.build_lib).
|
| 370 |
+
for (package_, module, module_file) in modules:
|
| 371 |
+
assert package == package_
|
| 372 |
+
self.build_module(module, module_file, package)
|
| 373 |
+
|
| 374 |
+
def byte_compile(self, files):
|
| 375 |
+
if sys.dont_write_bytecode:
|
| 376 |
+
self.warn('byte-compiling is disabled, skipping.')
|
| 377 |
+
return
|
| 378 |
+
|
| 379 |
+
from distutils.util import byte_compile
|
| 380 |
+
prefix = self.build_lib
|
| 381 |
+
if prefix[-1] != os.sep:
|
| 382 |
+
prefix = prefix + os.sep
|
| 383 |
+
|
| 384 |
+
# XXX this code is essentially the same as the 'byte_compile()
|
| 385 |
+
# method of the "install_lib" command, except for the determination
|
| 386 |
+
# of the 'prefix' string. Hmmm.
|
| 387 |
+
if self.compile:
|
| 388 |
+
byte_compile(files, optimize=0,
|
| 389 |
+
force=self.force, prefix=prefix, dry_run=self.dry_run)
|
| 390 |
+
if self.optimize > 0:
|
| 391 |
+
byte_compile(files, optimize=self.optimize,
|
| 392 |
+
force=self.force, prefix=prefix, dry_run=self.dry_run)
|
| 393 |
+
|
| 394 |
+
class build_py_2to3(build_py, Mixin2to3):
|
| 395 |
+
def run(self):
|
| 396 |
+
self.updated_files = []
|
| 397 |
+
|
| 398 |
+
# Base class code
|
| 399 |
+
if self.py_modules:
|
| 400 |
+
self.build_modules()
|
| 401 |
+
if self.packages:
|
| 402 |
+
self.build_packages()
|
| 403 |
+
self.build_package_data()
|
| 404 |
+
|
| 405 |
+
# 2to3
|
| 406 |
+
self.run_2to3(self.updated_files)
|
| 407 |
+
|
| 408 |
+
# Remaining base class code
|
| 409 |
+
self.byte_compile(self.get_outputs(include_bytecode=0))
|
| 410 |
+
|
| 411 |
+
def build_module(self, module, module_file, package):
|
| 412 |
+
res = build_py.build_module(self, module, module_file, package)
|
| 413 |
+
if res[1]:
|
| 414 |
+
# file was copied
|
| 415 |
+
self.updated_files.append(res[0])
|
| 416 |
+
return res
|
evalkit_cambrian/lib/python3.10/distutils/command/build_scripts.py
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_scripts
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_scripts' command."""
|
| 4 |
+
|
| 5 |
+
import os, re
|
| 6 |
+
from stat import ST_MODE
|
| 7 |
+
from distutils import sysconfig
|
| 8 |
+
from distutils.core import Command
|
| 9 |
+
from distutils.dep_util import newer
|
| 10 |
+
from distutils.util import convert_path, Mixin2to3
|
| 11 |
+
from distutils import log
|
| 12 |
+
import tokenize
|
| 13 |
+
|
| 14 |
+
# check if Python is called on the first line with this expression
|
| 15 |
+
first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$')
|
| 16 |
+
|
| 17 |
+
class build_scripts(Command):
|
| 18 |
+
|
| 19 |
+
description = "\"build\" scripts (copy and fixup #! line)"
|
| 20 |
+
|
| 21 |
+
user_options = [
|
| 22 |
+
('build-dir=', 'd', "directory to \"build\" (copy) to"),
|
| 23 |
+
('force', 'f', "forcibly build everything (ignore file timestamps"),
|
| 24 |
+
('executable=', 'e', "specify final destination interpreter path"),
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
boolean_options = ['force']
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def initialize_options(self):
|
| 31 |
+
self.build_dir = None
|
| 32 |
+
self.scripts = None
|
| 33 |
+
self.force = None
|
| 34 |
+
self.executable = None
|
| 35 |
+
self.outfiles = None
|
| 36 |
+
|
| 37 |
+
def finalize_options(self):
|
| 38 |
+
self.set_undefined_options('build',
|
| 39 |
+
('build_scripts', 'build_dir'),
|
| 40 |
+
('force', 'force'),
|
| 41 |
+
('executable', 'executable'))
|
| 42 |
+
self.scripts = self.distribution.scripts
|
| 43 |
+
|
| 44 |
+
def get_source_files(self):
|
| 45 |
+
return self.scripts
|
| 46 |
+
|
| 47 |
+
def run(self):
|
| 48 |
+
if not self.scripts:
|
| 49 |
+
return
|
| 50 |
+
self.copy_scripts()
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def copy_scripts(self):
|
| 54 |
+
r"""Copy each script listed in 'self.scripts'; if it's marked as a
|
| 55 |
+
Python script in the Unix way (first line matches 'first_line_re',
|
| 56 |
+
ie. starts with "\#!" and contains "python"), then adjust the first
|
| 57 |
+
line to refer to the current Python interpreter as we copy.
|
| 58 |
+
"""
|
| 59 |
+
self.mkpath(self.build_dir)
|
| 60 |
+
outfiles = []
|
| 61 |
+
updated_files = []
|
| 62 |
+
for script in self.scripts:
|
| 63 |
+
adjust = False
|
| 64 |
+
script = convert_path(script)
|
| 65 |
+
outfile = os.path.join(self.build_dir, os.path.basename(script))
|
| 66 |
+
outfiles.append(outfile)
|
| 67 |
+
|
| 68 |
+
if not self.force and not newer(script, outfile):
|
| 69 |
+
log.debug("not copying %s (up-to-date)", script)
|
| 70 |
+
continue
|
| 71 |
+
|
| 72 |
+
# Always open the file, but ignore failures in dry-run mode --
|
| 73 |
+
# that way, we'll get accurate feedback if we can read the
|
| 74 |
+
# script.
|
| 75 |
+
try:
|
| 76 |
+
f = open(script, "rb")
|
| 77 |
+
except OSError:
|
| 78 |
+
if not self.dry_run:
|
| 79 |
+
raise
|
| 80 |
+
f = None
|
| 81 |
+
else:
|
| 82 |
+
encoding, lines = tokenize.detect_encoding(f.readline)
|
| 83 |
+
f.seek(0)
|
| 84 |
+
first_line = f.readline()
|
| 85 |
+
if not first_line:
|
| 86 |
+
self.warn("%s is an empty file (skipping)" % script)
|
| 87 |
+
continue
|
| 88 |
+
|
| 89 |
+
match = first_line_re.match(first_line)
|
| 90 |
+
if match:
|
| 91 |
+
adjust = True
|
| 92 |
+
post_interp = match.group(1) or b''
|
| 93 |
+
|
| 94 |
+
if adjust:
|
| 95 |
+
log.info("copying and adjusting %s -> %s", script,
|
| 96 |
+
self.build_dir)
|
| 97 |
+
updated_files.append(outfile)
|
| 98 |
+
if not self.dry_run:
|
| 99 |
+
if not sysconfig.python_build:
|
| 100 |
+
executable = self.executable
|
| 101 |
+
else:
|
| 102 |
+
executable = os.path.join(
|
| 103 |
+
sysconfig.get_config_var("BINDIR"),
|
| 104 |
+
"python%s%s" % (sysconfig.get_config_var("VERSION"),
|
| 105 |
+
sysconfig.get_config_var("EXE")))
|
| 106 |
+
executable = os.fsencode(executable)
|
| 107 |
+
shebang = b"#!" + executable + post_interp + b"\n"
|
| 108 |
+
# Python parser starts to read a script using UTF-8 until
|
| 109 |
+
# it gets a #coding:xxx cookie. The shebang has to be the
|
| 110 |
+
# first line of a file, the #coding:xxx cookie cannot be
|
| 111 |
+
# written before. So the shebang has to be decodable from
|
| 112 |
+
# UTF-8.
|
| 113 |
+
try:
|
| 114 |
+
shebang.decode('utf-8')
|
| 115 |
+
except UnicodeDecodeError:
|
| 116 |
+
raise ValueError(
|
| 117 |
+
"The shebang ({!r}) is not decodable "
|
| 118 |
+
"from utf-8".format(shebang))
|
| 119 |
+
# If the script is encoded to a custom encoding (use a
|
| 120 |
+
# #coding:xxx cookie), the shebang has to be decodable from
|
| 121 |
+
# the script encoding too.
|
| 122 |
+
try:
|
| 123 |
+
shebang.decode(encoding)
|
| 124 |
+
except UnicodeDecodeError:
|
| 125 |
+
raise ValueError(
|
| 126 |
+
"The shebang ({!r}) is not decodable "
|
| 127 |
+
"from the script encoding ({})"
|
| 128 |
+
.format(shebang, encoding))
|
| 129 |
+
with open(outfile, "wb") as outf:
|
| 130 |
+
outf.write(shebang)
|
| 131 |
+
outf.writelines(f.readlines())
|
| 132 |
+
if f:
|
| 133 |
+
f.close()
|
| 134 |
+
else:
|
| 135 |
+
if f:
|
| 136 |
+
f.close()
|
| 137 |
+
updated_files.append(outfile)
|
| 138 |
+
self.copy_file(script, outfile)
|
| 139 |
+
|
| 140 |
+
if os.name == 'posix':
|
| 141 |
+
for file in outfiles:
|
| 142 |
+
if self.dry_run:
|
| 143 |
+
log.info("changing mode of %s", file)
|
| 144 |
+
else:
|
| 145 |
+
oldmode = os.stat(file)[ST_MODE] & 0o7777
|
| 146 |
+
newmode = (oldmode | 0o555) & 0o7777
|
| 147 |
+
if newmode != oldmode:
|
| 148 |
+
log.info("changing mode of %s from %o to %o",
|
| 149 |
+
file, oldmode, newmode)
|
| 150 |
+
os.chmod(file, newmode)
|
| 151 |
+
# XXX should we modify self.outfiles?
|
| 152 |
+
return outfiles, updated_files
|
| 153 |
+
|
| 154 |
+
class build_scripts_2to3(build_scripts, Mixin2to3):
|
| 155 |
+
|
| 156 |
+
def copy_scripts(self):
|
| 157 |
+
outfiles, updated_files = build_scripts.copy_scripts(self)
|
| 158 |
+
if not self.dry_run:
|
| 159 |
+
self.run_2to3(updated_files)
|
| 160 |
+
return outfiles, updated_files
|
evalkit_cambrian/lib/python3.10/distutils/command/command_template
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.x
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'x' command.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# created 2000/mm/dd, John Doe
|
| 7 |
+
|
| 8 |
+
__revision__ = "$Id$"
|
| 9 |
+
|
| 10 |
+
from distutils.core import Command
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class x(Command):
|
| 14 |
+
|
| 15 |
+
# Brief (40-50 characters) description of the command
|
| 16 |
+
description = ""
|
| 17 |
+
|
| 18 |
+
# List of option tuples: long name, short name (None if no short
|
| 19 |
+
# name), and help string.
|
| 20 |
+
user_options = [('', '',
|
| 21 |
+
""),
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
def initialize_options(self):
|
| 25 |
+
self. = None
|
| 26 |
+
self. = None
|
| 27 |
+
self. = None
|
| 28 |
+
|
| 29 |
+
def finalize_options(self):
|
| 30 |
+
if self.x is None:
|
| 31 |
+
self.x =
|
| 32 |
+
|
| 33 |
+
def run(self):
|
evalkit_cambrian/lib/python3.10/distutils/command/install_headers.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_headers
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_headers' command, to install C/C++ header
|
| 4 |
+
files to the Python include directory."""
|
| 5 |
+
|
| 6 |
+
from distutils.core import Command
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
# XXX force is never used
|
| 10 |
+
class install_headers(Command):
|
| 11 |
+
|
| 12 |
+
description = "install C/C++ header files"
|
| 13 |
+
|
| 14 |
+
user_options = [('install-dir=', 'd',
|
| 15 |
+
"directory to install header files to"),
|
| 16 |
+
('force', 'f',
|
| 17 |
+
"force installation (overwrite existing files)"),
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
boolean_options = ['force']
|
| 21 |
+
|
| 22 |
+
def initialize_options(self):
|
| 23 |
+
self.install_dir = None
|
| 24 |
+
self.force = 0
|
| 25 |
+
self.outfiles = []
|
| 26 |
+
|
| 27 |
+
def finalize_options(self):
|
| 28 |
+
self.set_undefined_options('install',
|
| 29 |
+
('install_headers', 'install_dir'),
|
| 30 |
+
('force', 'force'))
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def run(self):
|
| 34 |
+
headers = self.distribution.headers
|
| 35 |
+
if not headers:
|
| 36 |
+
return
|
| 37 |
+
|
| 38 |
+
self.mkpath(self.install_dir)
|
| 39 |
+
for header in headers:
|
| 40 |
+
(out, _) = self.copy_file(header, self.install_dir)
|
| 41 |
+
self.outfiles.append(out)
|
| 42 |
+
|
| 43 |
+
def get_inputs(self):
|
| 44 |
+
return self.distribution.headers or []
|
| 45 |
+
|
| 46 |
+
def get_outputs(self):
|
| 47 |
+
return self.outfiles
|
evalkit_cambrian/lib/python3.10/distutils/command/install_scripts.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_scripts
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_scripts' command, for installing
|
| 4 |
+
Python scripts."""
|
| 5 |
+
|
| 6 |
+
# contributed by Bastian Kleineidam
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
from distutils.core import Command
|
| 10 |
+
from distutils import log
|
| 11 |
+
from stat import ST_MODE
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class install_scripts(Command):
|
| 15 |
+
|
| 16 |
+
description = "install scripts (Python or otherwise)"
|
| 17 |
+
|
| 18 |
+
user_options = [
|
| 19 |
+
('install-dir=', 'd', "directory to install scripts to"),
|
| 20 |
+
('build-dir=','b', "build directory (where to install from)"),
|
| 21 |
+
('force', 'f', "force installation (overwrite existing files)"),
|
| 22 |
+
('skip-build', None, "skip the build steps"),
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
boolean_options = ['force', 'skip-build']
|
| 26 |
+
|
| 27 |
+
def initialize_options(self):
|
| 28 |
+
self.install_dir = None
|
| 29 |
+
self.force = 0
|
| 30 |
+
self.build_dir = None
|
| 31 |
+
self.skip_build = None
|
| 32 |
+
|
| 33 |
+
def finalize_options(self):
|
| 34 |
+
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
|
| 35 |
+
self.set_undefined_options('install',
|
| 36 |
+
('install_scripts', 'install_dir'),
|
| 37 |
+
('force', 'force'),
|
| 38 |
+
('skip_build', 'skip_build'),
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
def run(self):
|
| 42 |
+
if not self.skip_build:
|
| 43 |
+
self.run_command('build_scripts')
|
| 44 |
+
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
| 45 |
+
if os.name == 'posix':
|
| 46 |
+
# Set the executable bits (owner, group, and world) on
|
| 47 |
+
# all the scripts we just installed.
|
| 48 |
+
for file in self.get_outputs():
|
| 49 |
+
if self.dry_run:
|
| 50 |
+
log.info("changing mode of %s", file)
|
| 51 |
+
else:
|
| 52 |
+
mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
|
| 53 |
+
log.info("changing mode of %s to %o", file, mode)
|
| 54 |
+
os.chmod(file, mode)
|
| 55 |
+
|
| 56 |
+
def get_inputs(self):
|
| 57 |
+
return self.distribution.scripts or []
|
| 58 |
+
|
| 59 |
+
def get_outputs(self):
|
| 60 |
+
return self.outfiles or []
|
evalkit_cambrian/lib/python3.10/distutils/command/upload.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
distutils.command.upload
|
| 3 |
+
|
| 4 |
+
Implements the Distutils 'upload' subcommand (upload package to a package
|
| 5 |
+
index).
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
import io
|
| 10 |
+
import hashlib
|
| 11 |
+
from base64 import standard_b64encode
|
| 12 |
+
from urllib.error import HTTPError
|
| 13 |
+
from urllib.request import urlopen, Request
|
| 14 |
+
from urllib.parse import urlparse
|
| 15 |
+
from distutils.errors import DistutilsError, DistutilsOptionError
|
| 16 |
+
from distutils.core import PyPIRCCommand
|
| 17 |
+
from distutils.spawn import spawn
|
| 18 |
+
from distutils import log
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
|
| 22 |
+
# https://bugs.python.org/issue40698
|
| 23 |
+
_FILE_CONTENT_DIGESTS = {
|
| 24 |
+
"md5_digest": getattr(hashlib, "md5", None),
|
| 25 |
+
"sha256_digest": getattr(hashlib, "sha256", None),
|
| 26 |
+
"blake2_256_digest": getattr(hashlib, "blake2b", None),
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class upload(PyPIRCCommand):
|
| 31 |
+
|
| 32 |
+
description = "upload binary package to PyPI"
|
| 33 |
+
|
| 34 |
+
user_options = PyPIRCCommand.user_options + [
|
| 35 |
+
('sign', 's',
|
| 36 |
+
'sign files to upload using gpg'),
|
| 37 |
+
('identity=', 'i', 'GPG identity used to sign files'),
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
boolean_options = PyPIRCCommand.boolean_options + ['sign']
|
| 41 |
+
|
| 42 |
+
def initialize_options(self):
|
| 43 |
+
PyPIRCCommand.initialize_options(self)
|
| 44 |
+
self.username = ''
|
| 45 |
+
self.password = ''
|
| 46 |
+
self.show_response = 0
|
| 47 |
+
self.sign = False
|
| 48 |
+
self.identity = None
|
| 49 |
+
|
| 50 |
+
def finalize_options(self):
|
| 51 |
+
PyPIRCCommand.finalize_options(self)
|
| 52 |
+
if self.identity and not self.sign:
|
| 53 |
+
raise DistutilsOptionError(
|
| 54 |
+
"Must use --sign for --identity to have meaning"
|
| 55 |
+
)
|
| 56 |
+
config = self._read_pypirc()
|
| 57 |
+
if config != {}:
|
| 58 |
+
self.username = config['username']
|
| 59 |
+
self.password = config['password']
|
| 60 |
+
self.repository = config['repository']
|
| 61 |
+
self.realm = config['realm']
|
| 62 |
+
|
| 63 |
+
# getting the password from the distribution
|
| 64 |
+
# if previously set by the register command
|
| 65 |
+
if not self.password and self.distribution.password:
|
| 66 |
+
self.password = self.distribution.password
|
| 67 |
+
|
| 68 |
+
def run(self):
|
| 69 |
+
if not self.distribution.dist_files:
|
| 70 |
+
msg = ("Must create and upload files in one command "
|
| 71 |
+
"(e.g. setup.py sdist upload)")
|
| 72 |
+
raise DistutilsOptionError(msg)
|
| 73 |
+
for command, pyversion, filename in self.distribution.dist_files:
|
| 74 |
+
self.upload_file(command, pyversion, filename)
|
| 75 |
+
|
| 76 |
+
def upload_file(self, command, pyversion, filename):
|
| 77 |
+
# Makes sure the repository URL is compliant
|
| 78 |
+
schema, netloc, url, params, query, fragments = \
|
| 79 |
+
urlparse(self.repository)
|
| 80 |
+
if params or query or fragments:
|
| 81 |
+
raise AssertionError("Incompatible url %s" % self.repository)
|
| 82 |
+
|
| 83 |
+
if schema not in ('http', 'https'):
|
| 84 |
+
raise AssertionError("unsupported schema " + schema)
|
| 85 |
+
|
| 86 |
+
# Sign if requested
|
| 87 |
+
if self.sign:
|
| 88 |
+
gpg_args = ["gpg", "--detach-sign", "-a", filename]
|
| 89 |
+
if self.identity:
|
| 90 |
+
gpg_args[2:2] = ["--local-user", self.identity]
|
| 91 |
+
spawn(gpg_args,
|
| 92 |
+
dry_run=self.dry_run)
|
| 93 |
+
|
| 94 |
+
# Fill in the data - send all the meta-data in case we need to
|
| 95 |
+
# register a new release
|
| 96 |
+
f = open(filename,'rb')
|
| 97 |
+
try:
|
| 98 |
+
content = f.read()
|
| 99 |
+
finally:
|
| 100 |
+
f.close()
|
| 101 |
+
|
| 102 |
+
meta = self.distribution.metadata
|
| 103 |
+
data = {
|
| 104 |
+
# action
|
| 105 |
+
':action': 'file_upload',
|
| 106 |
+
'protocol_version': '1',
|
| 107 |
+
|
| 108 |
+
# identify release
|
| 109 |
+
'name': meta.get_name(),
|
| 110 |
+
'version': meta.get_version(),
|
| 111 |
+
|
| 112 |
+
# file content
|
| 113 |
+
'content': (os.path.basename(filename),content),
|
| 114 |
+
'filetype': command,
|
| 115 |
+
'pyversion': pyversion,
|
| 116 |
+
|
| 117 |
+
# additional meta-data
|
| 118 |
+
'metadata_version': '1.0',
|
| 119 |
+
'summary': meta.get_description(),
|
| 120 |
+
'home_page': meta.get_url(),
|
| 121 |
+
'author': meta.get_contact(),
|
| 122 |
+
'author_email': meta.get_contact_email(),
|
| 123 |
+
'license': meta.get_licence(),
|
| 124 |
+
'description': meta.get_long_description(),
|
| 125 |
+
'keywords': meta.get_keywords(),
|
| 126 |
+
'platform': meta.get_platforms(),
|
| 127 |
+
'classifiers': meta.get_classifiers(),
|
| 128 |
+
'download_url': meta.get_download_url(),
|
| 129 |
+
# PEP 314
|
| 130 |
+
'provides': meta.get_provides(),
|
| 131 |
+
'requires': meta.get_requires(),
|
| 132 |
+
'obsoletes': meta.get_obsoletes(),
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
data['comment'] = ''
|
| 136 |
+
|
| 137 |
+
# file content digests
|
| 138 |
+
for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
|
| 139 |
+
if digest_cons is None:
|
| 140 |
+
continue
|
| 141 |
+
try:
|
| 142 |
+
data[digest_name] = digest_cons(content).hexdigest()
|
| 143 |
+
except ValueError:
|
| 144 |
+
# hash digest not available or blocked by security policy
|
| 145 |
+
pass
|
| 146 |
+
|
| 147 |
+
if self.sign:
|
| 148 |
+
with open(filename + ".asc", "rb") as f:
|
| 149 |
+
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
|
| 150 |
+
f.read())
|
| 151 |
+
|
| 152 |
+
# set up the authentication
|
| 153 |
+
user_pass = (self.username + ":" + self.password).encode('ascii')
|
| 154 |
+
# The exact encoding of the authentication string is debated.
|
| 155 |
+
# Anyway PyPI only accepts ascii for both username or password.
|
| 156 |
+
auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
|
| 157 |
+
|
| 158 |
+
# Build up the MIME payload for the POST data
|
| 159 |
+
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
| 160 |
+
sep_boundary = b'\r\n--' + boundary.encode('ascii')
|
| 161 |
+
end_boundary = sep_boundary + b'--\r\n'
|
| 162 |
+
body = io.BytesIO()
|
| 163 |
+
for key, value in data.items():
|
| 164 |
+
title = '\r\nContent-Disposition: form-data; name="%s"' % key
|
| 165 |
+
# handle multiple entries for the same name
|
| 166 |
+
if not isinstance(value, list):
|
| 167 |
+
value = [value]
|
| 168 |
+
for value in value:
|
| 169 |
+
if type(value) is tuple:
|
| 170 |
+
title += '; filename="%s"' % value[0]
|
| 171 |
+
value = value[1]
|
| 172 |
+
else:
|
| 173 |
+
value = str(value).encode('utf-8')
|
| 174 |
+
body.write(sep_boundary)
|
| 175 |
+
body.write(title.encode('utf-8'))
|
| 176 |
+
body.write(b"\r\n\r\n")
|
| 177 |
+
body.write(value)
|
| 178 |
+
body.write(end_boundary)
|
| 179 |
+
body = body.getvalue()
|
| 180 |
+
|
| 181 |
+
msg = "Submitting %s to %s" % (filename, self.repository)
|
| 182 |
+
self.announce(msg, log.INFO)
|
| 183 |
+
|
| 184 |
+
# build the Request
|
| 185 |
+
headers = {
|
| 186 |
+
'Content-type': 'multipart/form-data; boundary=%s' % boundary,
|
| 187 |
+
'Content-length': str(len(body)),
|
| 188 |
+
'Authorization': auth,
|
| 189 |
+
}
|
| 190 |
+
|
| 191 |
+
request = Request(self.repository, data=body,
|
| 192 |
+
headers=headers)
|
| 193 |
+
# send the data
|
| 194 |
+
try:
|
| 195 |
+
result = urlopen(request)
|
| 196 |
+
status = result.getcode()
|
| 197 |
+
reason = result.msg
|
| 198 |
+
except HTTPError as e:
|
| 199 |
+
status = e.code
|
| 200 |
+
reason = e.msg
|
| 201 |
+
except OSError as e:
|
| 202 |
+
self.announce(str(e), log.ERROR)
|
| 203 |
+
raise
|
| 204 |
+
|
| 205 |
+
if status == 200:
|
| 206 |
+
self.announce('Server response (%s): %s' % (status, reason),
|
| 207 |
+
log.INFO)
|
| 208 |
+
if self.show_response:
|
| 209 |
+
text = self._read_pypi_response(result)
|
| 210 |
+
msg = '\n'.join(('-' * 75, text, '-' * 75))
|
| 211 |
+
self.announce(msg, log.INFO)
|
| 212 |
+
else:
|
| 213 |
+
msg = 'Upload failed (%s): %s' % (status, reason)
|
| 214 |
+
self.announce(msg, log.ERROR)
|
| 215 |
+
raise DistutilsError(msg)
|
evalkit_cambrian/lib/python3.10/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:236bcb61156d76c4b8a05821b988c7b8c35bf0da28a4b614e8d6ab5212c25c6f
|
| 3 |
+
size 2055563
|
evalkit_cambrian/lib/python3.10/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356
|
| 3 |
+
size 1232695
|
evalkit_cambrian/lib/python3.10/lib2to3/Grammar.txt
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Grammar for 2to3. This grammar supports Python 2.x and 3.x.
|
| 2 |
+
|
| 3 |
+
# NOTE WELL: You should also follow all the steps listed at
|
| 4 |
+
# https://devguide.python.org/grammar/
|
| 5 |
+
|
| 6 |
+
# Start symbols for the grammar:
|
| 7 |
+
# file_input is a module or sequence of commands read from an input file;
|
| 8 |
+
# single_input is a single interactive statement;
|
| 9 |
+
# eval_input is the input for the eval() and input() functions.
|
| 10 |
+
# NB: compound_stmt in single_input is followed by extra NEWLINE!
|
| 11 |
+
file_input: (NEWLINE | stmt)* ENDMARKER
|
| 12 |
+
single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
|
| 13 |
+
eval_input: testlist NEWLINE* ENDMARKER
|
| 14 |
+
|
| 15 |
+
decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
|
| 16 |
+
decorators: decorator+
|
| 17 |
+
decorated: decorators (classdef | funcdef | async_funcdef)
|
| 18 |
+
async_funcdef: ASYNC funcdef
|
| 19 |
+
funcdef: 'def' NAME parameters ['->' test] ':' suite
|
| 20 |
+
parameters: '(' [typedargslist] ')'
|
| 21 |
+
|
| 22 |
+
# The following definition for typedarglist is equivalent to this set of rules:
|
| 23 |
+
#
|
| 24 |
+
# arguments = argument (',' argument)*
|
| 25 |
+
# argument = tfpdef ['=' test]
|
| 26 |
+
# kwargs = '**' tname [',']
|
| 27 |
+
# args = '*' [tname]
|
| 28 |
+
# kwonly_kwargs = (',' argument)* [',' [kwargs]]
|
| 29 |
+
# args_kwonly_kwargs = args kwonly_kwargs | kwargs
|
| 30 |
+
# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]]
|
| 31 |
+
# typedargslist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs
|
| 32 |
+
# typedarglist = arguments ',' '/' [',' [typedargslist_no_posonly]])|(typedargslist_no_posonly)"
|
| 33 |
+
#
|
| 34 |
+
# It needs to be fully expanded to allow our LL(1) parser to work on it.
|
| 35 |
+
|
| 36 |
+
typedargslist: tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [
|
| 37 |
+
',' [((tfpdef ['=' test] ',')* ('*' [tname] (',' tname ['=' test])*
|
| 38 |
+
[',' ['**' tname [',']]] | '**' tname [','])
|
| 39 |
+
| tfpdef ['=' test] (',' tfpdef ['=' test])* [','])]
|
| 40 |
+
] | ((tfpdef ['=' test] ',')* ('*' [tname] (',' tname ['=' test])*
|
| 41 |
+
[',' ['**' tname [',']]] | '**' tname [','])
|
| 42 |
+
| tfpdef ['=' test] (',' tfpdef ['=' test])* [','])
|
| 43 |
+
|
| 44 |
+
tname: NAME [':' test]
|
| 45 |
+
tfpdef: tname | '(' tfplist ')'
|
| 46 |
+
tfplist: tfpdef (',' tfpdef)* [',']
|
| 47 |
+
|
| 48 |
+
# The following definition for varargslist is equivalent to this set of rules:
|
| 49 |
+
#
|
| 50 |
+
# arguments = argument (',' argument )*
|
| 51 |
+
# argument = vfpdef ['=' test]
|
| 52 |
+
# kwargs = '**' vname [',']
|
| 53 |
+
# args = '*' [vname]
|
| 54 |
+
# kwonly_kwargs = (',' argument )* [',' [kwargs]]
|
| 55 |
+
# args_kwonly_kwargs = args kwonly_kwargs | kwargs
|
| 56 |
+
# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]]
|
| 57 |
+
# vararglist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs
|
| 58 |
+
# varargslist = arguments ',' '/' [','[(vararglist_no_posonly)]] | (vararglist_no_posonly)
|
| 59 |
+
#
|
| 60 |
+
# It needs to be fully expanded to allow our LL(1) parser to work on it.
|
| 61 |
+
|
| 62 |
+
varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [
|
| 63 |
+
((vfpdef ['=' test] ',')* ('*' [vname] (',' vname ['=' test])*
|
| 64 |
+
[',' ['**' vname [',']]] | '**' vname [','])
|
| 65 |
+
| vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
|
| 66 |
+
]] | ((vfpdef ['=' test] ',')*
|
| 67 |
+
('*' [vname] (',' vname ['=' test])* [',' ['**' vname [',']]]| '**' vname [','])
|
| 68 |
+
| vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
|
| 69 |
+
|
| 70 |
+
vname: NAME
|
| 71 |
+
vfpdef: vname | '(' vfplist ')'
|
| 72 |
+
vfplist: vfpdef (',' vfpdef)* [',']
|
| 73 |
+
|
| 74 |
+
stmt: simple_stmt | compound_stmt
|
| 75 |
+
simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
|
| 76 |
+
small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
|
| 77 |
+
import_stmt | global_stmt | exec_stmt | assert_stmt)
|
| 78 |
+
expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) |
|
| 79 |
+
('=' (yield_expr|testlist_star_expr))*)
|
| 80 |
+
annassign: ':' test ['=' test]
|
| 81 |
+
testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
|
| 82 |
+
augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' |
|
| 83 |
+
'<<=' | '>>=' | '**=' | '//=')
|
| 84 |
+
# For normal and annotated assignments, additional restrictions enforced by the interpreter
|
| 85 |
+
print_stmt: 'print' ( [ test (',' test)* [','] ] |
|
| 86 |
+
'>>' test [ (',' test)+ [','] ] )
|
| 87 |
+
del_stmt: 'del' exprlist
|
| 88 |
+
pass_stmt: 'pass'
|
| 89 |
+
flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
|
| 90 |
+
break_stmt: 'break'
|
| 91 |
+
continue_stmt: 'continue'
|
| 92 |
+
return_stmt: 'return' [testlist_star_expr]
|
| 93 |
+
yield_stmt: yield_expr
|
| 94 |
+
raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]]
|
| 95 |
+
import_stmt: import_name | import_from
|
| 96 |
+
import_name: 'import' dotted_as_names
|
| 97 |
+
import_from: ('from' ('.'* dotted_name | '.'+)
|
| 98 |
+
'import' ('*' | '(' import_as_names ')' | import_as_names))
|
| 99 |
+
import_as_name: NAME ['as' NAME]
|
| 100 |
+
dotted_as_name: dotted_name ['as' NAME]
|
| 101 |
+
import_as_names: import_as_name (',' import_as_name)* [',']
|
| 102 |
+
dotted_as_names: dotted_as_name (',' dotted_as_name)*
|
| 103 |
+
dotted_name: NAME ('.' NAME)*
|
| 104 |
+
global_stmt: ('global' | 'nonlocal') NAME (',' NAME)*
|
| 105 |
+
exec_stmt: 'exec' expr ['in' test [',' test]]
|
| 106 |
+
assert_stmt: 'assert' test [',' test]
|
| 107 |
+
|
| 108 |
+
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
|
| 109 |
+
async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
|
| 110 |
+
if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite]
|
| 111 |
+
while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite]
|
| 112 |
+
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
|
| 113 |
+
try_stmt: ('try' ':' suite
|
| 114 |
+
((except_clause ':' suite)+
|
| 115 |
+
['else' ':' suite]
|
| 116 |
+
['finally' ':' suite] |
|
| 117 |
+
'finally' ':' suite))
|
| 118 |
+
with_stmt: 'with' with_item (',' with_item)* ':' suite
|
| 119 |
+
with_item: test ['as' expr]
|
| 120 |
+
with_var: 'as' expr
|
| 121 |
+
# NB compile.c makes sure that the default except clause is last
|
| 122 |
+
except_clause: 'except' [test [(',' | 'as') test]]
|
| 123 |
+
suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
|
| 124 |
+
|
| 125 |
+
# Backward compatibility cruft to support:
|
| 126 |
+
# [ x for x in lambda: True, lambda: False if x() ]
|
| 127 |
+
# even while also allowing:
|
| 128 |
+
# lambda x: 5 if x else 2
|
| 129 |
+
# (But not a mix of the two)
|
| 130 |
+
testlist_safe: old_test [(',' old_test)+ [',']]
|
| 131 |
+
old_test: or_test | old_lambdef
|
| 132 |
+
old_lambdef: 'lambda' [varargslist] ':' old_test
|
| 133 |
+
|
| 134 |
+
namedexpr_test: test [':=' test]
|
| 135 |
+
test: or_test ['if' or_test 'else' test] | lambdef
|
| 136 |
+
or_test: and_test ('or' and_test)*
|
| 137 |
+
and_test: not_test ('and' not_test)*
|
| 138 |
+
not_test: 'not' not_test | comparison
|
| 139 |
+
comparison: expr (comp_op expr)*
|
| 140 |
+
comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
|
| 141 |
+
star_expr: '*' expr
|
| 142 |
+
expr: xor_expr ('|' xor_expr)*
|
| 143 |
+
xor_expr: and_expr ('^' and_expr)*
|
| 144 |
+
and_expr: shift_expr ('&' shift_expr)*
|
| 145 |
+
shift_expr: arith_expr (('<<'|'>>') arith_expr)*
|
| 146 |
+
arith_expr: term (('+'|'-') term)*
|
| 147 |
+
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
|
| 148 |
+
factor: ('+'|'-'|'~') factor | power
|
| 149 |
+
power: [AWAIT] atom trailer* ['**' factor]
|
| 150 |
+
atom: ('(' [yield_expr|testlist_gexp] ')' |
|
| 151 |
+
'[' [listmaker] ']' |
|
| 152 |
+
'{' [dictsetmaker] '}' |
|
| 153 |
+
'`' testlist1 '`' |
|
| 154 |
+
NAME | NUMBER | STRING+ | '.' '.' '.')
|
| 155 |
+
listmaker: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] )
|
| 156 |
+
testlist_gexp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] )
|
| 157 |
+
lambdef: 'lambda' [varargslist] ':' test
|
| 158 |
+
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
|
| 159 |
+
subscriptlist: subscript (',' subscript)* [',']
|
| 160 |
+
subscript: test | [test] ':' [test] [sliceop]
|
| 161 |
+
sliceop: ':' [test]
|
| 162 |
+
exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
|
| 163 |
+
testlist: test (',' test)* [',']
|
| 164 |
+
dictsetmaker: ( ((test ':' test | '**' expr)
|
| 165 |
+
(comp_for | (',' (test ':' test | '**' expr))* [','])) |
|
| 166 |
+
((test | star_expr)
|
| 167 |
+
(comp_for | (',' (test | star_expr))* [','])) )
|
| 168 |
+
|
| 169 |
+
classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
|
| 170 |
+
|
| 171 |
+
arglist: argument (',' argument)* [',']
|
| 172 |
+
|
| 173 |
+
# "test '=' test" is really "keyword '=' test", but we have no such token.
|
| 174 |
+
# These need to be in a single rule to avoid grammar that is ambiguous
|
| 175 |
+
# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr,
|
| 176 |
+
# we explicitly match '*' here, too, to give it proper precedence.
|
| 177 |
+
# Illegal combinations and orderings are blocked in ast.c:
|
| 178 |
+
# multiple (test comp_for) arguments are blocked; keyword unpackings
|
| 179 |
+
# that precede iterable unpackings are blocked; etc.
|
| 180 |
+
argument: ( test [comp_for] |
|
| 181 |
+
test ':=' test |
|
| 182 |
+
test '=' test |
|
| 183 |
+
'**' test |
|
| 184 |
+
'*' test )
|
| 185 |
+
|
| 186 |
+
comp_iter: comp_for | comp_if
|
| 187 |
+
comp_for: [ASYNC] 'for' exprlist 'in' testlist_safe [comp_iter]
|
| 188 |
+
comp_if: 'if' old_test [comp_iter]
|
| 189 |
+
|
| 190 |
+
testlist1: test (',' test)*
|
| 191 |
+
|
| 192 |
+
# not used in grammar, but may appear in "node" passed from Parser to Compiler
|
| 193 |
+
encoding_decl: NAME
|
| 194 |
+
|
| 195 |
+
yield_expr: 'yield' [yield_arg]
|
| 196 |
+
yield_arg: 'from' test | testlist_star_expr
|
evalkit_cambrian/lib/python3.10/lib2to3/PatternGrammar.txt
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
# A grammar to describe tree matching patterns.
|
| 5 |
+
# Not shown here:
|
| 6 |
+
# - 'TOKEN' stands for any token (leaf node)
|
| 7 |
+
# - 'any' stands for any node (leaf or interior)
|
| 8 |
+
# With 'any' we can still specify the sub-structure.
|
| 9 |
+
|
| 10 |
+
# The start symbol is 'Matcher'.
|
| 11 |
+
|
| 12 |
+
Matcher: Alternatives ENDMARKER
|
| 13 |
+
|
| 14 |
+
Alternatives: Alternative ('|' Alternative)*
|
| 15 |
+
|
| 16 |
+
Alternative: (Unit | NegatedUnit)+
|
| 17 |
+
|
| 18 |
+
Unit: [NAME '='] ( STRING [Repeater]
|
| 19 |
+
| NAME [Details] [Repeater]
|
| 20 |
+
| '(' Alternatives ')' [Repeater]
|
| 21 |
+
| '[' Alternatives ']'
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
NegatedUnit: 'not' (STRING | NAME [Details] | '(' Alternatives ')')
|
| 25 |
+
|
| 26 |
+
Repeater: '*' | '+' | '{' NUMBER [',' NUMBER] '}'
|
| 27 |
+
|
| 28 |
+
Details: '<' Alternatives '>'
|
evalkit_cambrian/lib/python3.10/lib2to3/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import warnings
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
warnings.warn(
|
| 5 |
+
"lib2to3 package is deprecated and may not be able to parse Python 3.10+",
|
| 6 |
+
PendingDeprecationWarning,
|
| 7 |
+
stacklevel=2,
|
| 8 |
+
)
|
evalkit_cambrian/lib/python3.10/lib2to3/__main__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from .main import main
|
| 3 |
+
|
| 4 |
+
sys.exit(main("lib2to3.fixes"))
|
evalkit_cambrian/lib/python3.10/lib2to3/btm_matcher.py
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A bottom-up tree matching algorithm implementation meant to speed
|
| 2 |
+
up 2to3's matching process. After the tree patterns are reduced to
|
| 3 |
+
their rarest linear path, a linear Aho-Corasick automaton is
|
| 4 |
+
created. The linear automaton traverses the linear paths from the
|
| 5 |
+
leaves to the root of the AST and returns a set of nodes for further
|
| 6 |
+
matching. This reduces significantly the number of candidate nodes."""
|
| 7 |
+
|
| 8 |
+
__author__ = "George Boutsioukis <gboutsioukis@gmail.com>"
|
| 9 |
+
|
| 10 |
+
import logging
|
| 11 |
+
import itertools
|
| 12 |
+
from collections import defaultdict
|
| 13 |
+
|
| 14 |
+
from . import pytree
|
| 15 |
+
from .btm_utils import reduce_tree
|
| 16 |
+
|
| 17 |
+
class BMNode(object):
|
| 18 |
+
"""Class for a node of the Aho-Corasick automaton used in matching"""
|
| 19 |
+
count = itertools.count()
|
| 20 |
+
def __init__(self):
|
| 21 |
+
self.transition_table = {}
|
| 22 |
+
self.fixers = []
|
| 23 |
+
self.id = next(BMNode.count)
|
| 24 |
+
self.content = ''
|
| 25 |
+
|
| 26 |
+
class BottomMatcher(object):
|
| 27 |
+
"""The main matcher class. After instantiating the patterns should
|
| 28 |
+
be added using the add_fixer method"""
|
| 29 |
+
|
| 30 |
+
def __init__(self):
|
| 31 |
+
self.match = set()
|
| 32 |
+
self.root = BMNode()
|
| 33 |
+
self.nodes = [self.root]
|
| 34 |
+
self.fixers = []
|
| 35 |
+
self.logger = logging.getLogger("RefactoringTool")
|
| 36 |
+
|
| 37 |
+
def add_fixer(self, fixer):
|
| 38 |
+
"""Reduces a fixer's pattern tree to a linear path and adds it
|
| 39 |
+
to the matcher(a common Aho-Corasick automaton). The fixer is
|
| 40 |
+
appended on the matching states and called when they are
|
| 41 |
+
reached"""
|
| 42 |
+
self.fixers.append(fixer)
|
| 43 |
+
tree = reduce_tree(fixer.pattern_tree)
|
| 44 |
+
linear = tree.get_linear_subpattern()
|
| 45 |
+
match_nodes = self.add(linear, start=self.root)
|
| 46 |
+
for match_node in match_nodes:
|
| 47 |
+
match_node.fixers.append(fixer)
|
| 48 |
+
|
| 49 |
+
def add(self, pattern, start):
|
| 50 |
+
"Recursively adds a linear pattern to the AC automaton"
|
| 51 |
+
#print("adding pattern", pattern, "to", start)
|
| 52 |
+
if not pattern:
|
| 53 |
+
#print("empty pattern")
|
| 54 |
+
return [start]
|
| 55 |
+
if isinstance(pattern[0], tuple):
|
| 56 |
+
#alternatives
|
| 57 |
+
#print("alternatives")
|
| 58 |
+
match_nodes = []
|
| 59 |
+
for alternative in pattern[0]:
|
| 60 |
+
#add all alternatives, and add the rest of the pattern
|
| 61 |
+
#to each end node
|
| 62 |
+
end_nodes = self.add(alternative, start=start)
|
| 63 |
+
for end in end_nodes:
|
| 64 |
+
match_nodes.extend(self.add(pattern[1:], end))
|
| 65 |
+
return match_nodes
|
| 66 |
+
else:
|
| 67 |
+
#single token
|
| 68 |
+
#not last
|
| 69 |
+
if pattern[0] not in start.transition_table:
|
| 70 |
+
#transition did not exist, create new
|
| 71 |
+
next_node = BMNode()
|
| 72 |
+
start.transition_table[pattern[0]] = next_node
|
| 73 |
+
else:
|
| 74 |
+
#transition exists already, follow
|
| 75 |
+
next_node = start.transition_table[pattern[0]]
|
| 76 |
+
|
| 77 |
+
if pattern[1:]:
|
| 78 |
+
end_nodes = self.add(pattern[1:], start=next_node)
|
| 79 |
+
else:
|
| 80 |
+
end_nodes = [next_node]
|
| 81 |
+
return end_nodes
|
| 82 |
+
|
| 83 |
+
def run(self, leaves):
|
| 84 |
+
"""The main interface with the bottom matcher. The tree is
|
| 85 |
+
traversed from the bottom using the constructed
|
| 86 |
+
automaton. Nodes are only checked once as the tree is
|
| 87 |
+
retraversed. When the automaton fails, we give it one more
|
| 88 |
+
shot(in case the above tree matches as a whole with the
|
| 89 |
+
rejected leaf), then we break for the next leaf. There is the
|
| 90 |
+
special case of multiple arguments(see code comments) where we
|
| 91 |
+
recheck the nodes
|
| 92 |
+
|
| 93 |
+
Args:
|
| 94 |
+
The leaves of the AST tree to be matched
|
| 95 |
+
|
| 96 |
+
Returns:
|
| 97 |
+
A dictionary of node matches with fixers as the keys
|
| 98 |
+
"""
|
| 99 |
+
current_ac_node = self.root
|
| 100 |
+
results = defaultdict(list)
|
| 101 |
+
for leaf in leaves:
|
| 102 |
+
current_ast_node = leaf
|
| 103 |
+
while current_ast_node:
|
| 104 |
+
current_ast_node.was_checked = True
|
| 105 |
+
for child in current_ast_node.children:
|
| 106 |
+
# multiple statements, recheck
|
| 107 |
+
if isinstance(child, pytree.Leaf) and child.value == ";":
|
| 108 |
+
current_ast_node.was_checked = False
|
| 109 |
+
break
|
| 110 |
+
if current_ast_node.type == 1:
|
| 111 |
+
#name
|
| 112 |
+
node_token = current_ast_node.value
|
| 113 |
+
else:
|
| 114 |
+
node_token = current_ast_node.type
|
| 115 |
+
|
| 116 |
+
if node_token in current_ac_node.transition_table:
|
| 117 |
+
#token matches
|
| 118 |
+
current_ac_node = current_ac_node.transition_table[node_token]
|
| 119 |
+
for fixer in current_ac_node.fixers:
|
| 120 |
+
results[fixer].append(current_ast_node)
|
| 121 |
+
else:
|
| 122 |
+
#matching failed, reset automaton
|
| 123 |
+
current_ac_node = self.root
|
| 124 |
+
if (current_ast_node.parent is not None
|
| 125 |
+
and current_ast_node.parent.was_checked):
|
| 126 |
+
#the rest of the tree upwards has been checked, next leaf
|
| 127 |
+
break
|
| 128 |
+
|
| 129 |
+
#recheck the rejected node once from the root
|
| 130 |
+
if node_token in current_ac_node.transition_table:
|
| 131 |
+
#token matches
|
| 132 |
+
current_ac_node = current_ac_node.transition_table[node_token]
|
| 133 |
+
for fixer in current_ac_node.fixers:
|
| 134 |
+
results[fixer].append(current_ast_node)
|
| 135 |
+
|
| 136 |
+
current_ast_node = current_ast_node.parent
|
| 137 |
+
return results
|
| 138 |
+
|
| 139 |
+
def print_ac(self):
|
| 140 |
+
"Prints a graphviz diagram of the BM automaton(for debugging)"
|
| 141 |
+
print("digraph g{")
|
| 142 |
+
def print_node(node):
|
| 143 |
+
for subnode_key in node.transition_table.keys():
|
| 144 |
+
subnode = node.transition_table[subnode_key]
|
| 145 |
+
print("%d -> %d [label=%s] //%s" %
|
| 146 |
+
(node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers)))
|
| 147 |
+
if subnode_key == 1:
|
| 148 |
+
print(subnode.content)
|
| 149 |
+
print_node(subnode)
|
| 150 |
+
print_node(self.root)
|
| 151 |
+
print("}")
|
| 152 |
+
|
| 153 |
+
# taken from pytree.py for debugging; only used by print_ac
|
| 154 |
+
_type_reprs = {}
|
| 155 |
+
def type_repr(type_num):
|
| 156 |
+
global _type_reprs
|
| 157 |
+
if not _type_reprs:
|
| 158 |
+
from .pygram import python_symbols
|
| 159 |
+
# printing tokens is possible but not as useful
|
| 160 |
+
# from .pgen2 import token // token.__dict__.items():
|
| 161 |
+
for name, val in python_symbols.__dict__.items():
|
| 162 |
+
if type(val) == int: _type_reprs[val] = name
|
| 163 |
+
return _type_reprs.setdefault(type_num, type_num)
|
evalkit_cambrian/lib/python3.10/lib2to3/btm_utils.py
ADDED
|
@@ -0,0 +1,281 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"Utility functions used by the btm_matcher module"
|
| 2 |
+
|
| 3 |
+
from . import pytree
|
| 4 |
+
from .pgen2 import grammar, token
|
| 5 |
+
from .pygram import pattern_symbols, python_symbols
|
| 6 |
+
|
| 7 |
+
syms = pattern_symbols
|
| 8 |
+
pysyms = python_symbols
|
| 9 |
+
tokens = grammar.opmap
|
| 10 |
+
token_labels = token
|
| 11 |
+
|
| 12 |
+
TYPE_ANY = -1
|
| 13 |
+
TYPE_ALTERNATIVES = -2
|
| 14 |
+
TYPE_GROUP = -3
|
| 15 |
+
|
| 16 |
+
class MinNode(object):
|
| 17 |
+
"""This class serves as an intermediate representation of the
|
| 18 |
+
pattern tree during the conversion to sets of leaf-to-root
|
| 19 |
+
subpatterns"""
|
| 20 |
+
|
| 21 |
+
def __init__(self, type=None, name=None):
|
| 22 |
+
self.type = type
|
| 23 |
+
self.name = name
|
| 24 |
+
self.children = []
|
| 25 |
+
self.leaf = False
|
| 26 |
+
self.parent = None
|
| 27 |
+
self.alternatives = []
|
| 28 |
+
self.group = []
|
| 29 |
+
|
| 30 |
+
def __repr__(self):
|
| 31 |
+
return str(self.type) + ' ' + str(self.name)
|
| 32 |
+
|
| 33 |
+
def leaf_to_root(self):
|
| 34 |
+
"""Internal method. Returns a characteristic path of the
|
| 35 |
+
pattern tree. This method must be run for all leaves until the
|
| 36 |
+
linear subpatterns are merged into a single"""
|
| 37 |
+
node = self
|
| 38 |
+
subp = []
|
| 39 |
+
while node:
|
| 40 |
+
if node.type == TYPE_ALTERNATIVES:
|
| 41 |
+
node.alternatives.append(subp)
|
| 42 |
+
if len(node.alternatives) == len(node.children):
|
| 43 |
+
#last alternative
|
| 44 |
+
subp = [tuple(node.alternatives)]
|
| 45 |
+
node.alternatives = []
|
| 46 |
+
node = node.parent
|
| 47 |
+
continue
|
| 48 |
+
else:
|
| 49 |
+
node = node.parent
|
| 50 |
+
subp = None
|
| 51 |
+
break
|
| 52 |
+
|
| 53 |
+
if node.type == TYPE_GROUP:
|
| 54 |
+
node.group.append(subp)
|
| 55 |
+
#probably should check the number of leaves
|
| 56 |
+
if len(node.group) == len(node.children):
|
| 57 |
+
subp = get_characteristic_subpattern(node.group)
|
| 58 |
+
node.group = []
|
| 59 |
+
node = node.parent
|
| 60 |
+
continue
|
| 61 |
+
else:
|
| 62 |
+
node = node.parent
|
| 63 |
+
subp = None
|
| 64 |
+
break
|
| 65 |
+
|
| 66 |
+
if node.type == token_labels.NAME and node.name:
|
| 67 |
+
#in case of type=name, use the name instead
|
| 68 |
+
subp.append(node.name)
|
| 69 |
+
else:
|
| 70 |
+
subp.append(node.type)
|
| 71 |
+
|
| 72 |
+
node = node.parent
|
| 73 |
+
return subp
|
| 74 |
+
|
| 75 |
+
def get_linear_subpattern(self):
|
| 76 |
+
"""Drives the leaf_to_root method. The reason that
|
| 77 |
+
leaf_to_root must be run multiple times is because we need to
|
| 78 |
+
reject 'group' matches; for example the alternative form
|
| 79 |
+
(a | b c) creates a group [b c] that needs to be matched. Since
|
| 80 |
+
matching multiple linear patterns overcomes the automaton's
|
| 81 |
+
capabilities, leaf_to_root merges each group into a single
|
| 82 |
+
choice based on 'characteristic'ity,
|
| 83 |
+
|
| 84 |
+
i.e. (a|b c) -> (a|b) if b more characteristic than c
|
| 85 |
+
|
| 86 |
+
Returns: The most 'characteristic'(as defined by
|
| 87 |
+
get_characteristic_subpattern) path for the compiled pattern
|
| 88 |
+
tree.
|
| 89 |
+
"""
|
| 90 |
+
|
| 91 |
+
for l in self.leaves():
|
| 92 |
+
subp = l.leaf_to_root()
|
| 93 |
+
if subp:
|
| 94 |
+
return subp
|
| 95 |
+
|
| 96 |
+
def leaves(self):
|
| 97 |
+
"Generator that returns the leaves of the tree"
|
| 98 |
+
for child in self.children:
|
| 99 |
+
yield from child.leaves()
|
| 100 |
+
if not self.children:
|
| 101 |
+
yield self
|
| 102 |
+
|
| 103 |
+
def reduce_tree(node, parent=None):
|
| 104 |
+
"""
|
| 105 |
+
Internal function. Reduces a compiled pattern tree to an
|
| 106 |
+
intermediate representation suitable for feeding the
|
| 107 |
+
automaton. This also trims off any optional pattern elements(like
|
| 108 |
+
[a], a*).
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
new_node = None
|
| 112 |
+
#switch on the node type
|
| 113 |
+
if node.type == syms.Matcher:
|
| 114 |
+
#skip
|
| 115 |
+
node = node.children[0]
|
| 116 |
+
|
| 117 |
+
if node.type == syms.Alternatives :
|
| 118 |
+
#2 cases
|
| 119 |
+
if len(node.children) <= 2:
|
| 120 |
+
#just a single 'Alternative', skip this node
|
| 121 |
+
new_node = reduce_tree(node.children[0], parent)
|
| 122 |
+
else:
|
| 123 |
+
#real alternatives
|
| 124 |
+
new_node = MinNode(type=TYPE_ALTERNATIVES)
|
| 125 |
+
#skip odd children('|' tokens)
|
| 126 |
+
for child in node.children:
|
| 127 |
+
if node.children.index(child)%2:
|
| 128 |
+
continue
|
| 129 |
+
reduced = reduce_tree(child, new_node)
|
| 130 |
+
if reduced is not None:
|
| 131 |
+
new_node.children.append(reduced)
|
| 132 |
+
elif node.type == syms.Alternative:
|
| 133 |
+
if len(node.children) > 1:
|
| 134 |
+
|
| 135 |
+
new_node = MinNode(type=TYPE_GROUP)
|
| 136 |
+
for child in node.children:
|
| 137 |
+
reduced = reduce_tree(child, new_node)
|
| 138 |
+
if reduced:
|
| 139 |
+
new_node.children.append(reduced)
|
| 140 |
+
if not new_node.children:
|
| 141 |
+
# delete the group if all of the children were reduced to None
|
| 142 |
+
new_node = None
|
| 143 |
+
|
| 144 |
+
else:
|
| 145 |
+
new_node = reduce_tree(node.children[0], parent)
|
| 146 |
+
|
| 147 |
+
elif node.type == syms.Unit:
|
| 148 |
+
if (isinstance(node.children[0], pytree.Leaf) and
|
| 149 |
+
node.children[0].value == '('):
|
| 150 |
+
#skip parentheses
|
| 151 |
+
return reduce_tree(node.children[1], parent)
|
| 152 |
+
if ((isinstance(node.children[0], pytree.Leaf) and
|
| 153 |
+
node.children[0].value == '[')
|
| 154 |
+
or
|
| 155 |
+
(len(node.children)>1 and
|
| 156 |
+
hasattr(node.children[1], "value") and
|
| 157 |
+
node.children[1].value == '[')):
|
| 158 |
+
#skip whole unit if its optional
|
| 159 |
+
return None
|
| 160 |
+
|
| 161 |
+
leaf = True
|
| 162 |
+
details_node = None
|
| 163 |
+
alternatives_node = None
|
| 164 |
+
has_repeater = False
|
| 165 |
+
repeater_node = None
|
| 166 |
+
has_variable_name = False
|
| 167 |
+
|
| 168 |
+
for child in node.children:
|
| 169 |
+
if child.type == syms.Details:
|
| 170 |
+
leaf = False
|
| 171 |
+
details_node = child
|
| 172 |
+
elif child.type == syms.Repeater:
|
| 173 |
+
has_repeater = True
|
| 174 |
+
repeater_node = child
|
| 175 |
+
elif child.type == syms.Alternatives:
|
| 176 |
+
alternatives_node = child
|
| 177 |
+
if hasattr(child, 'value') and child.value == '=': # variable name
|
| 178 |
+
has_variable_name = True
|
| 179 |
+
|
| 180 |
+
#skip variable name
|
| 181 |
+
if has_variable_name:
|
| 182 |
+
#skip variable name, '='
|
| 183 |
+
name_leaf = node.children[2]
|
| 184 |
+
if hasattr(name_leaf, 'value') and name_leaf.value == '(':
|
| 185 |
+
# skip parenthesis
|
| 186 |
+
name_leaf = node.children[3]
|
| 187 |
+
else:
|
| 188 |
+
name_leaf = node.children[0]
|
| 189 |
+
|
| 190 |
+
#set node type
|
| 191 |
+
if name_leaf.type == token_labels.NAME:
|
| 192 |
+
#(python) non-name or wildcard
|
| 193 |
+
if name_leaf.value == 'any':
|
| 194 |
+
new_node = MinNode(type=TYPE_ANY)
|
| 195 |
+
else:
|
| 196 |
+
if hasattr(token_labels, name_leaf.value):
|
| 197 |
+
new_node = MinNode(type=getattr(token_labels, name_leaf.value))
|
| 198 |
+
else:
|
| 199 |
+
new_node = MinNode(type=getattr(pysyms, name_leaf.value))
|
| 200 |
+
|
| 201 |
+
elif name_leaf.type == token_labels.STRING:
|
| 202 |
+
#(python) name or character; remove the apostrophes from
|
| 203 |
+
#the string value
|
| 204 |
+
name = name_leaf.value.strip("'")
|
| 205 |
+
if name in tokens:
|
| 206 |
+
new_node = MinNode(type=tokens[name])
|
| 207 |
+
else:
|
| 208 |
+
new_node = MinNode(type=token_labels.NAME, name=name)
|
| 209 |
+
elif name_leaf.type == syms.Alternatives:
|
| 210 |
+
new_node = reduce_tree(alternatives_node, parent)
|
| 211 |
+
|
| 212 |
+
#handle repeaters
|
| 213 |
+
if has_repeater:
|
| 214 |
+
if repeater_node.children[0].value == '*':
|
| 215 |
+
#reduce to None
|
| 216 |
+
new_node = None
|
| 217 |
+
elif repeater_node.children[0].value == '+':
|
| 218 |
+
#reduce to a single occurrence i.e. do nothing
|
| 219 |
+
pass
|
| 220 |
+
else:
|
| 221 |
+
#TODO: handle {min, max} repeaters
|
| 222 |
+
raise NotImplementedError
|
| 223 |
+
pass
|
| 224 |
+
|
| 225 |
+
#add children
|
| 226 |
+
if details_node and new_node is not None:
|
| 227 |
+
for child in details_node.children[1:-1]:
|
| 228 |
+
#skip '<', '>' markers
|
| 229 |
+
reduced = reduce_tree(child, new_node)
|
| 230 |
+
if reduced is not None:
|
| 231 |
+
new_node.children.append(reduced)
|
| 232 |
+
if new_node:
|
| 233 |
+
new_node.parent = parent
|
| 234 |
+
return new_node
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def get_characteristic_subpattern(subpatterns):
|
| 238 |
+
"""Picks the most characteristic from a list of linear patterns
|
| 239 |
+
Current order used is:
|
| 240 |
+
names > common_names > common_chars
|
| 241 |
+
"""
|
| 242 |
+
if not isinstance(subpatterns, list):
|
| 243 |
+
return subpatterns
|
| 244 |
+
if len(subpatterns)==1:
|
| 245 |
+
return subpatterns[0]
|
| 246 |
+
|
| 247 |
+
# first pick out the ones containing variable names
|
| 248 |
+
subpatterns_with_names = []
|
| 249 |
+
subpatterns_with_common_names = []
|
| 250 |
+
common_names = ['in', 'for', 'if' , 'not', 'None']
|
| 251 |
+
subpatterns_with_common_chars = []
|
| 252 |
+
common_chars = "[]().,:"
|
| 253 |
+
for subpattern in subpatterns:
|
| 254 |
+
if any(rec_test(subpattern, lambda x: type(x) is str)):
|
| 255 |
+
if any(rec_test(subpattern,
|
| 256 |
+
lambda x: isinstance(x, str) and x in common_chars)):
|
| 257 |
+
subpatterns_with_common_chars.append(subpattern)
|
| 258 |
+
elif any(rec_test(subpattern,
|
| 259 |
+
lambda x: isinstance(x, str) and x in common_names)):
|
| 260 |
+
subpatterns_with_common_names.append(subpattern)
|
| 261 |
+
|
| 262 |
+
else:
|
| 263 |
+
subpatterns_with_names.append(subpattern)
|
| 264 |
+
|
| 265 |
+
if subpatterns_with_names:
|
| 266 |
+
subpatterns = subpatterns_with_names
|
| 267 |
+
elif subpatterns_with_common_names:
|
| 268 |
+
subpatterns = subpatterns_with_common_names
|
| 269 |
+
elif subpatterns_with_common_chars:
|
| 270 |
+
subpatterns = subpatterns_with_common_chars
|
| 271 |
+
# of the remaining subpatterns pick out the longest one
|
| 272 |
+
return max(subpatterns, key=len)
|
| 273 |
+
|
| 274 |
+
def rec_test(sequence, test_func):
|
| 275 |
+
"""Tests test_func on all items of sequence and items of included
|
| 276 |
+
sub-iterables"""
|
| 277 |
+
for x in sequence:
|
| 278 |
+
if isinstance(x, (list, tuple)):
|
| 279 |
+
yield from rec_test(x, test_func)
|
| 280 |
+
else:
|
| 281 |
+
yield test_func(x)
|
evalkit_cambrian/lib/python3.10/lib2to3/fixer_base.py
ADDED
|
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Base class for fixers (optional, but recommended)."""
|
| 5 |
+
|
| 6 |
+
# Python imports
|
| 7 |
+
import itertools
|
| 8 |
+
|
| 9 |
+
# Local imports
|
| 10 |
+
from .patcomp import PatternCompiler
|
| 11 |
+
from . import pygram
|
| 12 |
+
from .fixer_util import does_tree_import
|
| 13 |
+
|
| 14 |
+
class BaseFix(object):
|
| 15 |
+
|
| 16 |
+
"""Optional base class for fixers.
|
| 17 |
+
|
| 18 |
+
The subclass name must be FixFooBar where FooBar is the result of
|
| 19 |
+
removing underscores and capitalizing the words of the fix name.
|
| 20 |
+
For example, the class name for a fixer named 'has_key' should be
|
| 21 |
+
FixHasKey.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
PATTERN = None # Most subclasses should override with a string literal
|
| 25 |
+
pattern = None # Compiled pattern, set by compile_pattern()
|
| 26 |
+
pattern_tree = None # Tree representation of the pattern
|
| 27 |
+
options = None # Options object passed to initializer
|
| 28 |
+
filename = None # The filename (set by set_filename)
|
| 29 |
+
numbers = itertools.count(1) # For new_name()
|
| 30 |
+
used_names = set() # A set of all used NAMEs
|
| 31 |
+
order = "post" # Does the fixer prefer pre- or post-order traversal
|
| 32 |
+
explicit = False # Is this ignored by refactor.py -f all?
|
| 33 |
+
run_order = 5 # Fixers will be sorted by run order before execution
|
| 34 |
+
# Lower numbers will be run first.
|
| 35 |
+
_accept_type = None # [Advanced and not public] This tells RefactoringTool
|
| 36 |
+
# which node type to accept when there's not a pattern.
|
| 37 |
+
|
| 38 |
+
keep_line_order = False # For the bottom matcher: match with the
|
| 39 |
+
# original line order
|
| 40 |
+
BM_compatible = False # Compatibility with the bottom matching
|
| 41 |
+
# module; every fixer should set this
|
| 42 |
+
# manually
|
| 43 |
+
|
| 44 |
+
# Shortcut for access to Python grammar symbols
|
| 45 |
+
syms = pygram.python_symbols
|
| 46 |
+
|
| 47 |
+
def __init__(self, options, log):
|
| 48 |
+
"""Initializer. Subclass may override.
|
| 49 |
+
|
| 50 |
+
Args:
|
| 51 |
+
options: a dict containing the options passed to RefactoringTool
|
| 52 |
+
that could be used to customize the fixer through the command line.
|
| 53 |
+
log: a list to append warnings and other messages to.
|
| 54 |
+
"""
|
| 55 |
+
self.options = options
|
| 56 |
+
self.log = log
|
| 57 |
+
self.compile_pattern()
|
| 58 |
+
|
| 59 |
+
def compile_pattern(self):
|
| 60 |
+
"""Compiles self.PATTERN into self.pattern.
|
| 61 |
+
|
| 62 |
+
Subclass may override if it doesn't want to use
|
| 63 |
+
self.{pattern,PATTERN} in .match().
|
| 64 |
+
"""
|
| 65 |
+
if self.PATTERN is not None:
|
| 66 |
+
PC = PatternCompiler()
|
| 67 |
+
self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN,
|
| 68 |
+
with_tree=True)
|
| 69 |
+
|
| 70 |
+
def set_filename(self, filename):
|
| 71 |
+
"""Set the filename.
|
| 72 |
+
|
| 73 |
+
The main refactoring tool should call this.
|
| 74 |
+
"""
|
| 75 |
+
self.filename = filename
|
| 76 |
+
|
| 77 |
+
def match(self, node):
|
| 78 |
+
"""Returns match for a given parse tree node.
|
| 79 |
+
|
| 80 |
+
Should return a true or false object (not necessarily a bool).
|
| 81 |
+
It may return a non-empty dict of matching sub-nodes as
|
| 82 |
+
returned by a matching pattern.
|
| 83 |
+
|
| 84 |
+
Subclass may override.
|
| 85 |
+
"""
|
| 86 |
+
results = {"node": node}
|
| 87 |
+
return self.pattern.match(node, results) and results
|
| 88 |
+
|
| 89 |
+
def transform(self, node, results):
|
| 90 |
+
"""Returns the transformation for a given parse tree node.
|
| 91 |
+
|
| 92 |
+
Args:
|
| 93 |
+
node: the root of the parse tree that matched the fixer.
|
| 94 |
+
results: a dict mapping symbolic names to part of the match.
|
| 95 |
+
|
| 96 |
+
Returns:
|
| 97 |
+
None, or a node that is a modified copy of the
|
| 98 |
+
argument node. The node argument may also be modified in-place to
|
| 99 |
+
effect the same change.
|
| 100 |
+
|
| 101 |
+
Subclass *must* override.
|
| 102 |
+
"""
|
| 103 |
+
raise NotImplementedError()
|
| 104 |
+
|
| 105 |
+
def new_name(self, template="xxx_todo_changeme"):
|
| 106 |
+
"""Return a string suitable for use as an identifier
|
| 107 |
+
|
| 108 |
+
The new name is guaranteed not to conflict with other identifiers.
|
| 109 |
+
"""
|
| 110 |
+
name = template
|
| 111 |
+
while name in self.used_names:
|
| 112 |
+
name = template + str(next(self.numbers))
|
| 113 |
+
self.used_names.add(name)
|
| 114 |
+
return name
|
| 115 |
+
|
| 116 |
+
def log_message(self, message):
|
| 117 |
+
if self.first_log:
|
| 118 |
+
self.first_log = False
|
| 119 |
+
self.log.append("### In file %s ###" % self.filename)
|
| 120 |
+
self.log.append(message)
|
| 121 |
+
|
| 122 |
+
def cannot_convert(self, node, reason=None):
|
| 123 |
+
"""Warn the user that a given chunk of code is not valid Python 3,
|
| 124 |
+
but that it cannot be converted automatically.
|
| 125 |
+
|
| 126 |
+
First argument is the top-level node for the code in question.
|
| 127 |
+
Optional second argument is why it can't be converted.
|
| 128 |
+
"""
|
| 129 |
+
lineno = node.get_lineno()
|
| 130 |
+
for_output = node.clone()
|
| 131 |
+
for_output.prefix = ""
|
| 132 |
+
msg = "Line %d: could not convert: %s"
|
| 133 |
+
self.log_message(msg % (lineno, for_output))
|
| 134 |
+
if reason:
|
| 135 |
+
self.log_message(reason)
|
| 136 |
+
|
| 137 |
+
def warning(self, node, reason):
|
| 138 |
+
"""Used for warning the user about possible uncertainty in the
|
| 139 |
+
translation.
|
| 140 |
+
|
| 141 |
+
First argument is the top-level node for the code in question.
|
| 142 |
+
Optional second argument is why it can't be converted.
|
| 143 |
+
"""
|
| 144 |
+
lineno = node.get_lineno()
|
| 145 |
+
self.log_message("Line %d: %s" % (lineno, reason))
|
| 146 |
+
|
| 147 |
+
def start_tree(self, tree, filename):
|
| 148 |
+
"""Some fixers need to maintain tree-wide state.
|
| 149 |
+
This method is called once, at the start of tree fix-up.
|
| 150 |
+
|
| 151 |
+
tree - the root node of the tree to be processed.
|
| 152 |
+
filename - the name of the file the tree came from.
|
| 153 |
+
"""
|
| 154 |
+
self.used_names = tree.used_names
|
| 155 |
+
self.set_filename(filename)
|
| 156 |
+
self.numbers = itertools.count(1)
|
| 157 |
+
self.first_log = True
|
| 158 |
+
|
| 159 |
+
def finish_tree(self, tree, filename):
|
| 160 |
+
"""Some fixers need to maintain tree-wide state.
|
| 161 |
+
This method is called once, at the conclusion of tree fix-up.
|
| 162 |
+
|
| 163 |
+
tree - the root node of the tree to be processed.
|
| 164 |
+
filename - the name of the file the tree came from.
|
| 165 |
+
"""
|
| 166 |
+
pass
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
class ConditionalFix(BaseFix):
|
| 170 |
+
""" Base class for fixers which not execute if an import is found. """
|
| 171 |
+
|
| 172 |
+
# This is the name of the import which, if found, will cause the test to be skipped
|
| 173 |
+
skip_on = None
|
| 174 |
+
|
| 175 |
+
def start_tree(self, *args):
|
| 176 |
+
super(ConditionalFix, self).start_tree(*args)
|
| 177 |
+
self._should_skip = None
|
| 178 |
+
|
| 179 |
+
def should_skip(self, node):
|
| 180 |
+
if self._should_skip is not None:
|
| 181 |
+
return self._should_skip
|
| 182 |
+
pkg = self.skip_on.split(".")
|
| 183 |
+
name = pkg[-1]
|
| 184 |
+
pkg = ".".join(pkg[:-1])
|
| 185 |
+
self._should_skip = does_tree_import(pkg, name, node)
|
| 186 |
+
return self._should_skip
|
evalkit_cambrian/lib/python3.10/lib2to3/fixer_util.py
ADDED
|
@@ -0,0 +1,453 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utility functions, node construction macros, etc."""
|
| 2 |
+
# Author: Collin Winter
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .pgen2 import token
|
| 6 |
+
from .pytree import Leaf, Node
|
| 7 |
+
from .pygram import python_symbols as syms
|
| 8 |
+
from . import patcomp
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
###########################################################
|
| 12 |
+
### Common node-construction "macros"
|
| 13 |
+
###########################################################
|
| 14 |
+
|
| 15 |
+
def KeywordArg(keyword, value):
|
| 16 |
+
return Node(syms.argument,
|
| 17 |
+
[keyword, Leaf(token.EQUAL, "="), value])
|
| 18 |
+
|
| 19 |
+
def LParen():
|
| 20 |
+
return Leaf(token.LPAR, "(")
|
| 21 |
+
|
| 22 |
+
def RParen():
|
| 23 |
+
return Leaf(token.RPAR, ")")
|
| 24 |
+
|
| 25 |
+
def Assign(target, source):
|
| 26 |
+
"""Build an assignment statement"""
|
| 27 |
+
if not isinstance(target, list):
|
| 28 |
+
target = [target]
|
| 29 |
+
if not isinstance(source, list):
|
| 30 |
+
source.prefix = " "
|
| 31 |
+
source = [source]
|
| 32 |
+
|
| 33 |
+
return Node(syms.atom,
|
| 34 |
+
target + [Leaf(token.EQUAL, "=", prefix=" ")] + source)
|
| 35 |
+
|
| 36 |
+
def Name(name, prefix=None):
|
| 37 |
+
"""Return a NAME leaf"""
|
| 38 |
+
return Leaf(token.NAME, name, prefix=prefix)
|
| 39 |
+
|
| 40 |
+
def Attr(obj, attr):
|
| 41 |
+
"""A node tuple for obj.attr"""
|
| 42 |
+
return [obj, Node(syms.trailer, [Dot(), attr])]
|
| 43 |
+
|
| 44 |
+
def Comma():
|
| 45 |
+
"""A comma leaf"""
|
| 46 |
+
return Leaf(token.COMMA, ",")
|
| 47 |
+
|
| 48 |
+
def Dot():
|
| 49 |
+
"""A period (.) leaf"""
|
| 50 |
+
return Leaf(token.DOT, ".")
|
| 51 |
+
|
| 52 |
+
def ArgList(args, lparen=LParen(), rparen=RParen()):
|
| 53 |
+
"""A parenthesised argument list, used by Call()"""
|
| 54 |
+
node = Node(syms.trailer, [lparen.clone(), rparen.clone()])
|
| 55 |
+
if args:
|
| 56 |
+
node.insert_child(1, Node(syms.arglist, args))
|
| 57 |
+
return node
|
| 58 |
+
|
| 59 |
+
def Call(func_name, args=None, prefix=None):
|
| 60 |
+
"""A function call"""
|
| 61 |
+
node = Node(syms.power, [func_name, ArgList(args)])
|
| 62 |
+
if prefix is not None:
|
| 63 |
+
node.prefix = prefix
|
| 64 |
+
return node
|
| 65 |
+
|
| 66 |
+
def Newline():
|
| 67 |
+
"""A newline literal"""
|
| 68 |
+
return Leaf(token.NEWLINE, "\n")
|
| 69 |
+
|
| 70 |
+
def BlankLine():
|
| 71 |
+
"""A blank line"""
|
| 72 |
+
return Leaf(token.NEWLINE, "")
|
| 73 |
+
|
| 74 |
+
def Number(n, prefix=None):
|
| 75 |
+
return Leaf(token.NUMBER, n, prefix=prefix)
|
| 76 |
+
|
| 77 |
+
def Subscript(index_node):
|
| 78 |
+
"""A numeric or string subscript"""
|
| 79 |
+
return Node(syms.trailer, [Leaf(token.LBRACE, "["),
|
| 80 |
+
index_node,
|
| 81 |
+
Leaf(token.RBRACE, "]")])
|
| 82 |
+
|
| 83 |
+
def String(string, prefix=None):
|
| 84 |
+
"""A string leaf"""
|
| 85 |
+
return Leaf(token.STRING, string, prefix=prefix)
|
| 86 |
+
|
| 87 |
+
def ListComp(xp, fp, it, test=None):
|
| 88 |
+
"""A list comprehension of the form [xp for fp in it if test].
|
| 89 |
+
|
| 90 |
+
If test is None, the "if test" part is omitted.
|
| 91 |
+
"""
|
| 92 |
+
xp.prefix = ""
|
| 93 |
+
fp.prefix = " "
|
| 94 |
+
it.prefix = " "
|
| 95 |
+
for_leaf = Leaf(token.NAME, "for")
|
| 96 |
+
for_leaf.prefix = " "
|
| 97 |
+
in_leaf = Leaf(token.NAME, "in")
|
| 98 |
+
in_leaf.prefix = " "
|
| 99 |
+
inner_args = [for_leaf, fp, in_leaf, it]
|
| 100 |
+
if test:
|
| 101 |
+
test.prefix = " "
|
| 102 |
+
if_leaf = Leaf(token.NAME, "if")
|
| 103 |
+
if_leaf.prefix = " "
|
| 104 |
+
inner_args.append(Node(syms.comp_if, [if_leaf, test]))
|
| 105 |
+
inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)])
|
| 106 |
+
return Node(syms.atom,
|
| 107 |
+
[Leaf(token.LBRACE, "["),
|
| 108 |
+
inner,
|
| 109 |
+
Leaf(token.RBRACE, "]")])
|
| 110 |
+
|
| 111 |
+
def FromImport(package_name, name_leafs):
|
| 112 |
+
""" Return an import statement in the form:
|
| 113 |
+
from package import name_leafs"""
|
| 114 |
+
# XXX: May not handle dotted imports properly (eg, package_name='foo.bar')
|
| 115 |
+
#assert package_name == '.' or '.' not in package_name, "FromImport has "\
|
| 116 |
+
# "not been tested with dotted package names -- use at your own "\
|
| 117 |
+
# "peril!"
|
| 118 |
+
|
| 119 |
+
for leaf in name_leafs:
|
| 120 |
+
# Pull the leaves out of their old tree
|
| 121 |
+
leaf.remove()
|
| 122 |
+
|
| 123 |
+
children = [Leaf(token.NAME, "from"),
|
| 124 |
+
Leaf(token.NAME, package_name, prefix=" "),
|
| 125 |
+
Leaf(token.NAME, "import", prefix=" "),
|
| 126 |
+
Node(syms.import_as_names, name_leafs)]
|
| 127 |
+
imp = Node(syms.import_from, children)
|
| 128 |
+
return imp
|
| 129 |
+
|
| 130 |
+
def ImportAndCall(node, results, names):
|
| 131 |
+
"""Returns an import statement and calls a method
|
| 132 |
+
of the module:
|
| 133 |
+
|
| 134 |
+
import module
|
| 135 |
+
module.name()"""
|
| 136 |
+
obj = results["obj"].clone()
|
| 137 |
+
if obj.type == syms.arglist:
|
| 138 |
+
newarglist = obj.clone()
|
| 139 |
+
else:
|
| 140 |
+
newarglist = Node(syms.arglist, [obj.clone()])
|
| 141 |
+
after = results["after"]
|
| 142 |
+
if after:
|
| 143 |
+
after = [n.clone() for n in after]
|
| 144 |
+
new = Node(syms.power,
|
| 145 |
+
Attr(Name(names[0]), Name(names[1])) +
|
| 146 |
+
[Node(syms.trailer,
|
| 147 |
+
[results["lpar"].clone(),
|
| 148 |
+
newarglist,
|
| 149 |
+
results["rpar"].clone()])] + after)
|
| 150 |
+
new.prefix = node.prefix
|
| 151 |
+
return new
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
###########################################################
|
| 155 |
+
### Determine whether a node represents a given literal
|
| 156 |
+
###########################################################
|
| 157 |
+
|
| 158 |
+
def is_tuple(node):
|
| 159 |
+
"""Does the node represent a tuple literal?"""
|
| 160 |
+
if isinstance(node, Node) and node.children == [LParen(), RParen()]:
|
| 161 |
+
return True
|
| 162 |
+
return (isinstance(node, Node)
|
| 163 |
+
and len(node.children) == 3
|
| 164 |
+
and isinstance(node.children[0], Leaf)
|
| 165 |
+
and isinstance(node.children[1], Node)
|
| 166 |
+
and isinstance(node.children[2], Leaf)
|
| 167 |
+
and node.children[0].value == "("
|
| 168 |
+
and node.children[2].value == ")")
|
| 169 |
+
|
| 170 |
+
def is_list(node):
|
| 171 |
+
"""Does the node represent a list literal?"""
|
| 172 |
+
return (isinstance(node, Node)
|
| 173 |
+
and len(node.children) > 1
|
| 174 |
+
and isinstance(node.children[0], Leaf)
|
| 175 |
+
and isinstance(node.children[-1], Leaf)
|
| 176 |
+
and node.children[0].value == "["
|
| 177 |
+
and node.children[-1].value == "]")
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
###########################################################
|
| 181 |
+
### Misc
|
| 182 |
+
###########################################################
|
| 183 |
+
|
| 184 |
+
def parenthesize(node):
|
| 185 |
+
return Node(syms.atom, [LParen(), node, RParen()])
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
consuming_calls = {"sorted", "list", "set", "any", "all", "tuple", "sum",
|
| 189 |
+
"min", "max", "enumerate"}
|
| 190 |
+
|
| 191 |
+
def attr_chain(obj, attr):
|
| 192 |
+
"""Follow an attribute chain.
|
| 193 |
+
|
| 194 |
+
If you have a chain of objects where a.foo -> b, b.foo-> c, etc,
|
| 195 |
+
use this to iterate over all objects in the chain. Iteration is
|
| 196 |
+
terminated by getattr(x, attr) is None.
|
| 197 |
+
|
| 198 |
+
Args:
|
| 199 |
+
obj: the starting object
|
| 200 |
+
attr: the name of the chaining attribute
|
| 201 |
+
|
| 202 |
+
Yields:
|
| 203 |
+
Each successive object in the chain.
|
| 204 |
+
"""
|
| 205 |
+
next = getattr(obj, attr)
|
| 206 |
+
while next:
|
| 207 |
+
yield next
|
| 208 |
+
next = getattr(next, attr)
|
| 209 |
+
|
| 210 |
+
p0 = """for_stmt< 'for' any 'in' node=any ':' any* >
|
| 211 |
+
| comp_for< 'for' any 'in' node=any any* >
|
| 212 |
+
"""
|
| 213 |
+
p1 = """
|
| 214 |
+
power<
|
| 215 |
+
( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' |
|
| 216 |
+
'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) )
|
| 217 |
+
trailer< '(' node=any ')' >
|
| 218 |
+
any*
|
| 219 |
+
>
|
| 220 |
+
"""
|
| 221 |
+
p2 = """
|
| 222 |
+
power<
|
| 223 |
+
( 'sorted' | 'enumerate' )
|
| 224 |
+
trailer< '(' arglist<node=any any*> ')' >
|
| 225 |
+
any*
|
| 226 |
+
>
|
| 227 |
+
"""
|
| 228 |
+
pats_built = False
|
| 229 |
+
def in_special_context(node):
|
| 230 |
+
""" Returns true if node is in an environment where all that is required
|
| 231 |
+
of it is being iterable (ie, it doesn't matter if it returns a list
|
| 232 |
+
or an iterator).
|
| 233 |
+
See test_map_nochange in test_fixers.py for some examples and tests.
|
| 234 |
+
"""
|
| 235 |
+
global p0, p1, p2, pats_built
|
| 236 |
+
if not pats_built:
|
| 237 |
+
p0 = patcomp.compile_pattern(p0)
|
| 238 |
+
p1 = patcomp.compile_pattern(p1)
|
| 239 |
+
p2 = patcomp.compile_pattern(p2)
|
| 240 |
+
pats_built = True
|
| 241 |
+
patterns = [p0, p1, p2]
|
| 242 |
+
for pattern, parent in zip(patterns, attr_chain(node, "parent")):
|
| 243 |
+
results = {}
|
| 244 |
+
if pattern.match(parent, results) and results["node"] is node:
|
| 245 |
+
return True
|
| 246 |
+
return False
|
| 247 |
+
|
| 248 |
+
def is_probably_builtin(node):
|
| 249 |
+
"""
|
| 250 |
+
Check that something isn't an attribute or function name etc.
|
| 251 |
+
"""
|
| 252 |
+
prev = node.prev_sibling
|
| 253 |
+
if prev is not None and prev.type == token.DOT:
|
| 254 |
+
# Attribute lookup.
|
| 255 |
+
return False
|
| 256 |
+
parent = node.parent
|
| 257 |
+
if parent.type in (syms.funcdef, syms.classdef):
|
| 258 |
+
return False
|
| 259 |
+
if parent.type == syms.expr_stmt and parent.children[0] is node:
|
| 260 |
+
# Assignment.
|
| 261 |
+
return False
|
| 262 |
+
if parent.type == syms.parameters or \
|
| 263 |
+
(parent.type == syms.typedargslist and (
|
| 264 |
+
(prev is not None and prev.type == token.COMMA) or
|
| 265 |
+
parent.children[0] is node
|
| 266 |
+
)):
|
| 267 |
+
# The name of an argument.
|
| 268 |
+
return False
|
| 269 |
+
return True
|
| 270 |
+
|
| 271 |
+
def find_indentation(node):
|
| 272 |
+
"""Find the indentation of *node*."""
|
| 273 |
+
while node is not None:
|
| 274 |
+
if node.type == syms.suite and len(node.children) > 2:
|
| 275 |
+
indent = node.children[1]
|
| 276 |
+
if indent.type == token.INDENT:
|
| 277 |
+
return indent.value
|
| 278 |
+
node = node.parent
|
| 279 |
+
return ""
|
| 280 |
+
|
| 281 |
+
###########################################################
|
| 282 |
+
### The following functions are to find bindings in a suite
|
| 283 |
+
###########################################################
|
| 284 |
+
|
| 285 |
+
def make_suite(node):
|
| 286 |
+
if node.type == syms.suite:
|
| 287 |
+
return node
|
| 288 |
+
node = node.clone()
|
| 289 |
+
parent, node.parent = node.parent, None
|
| 290 |
+
suite = Node(syms.suite, [node])
|
| 291 |
+
suite.parent = parent
|
| 292 |
+
return suite
|
| 293 |
+
|
| 294 |
+
def find_root(node):
|
| 295 |
+
"""Find the top level namespace."""
|
| 296 |
+
# Scamper up to the top level namespace
|
| 297 |
+
while node.type != syms.file_input:
|
| 298 |
+
node = node.parent
|
| 299 |
+
if not node:
|
| 300 |
+
raise ValueError("root found before file_input node was found.")
|
| 301 |
+
return node
|
| 302 |
+
|
| 303 |
+
def does_tree_import(package, name, node):
|
| 304 |
+
""" Returns true if name is imported from package at the
|
| 305 |
+
top level of the tree which node belongs to.
|
| 306 |
+
To cover the case of an import like 'import foo', use
|
| 307 |
+
None for the package and 'foo' for the name. """
|
| 308 |
+
binding = find_binding(name, find_root(node), package)
|
| 309 |
+
return bool(binding)
|
| 310 |
+
|
| 311 |
+
def is_import(node):
|
| 312 |
+
"""Returns true if the node is an import statement."""
|
| 313 |
+
return node.type in (syms.import_name, syms.import_from)
|
| 314 |
+
|
| 315 |
+
def touch_import(package, name, node):
|
| 316 |
+
""" Works like `does_tree_import` but adds an import statement
|
| 317 |
+
if it was not imported. """
|
| 318 |
+
def is_import_stmt(node):
|
| 319 |
+
return (node.type == syms.simple_stmt and node.children and
|
| 320 |
+
is_import(node.children[0]))
|
| 321 |
+
|
| 322 |
+
root = find_root(node)
|
| 323 |
+
|
| 324 |
+
if does_tree_import(package, name, root):
|
| 325 |
+
return
|
| 326 |
+
|
| 327 |
+
# figure out where to insert the new import. First try to find
|
| 328 |
+
# the first import and then skip to the last one.
|
| 329 |
+
insert_pos = offset = 0
|
| 330 |
+
for idx, node in enumerate(root.children):
|
| 331 |
+
if not is_import_stmt(node):
|
| 332 |
+
continue
|
| 333 |
+
for offset, node2 in enumerate(root.children[idx:]):
|
| 334 |
+
if not is_import_stmt(node2):
|
| 335 |
+
break
|
| 336 |
+
insert_pos = idx + offset
|
| 337 |
+
break
|
| 338 |
+
|
| 339 |
+
# if there are no imports where we can insert, find the docstring.
|
| 340 |
+
# if that also fails, we stick to the beginning of the file
|
| 341 |
+
if insert_pos == 0:
|
| 342 |
+
for idx, node in enumerate(root.children):
|
| 343 |
+
if (node.type == syms.simple_stmt and node.children and
|
| 344 |
+
node.children[0].type == token.STRING):
|
| 345 |
+
insert_pos = idx + 1
|
| 346 |
+
break
|
| 347 |
+
|
| 348 |
+
if package is None:
|
| 349 |
+
import_ = Node(syms.import_name, [
|
| 350 |
+
Leaf(token.NAME, "import"),
|
| 351 |
+
Leaf(token.NAME, name, prefix=" ")
|
| 352 |
+
])
|
| 353 |
+
else:
|
| 354 |
+
import_ = FromImport(package, [Leaf(token.NAME, name, prefix=" ")])
|
| 355 |
+
|
| 356 |
+
children = [import_, Newline()]
|
| 357 |
+
root.insert_child(insert_pos, Node(syms.simple_stmt, children))
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
_def_syms = {syms.classdef, syms.funcdef}
|
| 361 |
+
def find_binding(name, node, package=None):
|
| 362 |
+
""" Returns the node which binds variable name, otherwise None.
|
| 363 |
+
If optional argument package is supplied, only imports will
|
| 364 |
+
be returned.
|
| 365 |
+
See test cases for examples."""
|
| 366 |
+
for child in node.children:
|
| 367 |
+
ret = None
|
| 368 |
+
if child.type == syms.for_stmt:
|
| 369 |
+
if _find(name, child.children[1]):
|
| 370 |
+
return child
|
| 371 |
+
n = find_binding(name, make_suite(child.children[-1]), package)
|
| 372 |
+
if n: ret = n
|
| 373 |
+
elif child.type in (syms.if_stmt, syms.while_stmt):
|
| 374 |
+
n = find_binding(name, make_suite(child.children[-1]), package)
|
| 375 |
+
if n: ret = n
|
| 376 |
+
elif child.type == syms.try_stmt:
|
| 377 |
+
n = find_binding(name, make_suite(child.children[2]), package)
|
| 378 |
+
if n:
|
| 379 |
+
ret = n
|
| 380 |
+
else:
|
| 381 |
+
for i, kid in enumerate(child.children[3:]):
|
| 382 |
+
if kid.type == token.COLON and kid.value == ":":
|
| 383 |
+
# i+3 is the colon, i+4 is the suite
|
| 384 |
+
n = find_binding(name, make_suite(child.children[i+4]), package)
|
| 385 |
+
if n: ret = n
|
| 386 |
+
elif child.type in _def_syms and child.children[1].value == name:
|
| 387 |
+
ret = child
|
| 388 |
+
elif _is_import_binding(child, name, package):
|
| 389 |
+
ret = child
|
| 390 |
+
elif child.type == syms.simple_stmt:
|
| 391 |
+
ret = find_binding(name, child, package)
|
| 392 |
+
elif child.type == syms.expr_stmt:
|
| 393 |
+
if _find(name, child.children[0]):
|
| 394 |
+
ret = child
|
| 395 |
+
|
| 396 |
+
if ret:
|
| 397 |
+
if not package:
|
| 398 |
+
return ret
|
| 399 |
+
if is_import(ret):
|
| 400 |
+
return ret
|
| 401 |
+
return None
|
| 402 |
+
|
| 403 |
+
_block_syms = {syms.funcdef, syms.classdef, syms.trailer}
|
| 404 |
+
def _find(name, node):
|
| 405 |
+
nodes = [node]
|
| 406 |
+
while nodes:
|
| 407 |
+
node = nodes.pop()
|
| 408 |
+
if node.type > 256 and node.type not in _block_syms:
|
| 409 |
+
nodes.extend(node.children)
|
| 410 |
+
elif node.type == token.NAME and node.value == name:
|
| 411 |
+
return node
|
| 412 |
+
return None
|
| 413 |
+
|
| 414 |
+
def _is_import_binding(node, name, package=None):
|
| 415 |
+
""" Will return node if node will import name, or node
|
| 416 |
+
will import * from package. None is returned otherwise.
|
| 417 |
+
See test cases for examples. """
|
| 418 |
+
|
| 419 |
+
if node.type == syms.import_name and not package:
|
| 420 |
+
imp = node.children[1]
|
| 421 |
+
if imp.type == syms.dotted_as_names:
|
| 422 |
+
for child in imp.children:
|
| 423 |
+
if child.type == syms.dotted_as_name:
|
| 424 |
+
if child.children[2].value == name:
|
| 425 |
+
return node
|
| 426 |
+
elif child.type == token.NAME and child.value == name:
|
| 427 |
+
return node
|
| 428 |
+
elif imp.type == syms.dotted_as_name:
|
| 429 |
+
last = imp.children[-1]
|
| 430 |
+
if last.type == token.NAME and last.value == name:
|
| 431 |
+
return node
|
| 432 |
+
elif imp.type == token.NAME and imp.value == name:
|
| 433 |
+
return node
|
| 434 |
+
elif node.type == syms.import_from:
|
| 435 |
+
# str(...) is used to make life easier here, because
|
| 436 |
+
# from a.b import parses to ['import', ['a', '.', 'b'], ...]
|
| 437 |
+
if package and str(node.children[1]).strip() != package:
|
| 438 |
+
return None
|
| 439 |
+
n = node.children[3]
|
| 440 |
+
if package and _find("as", n):
|
| 441 |
+
# See test_from_import_as for explanation
|
| 442 |
+
return None
|
| 443 |
+
elif n.type == syms.import_as_names and _find(name, n):
|
| 444 |
+
return node
|
| 445 |
+
elif n.type == syms.import_as_name:
|
| 446 |
+
child = n.children[2]
|
| 447 |
+
if child.type == token.NAME and child.value == name:
|
| 448 |
+
return node
|
| 449 |
+
elif n.type == token.NAME and n.value == name:
|
| 450 |
+
return node
|
| 451 |
+
elif package and n.type == token.STAR:
|
| 452 |
+
return node
|
| 453 |
+
return None
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_basestring.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for basestring -> str."""
|
| 2 |
+
# Author: Christian Heimes
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Name
|
| 7 |
+
|
| 8 |
+
class FixBasestring(fixer_base.BaseFix):
|
| 9 |
+
BM_compatible = True
|
| 10 |
+
|
| 11 |
+
PATTERN = "'basestring'"
|
| 12 |
+
|
| 13 |
+
def transform(self, node, results):
|
| 14 |
+
return Name("str", prefix=node.prefix)
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_buffer.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that changes buffer(...) into memoryview(...)."""
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import fixer_base
|
| 8 |
+
from ..fixer_util import Name
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class FixBuffer(fixer_base.BaseFix):
|
| 12 |
+
BM_compatible = True
|
| 13 |
+
|
| 14 |
+
explicit = True # The user must ask for this fixer
|
| 15 |
+
|
| 16 |
+
PATTERN = """
|
| 17 |
+
power< name='buffer' trailer< '(' [any] ')' > any* >
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
def transform(self, node, results):
|
| 21 |
+
name = results["name"]
|
| 22 |
+
name.replace(Name("memoryview", prefix=name.prefix))
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_dict.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for dict methods.
|
| 5 |
+
|
| 6 |
+
d.keys() -> list(d.keys())
|
| 7 |
+
d.items() -> list(d.items())
|
| 8 |
+
d.values() -> list(d.values())
|
| 9 |
+
|
| 10 |
+
d.iterkeys() -> iter(d.keys())
|
| 11 |
+
d.iteritems() -> iter(d.items())
|
| 12 |
+
d.itervalues() -> iter(d.values())
|
| 13 |
+
|
| 14 |
+
d.viewkeys() -> d.keys()
|
| 15 |
+
d.viewitems() -> d.items()
|
| 16 |
+
d.viewvalues() -> d.values()
|
| 17 |
+
|
| 18 |
+
Except in certain very specific contexts: the iter() can be dropped
|
| 19 |
+
when the context is list(), sorted(), iter() or for...in; the list()
|
| 20 |
+
can be dropped when the context is list() or sorted() (but not iter()
|
| 21 |
+
or for...in!). Special contexts that apply to both: list(), sorted(), tuple()
|
| 22 |
+
set(), any(), all(), sum().
|
| 23 |
+
|
| 24 |
+
Note: iter(d.keys()) could be written as iter(d) but since the
|
| 25 |
+
original d.iterkeys() was also redundant we don't fix this. And there
|
| 26 |
+
are (rare) contexts where it makes a difference (e.g. when passing it
|
| 27 |
+
as an argument to a function that introspects the argument).
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
# Local imports
|
| 31 |
+
from .. import pytree
|
| 32 |
+
from .. import patcomp
|
| 33 |
+
from .. import fixer_base
|
| 34 |
+
from ..fixer_util import Name, Call, Dot
|
| 35 |
+
from .. import fixer_util
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
iter_exempt = fixer_util.consuming_calls | {"iter"}
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class FixDict(fixer_base.BaseFix):
|
| 42 |
+
BM_compatible = True
|
| 43 |
+
|
| 44 |
+
PATTERN = """
|
| 45 |
+
power< head=any+
|
| 46 |
+
trailer< '.' method=('keys'|'items'|'values'|
|
| 47 |
+
'iterkeys'|'iteritems'|'itervalues'|
|
| 48 |
+
'viewkeys'|'viewitems'|'viewvalues') >
|
| 49 |
+
parens=trailer< '(' ')' >
|
| 50 |
+
tail=any*
|
| 51 |
+
>
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
def transform(self, node, results):
|
| 55 |
+
head = results["head"]
|
| 56 |
+
method = results["method"][0] # Extract node for method name
|
| 57 |
+
tail = results["tail"]
|
| 58 |
+
syms = self.syms
|
| 59 |
+
method_name = method.value
|
| 60 |
+
isiter = method_name.startswith("iter")
|
| 61 |
+
isview = method_name.startswith("view")
|
| 62 |
+
if isiter or isview:
|
| 63 |
+
method_name = method_name[4:]
|
| 64 |
+
assert method_name in ("keys", "items", "values"), repr(method)
|
| 65 |
+
head = [n.clone() for n in head]
|
| 66 |
+
tail = [n.clone() for n in tail]
|
| 67 |
+
special = not tail and self.in_special_context(node, isiter)
|
| 68 |
+
args = head + [pytree.Node(syms.trailer,
|
| 69 |
+
[Dot(),
|
| 70 |
+
Name(method_name,
|
| 71 |
+
prefix=method.prefix)]),
|
| 72 |
+
results["parens"].clone()]
|
| 73 |
+
new = pytree.Node(syms.power, args)
|
| 74 |
+
if not (special or isview):
|
| 75 |
+
new.prefix = ""
|
| 76 |
+
new = Call(Name("iter" if isiter else "list"), [new])
|
| 77 |
+
if tail:
|
| 78 |
+
new = pytree.Node(syms.power, [new] + tail)
|
| 79 |
+
new.prefix = node.prefix
|
| 80 |
+
return new
|
| 81 |
+
|
| 82 |
+
P1 = "power< func=NAME trailer< '(' node=any ')' > any* >"
|
| 83 |
+
p1 = patcomp.compile_pattern(P1)
|
| 84 |
+
|
| 85 |
+
P2 = """for_stmt< 'for' any 'in' node=any ':' any* >
|
| 86 |
+
| comp_for< 'for' any 'in' node=any any* >
|
| 87 |
+
"""
|
| 88 |
+
p2 = patcomp.compile_pattern(P2)
|
| 89 |
+
|
| 90 |
+
def in_special_context(self, node, isiter):
|
| 91 |
+
if node.parent is None:
|
| 92 |
+
return False
|
| 93 |
+
results = {}
|
| 94 |
+
if (node.parent.parent is not None and
|
| 95 |
+
self.p1.match(node.parent.parent, results) and
|
| 96 |
+
results["node"] is node):
|
| 97 |
+
if isiter:
|
| 98 |
+
# iter(d.iterkeys()) -> iter(d.keys()), etc.
|
| 99 |
+
return results["func"].value in iter_exempt
|
| 100 |
+
else:
|
| 101 |
+
# list(d.keys()) -> list(d.keys()), etc.
|
| 102 |
+
return results["func"].value in fixer_util.consuming_calls
|
| 103 |
+
if not isiter:
|
| 104 |
+
return False
|
| 105 |
+
# for ... in d.iterkeys() -> for ... in d.keys(), etc.
|
| 106 |
+
return self.p2.match(node.parent, results) and results["node"] is node
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_execfile.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for execfile.
|
| 5 |
+
|
| 6 |
+
This converts usages of the execfile function into calls to the built-in
|
| 7 |
+
exec() function.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from .. import fixer_base
|
| 11 |
+
from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node,
|
| 12 |
+
ArgList, String, syms)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FixExecfile(fixer_base.BaseFix):
|
| 16 |
+
BM_compatible = True
|
| 17 |
+
|
| 18 |
+
PATTERN = """
|
| 19 |
+
power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
|
| 20 |
+
|
|
| 21 |
+
power< 'execfile' trailer< '(' filename=any ')' > >
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def transform(self, node, results):
|
| 25 |
+
assert results
|
| 26 |
+
filename = results["filename"]
|
| 27 |
+
globals = results.get("globals")
|
| 28 |
+
locals = results.get("locals")
|
| 29 |
+
|
| 30 |
+
# Copy over the prefix from the right parentheses end of the execfile
|
| 31 |
+
# call.
|
| 32 |
+
execfile_paren = node.children[-1].children[-1].clone()
|
| 33 |
+
# Construct open().read().
|
| 34 |
+
open_args = ArgList([filename.clone(), Comma(), String('"rb"', ' ')],
|
| 35 |
+
rparen=execfile_paren)
|
| 36 |
+
open_call = Node(syms.power, [Name("open"), open_args])
|
| 37 |
+
read = [Node(syms.trailer, [Dot(), Name('read')]),
|
| 38 |
+
Node(syms.trailer, [LParen(), RParen()])]
|
| 39 |
+
open_expr = [open_call] + read
|
| 40 |
+
# Wrap the open call in a compile call. This is so the filename will be
|
| 41 |
+
# preserved in the execed code.
|
| 42 |
+
filename_arg = filename.clone()
|
| 43 |
+
filename_arg.prefix = " "
|
| 44 |
+
exec_str = String("'exec'", " ")
|
| 45 |
+
compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str]
|
| 46 |
+
compile_call = Call(Name("compile"), compile_args, "")
|
| 47 |
+
# Finally, replace the execfile call with an exec call.
|
| 48 |
+
args = [compile_call]
|
| 49 |
+
if globals is not None:
|
| 50 |
+
args.extend([Comma(), globals.clone()])
|
| 51 |
+
if locals is not None:
|
| 52 |
+
args.extend([Comma(), locals.clone()])
|
| 53 |
+
return Call(Name("exec"), args, prefix=node.prefix)
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_filter.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that changes filter(F, X) into list(filter(F, X)).
|
| 5 |
+
|
| 6 |
+
We avoid the transformation if the filter() call is directly contained
|
| 7 |
+
in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or
|
| 8 |
+
for V in <>:.
|
| 9 |
+
|
| 10 |
+
NOTE: This is still not correct if the original code was depending on
|
| 11 |
+
filter(F, X) to return a string if X is a string and a tuple if X is a
|
| 12 |
+
tuple. That would require type inference, which we don't do. Let
|
| 13 |
+
Python 2.6 figure it out.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
# Local imports
|
| 17 |
+
from .. import fixer_base
|
| 18 |
+
from ..pytree import Node
|
| 19 |
+
from ..pygram import python_symbols as syms
|
| 20 |
+
from ..fixer_util import Name, ArgList, ListComp, in_special_context, parenthesize
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class FixFilter(fixer_base.ConditionalFix):
|
| 24 |
+
BM_compatible = True
|
| 25 |
+
|
| 26 |
+
PATTERN = """
|
| 27 |
+
filter_lambda=power<
|
| 28 |
+
'filter'
|
| 29 |
+
trailer<
|
| 30 |
+
'('
|
| 31 |
+
arglist<
|
| 32 |
+
lambdef< 'lambda'
|
| 33 |
+
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
|
| 34 |
+
>
|
| 35 |
+
','
|
| 36 |
+
it=any
|
| 37 |
+
>
|
| 38 |
+
')'
|
| 39 |
+
>
|
| 40 |
+
[extra_trailers=trailer*]
|
| 41 |
+
>
|
| 42 |
+
|
|
| 43 |
+
power<
|
| 44 |
+
'filter'
|
| 45 |
+
trailer< '(' arglist< none='None' ',' seq=any > ')' >
|
| 46 |
+
[extra_trailers=trailer*]
|
| 47 |
+
>
|
| 48 |
+
|
|
| 49 |
+
power<
|
| 50 |
+
'filter'
|
| 51 |
+
args=trailer< '(' [any] ')' >
|
| 52 |
+
[extra_trailers=trailer*]
|
| 53 |
+
>
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
skip_on = "future_builtins.filter"
|
| 57 |
+
|
| 58 |
+
def transform(self, node, results):
|
| 59 |
+
if self.should_skip(node):
|
| 60 |
+
return
|
| 61 |
+
|
| 62 |
+
trailers = []
|
| 63 |
+
if 'extra_trailers' in results:
|
| 64 |
+
for t in results['extra_trailers']:
|
| 65 |
+
trailers.append(t.clone())
|
| 66 |
+
|
| 67 |
+
if "filter_lambda" in results:
|
| 68 |
+
xp = results.get("xp").clone()
|
| 69 |
+
if xp.type == syms.test:
|
| 70 |
+
xp.prefix = ""
|
| 71 |
+
xp = parenthesize(xp)
|
| 72 |
+
|
| 73 |
+
new = ListComp(results.get("fp").clone(),
|
| 74 |
+
results.get("fp").clone(),
|
| 75 |
+
results.get("it").clone(), xp)
|
| 76 |
+
new = Node(syms.power, [new] + trailers, prefix="")
|
| 77 |
+
|
| 78 |
+
elif "none" in results:
|
| 79 |
+
new = ListComp(Name("_f"),
|
| 80 |
+
Name("_f"),
|
| 81 |
+
results["seq"].clone(),
|
| 82 |
+
Name("_f"))
|
| 83 |
+
new = Node(syms.power, [new] + trailers, prefix="")
|
| 84 |
+
|
| 85 |
+
else:
|
| 86 |
+
if in_special_context(node):
|
| 87 |
+
return None
|
| 88 |
+
|
| 89 |
+
args = results['args'].clone()
|
| 90 |
+
new = Node(syms.power, [Name("filter"), args], prefix="")
|
| 91 |
+
new = Node(syms.power, [Name("list"), ArgList([new])] + trailers)
|
| 92 |
+
new.prefix = ""
|
| 93 |
+
new.prefix = node.prefix
|
| 94 |
+
return new
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_getcwdu.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Fixer that changes os.getcwdu() to os.getcwd().
|
| 3 |
+
"""
|
| 4 |
+
# Author: Victor Stinner
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import fixer_base
|
| 8 |
+
from ..fixer_util import Name
|
| 9 |
+
|
| 10 |
+
class FixGetcwdu(fixer_base.BaseFix):
|
| 11 |
+
BM_compatible = True
|
| 12 |
+
|
| 13 |
+
PATTERN = """
|
| 14 |
+
power< 'os' trailer< dot='.' name='getcwdu' > any* >
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def transform(self, node, results):
|
| 18 |
+
name = results["name"]
|
| 19 |
+
name.replace(Name("getcwd", prefix=name.prefix))
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_idioms.py
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Adjust some old Python 2 idioms to their modern counterparts.
|
| 2 |
+
|
| 3 |
+
* Change some type comparisons to isinstance() calls:
|
| 4 |
+
type(x) == T -> isinstance(x, T)
|
| 5 |
+
type(x) is T -> isinstance(x, T)
|
| 6 |
+
type(x) != T -> not isinstance(x, T)
|
| 7 |
+
type(x) is not T -> not isinstance(x, T)
|
| 8 |
+
|
| 9 |
+
* Change "while 1:" into "while True:".
|
| 10 |
+
|
| 11 |
+
* Change both
|
| 12 |
+
|
| 13 |
+
v = list(EXPR)
|
| 14 |
+
v.sort()
|
| 15 |
+
foo(v)
|
| 16 |
+
|
| 17 |
+
and the more general
|
| 18 |
+
|
| 19 |
+
v = EXPR
|
| 20 |
+
v.sort()
|
| 21 |
+
foo(v)
|
| 22 |
+
|
| 23 |
+
into
|
| 24 |
+
|
| 25 |
+
v = sorted(EXPR)
|
| 26 |
+
foo(v)
|
| 27 |
+
"""
|
| 28 |
+
# Author: Jacques Frechet, Collin Winter
|
| 29 |
+
|
| 30 |
+
# Local imports
|
| 31 |
+
from .. import fixer_base
|
| 32 |
+
from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms
|
| 33 |
+
|
| 34 |
+
CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
|
| 35 |
+
TYPE = "power< 'type' trailer< '(' x=any ')' > >"
|
| 36 |
+
|
| 37 |
+
class FixIdioms(fixer_base.BaseFix):
|
| 38 |
+
explicit = True # The user must ask for this fixer
|
| 39 |
+
|
| 40 |
+
PATTERN = r"""
|
| 41 |
+
isinstance=comparison< %s %s T=any >
|
| 42 |
+
|
|
| 43 |
+
isinstance=comparison< T=any %s %s >
|
| 44 |
+
|
|
| 45 |
+
while_stmt< 'while' while='1' ':' any+ >
|
| 46 |
+
|
|
| 47 |
+
sorted=any<
|
| 48 |
+
any*
|
| 49 |
+
simple_stmt<
|
| 50 |
+
expr_stmt< id1=any '='
|
| 51 |
+
power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
|
| 52 |
+
>
|
| 53 |
+
'\n'
|
| 54 |
+
>
|
| 55 |
+
sort=
|
| 56 |
+
simple_stmt<
|
| 57 |
+
power< id2=any
|
| 58 |
+
trailer< '.' 'sort' > trailer< '(' ')' >
|
| 59 |
+
>
|
| 60 |
+
'\n'
|
| 61 |
+
>
|
| 62 |
+
next=any*
|
| 63 |
+
>
|
| 64 |
+
|
|
| 65 |
+
sorted=any<
|
| 66 |
+
any*
|
| 67 |
+
simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
|
| 68 |
+
sort=
|
| 69 |
+
simple_stmt<
|
| 70 |
+
power< id2=any
|
| 71 |
+
trailer< '.' 'sort' > trailer< '(' ')' >
|
| 72 |
+
>
|
| 73 |
+
'\n'
|
| 74 |
+
>
|
| 75 |
+
next=any*
|
| 76 |
+
>
|
| 77 |
+
""" % (TYPE, CMP, CMP, TYPE)
|
| 78 |
+
|
| 79 |
+
def match(self, node):
|
| 80 |
+
r = super(FixIdioms, self).match(node)
|
| 81 |
+
# If we've matched one of the sort/sorted subpatterns above, we
|
| 82 |
+
# want to reject matches where the initial assignment and the
|
| 83 |
+
# subsequent .sort() call involve different identifiers.
|
| 84 |
+
if r and "sorted" in r:
|
| 85 |
+
if r["id1"] == r["id2"]:
|
| 86 |
+
return r
|
| 87 |
+
return None
|
| 88 |
+
return r
|
| 89 |
+
|
| 90 |
+
def transform(self, node, results):
|
| 91 |
+
if "isinstance" in results:
|
| 92 |
+
return self.transform_isinstance(node, results)
|
| 93 |
+
elif "while" in results:
|
| 94 |
+
return self.transform_while(node, results)
|
| 95 |
+
elif "sorted" in results:
|
| 96 |
+
return self.transform_sort(node, results)
|
| 97 |
+
else:
|
| 98 |
+
raise RuntimeError("Invalid match")
|
| 99 |
+
|
| 100 |
+
def transform_isinstance(self, node, results):
|
| 101 |
+
x = results["x"].clone() # The thing inside of type()
|
| 102 |
+
T = results["T"].clone() # The type being compared against
|
| 103 |
+
x.prefix = ""
|
| 104 |
+
T.prefix = " "
|
| 105 |
+
test = Call(Name("isinstance"), [x, Comma(), T])
|
| 106 |
+
if "n" in results:
|
| 107 |
+
test.prefix = " "
|
| 108 |
+
test = Node(syms.not_test, [Name("not"), test])
|
| 109 |
+
test.prefix = node.prefix
|
| 110 |
+
return test
|
| 111 |
+
|
| 112 |
+
def transform_while(self, node, results):
|
| 113 |
+
one = results["while"]
|
| 114 |
+
one.replace(Name("True", prefix=one.prefix))
|
| 115 |
+
|
| 116 |
+
def transform_sort(self, node, results):
|
| 117 |
+
sort_stmt = results["sort"]
|
| 118 |
+
next_stmt = results["next"]
|
| 119 |
+
list_call = results.get("list")
|
| 120 |
+
simple_expr = results.get("expr")
|
| 121 |
+
|
| 122 |
+
if list_call:
|
| 123 |
+
list_call.replace(Name("sorted", prefix=list_call.prefix))
|
| 124 |
+
elif simple_expr:
|
| 125 |
+
new = simple_expr.clone()
|
| 126 |
+
new.prefix = ""
|
| 127 |
+
simple_expr.replace(Call(Name("sorted"), [new],
|
| 128 |
+
prefix=simple_expr.prefix))
|
| 129 |
+
else:
|
| 130 |
+
raise RuntimeError("should not have reached here")
|
| 131 |
+
sort_stmt.remove()
|
| 132 |
+
|
| 133 |
+
btwn = sort_stmt.prefix
|
| 134 |
+
# Keep any prefix lines between the sort_stmt and the list_call and
|
| 135 |
+
# shove them right after the sorted() call.
|
| 136 |
+
if "\n" in btwn:
|
| 137 |
+
if next_stmt:
|
| 138 |
+
# The new prefix should be everything from the sort_stmt's
|
| 139 |
+
# prefix up to the last newline, then the old prefix after a new
|
| 140 |
+
# line.
|
| 141 |
+
prefix_lines = (btwn.rpartition("\n")[0], next_stmt[0].prefix)
|
| 142 |
+
next_stmt[0].prefix = "\n".join(prefix_lines)
|
| 143 |
+
else:
|
| 144 |
+
assert list_call.parent
|
| 145 |
+
assert list_call.next_sibling is None
|
| 146 |
+
# Put a blank line after list_call and set its prefix.
|
| 147 |
+
end_line = BlankLine()
|
| 148 |
+
list_call.parent.append_child(end_line)
|
| 149 |
+
assert list_call.next_sibling is end_line
|
| 150 |
+
# The new prefix should be everything up to the first new line
|
| 151 |
+
# of sort_stmt's prefix.
|
| 152 |
+
end_line.prefix = btwn.rpartition("\n")[0]
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_imports.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix incompatible imports and module references."""
|
| 2 |
+
# Authors: Collin Winter, Nick Edds
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Name, attr_chain
|
| 7 |
+
|
| 8 |
+
MAPPING = {'StringIO': 'io',
|
| 9 |
+
'cStringIO': 'io',
|
| 10 |
+
'cPickle': 'pickle',
|
| 11 |
+
'__builtin__' : 'builtins',
|
| 12 |
+
'copy_reg': 'copyreg',
|
| 13 |
+
'Queue': 'queue',
|
| 14 |
+
'SocketServer': 'socketserver',
|
| 15 |
+
'ConfigParser': 'configparser',
|
| 16 |
+
'repr': 'reprlib',
|
| 17 |
+
'FileDialog': 'tkinter.filedialog',
|
| 18 |
+
'tkFileDialog': 'tkinter.filedialog',
|
| 19 |
+
'SimpleDialog': 'tkinter.simpledialog',
|
| 20 |
+
'tkSimpleDialog': 'tkinter.simpledialog',
|
| 21 |
+
'tkColorChooser': 'tkinter.colorchooser',
|
| 22 |
+
'tkCommonDialog': 'tkinter.commondialog',
|
| 23 |
+
'Dialog': 'tkinter.dialog',
|
| 24 |
+
'Tkdnd': 'tkinter.dnd',
|
| 25 |
+
'tkFont': 'tkinter.font',
|
| 26 |
+
'tkMessageBox': 'tkinter.messagebox',
|
| 27 |
+
'ScrolledText': 'tkinter.scrolledtext',
|
| 28 |
+
'Tkconstants': 'tkinter.constants',
|
| 29 |
+
'Tix': 'tkinter.tix',
|
| 30 |
+
'ttk': 'tkinter.ttk',
|
| 31 |
+
'Tkinter': 'tkinter',
|
| 32 |
+
'markupbase': '_markupbase',
|
| 33 |
+
'_winreg': 'winreg',
|
| 34 |
+
'thread': '_thread',
|
| 35 |
+
'dummy_thread': '_dummy_thread',
|
| 36 |
+
# anydbm and whichdb are handled by fix_imports2
|
| 37 |
+
'dbhash': 'dbm.bsd',
|
| 38 |
+
'dumbdbm': 'dbm.dumb',
|
| 39 |
+
'dbm': 'dbm.ndbm',
|
| 40 |
+
'gdbm': 'dbm.gnu',
|
| 41 |
+
'xmlrpclib': 'xmlrpc.client',
|
| 42 |
+
'DocXMLRPCServer': 'xmlrpc.server',
|
| 43 |
+
'SimpleXMLRPCServer': 'xmlrpc.server',
|
| 44 |
+
'httplib': 'http.client',
|
| 45 |
+
'htmlentitydefs' : 'html.entities',
|
| 46 |
+
'HTMLParser' : 'html.parser',
|
| 47 |
+
'Cookie': 'http.cookies',
|
| 48 |
+
'cookielib': 'http.cookiejar',
|
| 49 |
+
'BaseHTTPServer': 'http.server',
|
| 50 |
+
'SimpleHTTPServer': 'http.server',
|
| 51 |
+
'CGIHTTPServer': 'http.server',
|
| 52 |
+
#'test.test_support': 'test.support',
|
| 53 |
+
'commands': 'subprocess',
|
| 54 |
+
'UserString' : 'collections',
|
| 55 |
+
'UserList' : 'collections',
|
| 56 |
+
'urlparse' : 'urllib.parse',
|
| 57 |
+
'robotparser' : 'urllib.robotparser',
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def alternates(members):
|
| 62 |
+
return "(" + "|".join(map(repr, members)) + ")"
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def build_pattern(mapping=MAPPING):
|
| 66 |
+
mod_list = ' | '.join(["module_name='%s'" % key for key in mapping])
|
| 67 |
+
bare_names = alternates(mapping.keys())
|
| 68 |
+
|
| 69 |
+
yield """name_import=import_name< 'import' ((%s) |
|
| 70 |
+
multiple_imports=dotted_as_names< any* (%s) any* >) >
|
| 71 |
+
""" % (mod_list, mod_list)
|
| 72 |
+
yield """import_from< 'from' (%s) 'import' ['(']
|
| 73 |
+
( any | import_as_name< any 'as' any > |
|
| 74 |
+
import_as_names< any* >) [')'] >
|
| 75 |
+
""" % mod_list
|
| 76 |
+
yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > |
|
| 77 |
+
multiple_imports=dotted_as_names<
|
| 78 |
+
any* dotted_as_name< (%s) 'as' any > any* >) >
|
| 79 |
+
""" % (mod_list, mod_list)
|
| 80 |
+
|
| 81 |
+
# Find usages of module members in code e.g. thread.foo(bar)
|
| 82 |
+
yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class FixImports(fixer_base.BaseFix):
|
| 86 |
+
|
| 87 |
+
BM_compatible = True
|
| 88 |
+
keep_line_order = True
|
| 89 |
+
# This is overridden in fix_imports2.
|
| 90 |
+
mapping = MAPPING
|
| 91 |
+
|
| 92 |
+
# We want to run this fixer late, so fix_import doesn't try to make stdlib
|
| 93 |
+
# renames into relative imports.
|
| 94 |
+
run_order = 6
|
| 95 |
+
|
| 96 |
+
def build_pattern(self):
|
| 97 |
+
return "|".join(build_pattern(self.mapping))
|
| 98 |
+
|
| 99 |
+
def compile_pattern(self):
|
| 100 |
+
# We override this, so MAPPING can be pragmatically altered and the
|
| 101 |
+
# changes will be reflected in PATTERN.
|
| 102 |
+
self.PATTERN = self.build_pattern()
|
| 103 |
+
super(FixImports, self).compile_pattern()
|
| 104 |
+
|
| 105 |
+
# Don't match the node if it's within another match.
|
| 106 |
+
def match(self, node):
|
| 107 |
+
match = super(FixImports, self).match
|
| 108 |
+
results = match(node)
|
| 109 |
+
if results:
|
| 110 |
+
# Module usage could be in the trailer of an attribute lookup, so we
|
| 111 |
+
# might have nested matches when "bare_with_attr" is present.
|
| 112 |
+
if "bare_with_attr" not in results and \
|
| 113 |
+
any(match(obj) for obj in attr_chain(node, "parent")):
|
| 114 |
+
return False
|
| 115 |
+
return results
|
| 116 |
+
return False
|
| 117 |
+
|
| 118 |
+
def start_tree(self, tree, filename):
|
| 119 |
+
super(FixImports, self).start_tree(tree, filename)
|
| 120 |
+
self.replace = {}
|
| 121 |
+
|
| 122 |
+
def transform(self, node, results):
|
| 123 |
+
import_mod = results.get("module_name")
|
| 124 |
+
if import_mod:
|
| 125 |
+
mod_name = import_mod.value
|
| 126 |
+
new_name = self.mapping[mod_name]
|
| 127 |
+
import_mod.replace(Name(new_name, prefix=import_mod.prefix))
|
| 128 |
+
if "name_import" in results:
|
| 129 |
+
# If it's not a "from x import x, y" or "import x as y" import,
|
| 130 |
+
# marked its usage to be replaced.
|
| 131 |
+
self.replace[mod_name] = new_name
|
| 132 |
+
if "multiple_imports" in results:
|
| 133 |
+
# This is a nasty hack to fix multiple imports on a line (e.g.,
|
| 134 |
+
# "import StringIO, urlparse"). The problem is that I can't
|
| 135 |
+
# figure out an easy way to make a pattern recognize the keys of
|
| 136 |
+
# MAPPING randomly sprinkled in an import statement.
|
| 137 |
+
results = self.match(node)
|
| 138 |
+
if results:
|
| 139 |
+
self.transform(node, results)
|
| 140 |
+
else:
|
| 141 |
+
# Replace usage of the module.
|
| 142 |
+
bare_name = results["bare_with_attr"][0]
|
| 143 |
+
new_name = self.replace.get(bare_name.value)
|
| 144 |
+
if new_name:
|
| 145 |
+
bare_name.replace(Name(new_name, prefix=bare_name.prefix))
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_intern.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Georg Brandl.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for intern().
|
| 5 |
+
|
| 6 |
+
intern(s) -> sys.intern(s)"""
|
| 7 |
+
|
| 8 |
+
# Local imports
|
| 9 |
+
from .. import fixer_base
|
| 10 |
+
from ..fixer_util import ImportAndCall, touch_import
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class FixIntern(fixer_base.BaseFix):
|
| 14 |
+
BM_compatible = True
|
| 15 |
+
order = "pre"
|
| 16 |
+
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< 'intern'
|
| 19 |
+
trailer< lpar='('
|
| 20 |
+
( not(arglist | argument<any '=' any>) obj=any
|
| 21 |
+
| obj=arglist<(not argument<any '=' any>) any ','> )
|
| 22 |
+
rpar=')' >
|
| 23 |
+
after=any*
|
| 24 |
+
>
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def transform(self, node, results):
|
| 28 |
+
if results:
|
| 29 |
+
# I feel like we should be able to express this logic in the
|
| 30 |
+
# PATTERN above but I don't know how to do it so...
|
| 31 |
+
obj = results['obj']
|
| 32 |
+
if obj:
|
| 33 |
+
if (obj.type == self.syms.argument and
|
| 34 |
+
obj.children[0].value in {'**', '*'}):
|
| 35 |
+
return # Make no change.
|
| 36 |
+
names = ('sys', 'intern')
|
| 37 |
+
new = ImportAndCall(node, results, names)
|
| 38 |
+
touch_import(None, 'sys', node)
|
| 39 |
+
return new
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_itertools.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
|
| 2 |
+
itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
|
| 3 |
+
|
| 4 |
+
imports from itertools are fixed in fix_itertools_import.py
|
| 5 |
+
|
| 6 |
+
If itertools is imported as something else (ie: import itertools as it;
|
| 7 |
+
it.izip(spam, eggs)) method calls will not get fixed.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# Local imports
|
| 11 |
+
from .. import fixer_base
|
| 12 |
+
from ..fixer_util import Name
|
| 13 |
+
|
| 14 |
+
class FixItertools(fixer_base.BaseFix):
|
| 15 |
+
BM_compatible = True
|
| 16 |
+
it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')"
|
| 17 |
+
PATTERN = """
|
| 18 |
+
power< it='itertools'
|
| 19 |
+
trailer<
|
| 20 |
+
dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > >
|
| 21 |
+
|
|
| 22 |
+
power< func=%(it_funcs)s trailer< '(' [any] ')' > >
|
| 23 |
+
""" %(locals())
|
| 24 |
+
|
| 25 |
+
# Needs to be run after fix_(map|zip|filter)
|
| 26 |
+
run_order = 6
|
| 27 |
+
|
| 28 |
+
def transform(self, node, results):
|
| 29 |
+
prefix = None
|
| 30 |
+
func = results['func'][0]
|
| 31 |
+
if ('it' in results and
|
| 32 |
+
func.value not in ('ifilterfalse', 'izip_longest')):
|
| 33 |
+
dot, it = (results['dot'], results['it'])
|
| 34 |
+
# Remove the 'itertools'
|
| 35 |
+
prefix = it.prefix
|
| 36 |
+
it.remove()
|
| 37 |
+
# Replace the node which contains ('.', 'function') with the
|
| 38 |
+
# function (to be consistent with the second part of the pattern)
|
| 39 |
+
dot.remove()
|
| 40 |
+
func.parent.replace(func)
|
| 41 |
+
|
| 42 |
+
prefix = prefix or func.prefix
|
| 43 |
+
func.replace(Name(func.value[1:], prefix=prefix))
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_itertools_imports.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
|
| 2 |
+
|
| 3 |
+
# Local imports
|
| 4 |
+
from lib2to3 import fixer_base
|
| 5 |
+
from lib2to3.fixer_util import BlankLine, syms, token
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class FixItertoolsImports(fixer_base.BaseFix):
|
| 9 |
+
BM_compatible = True
|
| 10 |
+
PATTERN = """
|
| 11 |
+
import_from< 'from' 'itertools' 'import' imports=any >
|
| 12 |
+
""" %(locals())
|
| 13 |
+
|
| 14 |
+
def transform(self, node, results):
|
| 15 |
+
imports = results['imports']
|
| 16 |
+
if imports.type == syms.import_as_name or not imports.children:
|
| 17 |
+
children = [imports]
|
| 18 |
+
else:
|
| 19 |
+
children = imports.children
|
| 20 |
+
for child in children[::2]:
|
| 21 |
+
if child.type == token.NAME:
|
| 22 |
+
member = child.value
|
| 23 |
+
name_node = child
|
| 24 |
+
elif child.type == token.STAR:
|
| 25 |
+
# Just leave the import as is.
|
| 26 |
+
return
|
| 27 |
+
else:
|
| 28 |
+
assert child.type == syms.import_as_name
|
| 29 |
+
name_node = child.children[0]
|
| 30 |
+
member_name = name_node.value
|
| 31 |
+
if member_name in ('imap', 'izip', 'ifilter'):
|
| 32 |
+
child.value = None
|
| 33 |
+
child.remove()
|
| 34 |
+
elif member_name in ('ifilterfalse', 'izip_longest'):
|
| 35 |
+
node.changed()
|
| 36 |
+
name_node.value = ('filterfalse' if member_name[1] == 'f'
|
| 37 |
+
else 'zip_longest')
|
| 38 |
+
|
| 39 |
+
# Make sure the import statement is still sane
|
| 40 |
+
children = imports.children[:] or [imports]
|
| 41 |
+
remove_comma = True
|
| 42 |
+
for child in children:
|
| 43 |
+
if remove_comma and child.type == token.COMMA:
|
| 44 |
+
child.remove()
|
| 45 |
+
else:
|
| 46 |
+
remove_comma ^= True
|
| 47 |
+
|
| 48 |
+
while children and children[-1].type == token.COMMA:
|
| 49 |
+
children.pop().remove()
|
| 50 |
+
|
| 51 |
+
# If there are no imports left, just get rid of the entire statement
|
| 52 |
+
if (not (imports.children or getattr(imports, 'value', None)) or
|
| 53 |
+
imports.parent is None):
|
| 54 |
+
p = node.prefix
|
| 55 |
+
node = BlankLine()
|
| 56 |
+
node.prefix = p
|
| 57 |
+
return node
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_map.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there
|
| 5 |
+
exists a 'from future_builtins import map' statement in the top-level
|
| 6 |
+
namespace.
|
| 7 |
+
|
| 8 |
+
As a special case, map(None, X) is changed into list(X). (This is
|
| 9 |
+
necessary because the semantics are changed in this case -- the new
|
| 10 |
+
map(None, X) is equivalent to [(x,) for x in X].)
|
| 11 |
+
|
| 12 |
+
We avoid the transformation (except for the special case mentioned
|
| 13 |
+
above) if the map() call is directly contained in iter(<>), list(<>),
|
| 14 |
+
tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
|
| 15 |
+
|
| 16 |
+
NOTE: This is still not correct if the original code was depending on
|
| 17 |
+
map(F, X, Y, ...) to go on until the longest argument is exhausted,
|
| 18 |
+
substituting None for missing values -- like zip(), it now stops as
|
| 19 |
+
soon as the shortest argument is exhausted.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
# Local imports
|
| 23 |
+
from ..pgen2 import token
|
| 24 |
+
from .. import fixer_base
|
| 25 |
+
from ..fixer_util import Name, ArgList, Call, ListComp, in_special_context
|
| 26 |
+
from ..pygram import python_symbols as syms
|
| 27 |
+
from ..pytree import Node
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class FixMap(fixer_base.ConditionalFix):
|
| 31 |
+
BM_compatible = True
|
| 32 |
+
|
| 33 |
+
PATTERN = """
|
| 34 |
+
map_none=power<
|
| 35 |
+
'map'
|
| 36 |
+
trailer< '(' arglist< 'None' ',' arg=any [','] > ')' >
|
| 37 |
+
[extra_trailers=trailer*]
|
| 38 |
+
>
|
| 39 |
+
|
|
| 40 |
+
map_lambda=power<
|
| 41 |
+
'map'
|
| 42 |
+
trailer<
|
| 43 |
+
'('
|
| 44 |
+
arglist<
|
| 45 |
+
lambdef< 'lambda'
|
| 46 |
+
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
|
| 47 |
+
>
|
| 48 |
+
','
|
| 49 |
+
it=any
|
| 50 |
+
>
|
| 51 |
+
')'
|
| 52 |
+
>
|
| 53 |
+
[extra_trailers=trailer*]
|
| 54 |
+
>
|
| 55 |
+
|
|
| 56 |
+
power<
|
| 57 |
+
'map' args=trailer< '(' [any] ')' >
|
| 58 |
+
[extra_trailers=trailer*]
|
| 59 |
+
>
|
| 60 |
+
"""
|
| 61 |
+
|
| 62 |
+
skip_on = 'future_builtins.map'
|
| 63 |
+
|
| 64 |
+
def transform(self, node, results):
|
| 65 |
+
if self.should_skip(node):
|
| 66 |
+
return
|
| 67 |
+
|
| 68 |
+
trailers = []
|
| 69 |
+
if 'extra_trailers' in results:
|
| 70 |
+
for t in results['extra_trailers']:
|
| 71 |
+
trailers.append(t.clone())
|
| 72 |
+
|
| 73 |
+
if node.parent.type == syms.simple_stmt:
|
| 74 |
+
self.warning(node, "You should use a for loop here")
|
| 75 |
+
new = node.clone()
|
| 76 |
+
new.prefix = ""
|
| 77 |
+
new = Call(Name("list"), [new])
|
| 78 |
+
elif "map_lambda" in results:
|
| 79 |
+
new = ListComp(results["xp"].clone(),
|
| 80 |
+
results["fp"].clone(),
|
| 81 |
+
results["it"].clone())
|
| 82 |
+
new = Node(syms.power, [new] + trailers, prefix="")
|
| 83 |
+
|
| 84 |
+
else:
|
| 85 |
+
if "map_none" in results:
|
| 86 |
+
new = results["arg"].clone()
|
| 87 |
+
new.prefix = ""
|
| 88 |
+
else:
|
| 89 |
+
if "args" in results:
|
| 90 |
+
args = results["args"]
|
| 91 |
+
if args.type == syms.trailer and \
|
| 92 |
+
args.children[1].type == syms.arglist and \
|
| 93 |
+
args.children[1].children[0].type == token.NAME and \
|
| 94 |
+
args.children[1].children[0].value == "None":
|
| 95 |
+
self.warning(node, "cannot convert map(None, ...) "
|
| 96 |
+
"with multiple arguments because map() "
|
| 97 |
+
"now truncates to the shortest sequence")
|
| 98 |
+
return
|
| 99 |
+
|
| 100 |
+
new = Node(syms.power, [Name("map"), args.clone()])
|
| 101 |
+
new.prefix = ""
|
| 102 |
+
|
| 103 |
+
if in_special_context(node):
|
| 104 |
+
return None
|
| 105 |
+
|
| 106 |
+
new = Node(syms.power, [Name("list"), ArgList([new])] + trailers)
|
| 107 |
+
new.prefix = ""
|
| 108 |
+
|
| 109 |
+
new.prefix = node.prefix
|
| 110 |
+
return new
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_ne.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer that turns <> into !=."""
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import pytree
|
| 8 |
+
from ..pgen2 import token
|
| 9 |
+
from .. import fixer_base
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixNe(fixer_base.BaseFix):
|
| 13 |
+
# This is so simple that we don't need the pattern compiler.
|
| 14 |
+
|
| 15 |
+
_accept_type = token.NOTEQUAL
|
| 16 |
+
|
| 17 |
+
def match(self, node):
|
| 18 |
+
# Override
|
| 19 |
+
return node.value == "<>"
|
| 20 |
+
|
| 21 |
+
def transform(self, node, results):
|
| 22 |
+
new = pytree.Leaf(token.NOTEQUAL, "!=", prefix=node.prefix)
|
| 23 |
+
return new
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_next.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for it.next() -> next(it), per PEP 3114."""
|
| 2 |
+
# Author: Collin Winter
|
| 3 |
+
|
| 4 |
+
# Things that currently aren't covered:
|
| 5 |
+
# - listcomp "next" names aren't warned
|
| 6 |
+
# - "with" statement targets aren't checked
|
| 7 |
+
|
| 8 |
+
# Local imports
|
| 9 |
+
from ..pgen2 import token
|
| 10 |
+
from ..pygram import python_symbols as syms
|
| 11 |
+
from .. import fixer_base
|
| 12 |
+
from ..fixer_util import Name, Call, find_binding
|
| 13 |
+
|
| 14 |
+
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class FixNext(fixer_base.BaseFix):
|
| 18 |
+
BM_compatible = True
|
| 19 |
+
PATTERN = """
|
| 20 |
+
power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
|
| 21 |
+
|
|
| 22 |
+
power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > >
|
| 23 |
+
|
|
| 24 |
+
classdef< 'class' any+ ':'
|
| 25 |
+
suite< any*
|
| 26 |
+
funcdef< 'def'
|
| 27 |
+
name='next'
|
| 28 |
+
parameters< '(' NAME ')' > any+ >
|
| 29 |
+
any* > >
|
| 30 |
+
|
|
| 31 |
+
global=global_stmt< 'global' any* 'next' any* >
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
order = "pre" # Pre-order tree traversal
|
| 35 |
+
|
| 36 |
+
def start_tree(self, tree, filename):
|
| 37 |
+
super(FixNext, self).start_tree(tree, filename)
|
| 38 |
+
|
| 39 |
+
n = find_binding('next', tree)
|
| 40 |
+
if n:
|
| 41 |
+
self.warning(n, bind_warning)
|
| 42 |
+
self.shadowed_next = True
|
| 43 |
+
else:
|
| 44 |
+
self.shadowed_next = False
|
| 45 |
+
|
| 46 |
+
def transform(self, node, results):
|
| 47 |
+
assert results
|
| 48 |
+
|
| 49 |
+
base = results.get("base")
|
| 50 |
+
attr = results.get("attr")
|
| 51 |
+
name = results.get("name")
|
| 52 |
+
|
| 53 |
+
if base:
|
| 54 |
+
if self.shadowed_next:
|
| 55 |
+
attr.replace(Name("__next__", prefix=attr.prefix))
|
| 56 |
+
else:
|
| 57 |
+
base = [n.clone() for n in base]
|
| 58 |
+
base[0].prefix = ""
|
| 59 |
+
node.replace(Call(Name("next", prefix=node.prefix), base))
|
| 60 |
+
elif name:
|
| 61 |
+
n = Name("__next__", prefix=name.prefix)
|
| 62 |
+
name.replace(n)
|
| 63 |
+
elif attr:
|
| 64 |
+
# We don't do this transformation if we're assigning to "x.next".
|
| 65 |
+
# Unfortunately, it doesn't seem possible to do this in PATTERN,
|
| 66 |
+
# so it's being done here.
|
| 67 |
+
if is_assign_target(node):
|
| 68 |
+
head = results["head"]
|
| 69 |
+
if "".join([str(n) for n in head]).strip() == '__builtin__':
|
| 70 |
+
self.warning(node, bind_warning)
|
| 71 |
+
return
|
| 72 |
+
attr.replace(Name("__next__"))
|
| 73 |
+
elif "global" in results:
|
| 74 |
+
self.warning(node, bind_warning)
|
| 75 |
+
self.shadowed_next = True
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
### The following functions help test if node is part of an assignment
|
| 79 |
+
### target.
|
| 80 |
+
|
| 81 |
+
def is_assign_target(node):
|
| 82 |
+
assign = find_assign(node)
|
| 83 |
+
if assign is None:
|
| 84 |
+
return False
|
| 85 |
+
|
| 86 |
+
for child in assign.children:
|
| 87 |
+
if child.type == token.EQUAL:
|
| 88 |
+
return False
|
| 89 |
+
elif is_subtree(child, node):
|
| 90 |
+
return True
|
| 91 |
+
return False
|
| 92 |
+
|
| 93 |
+
def find_assign(node):
|
| 94 |
+
if node.type == syms.expr_stmt:
|
| 95 |
+
return node
|
| 96 |
+
if node.type == syms.simple_stmt or node.parent is None:
|
| 97 |
+
return None
|
| 98 |
+
return find_assign(node.parent)
|
| 99 |
+
|
| 100 |
+
def is_subtree(root, node):
|
| 101 |
+
if root == node:
|
| 102 |
+
return True
|
| 103 |
+
return any(is_subtree(c, node) for c in root.children)
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_nonzero.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for __nonzero__ -> __bool__ methods."""
|
| 2 |
+
# Author: Collin Winter
|
| 3 |
+
|
| 4 |
+
# Local imports
|
| 5 |
+
from .. import fixer_base
|
| 6 |
+
from ..fixer_util import Name
|
| 7 |
+
|
| 8 |
+
class FixNonzero(fixer_base.BaseFix):
|
| 9 |
+
BM_compatible = True
|
| 10 |
+
PATTERN = """
|
| 11 |
+
classdef< 'class' any+ ':'
|
| 12 |
+
suite< any*
|
| 13 |
+
funcdef< 'def' name='__nonzero__'
|
| 14 |
+
parameters< '(' NAME ')' > any+ >
|
| 15 |
+
any* > >
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
def transform(self, node, results):
|
| 19 |
+
name = results["name"]
|
| 20 |
+
new = Name("__bool__", prefix=name.prefix)
|
| 21 |
+
name.replace(new)
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_numliterals.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer that turns 1L into 1, 0755 into 0o755.
|
| 2 |
+
"""
|
| 3 |
+
# Copyright 2007 Georg Brandl.
|
| 4 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from ..pgen2 import token
|
| 8 |
+
from .. import fixer_base
|
| 9 |
+
from ..fixer_util import Number
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FixNumliterals(fixer_base.BaseFix):
|
| 13 |
+
# This is so simple that we don't need the pattern compiler.
|
| 14 |
+
|
| 15 |
+
_accept_type = token.NUMBER
|
| 16 |
+
|
| 17 |
+
def match(self, node):
|
| 18 |
+
# Override
|
| 19 |
+
return (node.value.startswith("0") or node.value[-1] in "Ll")
|
| 20 |
+
|
| 21 |
+
def transform(self, node, results):
|
| 22 |
+
val = node.value
|
| 23 |
+
if val[-1] in 'Ll':
|
| 24 |
+
val = val[:-1]
|
| 25 |
+
elif val.startswith('0') and val.isdigit() and len(set(val)) > 1:
|
| 26 |
+
val = "0o" + val[1:]
|
| 27 |
+
|
| 28 |
+
return Number(val, prefix=node.prefix)
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_reduce.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2008 Armin Ronacher.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for reduce().
|
| 5 |
+
|
| 6 |
+
Makes sure reduce() is imported from the functools module if reduce is
|
| 7 |
+
used in that module.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from lib2to3 import fixer_base
|
| 11 |
+
from lib2to3.fixer_util import touch_import
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FixReduce(fixer_base.BaseFix):
|
| 16 |
+
|
| 17 |
+
BM_compatible = True
|
| 18 |
+
order = "pre"
|
| 19 |
+
|
| 20 |
+
PATTERN = """
|
| 21 |
+
power< 'reduce'
|
| 22 |
+
trailer< '('
|
| 23 |
+
arglist< (
|
| 24 |
+
(not(argument<any '=' any>) any ','
|
| 25 |
+
not(argument<any '=' any>) any) |
|
| 26 |
+
(not(argument<any '=' any>) any ','
|
| 27 |
+
not(argument<any '=' any>) any ','
|
| 28 |
+
not(argument<any '=' any>) any)
|
| 29 |
+
) >
|
| 30 |
+
')' >
|
| 31 |
+
>
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
def transform(self, node, results):
|
| 35 |
+
touch_import('functools', 'reduce', node)
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_reload.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fixer for reload().
|
| 2 |
+
|
| 3 |
+
reload(s) -> importlib.reload(s)"""
|
| 4 |
+
|
| 5 |
+
# Local imports
|
| 6 |
+
from .. import fixer_base
|
| 7 |
+
from ..fixer_util import ImportAndCall, touch_import
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class FixReload(fixer_base.BaseFix):
|
| 11 |
+
BM_compatible = True
|
| 12 |
+
order = "pre"
|
| 13 |
+
|
| 14 |
+
PATTERN = """
|
| 15 |
+
power< 'reload'
|
| 16 |
+
trailer< lpar='('
|
| 17 |
+
( not(arglist | argument<any '=' any>) obj=any
|
| 18 |
+
| obj=arglist<(not argument<any '=' any>) any ','> )
|
| 19 |
+
rpar=')' >
|
| 20 |
+
after=any*
|
| 21 |
+
>
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def transform(self, node, results):
|
| 25 |
+
if results:
|
| 26 |
+
# I feel like we should be able to express this logic in the
|
| 27 |
+
# PATTERN above but I don't know how to do it so...
|
| 28 |
+
obj = results['obj']
|
| 29 |
+
if obj:
|
| 30 |
+
if (obj.type == self.syms.argument and
|
| 31 |
+
obj.children[0].value in {'**', '*'}):
|
| 32 |
+
return # Make no change.
|
| 33 |
+
names = ('importlib', 'reload')
|
| 34 |
+
new = ImportAndCall(node, results, names)
|
| 35 |
+
touch_import(None, 'importlib', node)
|
| 36 |
+
return new
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_renames.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix incompatible renames
|
| 2 |
+
|
| 3 |
+
Fixes:
|
| 4 |
+
* sys.maxint -> sys.maxsize
|
| 5 |
+
"""
|
| 6 |
+
# Author: Christian Heimes
|
| 7 |
+
# based on Collin Winter's fix_import
|
| 8 |
+
|
| 9 |
+
# Local imports
|
| 10 |
+
from .. import fixer_base
|
| 11 |
+
from ..fixer_util import Name, attr_chain
|
| 12 |
+
|
| 13 |
+
MAPPING = {"sys": {"maxint" : "maxsize"},
|
| 14 |
+
}
|
| 15 |
+
LOOKUP = {}
|
| 16 |
+
|
| 17 |
+
def alternates(members):
|
| 18 |
+
return "(" + "|".join(map(repr, members)) + ")"
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def build_pattern():
|
| 22 |
+
#bare = set()
|
| 23 |
+
for module, replace in list(MAPPING.items()):
|
| 24 |
+
for old_attr, new_attr in list(replace.items()):
|
| 25 |
+
LOOKUP[(module, old_attr)] = new_attr
|
| 26 |
+
#bare.add(module)
|
| 27 |
+
#bare.add(old_attr)
|
| 28 |
+
#yield """
|
| 29 |
+
# import_name< 'import' (module=%r
|
| 30 |
+
# | dotted_as_names< any* module=%r any* >) >
|
| 31 |
+
# """ % (module, module)
|
| 32 |
+
yield """
|
| 33 |
+
import_from< 'from' module_name=%r 'import'
|
| 34 |
+
( attr_name=%r | import_as_name< attr_name=%r 'as' any >) >
|
| 35 |
+
""" % (module, old_attr, old_attr)
|
| 36 |
+
yield """
|
| 37 |
+
power< module_name=%r trailer< '.' attr_name=%r > any* >
|
| 38 |
+
""" % (module, old_attr)
|
| 39 |
+
#yield """bare_name=%s""" % alternates(bare)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class FixRenames(fixer_base.BaseFix):
|
| 43 |
+
BM_compatible = True
|
| 44 |
+
PATTERN = "|".join(build_pattern())
|
| 45 |
+
|
| 46 |
+
order = "pre" # Pre-order tree traversal
|
| 47 |
+
|
| 48 |
+
# Don't match the node if it's within another match
|
| 49 |
+
def match(self, node):
|
| 50 |
+
match = super(FixRenames, self).match
|
| 51 |
+
results = match(node)
|
| 52 |
+
if results:
|
| 53 |
+
if any(match(obj) for obj in attr_chain(node, "parent")):
|
| 54 |
+
return False
|
| 55 |
+
return results
|
| 56 |
+
return False
|
| 57 |
+
|
| 58 |
+
#def start_tree(self, tree, filename):
|
| 59 |
+
# super(FixRenames, self).start_tree(tree, filename)
|
| 60 |
+
# self.replace = {}
|
| 61 |
+
|
| 62 |
+
def transform(self, node, results):
|
| 63 |
+
mod_name = results.get("module_name")
|
| 64 |
+
attr_name = results.get("attr_name")
|
| 65 |
+
#bare_name = results.get("bare_name")
|
| 66 |
+
#import_mod = results.get("module")
|
| 67 |
+
|
| 68 |
+
if mod_name and attr_name:
|
| 69 |
+
new_attr = LOOKUP[(mod_name.value, attr_name.value)]
|
| 70 |
+
attr_name.replace(Name(new_attr, prefix=attr_name.prefix))
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_standarderror.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for StandardError -> Exception."""
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import fixer_base
|
| 8 |
+
from ..fixer_util import Name
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class FixStandarderror(fixer_base.BaseFix):
|
| 12 |
+
BM_compatible = True
|
| 13 |
+
PATTERN = """
|
| 14 |
+
'StandardError'
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def transform(self, node, results):
|
| 18 |
+
return Name("Exception", prefix=node.prefix)
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_types.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Fixer for removing uses of the types module.
|
| 5 |
+
|
| 6 |
+
These work for only the known names in the types module. The forms above
|
| 7 |
+
can include types. or not. ie, It is assumed the module is imported either as:
|
| 8 |
+
|
| 9 |
+
import types
|
| 10 |
+
from types import ... # either * or specific types
|
| 11 |
+
|
| 12 |
+
The import statements are not modified.
|
| 13 |
+
|
| 14 |
+
There should be another fixer that handles at least the following constants:
|
| 15 |
+
|
| 16 |
+
type([]) -> list
|
| 17 |
+
type(()) -> tuple
|
| 18 |
+
type('') -> str
|
| 19 |
+
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
# Local imports
|
| 23 |
+
from .. import fixer_base
|
| 24 |
+
from ..fixer_util import Name
|
| 25 |
+
|
| 26 |
+
_TYPE_MAPPING = {
|
| 27 |
+
'BooleanType' : 'bool',
|
| 28 |
+
'BufferType' : 'memoryview',
|
| 29 |
+
'ClassType' : 'type',
|
| 30 |
+
'ComplexType' : 'complex',
|
| 31 |
+
'DictType': 'dict',
|
| 32 |
+
'DictionaryType' : 'dict',
|
| 33 |
+
'EllipsisType' : 'type(Ellipsis)',
|
| 34 |
+
#'FileType' : 'io.IOBase',
|
| 35 |
+
'FloatType': 'float',
|
| 36 |
+
'IntType': 'int',
|
| 37 |
+
'ListType': 'list',
|
| 38 |
+
'LongType': 'int',
|
| 39 |
+
'ObjectType' : 'object',
|
| 40 |
+
'NoneType': 'type(None)',
|
| 41 |
+
'NotImplementedType' : 'type(NotImplemented)',
|
| 42 |
+
'SliceType' : 'slice',
|
| 43 |
+
'StringType': 'bytes', # XXX ?
|
| 44 |
+
'StringTypes' : '(str,)', # XXX ?
|
| 45 |
+
'TupleType': 'tuple',
|
| 46 |
+
'TypeType' : 'type',
|
| 47 |
+
'UnicodeType': 'str',
|
| 48 |
+
'XRangeType' : 'range',
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
|
| 52 |
+
|
| 53 |
+
class FixTypes(fixer_base.BaseFix):
|
| 54 |
+
BM_compatible = True
|
| 55 |
+
PATTERN = '|'.join(_pats)
|
| 56 |
+
|
| 57 |
+
def transform(self, node, results):
|
| 58 |
+
new_value = _TYPE_MAPPING.get(results["name"].value)
|
| 59 |
+
if new_value:
|
| 60 |
+
return Name(new_value, prefix=node.prefix)
|
| 61 |
+
return None
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_unicode.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""Fixer for unicode.
|
| 2 |
+
|
| 3 |
+
* Changes unicode to str and unichr to chr.
|
| 4 |
+
|
| 5 |
+
* If "...\u..." is not unicode literal change it into "...\\u...".
|
| 6 |
+
|
| 7 |
+
* Change u"..." into "...".
|
| 8 |
+
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from ..pgen2 import token
|
| 12 |
+
from .. import fixer_base
|
| 13 |
+
|
| 14 |
+
_mapping = {"unichr" : "chr", "unicode" : "str"}
|
| 15 |
+
|
| 16 |
+
class FixUnicode(fixer_base.BaseFix):
|
| 17 |
+
BM_compatible = True
|
| 18 |
+
PATTERN = "STRING | 'unicode' | 'unichr'"
|
| 19 |
+
|
| 20 |
+
def start_tree(self, tree, filename):
|
| 21 |
+
super(FixUnicode, self).start_tree(tree, filename)
|
| 22 |
+
self.unicode_literals = 'unicode_literals' in tree.future_features
|
| 23 |
+
|
| 24 |
+
def transform(self, node, results):
|
| 25 |
+
if node.type == token.NAME:
|
| 26 |
+
new = node.clone()
|
| 27 |
+
new.value = _mapping[node.value]
|
| 28 |
+
return new
|
| 29 |
+
elif node.type == token.STRING:
|
| 30 |
+
val = node.value
|
| 31 |
+
if not self.unicode_literals and val[0] in '\'"' and '\\' in val:
|
| 32 |
+
val = r'\\'.join([
|
| 33 |
+
v.replace('\\u', r'\\u').replace('\\U', r'\\U')
|
| 34 |
+
for v in val.split(r'\\')
|
| 35 |
+
])
|
| 36 |
+
if val[0] in 'uU':
|
| 37 |
+
val = val[1:]
|
| 38 |
+
if val == node.value:
|
| 39 |
+
return node
|
| 40 |
+
new = node.clone()
|
| 41 |
+
new.value = val
|
| 42 |
+
return new
|
evalkit_cambrian/lib/python3.10/lib2to3/fixes/fix_xreadlines.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fix "for x in f.xreadlines()" -> "for x in f".
|
| 2 |
+
|
| 3 |
+
This fixer will also convert g(f.xreadlines) into g(f.__iter__)."""
|
| 4 |
+
# Author: Collin Winter
|
| 5 |
+
|
| 6 |
+
# Local imports
|
| 7 |
+
from .. import fixer_base
|
| 8 |
+
from ..fixer_util import Name
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class FixXreadlines(fixer_base.BaseFix):
|
| 12 |
+
BM_compatible = True
|
| 13 |
+
PATTERN = """
|
| 14 |
+
power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
|
| 15 |
+
|
|
| 16 |
+
power< any+ trailer< '.' no_call='xreadlines' > >
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
def transform(self, node, results):
|
| 20 |
+
no_call = results.get("no_call")
|
| 21 |
+
|
| 22 |
+
if no_call:
|
| 23 |
+
no_call.replace(Name("__iter__", prefix=no_call.prefix))
|
| 24 |
+
else:
|
| 25 |
+
node.replace([x.clone() for x in results["call"]])
|
evalkit_cambrian/lib/python3.10/lib2to3/main.py
ADDED
|
@@ -0,0 +1,273 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Main program for 2to3.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from __future__ import with_statement, print_function
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
import difflib
|
| 10 |
+
import logging
|
| 11 |
+
import shutil
|
| 12 |
+
import optparse
|
| 13 |
+
|
| 14 |
+
from . import refactor
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def diff_texts(a, b, filename):
|
| 18 |
+
"""Return a unified diff of two strings."""
|
| 19 |
+
a = a.splitlines()
|
| 20 |
+
b = b.splitlines()
|
| 21 |
+
return difflib.unified_diff(a, b, filename, filename,
|
| 22 |
+
"(original)", "(refactored)",
|
| 23 |
+
lineterm="")
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
|
| 27 |
+
"""
|
| 28 |
+
A refactoring tool that can avoid overwriting its input files.
|
| 29 |
+
Prints output to stdout.
|
| 30 |
+
|
| 31 |
+
Output files can optionally be written to a different directory and or
|
| 32 |
+
have an extra file suffix appended to their name for use in situations
|
| 33 |
+
where you do not want to replace the input files.
|
| 34 |
+
"""
|
| 35 |
+
|
| 36 |
+
def __init__(self, fixers, options, explicit, nobackups, show_diffs,
|
| 37 |
+
input_base_dir='', output_dir='', append_suffix=''):
|
| 38 |
+
"""
|
| 39 |
+
Args:
|
| 40 |
+
fixers: A list of fixers to import.
|
| 41 |
+
options: A dict with RefactoringTool configuration.
|
| 42 |
+
explicit: A list of fixers to run even if they are explicit.
|
| 43 |
+
nobackups: If true no backup '.bak' files will be created for those
|
| 44 |
+
files that are being refactored.
|
| 45 |
+
show_diffs: Should diffs of the refactoring be printed to stdout?
|
| 46 |
+
input_base_dir: The base directory for all input files. This class
|
| 47 |
+
will strip this path prefix off of filenames before substituting
|
| 48 |
+
it with output_dir. Only meaningful if output_dir is supplied.
|
| 49 |
+
All files processed by refactor() must start with this path.
|
| 50 |
+
output_dir: If supplied, all converted files will be written into
|
| 51 |
+
this directory tree instead of input_base_dir.
|
| 52 |
+
append_suffix: If supplied, all files output by this tool will have
|
| 53 |
+
this appended to their filename. Useful for changing .py to
|
| 54 |
+
.py3 for example by passing append_suffix='3'.
|
| 55 |
+
"""
|
| 56 |
+
self.nobackups = nobackups
|
| 57 |
+
self.show_diffs = show_diffs
|
| 58 |
+
if input_base_dir and not input_base_dir.endswith(os.sep):
|
| 59 |
+
input_base_dir += os.sep
|
| 60 |
+
self._input_base_dir = input_base_dir
|
| 61 |
+
self._output_dir = output_dir
|
| 62 |
+
self._append_suffix = append_suffix
|
| 63 |
+
super(StdoutRefactoringTool, self).__init__(fixers, options, explicit)
|
| 64 |
+
|
| 65 |
+
def log_error(self, msg, *args, **kwargs):
|
| 66 |
+
self.errors.append((msg, args, kwargs))
|
| 67 |
+
self.logger.error(msg, *args, **kwargs)
|
| 68 |
+
|
| 69 |
+
def write_file(self, new_text, filename, old_text, encoding):
|
| 70 |
+
orig_filename = filename
|
| 71 |
+
if self._output_dir:
|
| 72 |
+
if filename.startswith(self._input_base_dir):
|
| 73 |
+
filename = os.path.join(self._output_dir,
|
| 74 |
+
filename[len(self._input_base_dir):])
|
| 75 |
+
else:
|
| 76 |
+
raise ValueError('filename %s does not start with the '
|
| 77 |
+
'input_base_dir %s' % (
|
| 78 |
+
filename, self._input_base_dir))
|
| 79 |
+
if self._append_suffix:
|
| 80 |
+
filename += self._append_suffix
|
| 81 |
+
if orig_filename != filename:
|
| 82 |
+
output_dir = os.path.dirname(filename)
|
| 83 |
+
if not os.path.isdir(output_dir) and output_dir:
|
| 84 |
+
os.makedirs(output_dir)
|
| 85 |
+
self.log_message('Writing converted %s to %s.', orig_filename,
|
| 86 |
+
filename)
|
| 87 |
+
if not self.nobackups:
|
| 88 |
+
# Make backup
|
| 89 |
+
backup = filename + ".bak"
|
| 90 |
+
if os.path.lexists(backup):
|
| 91 |
+
try:
|
| 92 |
+
os.remove(backup)
|
| 93 |
+
except OSError:
|
| 94 |
+
self.log_message("Can't remove backup %s", backup)
|
| 95 |
+
try:
|
| 96 |
+
os.rename(filename, backup)
|
| 97 |
+
except OSError:
|
| 98 |
+
self.log_message("Can't rename %s to %s", filename, backup)
|
| 99 |
+
# Actually write the new file
|
| 100 |
+
write = super(StdoutRefactoringTool, self).write_file
|
| 101 |
+
write(new_text, filename, old_text, encoding)
|
| 102 |
+
if not self.nobackups:
|
| 103 |
+
shutil.copymode(backup, filename)
|
| 104 |
+
if orig_filename != filename:
|
| 105 |
+
# Preserve the file mode in the new output directory.
|
| 106 |
+
shutil.copymode(orig_filename, filename)
|
| 107 |
+
|
| 108 |
+
def print_output(self, old, new, filename, equal):
|
| 109 |
+
if equal:
|
| 110 |
+
self.log_message("No changes to %s", filename)
|
| 111 |
+
else:
|
| 112 |
+
self.log_message("Refactored %s", filename)
|
| 113 |
+
if self.show_diffs:
|
| 114 |
+
diff_lines = diff_texts(old, new, filename)
|
| 115 |
+
try:
|
| 116 |
+
if self.output_lock is not None:
|
| 117 |
+
with self.output_lock:
|
| 118 |
+
for line in diff_lines:
|
| 119 |
+
print(line)
|
| 120 |
+
sys.stdout.flush()
|
| 121 |
+
else:
|
| 122 |
+
for line in diff_lines:
|
| 123 |
+
print(line)
|
| 124 |
+
except UnicodeEncodeError:
|
| 125 |
+
warn("couldn't encode %s's diff for your terminal" %
|
| 126 |
+
(filename,))
|
| 127 |
+
return
|
| 128 |
+
|
| 129 |
+
def warn(msg):
|
| 130 |
+
print("WARNING: %s" % (msg,), file=sys.stderr)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def main(fixer_pkg, args=None):
|
| 134 |
+
"""Main program.
|
| 135 |
+
|
| 136 |
+
Args:
|
| 137 |
+
fixer_pkg: the name of a package where the fixers are located.
|
| 138 |
+
args: optional; a list of command line arguments. If omitted,
|
| 139 |
+
sys.argv[1:] is used.
|
| 140 |
+
|
| 141 |
+
Returns a suggested exit status (0, 1, 2).
|
| 142 |
+
"""
|
| 143 |
+
# Set up option parser
|
| 144 |
+
parser = optparse.OptionParser(usage="2to3 [options] file|dir ...")
|
| 145 |
+
parser.add_option("-d", "--doctests_only", action="store_true",
|
| 146 |
+
help="Fix up doctests only")
|
| 147 |
+
parser.add_option("-f", "--fix", action="append", default=[],
|
| 148 |
+
help="Each FIX specifies a transformation; default: all")
|
| 149 |
+
parser.add_option("-j", "--processes", action="store", default=1,
|
| 150 |
+
type="int", help="Run 2to3 concurrently")
|
| 151 |
+
parser.add_option("-x", "--nofix", action="append", default=[],
|
| 152 |
+
help="Prevent a transformation from being run")
|
| 153 |
+
parser.add_option("-l", "--list-fixes", action="store_true",
|
| 154 |
+
help="List available transformations")
|
| 155 |
+
parser.add_option("-p", "--print-function", action="store_true",
|
| 156 |
+
help="Modify the grammar so that print() is a function")
|
| 157 |
+
parser.add_option("-e", "--exec-function", action="store_true",
|
| 158 |
+
help="Modify the grammar so that exec() is a function")
|
| 159 |
+
parser.add_option("-v", "--verbose", action="store_true",
|
| 160 |
+
help="More verbose logging")
|
| 161 |
+
parser.add_option("--no-diffs", action="store_true",
|
| 162 |
+
help="Don't show diffs of the refactoring")
|
| 163 |
+
parser.add_option("-w", "--write", action="store_true",
|
| 164 |
+
help="Write back modified files")
|
| 165 |
+
parser.add_option("-n", "--nobackups", action="store_true", default=False,
|
| 166 |
+
help="Don't write backups for modified files")
|
| 167 |
+
parser.add_option("-o", "--output-dir", action="store", type="str",
|
| 168 |
+
default="", help="Put output files in this directory "
|
| 169 |
+
"instead of overwriting the input files. Requires -n.")
|
| 170 |
+
parser.add_option("-W", "--write-unchanged-files", action="store_true",
|
| 171 |
+
help="Also write files even if no changes were required"
|
| 172 |
+
" (useful with --output-dir); implies -w.")
|
| 173 |
+
parser.add_option("--add-suffix", action="store", type="str", default="",
|
| 174 |
+
help="Append this string to all output filenames."
|
| 175 |
+
" Requires -n if non-empty. "
|
| 176 |
+
"ex: --add-suffix='3' will generate .py3 files.")
|
| 177 |
+
|
| 178 |
+
# Parse command line arguments
|
| 179 |
+
refactor_stdin = False
|
| 180 |
+
flags = {}
|
| 181 |
+
options, args = parser.parse_args(args)
|
| 182 |
+
if options.write_unchanged_files:
|
| 183 |
+
flags["write_unchanged_files"] = True
|
| 184 |
+
if not options.write:
|
| 185 |
+
warn("--write-unchanged-files/-W implies -w.")
|
| 186 |
+
options.write = True
|
| 187 |
+
# If we allowed these, the original files would be renamed to backup names
|
| 188 |
+
# but not replaced.
|
| 189 |
+
if options.output_dir and not options.nobackups:
|
| 190 |
+
parser.error("Can't use --output-dir/-o without -n.")
|
| 191 |
+
if options.add_suffix and not options.nobackups:
|
| 192 |
+
parser.error("Can't use --add-suffix without -n.")
|
| 193 |
+
|
| 194 |
+
if not options.write and options.no_diffs:
|
| 195 |
+
warn("not writing files and not printing diffs; that's not very useful")
|
| 196 |
+
if not options.write and options.nobackups:
|
| 197 |
+
parser.error("Can't use -n without -w")
|
| 198 |
+
if options.list_fixes:
|
| 199 |
+
print("Available transformations for the -f/--fix option:")
|
| 200 |
+
for fixname in refactor.get_all_fix_names(fixer_pkg):
|
| 201 |
+
print(fixname)
|
| 202 |
+
if not args:
|
| 203 |
+
return 0
|
| 204 |
+
if not args:
|
| 205 |
+
print("At least one file or directory argument required.", file=sys.stderr)
|
| 206 |
+
print("Use --help to show usage.", file=sys.stderr)
|
| 207 |
+
return 2
|
| 208 |
+
if "-" in args:
|
| 209 |
+
refactor_stdin = True
|
| 210 |
+
if options.write:
|
| 211 |
+
print("Can't write to stdin.", file=sys.stderr)
|
| 212 |
+
return 2
|
| 213 |
+
if options.print_function:
|
| 214 |
+
flags["print_function"] = True
|
| 215 |
+
|
| 216 |
+
if options.exec_function:
|
| 217 |
+
flags["exec_function"] = True
|
| 218 |
+
|
| 219 |
+
# Set up logging handler
|
| 220 |
+
level = logging.DEBUG if options.verbose else logging.INFO
|
| 221 |
+
logging.basicConfig(format='%(name)s: %(message)s', level=level)
|
| 222 |
+
logger = logging.getLogger('lib2to3.main')
|
| 223 |
+
|
| 224 |
+
# Initialize the refactoring tool
|
| 225 |
+
avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))
|
| 226 |
+
unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix)
|
| 227 |
+
explicit = set()
|
| 228 |
+
if options.fix:
|
| 229 |
+
all_present = False
|
| 230 |
+
for fix in options.fix:
|
| 231 |
+
if fix == "all":
|
| 232 |
+
all_present = True
|
| 233 |
+
else:
|
| 234 |
+
explicit.add(fixer_pkg + ".fix_" + fix)
|
| 235 |
+
requested = avail_fixes.union(explicit) if all_present else explicit
|
| 236 |
+
else:
|
| 237 |
+
requested = avail_fixes.union(explicit)
|
| 238 |
+
fixer_names = requested.difference(unwanted_fixes)
|
| 239 |
+
input_base_dir = os.path.commonprefix(args)
|
| 240 |
+
if (input_base_dir and not input_base_dir.endswith(os.sep)
|
| 241 |
+
and not os.path.isdir(input_base_dir)):
|
| 242 |
+
# One or more similar names were passed, their directory is the base.
|
| 243 |
+
# os.path.commonprefix() is ignorant of path elements, this corrects
|
| 244 |
+
# for that weird API.
|
| 245 |
+
input_base_dir = os.path.dirname(input_base_dir)
|
| 246 |
+
if options.output_dir:
|
| 247 |
+
input_base_dir = input_base_dir.rstrip(os.sep)
|
| 248 |
+
logger.info('Output in %r will mirror the input directory %r layout.',
|
| 249 |
+
options.output_dir, input_base_dir)
|
| 250 |
+
rt = StdoutRefactoringTool(
|
| 251 |
+
sorted(fixer_names), flags, sorted(explicit),
|
| 252 |
+
options.nobackups, not options.no_diffs,
|
| 253 |
+
input_base_dir=input_base_dir,
|
| 254 |
+
output_dir=options.output_dir,
|
| 255 |
+
append_suffix=options.add_suffix)
|
| 256 |
+
|
| 257 |
+
# Refactor all files and directories passed as arguments
|
| 258 |
+
if not rt.errors:
|
| 259 |
+
if refactor_stdin:
|
| 260 |
+
rt.refactor_stdin()
|
| 261 |
+
else:
|
| 262 |
+
try:
|
| 263 |
+
rt.refactor(args, options.write, options.doctests_only,
|
| 264 |
+
options.processes)
|
| 265 |
+
except refactor.MultiprocessingUnsupported:
|
| 266 |
+
assert options.processes > 1
|
| 267 |
+
print("Sorry, -j isn't supported on this platform.",
|
| 268 |
+
file=sys.stderr)
|
| 269 |
+
return 1
|
| 270 |
+
rt.summarize()
|
| 271 |
+
|
| 272 |
+
# Return error status (0 if rt.errors is zero)
|
| 273 |
+
return int(bool(rt.errors))
|
evalkit_cambrian/lib/python3.10/lib2to3/patcomp.py
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2006 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Pattern compiler.
|
| 5 |
+
|
| 6 |
+
The grammar is taken from PatternGrammar.txt.
|
| 7 |
+
|
| 8 |
+
The compiler compiles a pattern to a pytree.*Pattern instance.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
__author__ = "Guido van Rossum <guido@python.org>"
|
| 12 |
+
|
| 13 |
+
# Python imports
|
| 14 |
+
import io
|
| 15 |
+
|
| 16 |
+
# Fairly local imports
|
| 17 |
+
from .pgen2 import driver, literals, token, tokenize, parse, grammar
|
| 18 |
+
|
| 19 |
+
# Really local imports
|
| 20 |
+
from . import pytree
|
| 21 |
+
from . import pygram
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class PatternSyntaxError(Exception):
|
| 25 |
+
pass
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def tokenize_wrapper(input):
|
| 29 |
+
"""Tokenizes a string suppressing significant whitespace."""
|
| 30 |
+
skip = {token.NEWLINE, token.INDENT, token.DEDENT}
|
| 31 |
+
tokens = tokenize.generate_tokens(io.StringIO(input).readline)
|
| 32 |
+
for quintuple in tokens:
|
| 33 |
+
type, value, start, end, line_text = quintuple
|
| 34 |
+
if type not in skip:
|
| 35 |
+
yield quintuple
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class PatternCompiler(object):
|
| 39 |
+
|
| 40 |
+
def __init__(self, grammar_file=None):
|
| 41 |
+
"""Initializer.
|
| 42 |
+
|
| 43 |
+
Takes an optional alternative filename for the pattern grammar.
|
| 44 |
+
"""
|
| 45 |
+
if grammar_file is None:
|
| 46 |
+
self.grammar = pygram.pattern_grammar
|
| 47 |
+
self.syms = pygram.pattern_symbols
|
| 48 |
+
else:
|
| 49 |
+
self.grammar = driver.load_grammar(grammar_file)
|
| 50 |
+
self.syms = pygram.Symbols(self.grammar)
|
| 51 |
+
self.pygrammar = pygram.python_grammar
|
| 52 |
+
self.pysyms = pygram.python_symbols
|
| 53 |
+
self.driver = driver.Driver(self.grammar, convert=pattern_convert)
|
| 54 |
+
|
| 55 |
+
def compile_pattern(self, input, debug=False, with_tree=False):
|
| 56 |
+
"""Compiles a pattern string to a nested pytree.*Pattern object."""
|
| 57 |
+
tokens = tokenize_wrapper(input)
|
| 58 |
+
try:
|
| 59 |
+
root = self.driver.parse_tokens(tokens, debug=debug)
|
| 60 |
+
except parse.ParseError as e:
|
| 61 |
+
raise PatternSyntaxError(str(e)) from None
|
| 62 |
+
if with_tree:
|
| 63 |
+
return self.compile_node(root), root
|
| 64 |
+
else:
|
| 65 |
+
return self.compile_node(root)
|
| 66 |
+
|
| 67 |
+
def compile_node(self, node):
|
| 68 |
+
"""Compiles a node, recursively.
|
| 69 |
+
|
| 70 |
+
This is one big switch on the node type.
|
| 71 |
+
"""
|
| 72 |
+
# XXX Optimize certain Wildcard-containing-Wildcard patterns
|
| 73 |
+
# that can be merged
|
| 74 |
+
if node.type == self.syms.Matcher:
|
| 75 |
+
node = node.children[0] # Avoid unneeded recursion
|
| 76 |
+
|
| 77 |
+
if node.type == self.syms.Alternatives:
|
| 78 |
+
# Skip the odd children since they are just '|' tokens
|
| 79 |
+
alts = [self.compile_node(ch) for ch in node.children[::2]]
|
| 80 |
+
if len(alts) == 1:
|
| 81 |
+
return alts[0]
|
| 82 |
+
p = pytree.WildcardPattern([[a] for a in alts], min=1, max=1)
|
| 83 |
+
return p.optimize()
|
| 84 |
+
|
| 85 |
+
if node.type == self.syms.Alternative:
|
| 86 |
+
units = [self.compile_node(ch) for ch in node.children]
|
| 87 |
+
if len(units) == 1:
|
| 88 |
+
return units[0]
|
| 89 |
+
p = pytree.WildcardPattern([units], min=1, max=1)
|
| 90 |
+
return p.optimize()
|
| 91 |
+
|
| 92 |
+
if node.type == self.syms.NegatedUnit:
|
| 93 |
+
pattern = self.compile_basic(node.children[1:])
|
| 94 |
+
p = pytree.NegatedPattern(pattern)
|
| 95 |
+
return p.optimize()
|
| 96 |
+
|
| 97 |
+
assert node.type == self.syms.Unit
|
| 98 |
+
|
| 99 |
+
name = None
|
| 100 |
+
nodes = node.children
|
| 101 |
+
if len(nodes) >= 3 and nodes[1].type == token.EQUAL:
|
| 102 |
+
name = nodes[0].value
|
| 103 |
+
nodes = nodes[2:]
|
| 104 |
+
repeat = None
|
| 105 |
+
if len(nodes) >= 2 and nodes[-1].type == self.syms.Repeater:
|
| 106 |
+
repeat = nodes[-1]
|
| 107 |
+
nodes = nodes[:-1]
|
| 108 |
+
|
| 109 |
+
# Now we've reduced it to: STRING | NAME [Details] | (...) | [...]
|
| 110 |
+
pattern = self.compile_basic(nodes, repeat)
|
| 111 |
+
|
| 112 |
+
if repeat is not None:
|
| 113 |
+
assert repeat.type == self.syms.Repeater
|
| 114 |
+
children = repeat.children
|
| 115 |
+
child = children[0]
|
| 116 |
+
if child.type == token.STAR:
|
| 117 |
+
min = 0
|
| 118 |
+
max = pytree.HUGE
|
| 119 |
+
elif child.type == token.PLUS:
|
| 120 |
+
min = 1
|
| 121 |
+
max = pytree.HUGE
|
| 122 |
+
elif child.type == token.LBRACE:
|
| 123 |
+
assert children[-1].type == token.RBRACE
|
| 124 |
+
assert len(children) in (3, 5)
|
| 125 |
+
min = max = self.get_int(children[1])
|
| 126 |
+
if len(children) == 5:
|
| 127 |
+
max = self.get_int(children[3])
|
| 128 |
+
else:
|
| 129 |
+
assert False
|
| 130 |
+
if min != 1 or max != 1:
|
| 131 |
+
pattern = pattern.optimize()
|
| 132 |
+
pattern = pytree.WildcardPattern([[pattern]], min=min, max=max)
|
| 133 |
+
|
| 134 |
+
if name is not None:
|
| 135 |
+
pattern.name = name
|
| 136 |
+
return pattern.optimize()
|
| 137 |
+
|
| 138 |
+
def compile_basic(self, nodes, repeat=None):
|
| 139 |
+
# Compile STRING | NAME [Details] | (...) | [...]
|
| 140 |
+
assert len(nodes) >= 1
|
| 141 |
+
node = nodes[0]
|
| 142 |
+
if node.type == token.STRING:
|
| 143 |
+
value = str(literals.evalString(node.value))
|
| 144 |
+
return pytree.LeafPattern(_type_of_literal(value), value)
|
| 145 |
+
elif node.type == token.NAME:
|
| 146 |
+
value = node.value
|
| 147 |
+
if value.isupper():
|
| 148 |
+
if value not in TOKEN_MAP:
|
| 149 |
+
raise PatternSyntaxError("Invalid token: %r" % value)
|
| 150 |
+
if nodes[1:]:
|
| 151 |
+
raise PatternSyntaxError("Can't have details for token")
|
| 152 |
+
return pytree.LeafPattern(TOKEN_MAP[value])
|
| 153 |
+
else:
|
| 154 |
+
if value == "any":
|
| 155 |
+
type = None
|
| 156 |
+
elif not value.startswith("_"):
|
| 157 |
+
type = getattr(self.pysyms, value, None)
|
| 158 |
+
if type is None:
|
| 159 |
+
raise PatternSyntaxError("Invalid symbol: %r" % value)
|
| 160 |
+
if nodes[1:]: # Details present
|
| 161 |
+
content = [self.compile_node(nodes[1].children[1])]
|
| 162 |
+
else:
|
| 163 |
+
content = None
|
| 164 |
+
return pytree.NodePattern(type, content)
|
| 165 |
+
elif node.value == "(":
|
| 166 |
+
return self.compile_node(nodes[1])
|
| 167 |
+
elif node.value == "[":
|
| 168 |
+
assert repeat is None
|
| 169 |
+
subpattern = self.compile_node(nodes[1])
|
| 170 |
+
return pytree.WildcardPattern([[subpattern]], min=0, max=1)
|
| 171 |
+
assert False, node
|
| 172 |
+
|
| 173 |
+
def get_int(self, node):
|
| 174 |
+
assert node.type == token.NUMBER
|
| 175 |
+
return int(node.value)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
# Map named tokens to the type value for a LeafPattern
|
| 179 |
+
TOKEN_MAP = {"NAME": token.NAME,
|
| 180 |
+
"STRING": token.STRING,
|
| 181 |
+
"NUMBER": token.NUMBER,
|
| 182 |
+
"TOKEN": None}
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def _type_of_literal(value):
|
| 186 |
+
if value[0].isalpha():
|
| 187 |
+
return token.NAME
|
| 188 |
+
elif value in grammar.opmap:
|
| 189 |
+
return grammar.opmap[value]
|
| 190 |
+
else:
|
| 191 |
+
return None
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def pattern_convert(grammar, raw_node_info):
|
| 195 |
+
"""Converts raw node information to a Node or Leaf instance."""
|
| 196 |
+
type, value, context, children = raw_node_info
|
| 197 |
+
if children or type in grammar.number2symbol:
|
| 198 |
+
return pytree.Node(type, children, context=context)
|
| 199 |
+
else:
|
| 200 |
+
return pytree.Leaf(type, value, context=context)
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
def compile_pattern(pattern):
|
| 204 |
+
return PatternCompiler().compile_pattern(pattern)
|