Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- evalkit_cambrian/lib/python3.10/_aix_support.py +89 -0
- evalkit_cambrian/lib/python3.10/_compat_pickle.py +251 -0
- evalkit_cambrian/lib/python3.10/_osx_support.py +574 -0
- evalkit_cambrian/lib/python3.10/_strptime.py +579 -0
- evalkit_cambrian/lib/python3.10/_sysconfigdata__linux_x86_64-linux-gnu.py.orig +986 -0
- evalkit_cambrian/lib/python3.10/_threading_local.py +242 -0
- evalkit_cambrian/lib/python3.10/abc.py +188 -0
- evalkit_cambrian/lib/python3.10/ast.py +1709 -0
- evalkit_cambrian/lib/python3.10/binhex.py +502 -0
- evalkit_cambrian/lib/python3.10/bz2.py +344 -0
- evalkit_cambrian/lib/python3.10/cProfile.py +191 -0
- evalkit_cambrian/lib/python3.10/cgi.py +1004 -0
- evalkit_cambrian/lib/python3.10/cgitb.py +321 -0
- evalkit_cambrian/lib/python3.10/chunk.py +169 -0
- evalkit_cambrian/lib/python3.10/code.py +315 -0
- evalkit_cambrian/lib/python3.10/codecs.py +1127 -0
- evalkit_cambrian/lib/python3.10/codeop.py +153 -0
- evalkit_cambrian/lib/python3.10/contextvars.py +4 -0
- evalkit_cambrian/lib/python3.10/copyreg.py +219 -0
- evalkit_cambrian/lib/python3.10/decimal.py +11 -0
- evalkit_cambrian/lib/python3.10/doctest.py +0 -0
- evalkit_cambrian/lib/python3.10/enum.py +1053 -0
- evalkit_cambrian/lib/python3.10/fileinput.py +462 -0
- evalkit_cambrian/lib/python3.10/fnmatch.py +199 -0
- evalkit_cambrian/lib/python3.10/functools.py +992 -0
- evalkit_cambrian/lib/python3.10/genericpath.py +155 -0
- evalkit_cambrian/lib/python3.10/hashlib.py +269 -0
- evalkit_cambrian/lib/python3.10/heapq.py +601 -0
- evalkit_cambrian/lib/python3.10/imghdr.py +168 -0
- evalkit_cambrian/lib/python3.10/lzma.py +356 -0
- evalkit_cambrian/lib/python3.10/mailcap.py +298 -0
- evalkit_cambrian/lib/python3.10/opcode.py +216 -0
- evalkit_cambrian/lib/python3.10/pdb.py +1750 -0
- evalkit_cambrian/lib/python3.10/poplib.py +483 -0
- evalkit_cambrian/lib/python3.10/pprint.py +670 -0
- evalkit_cambrian/lib/python3.10/profile.py +611 -0
- evalkit_cambrian/lib/python3.10/pty.py +187 -0
- evalkit_cambrian/lib/python3.10/pyclbr.py +314 -0
- evalkit_cambrian/lib/python3.10/pydoc.py +0 -0
- evalkit_cambrian/lib/python3.10/random.py +930 -0
- evalkit_cambrian/lib/python3.10/runpy.py +321 -0
- evalkit_cambrian/lib/python3.10/sched.py +167 -0
- evalkit_cambrian/lib/python3.10/signal.py +92 -0
- evalkit_cambrian/lib/python3.10/sndhdr.py +257 -0
- evalkit_cambrian/lib/python3.10/socketserver.py +844 -0
- evalkit_cambrian/lib/python3.10/string.py +280 -0
- evalkit_cambrian/lib/python3.10/textwrap.py +494 -0
- evalkit_cambrian/lib/python3.10/this.py +28 -0
- evalkit_cambrian/lib/python3.10/token.py +137 -0
- evalkit_cambrian/lib/python3.10/turtle.py +0 -0
evalkit_cambrian/lib/python3.10/_aix_support.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Shared AIX support functions."""
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
import sysconfig
|
| 5 |
+
|
| 6 |
+
try:
|
| 7 |
+
import subprocess
|
| 8 |
+
except ImportError: # pragma: no cover
|
| 9 |
+
# _aix_support is used in distutils by setup.py to build C extensions,
|
| 10 |
+
# before subprocess dependencies like _posixsubprocess are available.
|
| 11 |
+
import _bootsubprocess as subprocess
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def _aix_tag(vrtl, bd):
|
| 15 |
+
# type: (List[int], int) -> str
|
| 16 |
+
# Infer the ABI bitwidth from maxsize (assuming 64 bit as the default)
|
| 17 |
+
_sz = 32 if sys.maxsize == (2**31-1) else 64
|
| 18 |
+
# vrtl[version, release, technology_level]
|
| 19 |
+
return "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(vrtl[0], vrtl[1], vrtl[2], bd, _sz)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# extract version, release and technology level from a VRMF string
|
| 23 |
+
def _aix_vrtl(vrmf):
|
| 24 |
+
# type: (str) -> List[int]
|
| 25 |
+
v, r, tl = vrmf.split(".")[:3]
|
| 26 |
+
return [int(v[-1]), int(r), int(tl)]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _aix_bosmp64():
|
| 30 |
+
# type: () -> Tuple[str, int]
|
| 31 |
+
"""
|
| 32 |
+
Return a Tuple[str, int] e.g., ['7.1.4.34', 1806]
|
| 33 |
+
The fileset bos.mp64 is the AIX kernel. It's VRMF and builddate
|
| 34 |
+
reflect the current ABI levels of the runtime environment.
|
| 35 |
+
"""
|
| 36 |
+
# We expect all AIX systems to have lslpp installed in this location
|
| 37 |
+
out = subprocess.check_output(["/usr/bin/lslpp", "-Lqc", "bos.mp64"])
|
| 38 |
+
out = out.decode("utf-8")
|
| 39 |
+
out = out.strip().split(":") # type: ignore
|
| 40 |
+
# Use str() and int() to help mypy see types
|
| 41 |
+
return (str(out[2]), int(out[-1]))
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def aix_platform():
|
| 45 |
+
# type: () -> str
|
| 46 |
+
"""
|
| 47 |
+
AIX filesets are identified by four decimal values: V.R.M.F.
|
| 48 |
+
V (version) and R (release) can be retreived using ``uname``
|
| 49 |
+
Since 2007, starting with AIX 5.3 TL7, the M value has been
|
| 50 |
+
included with the fileset bos.mp64 and represents the Technology
|
| 51 |
+
Level (TL) of AIX. The F (Fix) value also increases, but is not
|
| 52 |
+
relevant for comparing releases and binary compatibility.
|
| 53 |
+
For binary compatibility the so-called builddate is needed.
|
| 54 |
+
Again, the builddate of an AIX release is associated with bos.mp64.
|
| 55 |
+
AIX ABI compatibility is described as guaranteed at: https://www.ibm.com/\
|
| 56 |
+
support/knowledgecenter/en/ssw_aix_72/install/binary_compatability.html
|
| 57 |
+
|
| 58 |
+
For pep425 purposes the AIX platform tag becomes:
|
| 59 |
+
"aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(v, r, tl, builddate, bitsize)
|
| 60 |
+
e.g., "aix-6107-1415-32" for AIX 6.1 TL7 bd 1415, 32-bit
|
| 61 |
+
and, "aix-6107-1415-64" for AIX 6.1 TL7 bd 1415, 64-bit
|
| 62 |
+
"""
|
| 63 |
+
vrmf, bd = _aix_bosmp64()
|
| 64 |
+
return _aix_tag(_aix_vrtl(vrmf), bd)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# extract vrtl from the BUILD_GNU_TYPE as an int
|
| 68 |
+
def _aix_bgt():
|
| 69 |
+
# type: () -> List[int]
|
| 70 |
+
gnu_type = sysconfig.get_config_var("BUILD_GNU_TYPE")
|
| 71 |
+
if not gnu_type:
|
| 72 |
+
raise ValueError("BUILD_GNU_TYPE is not defined")
|
| 73 |
+
return _aix_vrtl(vrmf=gnu_type)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def aix_buildtag():
|
| 77 |
+
# type: () -> str
|
| 78 |
+
"""
|
| 79 |
+
Return the platform_tag of the system Python was built on.
|
| 80 |
+
"""
|
| 81 |
+
# AIX_BUILDDATE is defined by configure with:
|
| 82 |
+
# lslpp -Lcq bos.mp64 | awk -F: '{ print $NF }'
|
| 83 |
+
build_date = sysconfig.get_config_var("AIX_BUILDDATE")
|
| 84 |
+
try:
|
| 85 |
+
build_date = int(build_date)
|
| 86 |
+
except (ValueError, TypeError):
|
| 87 |
+
raise ValueError(f"AIX_BUILDDATE is not defined or invalid: "
|
| 88 |
+
f"{build_date!r}")
|
| 89 |
+
return _aix_tag(_aix_bgt(), build_date)
|
evalkit_cambrian/lib/python3.10/_compat_pickle.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is used to map the old Python 2 names to the new names used in
|
| 2 |
+
# Python 3 for the pickle module. This needed to make pickle streams
|
| 3 |
+
# generated with Python 2 loadable by Python 3.
|
| 4 |
+
|
| 5 |
+
# This is a copy of lib2to3.fixes.fix_imports.MAPPING. We cannot import
|
| 6 |
+
# lib2to3 and use the mapping defined there, because lib2to3 uses pickle.
|
| 7 |
+
# Thus, this could cause the module to be imported recursively.
|
| 8 |
+
IMPORT_MAPPING = {
|
| 9 |
+
'__builtin__' : 'builtins',
|
| 10 |
+
'copy_reg': 'copyreg',
|
| 11 |
+
'Queue': 'queue',
|
| 12 |
+
'SocketServer': 'socketserver',
|
| 13 |
+
'ConfigParser': 'configparser',
|
| 14 |
+
'repr': 'reprlib',
|
| 15 |
+
'tkFileDialog': 'tkinter.filedialog',
|
| 16 |
+
'tkSimpleDialog': 'tkinter.simpledialog',
|
| 17 |
+
'tkColorChooser': 'tkinter.colorchooser',
|
| 18 |
+
'tkCommonDialog': 'tkinter.commondialog',
|
| 19 |
+
'Dialog': 'tkinter.dialog',
|
| 20 |
+
'Tkdnd': 'tkinter.dnd',
|
| 21 |
+
'tkFont': 'tkinter.font',
|
| 22 |
+
'tkMessageBox': 'tkinter.messagebox',
|
| 23 |
+
'ScrolledText': 'tkinter.scrolledtext',
|
| 24 |
+
'Tkconstants': 'tkinter.constants',
|
| 25 |
+
'Tix': 'tkinter.tix',
|
| 26 |
+
'ttk': 'tkinter.ttk',
|
| 27 |
+
'Tkinter': 'tkinter',
|
| 28 |
+
'markupbase': '_markupbase',
|
| 29 |
+
'_winreg': 'winreg',
|
| 30 |
+
'thread': '_thread',
|
| 31 |
+
'dummy_thread': '_dummy_thread',
|
| 32 |
+
'dbhash': 'dbm.bsd',
|
| 33 |
+
'dumbdbm': 'dbm.dumb',
|
| 34 |
+
'dbm': 'dbm.ndbm',
|
| 35 |
+
'gdbm': 'dbm.gnu',
|
| 36 |
+
'xmlrpclib': 'xmlrpc.client',
|
| 37 |
+
'SimpleXMLRPCServer': 'xmlrpc.server',
|
| 38 |
+
'httplib': 'http.client',
|
| 39 |
+
'htmlentitydefs' : 'html.entities',
|
| 40 |
+
'HTMLParser' : 'html.parser',
|
| 41 |
+
'Cookie': 'http.cookies',
|
| 42 |
+
'cookielib': 'http.cookiejar',
|
| 43 |
+
'BaseHTTPServer': 'http.server',
|
| 44 |
+
'test.test_support': 'test.support',
|
| 45 |
+
'commands': 'subprocess',
|
| 46 |
+
'urlparse' : 'urllib.parse',
|
| 47 |
+
'robotparser' : 'urllib.robotparser',
|
| 48 |
+
'urllib2': 'urllib.request',
|
| 49 |
+
'anydbm': 'dbm',
|
| 50 |
+
'_abcoll' : 'collections.abc',
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
# This contains rename rules that are easy to handle. We ignore the more
|
| 55 |
+
# complex stuff (e.g. mapping the names in the urllib and types modules).
|
| 56 |
+
# These rules should be run before import names are fixed.
|
| 57 |
+
NAME_MAPPING = {
|
| 58 |
+
('__builtin__', 'xrange'): ('builtins', 'range'),
|
| 59 |
+
('__builtin__', 'reduce'): ('functools', 'reduce'),
|
| 60 |
+
('__builtin__', 'intern'): ('sys', 'intern'),
|
| 61 |
+
('__builtin__', 'unichr'): ('builtins', 'chr'),
|
| 62 |
+
('__builtin__', 'unicode'): ('builtins', 'str'),
|
| 63 |
+
('__builtin__', 'long'): ('builtins', 'int'),
|
| 64 |
+
('itertools', 'izip'): ('builtins', 'zip'),
|
| 65 |
+
('itertools', 'imap'): ('builtins', 'map'),
|
| 66 |
+
('itertools', 'ifilter'): ('builtins', 'filter'),
|
| 67 |
+
('itertools', 'ifilterfalse'): ('itertools', 'filterfalse'),
|
| 68 |
+
('itertools', 'izip_longest'): ('itertools', 'zip_longest'),
|
| 69 |
+
('UserDict', 'IterableUserDict'): ('collections', 'UserDict'),
|
| 70 |
+
('UserList', 'UserList'): ('collections', 'UserList'),
|
| 71 |
+
('UserString', 'UserString'): ('collections', 'UserString'),
|
| 72 |
+
('whichdb', 'whichdb'): ('dbm', 'whichdb'),
|
| 73 |
+
('_socket', 'fromfd'): ('socket', 'fromfd'),
|
| 74 |
+
('_multiprocessing', 'Connection'): ('multiprocessing.connection', 'Connection'),
|
| 75 |
+
('multiprocessing.process', 'Process'): ('multiprocessing.context', 'Process'),
|
| 76 |
+
('multiprocessing.forking', 'Popen'): ('multiprocessing.popen_fork', 'Popen'),
|
| 77 |
+
('urllib', 'ContentTooShortError'): ('urllib.error', 'ContentTooShortError'),
|
| 78 |
+
('urllib', 'getproxies'): ('urllib.request', 'getproxies'),
|
| 79 |
+
('urllib', 'pathname2url'): ('urllib.request', 'pathname2url'),
|
| 80 |
+
('urllib', 'quote_plus'): ('urllib.parse', 'quote_plus'),
|
| 81 |
+
('urllib', 'quote'): ('urllib.parse', 'quote'),
|
| 82 |
+
('urllib', 'unquote_plus'): ('urllib.parse', 'unquote_plus'),
|
| 83 |
+
('urllib', 'unquote'): ('urllib.parse', 'unquote'),
|
| 84 |
+
('urllib', 'url2pathname'): ('urllib.request', 'url2pathname'),
|
| 85 |
+
('urllib', 'urlcleanup'): ('urllib.request', 'urlcleanup'),
|
| 86 |
+
('urllib', 'urlencode'): ('urllib.parse', 'urlencode'),
|
| 87 |
+
('urllib', 'urlopen'): ('urllib.request', 'urlopen'),
|
| 88 |
+
('urllib', 'urlretrieve'): ('urllib.request', 'urlretrieve'),
|
| 89 |
+
('urllib2', 'HTTPError'): ('urllib.error', 'HTTPError'),
|
| 90 |
+
('urllib2', 'URLError'): ('urllib.error', 'URLError'),
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
PYTHON2_EXCEPTIONS = (
|
| 94 |
+
"ArithmeticError",
|
| 95 |
+
"AssertionError",
|
| 96 |
+
"AttributeError",
|
| 97 |
+
"BaseException",
|
| 98 |
+
"BufferError",
|
| 99 |
+
"BytesWarning",
|
| 100 |
+
"DeprecationWarning",
|
| 101 |
+
"EOFError",
|
| 102 |
+
"EnvironmentError",
|
| 103 |
+
"Exception",
|
| 104 |
+
"FloatingPointError",
|
| 105 |
+
"FutureWarning",
|
| 106 |
+
"GeneratorExit",
|
| 107 |
+
"IOError",
|
| 108 |
+
"ImportError",
|
| 109 |
+
"ImportWarning",
|
| 110 |
+
"IndentationError",
|
| 111 |
+
"IndexError",
|
| 112 |
+
"KeyError",
|
| 113 |
+
"KeyboardInterrupt",
|
| 114 |
+
"LookupError",
|
| 115 |
+
"MemoryError",
|
| 116 |
+
"NameError",
|
| 117 |
+
"NotImplementedError",
|
| 118 |
+
"OSError",
|
| 119 |
+
"OverflowError",
|
| 120 |
+
"PendingDeprecationWarning",
|
| 121 |
+
"ReferenceError",
|
| 122 |
+
"RuntimeError",
|
| 123 |
+
"RuntimeWarning",
|
| 124 |
+
# StandardError is gone in Python 3, so we map it to Exception
|
| 125 |
+
"StopIteration",
|
| 126 |
+
"SyntaxError",
|
| 127 |
+
"SyntaxWarning",
|
| 128 |
+
"SystemError",
|
| 129 |
+
"SystemExit",
|
| 130 |
+
"TabError",
|
| 131 |
+
"TypeError",
|
| 132 |
+
"UnboundLocalError",
|
| 133 |
+
"UnicodeDecodeError",
|
| 134 |
+
"UnicodeEncodeError",
|
| 135 |
+
"UnicodeError",
|
| 136 |
+
"UnicodeTranslateError",
|
| 137 |
+
"UnicodeWarning",
|
| 138 |
+
"UserWarning",
|
| 139 |
+
"ValueError",
|
| 140 |
+
"Warning",
|
| 141 |
+
"ZeroDivisionError",
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
try:
|
| 145 |
+
WindowsError
|
| 146 |
+
except NameError:
|
| 147 |
+
pass
|
| 148 |
+
else:
|
| 149 |
+
PYTHON2_EXCEPTIONS += ("WindowsError",)
|
| 150 |
+
|
| 151 |
+
for excname in PYTHON2_EXCEPTIONS:
|
| 152 |
+
NAME_MAPPING[("exceptions", excname)] = ("builtins", excname)
|
| 153 |
+
|
| 154 |
+
MULTIPROCESSING_EXCEPTIONS = (
|
| 155 |
+
'AuthenticationError',
|
| 156 |
+
'BufferTooShort',
|
| 157 |
+
'ProcessError',
|
| 158 |
+
'TimeoutError',
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
for excname in MULTIPROCESSING_EXCEPTIONS:
|
| 162 |
+
NAME_MAPPING[("multiprocessing", excname)] = ("multiprocessing.context", excname)
|
| 163 |
+
|
| 164 |
+
# Same, but for 3.x to 2.x
|
| 165 |
+
REVERSE_IMPORT_MAPPING = dict((v, k) for (k, v) in IMPORT_MAPPING.items())
|
| 166 |
+
assert len(REVERSE_IMPORT_MAPPING) == len(IMPORT_MAPPING)
|
| 167 |
+
REVERSE_NAME_MAPPING = dict((v, k) for (k, v) in NAME_MAPPING.items())
|
| 168 |
+
assert len(REVERSE_NAME_MAPPING) == len(NAME_MAPPING)
|
| 169 |
+
|
| 170 |
+
# Non-mutual mappings.
|
| 171 |
+
|
| 172 |
+
IMPORT_MAPPING.update({
|
| 173 |
+
'cPickle': 'pickle',
|
| 174 |
+
'_elementtree': 'xml.etree.ElementTree',
|
| 175 |
+
'FileDialog': 'tkinter.filedialog',
|
| 176 |
+
'SimpleDialog': 'tkinter.simpledialog',
|
| 177 |
+
'DocXMLRPCServer': 'xmlrpc.server',
|
| 178 |
+
'SimpleHTTPServer': 'http.server',
|
| 179 |
+
'CGIHTTPServer': 'http.server',
|
| 180 |
+
# For compatibility with broken pickles saved in old Python 3 versions
|
| 181 |
+
'UserDict': 'collections',
|
| 182 |
+
'UserList': 'collections',
|
| 183 |
+
'UserString': 'collections',
|
| 184 |
+
'whichdb': 'dbm',
|
| 185 |
+
'StringIO': 'io',
|
| 186 |
+
'cStringIO': 'io',
|
| 187 |
+
})
|
| 188 |
+
|
| 189 |
+
REVERSE_IMPORT_MAPPING.update({
|
| 190 |
+
'_bz2': 'bz2',
|
| 191 |
+
'_dbm': 'dbm',
|
| 192 |
+
'_functools': 'functools',
|
| 193 |
+
'_gdbm': 'gdbm',
|
| 194 |
+
'_pickle': 'pickle',
|
| 195 |
+
})
|
| 196 |
+
|
| 197 |
+
NAME_MAPPING.update({
|
| 198 |
+
('__builtin__', 'basestring'): ('builtins', 'str'),
|
| 199 |
+
('exceptions', 'StandardError'): ('builtins', 'Exception'),
|
| 200 |
+
('UserDict', 'UserDict'): ('collections', 'UserDict'),
|
| 201 |
+
('socket', '_socketobject'): ('socket', 'SocketType'),
|
| 202 |
+
})
|
| 203 |
+
|
| 204 |
+
REVERSE_NAME_MAPPING.update({
|
| 205 |
+
('_functools', 'reduce'): ('__builtin__', 'reduce'),
|
| 206 |
+
('tkinter.filedialog', 'FileDialog'): ('FileDialog', 'FileDialog'),
|
| 207 |
+
('tkinter.filedialog', 'LoadFileDialog'): ('FileDialog', 'LoadFileDialog'),
|
| 208 |
+
('tkinter.filedialog', 'SaveFileDialog'): ('FileDialog', 'SaveFileDialog'),
|
| 209 |
+
('tkinter.simpledialog', 'SimpleDialog'): ('SimpleDialog', 'SimpleDialog'),
|
| 210 |
+
('xmlrpc.server', 'ServerHTMLDoc'): ('DocXMLRPCServer', 'ServerHTMLDoc'),
|
| 211 |
+
('xmlrpc.server', 'XMLRPCDocGenerator'):
|
| 212 |
+
('DocXMLRPCServer', 'XMLRPCDocGenerator'),
|
| 213 |
+
('xmlrpc.server', 'DocXMLRPCRequestHandler'):
|
| 214 |
+
('DocXMLRPCServer', 'DocXMLRPCRequestHandler'),
|
| 215 |
+
('xmlrpc.server', 'DocXMLRPCServer'):
|
| 216 |
+
('DocXMLRPCServer', 'DocXMLRPCServer'),
|
| 217 |
+
('xmlrpc.server', 'DocCGIXMLRPCRequestHandler'):
|
| 218 |
+
('DocXMLRPCServer', 'DocCGIXMLRPCRequestHandler'),
|
| 219 |
+
('http.server', 'SimpleHTTPRequestHandler'):
|
| 220 |
+
('SimpleHTTPServer', 'SimpleHTTPRequestHandler'),
|
| 221 |
+
('http.server', 'CGIHTTPRequestHandler'):
|
| 222 |
+
('CGIHTTPServer', 'CGIHTTPRequestHandler'),
|
| 223 |
+
('_socket', 'socket'): ('socket', '_socketobject'),
|
| 224 |
+
})
|
| 225 |
+
|
| 226 |
+
PYTHON3_OSERROR_EXCEPTIONS = (
|
| 227 |
+
'BrokenPipeError',
|
| 228 |
+
'ChildProcessError',
|
| 229 |
+
'ConnectionAbortedError',
|
| 230 |
+
'ConnectionError',
|
| 231 |
+
'ConnectionRefusedError',
|
| 232 |
+
'ConnectionResetError',
|
| 233 |
+
'FileExistsError',
|
| 234 |
+
'FileNotFoundError',
|
| 235 |
+
'InterruptedError',
|
| 236 |
+
'IsADirectoryError',
|
| 237 |
+
'NotADirectoryError',
|
| 238 |
+
'PermissionError',
|
| 239 |
+
'ProcessLookupError',
|
| 240 |
+
'TimeoutError',
|
| 241 |
+
)
|
| 242 |
+
|
| 243 |
+
for excname in PYTHON3_OSERROR_EXCEPTIONS:
|
| 244 |
+
REVERSE_NAME_MAPPING[('builtins', excname)] = ('exceptions', 'OSError')
|
| 245 |
+
|
| 246 |
+
PYTHON3_IMPORTERROR_EXCEPTIONS = (
|
| 247 |
+
'ModuleNotFoundError',
|
| 248 |
+
)
|
| 249 |
+
|
| 250 |
+
for excname in PYTHON3_IMPORTERROR_EXCEPTIONS:
|
| 251 |
+
REVERSE_NAME_MAPPING[('builtins', excname)] = ('exceptions', 'ImportError')
|
evalkit_cambrian/lib/python3.10/_osx_support.py
ADDED
|
@@ -0,0 +1,574 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Shared OS X support functions."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import sys
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
'compiler_fixup',
|
| 9 |
+
'customize_config_vars',
|
| 10 |
+
'customize_compiler',
|
| 11 |
+
'get_platform_osx',
|
| 12 |
+
]
|
| 13 |
+
|
| 14 |
+
# configuration variables that may contain universal build flags,
|
| 15 |
+
# like "-arch" or "-isdkroot", that may need customization for
|
| 16 |
+
# the user environment
|
| 17 |
+
_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
|
| 18 |
+
'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
|
| 19 |
+
'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
|
| 20 |
+
'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
|
| 21 |
+
|
| 22 |
+
# configuration variables that may contain compiler calls
|
| 23 |
+
_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
|
| 24 |
+
|
| 25 |
+
# prefix added to original configuration variable names
|
| 26 |
+
_INITPRE = '_OSX_SUPPORT_INITIAL_'
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _find_executable(executable, path=None):
|
| 30 |
+
"""Tries to find 'executable' in the directories listed in 'path'.
|
| 31 |
+
|
| 32 |
+
A string listing directories separated by 'os.pathsep'; defaults to
|
| 33 |
+
os.environ['PATH']. Returns the complete filename or None if not found.
|
| 34 |
+
"""
|
| 35 |
+
if path is None:
|
| 36 |
+
path = os.environ['PATH']
|
| 37 |
+
|
| 38 |
+
paths = path.split(os.pathsep)
|
| 39 |
+
base, ext = os.path.splitext(executable)
|
| 40 |
+
|
| 41 |
+
if (sys.platform == 'win32') and (ext != '.exe'):
|
| 42 |
+
executable = executable + '.exe'
|
| 43 |
+
|
| 44 |
+
if not os.path.isfile(executable):
|
| 45 |
+
for p in paths:
|
| 46 |
+
f = os.path.join(p, executable)
|
| 47 |
+
if os.path.isfile(f):
|
| 48 |
+
# the file exists, we have a shot at spawn working
|
| 49 |
+
return f
|
| 50 |
+
return None
|
| 51 |
+
else:
|
| 52 |
+
return executable
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _read_output(commandstring, capture_stderr=False):
|
| 56 |
+
"""Output from successful command execution or None"""
|
| 57 |
+
# Similar to os.popen(commandstring, "r").read(),
|
| 58 |
+
# but without actually using os.popen because that
|
| 59 |
+
# function is not usable during python bootstrap.
|
| 60 |
+
# tempfile is also not available then.
|
| 61 |
+
import contextlib
|
| 62 |
+
try:
|
| 63 |
+
import tempfile
|
| 64 |
+
fp = tempfile.NamedTemporaryFile()
|
| 65 |
+
except ImportError:
|
| 66 |
+
fp = open("/tmp/_osx_support.%s"%(
|
| 67 |
+
os.getpid(),), "w+b")
|
| 68 |
+
|
| 69 |
+
with contextlib.closing(fp) as fp:
|
| 70 |
+
if capture_stderr:
|
| 71 |
+
cmd = "%s >'%s' 2>&1" % (commandstring, fp.name)
|
| 72 |
+
else:
|
| 73 |
+
cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
|
| 74 |
+
return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _find_build_tool(toolname):
|
| 78 |
+
"""Find a build tool on current path or using xcrun"""
|
| 79 |
+
return (_find_executable(toolname)
|
| 80 |
+
or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
|
| 81 |
+
or ''
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
_SYSTEM_VERSION = None
|
| 85 |
+
|
| 86 |
+
def _get_system_version():
|
| 87 |
+
"""Return the OS X system version as a string"""
|
| 88 |
+
# Reading this plist is a documented way to get the system
|
| 89 |
+
# version (see the documentation for the Gestalt Manager)
|
| 90 |
+
# We avoid using platform.mac_ver to avoid possible bootstrap issues during
|
| 91 |
+
# the build of Python itself (distutils is used to build standard library
|
| 92 |
+
# extensions).
|
| 93 |
+
|
| 94 |
+
global _SYSTEM_VERSION
|
| 95 |
+
|
| 96 |
+
if _SYSTEM_VERSION is None:
|
| 97 |
+
_SYSTEM_VERSION = ''
|
| 98 |
+
try:
|
| 99 |
+
f = open('/System/Library/CoreServices/SystemVersion.plist', encoding="utf-8")
|
| 100 |
+
except OSError:
|
| 101 |
+
# We're on a plain darwin box, fall back to the default
|
| 102 |
+
# behaviour.
|
| 103 |
+
pass
|
| 104 |
+
else:
|
| 105 |
+
try:
|
| 106 |
+
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
|
| 107 |
+
r'<string>(.*?)</string>', f.read())
|
| 108 |
+
finally:
|
| 109 |
+
f.close()
|
| 110 |
+
if m is not None:
|
| 111 |
+
_SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
|
| 112 |
+
# else: fall back to the default behaviour
|
| 113 |
+
|
| 114 |
+
return _SYSTEM_VERSION
|
| 115 |
+
|
| 116 |
+
_SYSTEM_VERSION_TUPLE = None
|
| 117 |
+
def _get_system_version_tuple():
|
| 118 |
+
"""
|
| 119 |
+
Return the macOS system version as a tuple
|
| 120 |
+
|
| 121 |
+
The return value is safe to use to compare
|
| 122 |
+
two version numbers.
|
| 123 |
+
"""
|
| 124 |
+
global _SYSTEM_VERSION_TUPLE
|
| 125 |
+
if _SYSTEM_VERSION_TUPLE is None:
|
| 126 |
+
osx_version = _get_system_version()
|
| 127 |
+
if osx_version:
|
| 128 |
+
try:
|
| 129 |
+
_SYSTEM_VERSION_TUPLE = tuple(int(i) for i in osx_version.split('.'))
|
| 130 |
+
except ValueError:
|
| 131 |
+
_SYSTEM_VERSION_TUPLE = ()
|
| 132 |
+
|
| 133 |
+
return _SYSTEM_VERSION_TUPLE
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def _remove_original_values(_config_vars):
|
| 137 |
+
"""Remove original unmodified values for testing"""
|
| 138 |
+
# This is needed for higher-level cross-platform tests of get_platform.
|
| 139 |
+
for k in list(_config_vars):
|
| 140 |
+
if k.startswith(_INITPRE):
|
| 141 |
+
del _config_vars[k]
|
| 142 |
+
|
| 143 |
+
def _save_modified_value(_config_vars, cv, newvalue):
|
| 144 |
+
"""Save modified and original unmodified value of configuration var"""
|
| 145 |
+
|
| 146 |
+
oldvalue = _config_vars.get(cv, '')
|
| 147 |
+
if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
|
| 148 |
+
_config_vars[_INITPRE + cv] = oldvalue
|
| 149 |
+
_config_vars[cv] = newvalue
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
_cache_default_sysroot = None
|
| 153 |
+
def _default_sysroot(cc):
|
| 154 |
+
""" Returns the root of the default SDK for this system, or '/' """
|
| 155 |
+
global _cache_default_sysroot
|
| 156 |
+
|
| 157 |
+
if _cache_default_sysroot is not None:
|
| 158 |
+
return _cache_default_sysroot
|
| 159 |
+
|
| 160 |
+
contents = _read_output('%s -c -E -v - </dev/null' % (cc,), True)
|
| 161 |
+
in_incdirs = False
|
| 162 |
+
for line in contents.splitlines():
|
| 163 |
+
if line.startswith("#include <...>"):
|
| 164 |
+
in_incdirs = True
|
| 165 |
+
elif line.startswith("End of search list"):
|
| 166 |
+
in_incdirs = False
|
| 167 |
+
elif in_incdirs:
|
| 168 |
+
line = line.strip()
|
| 169 |
+
if line == '/usr/include':
|
| 170 |
+
_cache_default_sysroot = '/'
|
| 171 |
+
elif line.endswith(".sdk/usr/include"):
|
| 172 |
+
_cache_default_sysroot = line[:-12]
|
| 173 |
+
if _cache_default_sysroot is None:
|
| 174 |
+
_cache_default_sysroot = '/'
|
| 175 |
+
|
| 176 |
+
return _cache_default_sysroot
|
| 177 |
+
|
| 178 |
+
def _supports_universal_builds():
|
| 179 |
+
"""Returns True if universal builds are supported on this system"""
|
| 180 |
+
# As an approximation, we assume that if we are running on 10.4 or above,
|
| 181 |
+
# then we are running with an Xcode environment that supports universal
|
| 182 |
+
# builds, in particular -isysroot and -arch arguments to the compiler. This
|
| 183 |
+
# is in support of allowing 10.4 universal builds to run on 10.3.x systems.
|
| 184 |
+
|
| 185 |
+
osx_version = _get_system_version_tuple()
|
| 186 |
+
return bool(osx_version >= (10, 4)) if osx_version else False
|
| 187 |
+
|
| 188 |
+
def _supports_arm64_builds():
|
| 189 |
+
"""Returns True if arm64 builds are supported on this system"""
|
| 190 |
+
# There are two sets of systems supporting macOS/arm64 builds:
|
| 191 |
+
# 1. macOS 11 and later, unconditionally
|
| 192 |
+
# 2. macOS 10.15 with Xcode 12.2 or later
|
| 193 |
+
# For now the second category is ignored.
|
| 194 |
+
osx_version = _get_system_version_tuple()
|
| 195 |
+
return osx_version >= (11, 0) if osx_version else False
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def _find_appropriate_compiler(_config_vars):
|
| 199 |
+
"""Find appropriate C compiler for extension module builds"""
|
| 200 |
+
|
| 201 |
+
# Issue #13590:
|
| 202 |
+
# The OSX location for the compiler varies between OSX
|
| 203 |
+
# (or rather Xcode) releases. With older releases (up-to 10.5)
|
| 204 |
+
# the compiler is in /usr/bin, with newer releases the compiler
|
| 205 |
+
# can only be found inside Xcode.app if the "Command Line Tools"
|
| 206 |
+
# are not installed.
|
| 207 |
+
#
|
| 208 |
+
# Furthermore, the compiler that can be used varies between
|
| 209 |
+
# Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2'
|
| 210 |
+
# as the compiler, after that 'clang' should be used because
|
| 211 |
+
# gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
|
| 212 |
+
# miscompiles Python.
|
| 213 |
+
|
| 214 |
+
# skip checks if the compiler was overridden with a CC env variable
|
| 215 |
+
if 'CC' in os.environ:
|
| 216 |
+
return _config_vars
|
| 217 |
+
|
| 218 |
+
# The CC config var might contain additional arguments.
|
| 219 |
+
# Ignore them while searching.
|
| 220 |
+
cc = oldcc = _config_vars['CC'].split()[0]
|
| 221 |
+
if not _find_executable(cc):
|
| 222 |
+
# Compiler is not found on the shell search PATH.
|
| 223 |
+
# Now search for clang, first on PATH (if the Command LIne
|
| 224 |
+
# Tools have been installed in / or if the user has provided
|
| 225 |
+
# another location via CC). If not found, try using xcrun
|
| 226 |
+
# to find an uninstalled clang (within a selected Xcode).
|
| 227 |
+
|
| 228 |
+
# NOTE: Cannot use subprocess here because of bootstrap
|
| 229 |
+
# issues when building Python itself (and os.popen is
|
| 230 |
+
# implemented on top of subprocess and is therefore not
|
| 231 |
+
# usable as well)
|
| 232 |
+
|
| 233 |
+
cc = _find_build_tool('clang')
|
| 234 |
+
|
| 235 |
+
elif os.path.basename(cc).startswith('gcc'):
|
| 236 |
+
# Compiler is GCC, check if it is LLVM-GCC
|
| 237 |
+
data = _read_output("'%s' --version"
|
| 238 |
+
% (cc.replace("'", "'\"'\"'"),))
|
| 239 |
+
if data and 'llvm-gcc' in data:
|
| 240 |
+
# Found LLVM-GCC, fall back to clang
|
| 241 |
+
cc = _find_build_tool('clang')
|
| 242 |
+
|
| 243 |
+
if not cc:
|
| 244 |
+
raise SystemError(
|
| 245 |
+
"Cannot locate working compiler")
|
| 246 |
+
|
| 247 |
+
if cc != oldcc:
|
| 248 |
+
# Found a replacement compiler.
|
| 249 |
+
# Modify config vars using new compiler, if not already explicitly
|
| 250 |
+
# overridden by an env variable, preserving additional arguments.
|
| 251 |
+
for cv in _COMPILER_CONFIG_VARS:
|
| 252 |
+
if cv in _config_vars and cv not in os.environ:
|
| 253 |
+
cv_split = _config_vars[cv].split()
|
| 254 |
+
cv_split[0] = cc if cv != 'CXX' else cc + '++'
|
| 255 |
+
_save_modified_value(_config_vars, cv, ' '.join(cv_split))
|
| 256 |
+
|
| 257 |
+
return _config_vars
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def _remove_universal_flags(_config_vars):
|
| 261 |
+
"""Remove all universal build arguments from config vars"""
|
| 262 |
+
|
| 263 |
+
for cv in _UNIVERSAL_CONFIG_VARS:
|
| 264 |
+
# Do not alter a config var explicitly overridden by env var
|
| 265 |
+
if cv in _config_vars and cv not in os.environ:
|
| 266 |
+
flags = _config_vars[cv]
|
| 267 |
+
flags = re.sub(r'-arch\s+\w+\s', ' ', flags, flags=re.ASCII)
|
| 268 |
+
flags = re.sub(r'-isysroot\s*\S+', ' ', flags)
|
| 269 |
+
_save_modified_value(_config_vars, cv, flags)
|
| 270 |
+
|
| 271 |
+
return _config_vars
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def _remove_unsupported_archs(_config_vars):
|
| 275 |
+
"""Remove any unsupported archs from config vars"""
|
| 276 |
+
# Different Xcode releases support different sets for '-arch'
|
| 277 |
+
# flags. In particular, Xcode 4.x no longer supports the
|
| 278 |
+
# PPC architectures.
|
| 279 |
+
#
|
| 280 |
+
# This code automatically removes '-arch ppc' and '-arch ppc64'
|
| 281 |
+
# when these are not supported. That makes it possible to
|
| 282 |
+
# build extensions on OSX 10.7 and later with the prebuilt
|
| 283 |
+
# 32-bit installer on the python.org website.
|
| 284 |
+
|
| 285 |
+
# skip checks if the compiler was overridden with a CC env variable
|
| 286 |
+
if 'CC' in os.environ:
|
| 287 |
+
return _config_vars
|
| 288 |
+
|
| 289 |
+
if re.search(r'-arch\s+ppc', _config_vars['CFLAGS']) is not None:
|
| 290 |
+
# NOTE: Cannot use subprocess here because of bootstrap
|
| 291 |
+
# issues when building Python itself
|
| 292 |
+
status = os.system(
|
| 293 |
+
"""echo 'int main{};' | """
|
| 294 |
+
"""'%s' -c -arch ppc -x c -o /dev/null /dev/null 2>/dev/null"""
|
| 295 |
+
%(_config_vars['CC'].replace("'", "'\"'\"'"),))
|
| 296 |
+
if status:
|
| 297 |
+
# The compile failed for some reason. Because of differences
|
| 298 |
+
# across Xcode and compiler versions, there is no reliable way
|
| 299 |
+
# to be sure why it failed. Assume here it was due to lack of
|
| 300 |
+
# PPC support and remove the related '-arch' flags from each
|
| 301 |
+
# config variables not explicitly overridden by an environment
|
| 302 |
+
# variable. If the error was for some other reason, we hope the
|
| 303 |
+
# failure will show up again when trying to compile an extension
|
| 304 |
+
# module.
|
| 305 |
+
for cv in _UNIVERSAL_CONFIG_VARS:
|
| 306 |
+
if cv in _config_vars and cv not in os.environ:
|
| 307 |
+
flags = _config_vars[cv]
|
| 308 |
+
flags = re.sub(r'-arch\s+ppc\w*\s', ' ', flags)
|
| 309 |
+
_save_modified_value(_config_vars, cv, flags)
|
| 310 |
+
|
| 311 |
+
return _config_vars
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def _override_all_archs(_config_vars):
|
| 315 |
+
"""Allow override of all archs with ARCHFLAGS env var"""
|
| 316 |
+
# NOTE: This name was introduced by Apple in OSX 10.5 and
|
| 317 |
+
# is used by several scripting languages distributed with
|
| 318 |
+
# that OS release.
|
| 319 |
+
if 'ARCHFLAGS' in os.environ:
|
| 320 |
+
arch = os.environ['ARCHFLAGS']
|
| 321 |
+
for cv in _UNIVERSAL_CONFIG_VARS:
|
| 322 |
+
if cv in _config_vars and '-arch' in _config_vars[cv]:
|
| 323 |
+
flags = _config_vars[cv]
|
| 324 |
+
flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
|
| 325 |
+
flags = flags + ' ' + arch
|
| 326 |
+
_save_modified_value(_config_vars, cv, flags)
|
| 327 |
+
|
| 328 |
+
return _config_vars
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
def _check_for_unavailable_sdk(_config_vars):
|
| 332 |
+
"""Remove references to any SDKs not available"""
|
| 333 |
+
# If we're on OSX 10.5 or later and the user tries to
|
| 334 |
+
# compile an extension using an SDK that is not present
|
| 335 |
+
# on the current machine it is better to not use an SDK
|
| 336 |
+
# than to fail. This is particularly important with
|
| 337 |
+
# the standalone Command Line Tools alternative to a
|
| 338 |
+
# full-blown Xcode install since the CLT packages do not
|
| 339 |
+
# provide SDKs. If the SDK is not present, it is assumed
|
| 340 |
+
# that the header files and dev libs have been installed
|
| 341 |
+
# to /usr and /System/Library by either a standalone CLT
|
| 342 |
+
# package or the CLT component within Xcode.
|
| 343 |
+
cflags = _config_vars.get('CFLAGS', '')
|
| 344 |
+
m = re.search(r'-isysroot\s*(\S+)', cflags)
|
| 345 |
+
if m is not None:
|
| 346 |
+
sdk = m.group(1)
|
| 347 |
+
if not os.path.exists(sdk):
|
| 348 |
+
for cv in _UNIVERSAL_CONFIG_VARS:
|
| 349 |
+
# Do not alter a config var explicitly overridden by env var
|
| 350 |
+
if cv in _config_vars and cv not in os.environ:
|
| 351 |
+
flags = _config_vars[cv]
|
| 352 |
+
flags = re.sub(r'-isysroot\s*\S+(?:\s|$)', ' ', flags)
|
| 353 |
+
_save_modified_value(_config_vars, cv, flags)
|
| 354 |
+
|
| 355 |
+
return _config_vars
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def compiler_fixup(compiler_so, cc_args):
|
| 359 |
+
"""
|
| 360 |
+
This function will strip '-isysroot PATH' and '-arch ARCH' from the
|
| 361 |
+
compile flags if the user has specified one them in extra_compile_flags.
|
| 362 |
+
|
| 363 |
+
This is needed because '-arch ARCH' adds another architecture to the
|
| 364 |
+
build, without a way to remove an architecture. Furthermore GCC will
|
| 365 |
+
barf if multiple '-isysroot' arguments are present.
|
| 366 |
+
"""
|
| 367 |
+
stripArch = stripSysroot = False
|
| 368 |
+
|
| 369 |
+
compiler_so = list(compiler_so)
|
| 370 |
+
|
| 371 |
+
if not _supports_universal_builds():
|
| 372 |
+
# OSX before 10.4.0, these don't support -arch and -isysroot at
|
| 373 |
+
# all.
|
| 374 |
+
stripArch = stripSysroot = True
|
| 375 |
+
else:
|
| 376 |
+
stripArch = '-arch' in cc_args
|
| 377 |
+
stripSysroot = any(arg for arg in cc_args if arg.startswith('-isysroot'))
|
| 378 |
+
|
| 379 |
+
if stripArch or 'ARCHFLAGS' in os.environ:
|
| 380 |
+
while True:
|
| 381 |
+
try:
|
| 382 |
+
index = compiler_so.index('-arch')
|
| 383 |
+
# Strip this argument and the next one:
|
| 384 |
+
del compiler_so[index:index+2]
|
| 385 |
+
except ValueError:
|
| 386 |
+
break
|
| 387 |
+
|
| 388 |
+
elif not _supports_arm64_builds():
|
| 389 |
+
# Look for "-arch arm64" and drop that
|
| 390 |
+
for idx in reversed(range(len(compiler_so))):
|
| 391 |
+
if compiler_so[idx] == '-arch' and compiler_so[idx+1] == "arm64":
|
| 392 |
+
del compiler_so[idx:idx+2]
|
| 393 |
+
|
| 394 |
+
if 'ARCHFLAGS' in os.environ and not stripArch:
|
| 395 |
+
# User specified different -arch flags in the environ,
|
| 396 |
+
# see also distutils.sysconfig
|
| 397 |
+
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
|
| 398 |
+
|
| 399 |
+
if stripSysroot:
|
| 400 |
+
while True:
|
| 401 |
+
indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
|
| 402 |
+
if not indices:
|
| 403 |
+
break
|
| 404 |
+
index = indices[0]
|
| 405 |
+
if compiler_so[index] == '-isysroot':
|
| 406 |
+
# Strip this argument and the next one:
|
| 407 |
+
del compiler_so[index:index+2]
|
| 408 |
+
else:
|
| 409 |
+
# It's '-isysroot/some/path' in one arg
|
| 410 |
+
del compiler_so[index:index+1]
|
| 411 |
+
|
| 412 |
+
# Check if the SDK that is used during compilation actually exists,
|
| 413 |
+
# the universal build requires the usage of a universal SDK and not all
|
| 414 |
+
# users have that installed by default.
|
| 415 |
+
sysroot = None
|
| 416 |
+
argvar = cc_args
|
| 417 |
+
indices = [i for i,x in enumerate(cc_args) if x.startswith('-isysroot')]
|
| 418 |
+
if not indices:
|
| 419 |
+
argvar = compiler_so
|
| 420 |
+
indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
|
| 421 |
+
|
| 422 |
+
for idx in indices:
|
| 423 |
+
if argvar[idx] == '-isysroot':
|
| 424 |
+
sysroot = argvar[idx+1]
|
| 425 |
+
break
|
| 426 |
+
else:
|
| 427 |
+
sysroot = argvar[idx][len('-isysroot'):]
|
| 428 |
+
break
|
| 429 |
+
|
| 430 |
+
if sysroot and not os.path.isdir(sysroot):
|
| 431 |
+
sys.stderr.write(f"Compiling with an SDK that doesn't seem to exist: {sysroot}\n")
|
| 432 |
+
sys.stderr.write("Please check your Xcode installation\n")
|
| 433 |
+
sys.stderr.flush()
|
| 434 |
+
|
| 435 |
+
return compiler_so
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
def customize_config_vars(_config_vars):
|
| 439 |
+
"""Customize Python build configuration variables.
|
| 440 |
+
|
| 441 |
+
Called internally from sysconfig with a mutable mapping
|
| 442 |
+
containing name/value pairs parsed from the configured
|
| 443 |
+
makefile used to build this interpreter. Returns
|
| 444 |
+
the mapping updated as needed to reflect the environment
|
| 445 |
+
in which the interpreter is running; in the case of
|
| 446 |
+
a Python from a binary installer, the installed
|
| 447 |
+
environment may be very different from the build
|
| 448 |
+
environment, i.e. different OS levels, different
|
| 449 |
+
built tools, different available CPU architectures.
|
| 450 |
+
|
| 451 |
+
This customization is performed whenever
|
| 452 |
+
distutils.sysconfig.get_config_vars() is first
|
| 453 |
+
called. It may be used in environments where no
|
| 454 |
+
compilers are present, i.e. when installing pure
|
| 455 |
+
Python dists. Customization of compiler paths
|
| 456 |
+
and detection of unavailable archs is deferred
|
| 457 |
+
until the first extension module build is
|
| 458 |
+
requested (in distutils.sysconfig.customize_compiler).
|
| 459 |
+
|
| 460 |
+
Currently called from distutils.sysconfig
|
| 461 |
+
"""
|
| 462 |
+
|
| 463 |
+
if not _supports_universal_builds():
|
| 464 |
+
# On Mac OS X before 10.4, check if -arch and -isysroot
|
| 465 |
+
# are in CFLAGS or LDFLAGS and remove them if they are.
|
| 466 |
+
# This is needed when building extensions on a 10.3 system
|
| 467 |
+
# using a universal build of python.
|
| 468 |
+
_remove_universal_flags(_config_vars)
|
| 469 |
+
|
| 470 |
+
# Allow user to override all archs with ARCHFLAGS env var
|
| 471 |
+
_override_all_archs(_config_vars)
|
| 472 |
+
|
| 473 |
+
# Remove references to sdks that are not found
|
| 474 |
+
_check_for_unavailable_sdk(_config_vars)
|
| 475 |
+
|
| 476 |
+
return _config_vars
|
| 477 |
+
|
| 478 |
+
|
| 479 |
+
def customize_compiler(_config_vars):
|
| 480 |
+
"""Customize compiler path and configuration variables.
|
| 481 |
+
|
| 482 |
+
This customization is performed when the first
|
| 483 |
+
extension module build is requested
|
| 484 |
+
in distutils.sysconfig.customize_compiler.
|
| 485 |
+
"""
|
| 486 |
+
|
| 487 |
+
# Find a compiler to use for extension module builds
|
| 488 |
+
_find_appropriate_compiler(_config_vars)
|
| 489 |
+
|
| 490 |
+
# Remove ppc arch flags if not supported here
|
| 491 |
+
_remove_unsupported_archs(_config_vars)
|
| 492 |
+
|
| 493 |
+
# Allow user to override all archs with ARCHFLAGS env var
|
| 494 |
+
_override_all_archs(_config_vars)
|
| 495 |
+
|
| 496 |
+
return _config_vars
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
def get_platform_osx(_config_vars, osname, release, machine):
|
| 500 |
+
"""Filter values for get_platform()"""
|
| 501 |
+
# called from get_platform() in sysconfig and distutils.util
|
| 502 |
+
#
|
| 503 |
+
# For our purposes, we'll assume that the system version from
|
| 504 |
+
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
|
| 505 |
+
# to. This makes the compatibility story a bit more sane because the
|
| 506 |
+
# machine is going to compile and link as if it were
|
| 507 |
+
# MACOSX_DEPLOYMENT_TARGET.
|
| 508 |
+
|
| 509 |
+
macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
|
| 510 |
+
macrelease = _get_system_version() or macver
|
| 511 |
+
macver = macver or macrelease
|
| 512 |
+
|
| 513 |
+
if macver:
|
| 514 |
+
release = macver
|
| 515 |
+
osname = "macosx"
|
| 516 |
+
|
| 517 |
+
# Use the original CFLAGS value, if available, so that we
|
| 518 |
+
# return the same machine type for the platform string.
|
| 519 |
+
# Otherwise, distutils may consider this a cross-compiling
|
| 520 |
+
# case and disallow installs.
|
| 521 |
+
cflags = _config_vars.get(_INITPRE+'CFLAGS',
|
| 522 |
+
_config_vars.get('CFLAGS', ''))
|
| 523 |
+
if macrelease:
|
| 524 |
+
try:
|
| 525 |
+
macrelease = tuple(int(i) for i in macrelease.split('.')[0:2])
|
| 526 |
+
except ValueError:
|
| 527 |
+
macrelease = (10, 3)
|
| 528 |
+
else:
|
| 529 |
+
# assume no universal support
|
| 530 |
+
macrelease = (10, 3)
|
| 531 |
+
|
| 532 |
+
if (macrelease >= (10, 4)) and '-arch' in cflags.strip():
|
| 533 |
+
# The universal build will build fat binaries, but not on
|
| 534 |
+
# systems before 10.4
|
| 535 |
+
|
| 536 |
+
machine = 'fat'
|
| 537 |
+
|
| 538 |
+
archs = re.findall(r'-arch\s+(\S+)', cflags)
|
| 539 |
+
archs = tuple(sorted(set(archs)))
|
| 540 |
+
|
| 541 |
+
if len(archs) == 1:
|
| 542 |
+
machine = archs[0]
|
| 543 |
+
elif archs == ('arm64', 'x86_64'):
|
| 544 |
+
machine = 'universal2'
|
| 545 |
+
elif archs == ('i386', 'ppc'):
|
| 546 |
+
machine = 'fat'
|
| 547 |
+
elif archs == ('i386', 'x86_64'):
|
| 548 |
+
machine = 'intel'
|
| 549 |
+
elif archs == ('i386', 'ppc', 'x86_64'):
|
| 550 |
+
machine = 'fat3'
|
| 551 |
+
elif archs == ('ppc64', 'x86_64'):
|
| 552 |
+
machine = 'fat64'
|
| 553 |
+
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
|
| 554 |
+
machine = 'universal'
|
| 555 |
+
else:
|
| 556 |
+
raise ValueError(
|
| 557 |
+
"Don't know machine value for archs=%r" % (archs,))
|
| 558 |
+
|
| 559 |
+
elif machine == 'i386':
|
| 560 |
+
# On OSX the machine type returned by uname is always the
|
| 561 |
+
# 32-bit variant, even if the executable architecture is
|
| 562 |
+
# the 64-bit variant
|
| 563 |
+
if sys.maxsize >= 2**32:
|
| 564 |
+
machine = 'x86_64'
|
| 565 |
+
|
| 566 |
+
elif machine in ('PowerPC', 'Power_Macintosh'):
|
| 567 |
+
# Pick a sane name for the PPC architecture.
|
| 568 |
+
# See 'i386' case
|
| 569 |
+
if sys.maxsize >= 2**32:
|
| 570 |
+
machine = 'ppc64'
|
| 571 |
+
else:
|
| 572 |
+
machine = 'ppc'
|
| 573 |
+
|
| 574 |
+
return (osname, release, machine)
|
evalkit_cambrian/lib/python3.10/_strptime.py
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Strptime-related classes and functions.
|
| 2 |
+
|
| 3 |
+
CLASSES:
|
| 4 |
+
LocaleTime -- Discovers and stores locale-specific time information
|
| 5 |
+
TimeRE -- Creates regexes for pattern matching a string of text containing
|
| 6 |
+
time information
|
| 7 |
+
|
| 8 |
+
FUNCTIONS:
|
| 9 |
+
_getlang -- Figure out what language is being used for the locale
|
| 10 |
+
strptime -- Calculates the time struct represented by the passed-in string
|
| 11 |
+
|
| 12 |
+
"""
|
| 13 |
+
import time
|
| 14 |
+
import locale
|
| 15 |
+
import calendar
|
| 16 |
+
from re import compile as re_compile
|
| 17 |
+
from re import IGNORECASE
|
| 18 |
+
from re import escape as re_escape
|
| 19 |
+
from datetime import (date as datetime_date,
|
| 20 |
+
timedelta as datetime_timedelta,
|
| 21 |
+
timezone as datetime_timezone)
|
| 22 |
+
from _thread import allocate_lock as _thread_allocate_lock
|
| 23 |
+
|
| 24 |
+
__all__ = []
|
| 25 |
+
|
| 26 |
+
def _getlang():
|
| 27 |
+
# Figure out what the current language is set to.
|
| 28 |
+
return locale.getlocale(locale.LC_TIME)
|
| 29 |
+
|
| 30 |
+
class LocaleTime(object):
|
| 31 |
+
"""Stores and handles locale-specific information related to time.
|
| 32 |
+
|
| 33 |
+
ATTRIBUTES:
|
| 34 |
+
f_weekday -- full weekday names (7-item list)
|
| 35 |
+
a_weekday -- abbreviated weekday names (7-item list)
|
| 36 |
+
f_month -- full month names (13-item list; dummy value in [0], which
|
| 37 |
+
is added by code)
|
| 38 |
+
a_month -- abbreviated month names (13-item list, dummy value in
|
| 39 |
+
[0], which is added by code)
|
| 40 |
+
am_pm -- AM/PM representation (2-item list)
|
| 41 |
+
LC_date_time -- format string for date/time representation (string)
|
| 42 |
+
LC_date -- format string for date representation (string)
|
| 43 |
+
LC_time -- format string for time representation (string)
|
| 44 |
+
timezone -- daylight- and non-daylight-savings timezone representation
|
| 45 |
+
(2-item list of sets)
|
| 46 |
+
lang -- Language used by instance (2-item tuple)
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
def __init__(self):
|
| 50 |
+
"""Set all attributes.
|
| 51 |
+
|
| 52 |
+
Order of methods called matters for dependency reasons.
|
| 53 |
+
|
| 54 |
+
The locale language is set at the offset and then checked again before
|
| 55 |
+
exiting. This is to make sure that the attributes were not set with a
|
| 56 |
+
mix of information from more than one locale. This would most likely
|
| 57 |
+
happen when using threads where one thread calls a locale-dependent
|
| 58 |
+
function while another thread changes the locale while the function in
|
| 59 |
+
the other thread is still running. Proper coding would call for
|
| 60 |
+
locks to prevent changing the locale while locale-dependent code is
|
| 61 |
+
running. The check here is done in case someone does not think about
|
| 62 |
+
doing this.
|
| 63 |
+
|
| 64 |
+
Only other possible issue is if someone changed the timezone and did
|
| 65 |
+
not call tz.tzset . That is an issue for the programmer, though,
|
| 66 |
+
since changing the timezone is worthless without that call.
|
| 67 |
+
|
| 68 |
+
"""
|
| 69 |
+
self.lang = _getlang()
|
| 70 |
+
self.__calc_weekday()
|
| 71 |
+
self.__calc_month()
|
| 72 |
+
self.__calc_am_pm()
|
| 73 |
+
self.__calc_timezone()
|
| 74 |
+
self.__calc_date_time()
|
| 75 |
+
if _getlang() != self.lang:
|
| 76 |
+
raise ValueError("locale changed during initialization")
|
| 77 |
+
if time.tzname != self.tzname or time.daylight != self.daylight:
|
| 78 |
+
raise ValueError("timezone changed during initialization")
|
| 79 |
+
|
| 80 |
+
def __calc_weekday(self):
|
| 81 |
+
# Set self.a_weekday and self.f_weekday using the calendar
|
| 82 |
+
# module.
|
| 83 |
+
a_weekday = [calendar.day_abbr[i].lower() for i in range(7)]
|
| 84 |
+
f_weekday = [calendar.day_name[i].lower() for i in range(7)]
|
| 85 |
+
self.a_weekday = a_weekday
|
| 86 |
+
self.f_weekday = f_weekday
|
| 87 |
+
|
| 88 |
+
def __calc_month(self):
|
| 89 |
+
# Set self.f_month and self.a_month using the calendar module.
|
| 90 |
+
a_month = [calendar.month_abbr[i].lower() for i in range(13)]
|
| 91 |
+
f_month = [calendar.month_name[i].lower() for i in range(13)]
|
| 92 |
+
self.a_month = a_month
|
| 93 |
+
self.f_month = f_month
|
| 94 |
+
|
| 95 |
+
def __calc_am_pm(self):
|
| 96 |
+
# Set self.am_pm by using time.strftime().
|
| 97 |
+
|
| 98 |
+
# The magic date (1999,3,17,hour,44,55,2,76,0) is not really that
|
| 99 |
+
# magical; just happened to have used it everywhere else where a
|
| 100 |
+
# static date was needed.
|
| 101 |
+
am_pm = []
|
| 102 |
+
for hour in (1, 22):
|
| 103 |
+
time_tuple = time.struct_time((1999,3,17,hour,44,55,2,76,0))
|
| 104 |
+
am_pm.append(time.strftime("%p", time_tuple).lower())
|
| 105 |
+
self.am_pm = am_pm
|
| 106 |
+
|
| 107 |
+
def __calc_date_time(self):
|
| 108 |
+
# Set self.date_time, self.date, & self.time by using
|
| 109 |
+
# time.strftime().
|
| 110 |
+
|
| 111 |
+
# Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of
|
| 112 |
+
# overloaded numbers is minimized. The order in which searches for
|
| 113 |
+
# values within the format string is very important; it eliminates
|
| 114 |
+
# possible ambiguity for what something represents.
|
| 115 |
+
time_tuple = time.struct_time((1999,3,17,22,44,55,2,76,0))
|
| 116 |
+
date_time = [None, None, None]
|
| 117 |
+
date_time[0] = time.strftime("%c", time_tuple).lower()
|
| 118 |
+
date_time[1] = time.strftime("%x", time_tuple).lower()
|
| 119 |
+
date_time[2] = time.strftime("%X", time_tuple).lower()
|
| 120 |
+
replacement_pairs = [('%', '%%'), (self.f_weekday[2], '%A'),
|
| 121 |
+
(self.f_month[3], '%B'), (self.a_weekday[2], '%a'),
|
| 122 |
+
(self.a_month[3], '%b'), (self.am_pm[1], '%p'),
|
| 123 |
+
('1999', '%Y'), ('99', '%y'), ('22', '%H'),
|
| 124 |
+
('44', '%M'), ('55', '%S'), ('76', '%j'),
|
| 125 |
+
('17', '%d'), ('03', '%m'), ('3', '%m'),
|
| 126 |
+
# '3' needed for when no leading zero.
|
| 127 |
+
('2', '%w'), ('10', '%I')]
|
| 128 |
+
replacement_pairs.extend([(tz, "%Z") for tz_values in self.timezone
|
| 129 |
+
for tz in tz_values])
|
| 130 |
+
for offset,directive in ((0,'%c'), (1,'%x'), (2,'%X')):
|
| 131 |
+
current_format = date_time[offset]
|
| 132 |
+
for old, new in replacement_pairs:
|
| 133 |
+
# Must deal with possible lack of locale info
|
| 134 |
+
# manifesting itself as the empty string (e.g., Swedish's
|
| 135 |
+
# lack of AM/PM info) or a platform returning a tuple of empty
|
| 136 |
+
# strings (e.g., MacOS 9 having timezone as ('','')).
|
| 137 |
+
if old:
|
| 138 |
+
current_format = current_format.replace(old, new)
|
| 139 |
+
# If %W is used, then Sunday, 2005-01-03 will fall on week 0 since
|
| 140 |
+
# 2005-01-03 occurs before the first Monday of the year. Otherwise
|
| 141 |
+
# %U is used.
|
| 142 |
+
time_tuple = time.struct_time((1999,1,3,1,1,1,6,3,0))
|
| 143 |
+
if '00' in time.strftime(directive, time_tuple):
|
| 144 |
+
U_W = '%W'
|
| 145 |
+
else:
|
| 146 |
+
U_W = '%U'
|
| 147 |
+
date_time[offset] = current_format.replace('11', U_W)
|
| 148 |
+
self.LC_date_time = date_time[0]
|
| 149 |
+
self.LC_date = date_time[1]
|
| 150 |
+
self.LC_time = date_time[2]
|
| 151 |
+
|
| 152 |
+
def __calc_timezone(self):
|
| 153 |
+
# Set self.timezone by using time.tzname.
|
| 154 |
+
# Do not worry about possibility of time.tzname[0] == time.tzname[1]
|
| 155 |
+
# and time.daylight; handle that in strptime.
|
| 156 |
+
try:
|
| 157 |
+
time.tzset()
|
| 158 |
+
except AttributeError:
|
| 159 |
+
pass
|
| 160 |
+
self.tzname = time.tzname
|
| 161 |
+
self.daylight = time.daylight
|
| 162 |
+
no_saving = frozenset({"utc", "gmt", self.tzname[0].lower()})
|
| 163 |
+
if self.daylight:
|
| 164 |
+
has_saving = frozenset({self.tzname[1].lower()})
|
| 165 |
+
else:
|
| 166 |
+
has_saving = frozenset()
|
| 167 |
+
self.timezone = (no_saving, has_saving)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class TimeRE(dict):
|
| 171 |
+
"""Handle conversion from format directives to regexes."""
|
| 172 |
+
|
| 173 |
+
def __init__(self, locale_time=None):
|
| 174 |
+
"""Create keys/values.
|
| 175 |
+
|
| 176 |
+
Order of execution is important for dependency reasons.
|
| 177 |
+
|
| 178 |
+
"""
|
| 179 |
+
if locale_time:
|
| 180 |
+
self.locale_time = locale_time
|
| 181 |
+
else:
|
| 182 |
+
self.locale_time = LocaleTime()
|
| 183 |
+
base = super()
|
| 184 |
+
base.__init__({
|
| 185 |
+
# The " [1-9]" part of the regex is to make %c from ANSI C work
|
| 186 |
+
'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
|
| 187 |
+
'f': r"(?P<f>[0-9]{1,6})",
|
| 188 |
+
'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
|
| 189 |
+
'I': r"(?P<I>1[0-2]|0[1-9]|[1-9])",
|
| 190 |
+
'G': r"(?P<G>\d\d\d\d)",
|
| 191 |
+
'j': r"(?P<j>36[0-6]|3[0-5]\d|[1-2]\d\d|0[1-9]\d|00[1-9]|[1-9]\d|0[1-9]|[1-9])",
|
| 192 |
+
'm': r"(?P<m>1[0-2]|0[1-9]|[1-9])",
|
| 193 |
+
'M': r"(?P<M>[0-5]\d|\d)",
|
| 194 |
+
'S': r"(?P<S>6[0-1]|[0-5]\d|\d)",
|
| 195 |
+
'U': r"(?P<U>5[0-3]|[0-4]\d|\d)",
|
| 196 |
+
'w': r"(?P<w>[0-6])",
|
| 197 |
+
'u': r"(?P<u>[1-7])",
|
| 198 |
+
'V': r"(?P<V>5[0-3]|0[1-9]|[1-4]\d|\d)",
|
| 199 |
+
# W is set below by using 'U'
|
| 200 |
+
'y': r"(?P<y>\d\d)",
|
| 201 |
+
#XXX: Does 'Y' need to worry about having less or more than
|
| 202 |
+
# 4 digits?
|
| 203 |
+
'Y': r"(?P<Y>\d\d\d\d)",
|
| 204 |
+
'z': r"(?P<z>[+-]\d\d:?[0-5]\d(:?[0-5]\d(\.\d{1,6})?)?|(?-i:Z))",
|
| 205 |
+
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
|
| 206 |
+
'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
|
| 207 |
+
'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
|
| 208 |
+
'b': self.__seqToRE(self.locale_time.a_month[1:], 'b'),
|
| 209 |
+
'p': self.__seqToRE(self.locale_time.am_pm, 'p'),
|
| 210 |
+
'Z': self.__seqToRE((tz for tz_names in self.locale_time.timezone
|
| 211 |
+
for tz in tz_names),
|
| 212 |
+
'Z'),
|
| 213 |
+
'%': '%'})
|
| 214 |
+
base.__setitem__('W', base.__getitem__('U').replace('U', 'W'))
|
| 215 |
+
base.__setitem__('c', self.pattern(self.locale_time.LC_date_time))
|
| 216 |
+
base.__setitem__('x', self.pattern(self.locale_time.LC_date))
|
| 217 |
+
base.__setitem__('X', self.pattern(self.locale_time.LC_time))
|
| 218 |
+
|
| 219 |
+
def __seqToRE(self, to_convert, directive):
|
| 220 |
+
"""Convert a list to a regex string for matching a directive.
|
| 221 |
+
|
| 222 |
+
Want possible matching values to be from longest to shortest. This
|
| 223 |
+
prevents the possibility of a match occurring for a value that also
|
| 224 |
+
a substring of a larger value that should have matched (e.g., 'abc'
|
| 225 |
+
matching when 'abcdef' should have been the match).
|
| 226 |
+
|
| 227 |
+
"""
|
| 228 |
+
to_convert = sorted(to_convert, key=len, reverse=True)
|
| 229 |
+
for value in to_convert:
|
| 230 |
+
if value != '':
|
| 231 |
+
break
|
| 232 |
+
else:
|
| 233 |
+
return ''
|
| 234 |
+
regex = '|'.join(re_escape(stuff) for stuff in to_convert)
|
| 235 |
+
regex = '(?P<%s>%s' % (directive, regex)
|
| 236 |
+
return '%s)' % regex
|
| 237 |
+
|
| 238 |
+
def pattern(self, format):
|
| 239 |
+
"""Return regex pattern for the format string.
|
| 240 |
+
|
| 241 |
+
Need to make sure that any characters that might be interpreted as
|
| 242 |
+
regex syntax are escaped.
|
| 243 |
+
|
| 244 |
+
"""
|
| 245 |
+
processed_format = ''
|
| 246 |
+
# The sub() call escapes all characters that might be misconstrued
|
| 247 |
+
# as regex syntax. Cannot use re.escape since we have to deal with
|
| 248 |
+
# format directives (%m, etc.).
|
| 249 |
+
regex_chars = re_compile(r"([\\.^$*+?\(\){}\[\]|])")
|
| 250 |
+
format = regex_chars.sub(r"\\\1", format)
|
| 251 |
+
whitespace_replacement = re_compile(r'\s+')
|
| 252 |
+
format = whitespace_replacement.sub(r'\\s+', format)
|
| 253 |
+
while '%' in format:
|
| 254 |
+
directive_index = format.index('%')+1
|
| 255 |
+
processed_format = "%s%s%s" % (processed_format,
|
| 256 |
+
format[:directive_index-1],
|
| 257 |
+
self[format[directive_index]])
|
| 258 |
+
format = format[directive_index+1:]
|
| 259 |
+
return "%s%s" % (processed_format, format)
|
| 260 |
+
|
| 261 |
+
def compile(self, format):
|
| 262 |
+
"""Return a compiled re object for the format string."""
|
| 263 |
+
return re_compile(self.pattern(format), IGNORECASE)
|
| 264 |
+
|
| 265 |
+
_cache_lock = _thread_allocate_lock()
|
| 266 |
+
# DO NOT modify _TimeRE_cache or _regex_cache without acquiring the cache lock
|
| 267 |
+
# first!
|
| 268 |
+
_TimeRE_cache = TimeRE()
|
| 269 |
+
_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
|
| 270 |
+
_regex_cache = {}
|
| 271 |
+
|
| 272 |
+
def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
|
| 273 |
+
"""Calculate the Julian day based on the year, week of the year, and day of
|
| 274 |
+
the week, with week_start_day representing whether the week of the year
|
| 275 |
+
assumes the week starts on Sunday or Monday (6 or 0)."""
|
| 276 |
+
first_weekday = datetime_date(year, 1, 1).weekday()
|
| 277 |
+
# If we are dealing with the %U directive (week starts on Sunday), it's
|
| 278 |
+
# easier to just shift the view to Sunday being the first day of the
|
| 279 |
+
# week.
|
| 280 |
+
if not week_starts_Mon:
|
| 281 |
+
first_weekday = (first_weekday + 1) % 7
|
| 282 |
+
day_of_week = (day_of_week + 1) % 7
|
| 283 |
+
# Need to watch out for a week 0 (when the first day of the year is not
|
| 284 |
+
# the same as that specified by %U or %W).
|
| 285 |
+
week_0_length = (7 - first_weekday) % 7
|
| 286 |
+
if week_of_year == 0:
|
| 287 |
+
return 1 + day_of_week - first_weekday
|
| 288 |
+
else:
|
| 289 |
+
days_to_week = week_0_length + (7 * (week_of_year - 1))
|
| 290 |
+
return 1 + days_to_week + day_of_week
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
def _calc_julian_from_V(iso_year, iso_week, iso_weekday):
|
| 294 |
+
"""Calculate the Julian day based on the ISO 8601 year, week, and weekday.
|
| 295 |
+
ISO weeks start on Mondays, with week 01 being the week containing 4 Jan.
|
| 296 |
+
ISO week days range from 1 (Monday) to 7 (Sunday).
|
| 297 |
+
"""
|
| 298 |
+
correction = datetime_date(iso_year, 1, 4).isoweekday() + 3
|
| 299 |
+
ordinal = (iso_week * 7) + iso_weekday - correction
|
| 300 |
+
# ordinal may be negative or 0 now, which means the date is in the previous
|
| 301 |
+
# calendar year
|
| 302 |
+
if ordinal < 1:
|
| 303 |
+
ordinal += datetime_date(iso_year, 1, 1).toordinal()
|
| 304 |
+
iso_year -= 1
|
| 305 |
+
ordinal -= datetime_date(iso_year, 1, 1).toordinal()
|
| 306 |
+
return iso_year, ordinal
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
|
| 310 |
+
"""Return a 2-tuple consisting of a time struct and an int containing
|
| 311 |
+
the number of microseconds based on the input string and the
|
| 312 |
+
format string."""
|
| 313 |
+
|
| 314 |
+
for index, arg in enumerate([data_string, format]):
|
| 315 |
+
if not isinstance(arg, str):
|
| 316 |
+
msg = "strptime() argument {} must be str, not {}"
|
| 317 |
+
raise TypeError(msg.format(index, type(arg)))
|
| 318 |
+
|
| 319 |
+
global _TimeRE_cache, _regex_cache
|
| 320 |
+
with _cache_lock:
|
| 321 |
+
locale_time = _TimeRE_cache.locale_time
|
| 322 |
+
if (_getlang() != locale_time.lang or
|
| 323 |
+
time.tzname != locale_time.tzname or
|
| 324 |
+
time.daylight != locale_time.daylight):
|
| 325 |
+
_TimeRE_cache = TimeRE()
|
| 326 |
+
_regex_cache.clear()
|
| 327 |
+
locale_time = _TimeRE_cache.locale_time
|
| 328 |
+
if len(_regex_cache) > _CACHE_MAX_SIZE:
|
| 329 |
+
_regex_cache.clear()
|
| 330 |
+
format_regex = _regex_cache.get(format)
|
| 331 |
+
if not format_regex:
|
| 332 |
+
try:
|
| 333 |
+
format_regex = _TimeRE_cache.compile(format)
|
| 334 |
+
# KeyError raised when a bad format is found; can be specified as
|
| 335 |
+
# \\, in which case it was a stray % but with a space after it
|
| 336 |
+
except KeyError as err:
|
| 337 |
+
bad_directive = err.args[0]
|
| 338 |
+
if bad_directive == "\\":
|
| 339 |
+
bad_directive = "%"
|
| 340 |
+
del err
|
| 341 |
+
raise ValueError("'%s' is a bad directive in format '%s'" %
|
| 342 |
+
(bad_directive, format)) from None
|
| 343 |
+
# IndexError only occurs when the format string is "%"
|
| 344 |
+
except IndexError:
|
| 345 |
+
raise ValueError("stray %% in format '%s'" % format) from None
|
| 346 |
+
_regex_cache[format] = format_regex
|
| 347 |
+
found = format_regex.match(data_string)
|
| 348 |
+
if not found:
|
| 349 |
+
raise ValueError("time data %r does not match format %r" %
|
| 350 |
+
(data_string, format))
|
| 351 |
+
if len(data_string) != found.end():
|
| 352 |
+
raise ValueError("unconverted data remains: %s" %
|
| 353 |
+
data_string[found.end():])
|
| 354 |
+
|
| 355 |
+
iso_year = year = None
|
| 356 |
+
month = day = 1
|
| 357 |
+
hour = minute = second = fraction = 0
|
| 358 |
+
tz = -1
|
| 359 |
+
gmtoff = None
|
| 360 |
+
gmtoff_fraction = 0
|
| 361 |
+
# Default to -1 to signify that values not known; not critical to have,
|
| 362 |
+
# though
|
| 363 |
+
iso_week = week_of_year = None
|
| 364 |
+
week_of_year_start = None
|
| 365 |
+
# weekday and julian defaulted to None so as to signal need to calculate
|
| 366 |
+
# values
|
| 367 |
+
weekday = julian = None
|
| 368 |
+
found_dict = found.groupdict()
|
| 369 |
+
for group_key in found_dict.keys():
|
| 370 |
+
# Directives not explicitly handled below:
|
| 371 |
+
# c, x, X
|
| 372 |
+
# handled by making out of other directives
|
| 373 |
+
# U, W
|
| 374 |
+
# worthless without day of the week
|
| 375 |
+
if group_key == 'y':
|
| 376 |
+
year = int(found_dict['y'])
|
| 377 |
+
# Open Group specification for strptime() states that a %y
|
| 378 |
+
#value in the range of [00, 68] is in the century 2000, while
|
| 379 |
+
#[69,99] is in the century 1900
|
| 380 |
+
if year <= 68:
|
| 381 |
+
year += 2000
|
| 382 |
+
else:
|
| 383 |
+
year += 1900
|
| 384 |
+
elif group_key == 'Y':
|
| 385 |
+
year = int(found_dict['Y'])
|
| 386 |
+
elif group_key == 'G':
|
| 387 |
+
iso_year = int(found_dict['G'])
|
| 388 |
+
elif group_key == 'm':
|
| 389 |
+
month = int(found_dict['m'])
|
| 390 |
+
elif group_key == 'B':
|
| 391 |
+
month = locale_time.f_month.index(found_dict['B'].lower())
|
| 392 |
+
elif group_key == 'b':
|
| 393 |
+
month = locale_time.a_month.index(found_dict['b'].lower())
|
| 394 |
+
elif group_key == 'd':
|
| 395 |
+
day = int(found_dict['d'])
|
| 396 |
+
elif group_key == 'H':
|
| 397 |
+
hour = int(found_dict['H'])
|
| 398 |
+
elif group_key == 'I':
|
| 399 |
+
hour = int(found_dict['I'])
|
| 400 |
+
ampm = found_dict.get('p', '').lower()
|
| 401 |
+
# If there was no AM/PM indicator, we'll treat this like AM
|
| 402 |
+
if ampm in ('', locale_time.am_pm[0]):
|
| 403 |
+
# We're in AM so the hour is correct unless we're
|
| 404 |
+
# looking at 12 midnight.
|
| 405 |
+
# 12 midnight == 12 AM == hour 0
|
| 406 |
+
if hour == 12:
|
| 407 |
+
hour = 0
|
| 408 |
+
elif ampm == locale_time.am_pm[1]:
|
| 409 |
+
# We're in PM so we need to add 12 to the hour unless
|
| 410 |
+
# we're looking at 12 noon.
|
| 411 |
+
# 12 noon == 12 PM == hour 12
|
| 412 |
+
if hour != 12:
|
| 413 |
+
hour += 12
|
| 414 |
+
elif group_key == 'M':
|
| 415 |
+
minute = int(found_dict['M'])
|
| 416 |
+
elif group_key == 'S':
|
| 417 |
+
second = int(found_dict['S'])
|
| 418 |
+
elif group_key == 'f':
|
| 419 |
+
s = found_dict['f']
|
| 420 |
+
# Pad to always return microseconds.
|
| 421 |
+
s += "0" * (6 - len(s))
|
| 422 |
+
fraction = int(s)
|
| 423 |
+
elif group_key == 'A':
|
| 424 |
+
weekday = locale_time.f_weekday.index(found_dict['A'].lower())
|
| 425 |
+
elif group_key == 'a':
|
| 426 |
+
weekday = locale_time.a_weekday.index(found_dict['a'].lower())
|
| 427 |
+
elif group_key == 'w':
|
| 428 |
+
weekday = int(found_dict['w'])
|
| 429 |
+
if weekday == 0:
|
| 430 |
+
weekday = 6
|
| 431 |
+
else:
|
| 432 |
+
weekday -= 1
|
| 433 |
+
elif group_key == 'u':
|
| 434 |
+
weekday = int(found_dict['u'])
|
| 435 |
+
weekday -= 1
|
| 436 |
+
elif group_key == 'j':
|
| 437 |
+
julian = int(found_dict['j'])
|
| 438 |
+
elif group_key in ('U', 'W'):
|
| 439 |
+
week_of_year = int(found_dict[group_key])
|
| 440 |
+
if group_key == 'U':
|
| 441 |
+
# U starts week on Sunday.
|
| 442 |
+
week_of_year_start = 6
|
| 443 |
+
else:
|
| 444 |
+
# W starts week on Monday.
|
| 445 |
+
week_of_year_start = 0
|
| 446 |
+
elif group_key == 'V':
|
| 447 |
+
iso_week = int(found_dict['V'])
|
| 448 |
+
elif group_key == 'z':
|
| 449 |
+
z = found_dict['z']
|
| 450 |
+
if z == 'Z':
|
| 451 |
+
gmtoff = 0
|
| 452 |
+
else:
|
| 453 |
+
if z[3] == ':':
|
| 454 |
+
z = z[:3] + z[4:]
|
| 455 |
+
if len(z) > 5:
|
| 456 |
+
if z[5] != ':':
|
| 457 |
+
msg = f"Inconsistent use of : in {found_dict['z']}"
|
| 458 |
+
raise ValueError(msg)
|
| 459 |
+
z = z[:5] + z[6:]
|
| 460 |
+
hours = int(z[1:3])
|
| 461 |
+
minutes = int(z[3:5])
|
| 462 |
+
seconds = int(z[5:7] or 0)
|
| 463 |
+
gmtoff = (hours * 60 * 60) + (minutes * 60) + seconds
|
| 464 |
+
gmtoff_remainder = z[8:]
|
| 465 |
+
# Pad to always return microseconds.
|
| 466 |
+
gmtoff_remainder_padding = "0" * (6 - len(gmtoff_remainder))
|
| 467 |
+
gmtoff_fraction = int(gmtoff_remainder + gmtoff_remainder_padding)
|
| 468 |
+
if z.startswith("-"):
|
| 469 |
+
gmtoff = -gmtoff
|
| 470 |
+
gmtoff_fraction = -gmtoff_fraction
|
| 471 |
+
elif group_key == 'Z':
|
| 472 |
+
# Since -1 is default value only need to worry about setting tz if
|
| 473 |
+
# it can be something other than -1.
|
| 474 |
+
found_zone = found_dict['Z'].lower()
|
| 475 |
+
for value, tz_values in enumerate(locale_time.timezone):
|
| 476 |
+
if found_zone in tz_values:
|
| 477 |
+
# Deal with bad locale setup where timezone names are the
|
| 478 |
+
# same and yet time.daylight is true; too ambiguous to
|
| 479 |
+
# be able to tell what timezone has daylight savings
|
| 480 |
+
if (time.tzname[0] == time.tzname[1] and
|
| 481 |
+
time.daylight and found_zone not in ("utc", "gmt")):
|
| 482 |
+
break
|
| 483 |
+
else:
|
| 484 |
+
tz = value
|
| 485 |
+
break
|
| 486 |
+
# Deal with the cases where ambiguities arize
|
| 487 |
+
# don't assume default values for ISO week/year
|
| 488 |
+
if year is None and iso_year is not None:
|
| 489 |
+
if iso_week is None or weekday is None:
|
| 490 |
+
raise ValueError("ISO year directive '%G' must be used with "
|
| 491 |
+
"the ISO week directive '%V' and a weekday "
|
| 492 |
+
"directive ('%A', '%a', '%w', or '%u').")
|
| 493 |
+
if julian is not None:
|
| 494 |
+
raise ValueError("Day of the year directive '%j' is not "
|
| 495 |
+
"compatible with ISO year directive '%G'. "
|
| 496 |
+
"Use '%Y' instead.")
|
| 497 |
+
elif week_of_year is None and iso_week is not None:
|
| 498 |
+
if weekday is None:
|
| 499 |
+
raise ValueError("ISO week directive '%V' must be used with "
|
| 500 |
+
"the ISO year directive '%G' and a weekday "
|
| 501 |
+
"directive ('%A', '%a', '%w', or '%u').")
|
| 502 |
+
else:
|
| 503 |
+
raise ValueError("ISO week directive '%V' is incompatible with "
|
| 504 |
+
"the year directive '%Y'. Use the ISO year '%G' "
|
| 505 |
+
"instead.")
|
| 506 |
+
|
| 507 |
+
leap_year_fix = False
|
| 508 |
+
if year is None and month == 2 and day == 29:
|
| 509 |
+
year = 1904 # 1904 is first leap year of 20th century
|
| 510 |
+
leap_year_fix = True
|
| 511 |
+
elif year is None:
|
| 512 |
+
year = 1900
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
# If we know the week of the year and what day of that week, we can figure
|
| 516 |
+
# out the Julian day of the year.
|
| 517 |
+
if julian is None and weekday is not None:
|
| 518 |
+
if week_of_year is not None:
|
| 519 |
+
week_starts_Mon = True if week_of_year_start == 0 else False
|
| 520 |
+
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
|
| 521 |
+
week_starts_Mon)
|
| 522 |
+
elif iso_year is not None and iso_week is not None:
|
| 523 |
+
year, julian = _calc_julian_from_V(iso_year, iso_week, weekday + 1)
|
| 524 |
+
if julian is not None and julian <= 0:
|
| 525 |
+
year -= 1
|
| 526 |
+
yday = 366 if calendar.isleap(year) else 365
|
| 527 |
+
julian += yday
|
| 528 |
+
|
| 529 |
+
if julian is None:
|
| 530 |
+
# Cannot pre-calculate datetime_date() since can change in Julian
|
| 531 |
+
# calculation and thus could have different value for the day of
|
| 532 |
+
# the week calculation.
|
| 533 |
+
# Need to add 1 to result since first day of the year is 1, not 0.
|
| 534 |
+
julian = datetime_date(year, month, day).toordinal() - \
|
| 535 |
+
datetime_date(year, 1, 1).toordinal() + 1
|
| 536 |
+
else: # Assume that if they bothered to include Julian day (or if it was
|
| 537 |
+
# calculated above with year/week/weekday) it will be accurate.
|
| 538 |
+
datetime_result = datetime_date.fromordinal(
|
| 539 |
+
(julian - 1) +
|
| 540 |
+
datetime_date(year, 1, 1).toordinal())
|
| 541 |
+
year = datetime_result.year
|
| 542 |
+
month = datetime_result.month
|
| 543 |
+
day = datetime_result.day
|
| 544 |
+
if weekday is None:
|
| 545 |
+
weekday = datetime_date(year, month, day).weekday()
|
| 546 |
+
# Add timezone info
|
| 547 |
+
tzname = found_dict.get("Z")
|
| 548 |
+
|
| 549 |
+
if leap_year_fix:
|
| 550 |
+
# the caller didn't supply a year but asked for Feb 29th. We couldn't
|
| 551 |
+
# use the default of 1900 for computations. We set it back to ensure
|
| 552 |
+
# that February 29th is smaller than March 1st.
|
| 553 |
+
year = 1900
|
| 554 |
+
|
| 555 |
+
return (year, month, day,
|
| 556 |
+
hour, minute, second,
|
| 557 |
+
weekday, julian, tz, tzname, gmtoff), fraction, gmtoff_fraction
|
| 558 |
+
|
| 559 |
+
def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"):
|
| 560 |
+
"""Return a time struct based on the input string and the
|
| 561 |
+
format string."""
|
| 562 |
+
tt = _strptime(data_string, format)[0]
|
| 563 |
+
return time.struct_time(tt[:time._STRUCT_TM_ITEMS])
|
| 564 |
+
|
| 565 |
+
def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"):
|
| 566 |
+
"""Return a class cls instance based on the input string and the
|
| 567 |
+
format string."""
|
| 568 |
+
tt, fraction, gmtoff_fraction = _strptime(data_string, format)
|
| 569 |
+
tzname, gmtoff = tt[-2:]
|
| 570 |
+
args = tt[:6] + (fraction,)
|
| 571 |
+
if gmtoff is not None:
|
| 572 |
+
tzdelta = datetime_timedelta(seconds=gmtoff, microseconds=gmtoff_fraction)
|
| 573 |
+
if tzname:
|
| 574 |
+
tz = datetime_timezone(tzdelta, tzname)
|
| 575 |
+
else:
|
| 576 |
+
tz = datetime_timezone(tzdelta)
|
| 577 |
+
args += (tz,)
|
| 578 |
+
|
| 579 |
+
return cls(*args)
|
evalkit_cambrian/lib/python3.10/_sysconfigdata__linux_x86_64-linux-gnu.py.orig
ADDED
|
@@ -0,0 +1,986 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# system configuration generated and used by the sysconfig module
|
| 2 |
+
build_time_vars = {'ABIFLAGS': '',
|
| 3 |
+
'AC_APPLE_UNIVERSAL_BUILD': 0,
|
| 4 |
+
'AIX_BUILDDATE': 0,
|
| 5 |
+
'AIX_GENUINE_CPLUSPLUS': 0,
|
| 6 |
+
'ALIGNOF_LONG': 8,
|
| 7 |
+
'ALIGNOF_SIZE_T': 8,
|
| 8 |
+
'ALT_SOABI': 0,
|
| 9 |
+
'ANDROID_API_LEVEL': 0,
|
| 10 |
+
'AR': 'x86_64-conda-linux-gnu-ar',
|
| 11 |
+
'ARFLAGS': 'rcs',
|
| 12 |
+
'BASECFLAGS': '-Wno-unused-result -Wsign-compare',
|
| 13 |
+
'BASECPPFLAGS': '-IObjects -IInclude -IPython',
|
| 14 |
+
'BASEMODLIBS': '',
|
| 15 |
+
'BINDIR': '/root/envs/evalkit_cambrian/bin',
|
| 16 |
+
'BINLIBDEST': '/root/envs/evalkit_cambrian/lib/python3.10',
|
| 17 |
+
'BLDLIBRARY': 'libpython3.10.a',
|
| 18 |
+
'BLDSHARED': 'x86_64-conda-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 19 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 20 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 21 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 22 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 23 |
+
'-L/root/envs/evalkit_cambrian/lib '
|
| 24 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 25 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 26 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 27 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 28 |
+
'-L/root/envs/evalkit_cambrian/lib',
|
| 29 |
+
'BUILDEXE': '',
|
| 30 |
+
'BUILDPYTHON': 'python',
|
| 31 |
+
'BUILD_GNU_TYPE': 'x86_64-conda-linux-gnu',
|
| 32 |
+
'BYTESTR_DEPS': '\\',
|
| 33 |
+
'CC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 34 |
+
'CCSHARED': '-fPIC',
|
| 35 |
+
'CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 36 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 37 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 38 |
+
'-isystem '
|
| 39 |
+
'/root/envs/evalkit_cambrian/include '
|
| 40 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 41 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 42 |
+
' '
|
| 43 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 44 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 45 |
+
'-isystem '
|
| 46 |
+
'/root/envs/evalkit_cambrian/include '
|
| 47 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 48 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 49 |
+
' ',
|
| 50 |
+
'CFLAGSFORSHARED': '',
|
| 51 |
+
'CFLAGS_ALIASING': '',
|
| 52 |
+
'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in '
|
| 53 |
+
'Makefile.pre.in',
|
| 54 |
+
'CONFIGURE_CFLAGS': '-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 55 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 56 |
+
'-ffunction-sections -pipe -isystem '
|
| 57 |
+
'/root/envs/evalkit_cambrian/include '
|
| 58 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 59 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 60 |
+
' '
|
| 61 |
+
' ',
|
| 62 |
+
'CONFIGURE_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 63 |
+
' '
|
| 64 |
+
' -g -std=c99 -Wextra '
|
| 65 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 66 |
+
'-Wno-missing-field-initializers '
|
| 67 |
+
'-Werror=implicit-function-declaration '
|
| 68 |
+
'-fvisibility=hidden',
|
| 69 |
+
'CONFIGURE_CPPFLAGS': '-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 70 |
+
'/root/envs/evalkit_cambrian/include '
|
| 71 |
+
'-I/root/envs/evalkit_cambrian/include',
|
| 72 |
+
'CONFIGURE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 73 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 74 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 75 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 76 |
+
'-L/root/envs/evalkit_cambrian/lib',
|
| 77 |
+
'CONFIGURE_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 78 |
+
' '
|
| 79 |
+
' -g',
|
| 80 |
+
'CONFIG_ARGS': "'--prefix=/root/envs/evalkit_cambrian' "
|
| 81 |
+
"'--build=x86_64-conda-linux-gnu' "
|
| 82 |
+
"'--host=x86_64-conda-linux-gnu' '--enable-ipv6' "
|
| 83 |
+
"'--with-ensurepip=no' "
|
| 84 |
+
"'--with-tzpath=/root/envs/evalkit_cambrian/share/zoneinfo' "
|
| 85 |
+
"'--with-computed-gotos' '--with-system-ffi' "
|
| 86 |
+
"'--enable-loadable-sqlite-extensions' "
|
| 87 |
+
"'--with-tcltk-includes=-I/root/envs/evalkit_cambrian/include' "
|
| 88 |
+
"'--with-tcltk-libs=-L/root/envs/evalkit_cambrian/lib "
|
| 89 |
+
"-ltcl8.6 -ltk8.6' '--with-platlibdir=lib' '--with-lto' "
|
| 90 |
+
"'--enable-optimizations' "
|
| 91 |
+
"'-oldincludedir=/croot/python-split_1733933809325/_build_env/x86_64-conda-linux-gnu/sysroot/usr/include' "
|
| 92 |
+
"'--disable-shared' 'PROFILE_TASK=-m test --pgo' "
|
| 93 |
+
"'build_alias=x86_64-conda-linux-gnu' "
|
| 94 |
+
"'host_alias=x86_64-conda-linux-gnu' 'MACHDEP=linux' "
|
| 95 |
+
"'CC=x86_64-conda-linux-gnu-gcc' 'CFLAGS=-march=nocona "
|
| 96 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 97 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 98 |
+
'-pipe -isystem '
|
| 99 |
+
'/root/envs/evalkit_cambrian/include '
|
| 100 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 101 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 102 |
+
' '
|
| 103 |
+
"' 'LDFLAGS=-Wl,-O2 -Wl,--sort-common -Wl,--as-needed "
|
| 104 |
+
'-Wl,-z,relro -Wl,-z,now -Wl,--disable-new-dtags '
|
| 105 |
+
'-Wl,--gc-sections '
|
| 106 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 107 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 108 |
+
"-L/root/envs/evalkit_cambrian/lib' "
|
| 109 |
+
"'CPPFLAGS=-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem "
|
| 110 |
+
'/root/envs/evalkit_cambrian/include '
|
| 111 |
+
"-I/root/envs/evalkit_cambrian/include' "
|
| 112 |
+
"'CPP=/croot/python-split_1733933809325/_build_env/bin/x86_64-conda-linux-gnu-cpp' "
|
| 113 |
+
"'PKG_CONFIG_PATH=/root/envs/evalkit_cambrian/lib/pkgconfig'",
|
| 114 |
+
'CONFINCLUDEDIR': '/root/envs/evalkit_cambrian/include',
|
| 115 |
+
'CONFINCLUDEPY': '/root/envs/evalkit_cambrian/include/python3.10',
|
| 116 |
+
'COREPYTHONPATH': '',
|
| 117 |
+
'COVERAGE_INFO': '/croot/python-split_1733933809325/work/build-static/coverage.info',
|
| 118 |
+
'COVERAGE_REPORT': '/croot/python-split_1733933809325/work/build-static/lcov-report',
|
| 119 |
+
'COVERAGE_REPORT_OPTIONS': '--no-branch-coverage --title "CPython lcov '
|
| 120 |
+
'report"',
|
| 121 |
+
'CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 122 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 123 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 124 |
+
'/root/envs/evalkit_cambrian/include '
|
| 125 |
+
'-I/root/envs/evalkit_cambrian/include '
|
| 126 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 127 |
+
'/root/envs/evalkit_cambrian/include '
|
| 128 |
+
'-I/root/envs/evalkit_cambrian/include',
|
| 129 |
+
'CXX': 'x86_64-conda-linux-gnu-c++ -pthread',
|
| 130 |
+
'DESTDIRS': '/root/envs/evalkit_cambrian '
|
| 131 |
+
'/root/envs/evalkit_cambrian/lib '
|
| 132 |
+
'/root/envs/evalkit_cambrian/lib/python3.10 '
|
| 133 |
+
'/root/envs/evalkit_cambrian/lib/python3.10/lib-dynload',
|
| 134 |
+
'DESTLIB': '/root/envs/evalkit_cambrian/lib/python3.10',
|
| 135 |
+
'DESTPATH': '',
|
| 136 |
+
'DESTSHARED': '/root/envs/evalkit_cambrian/lib/python3.10/lib-dynload',
|
| 137 |
+
'DFLAGS': '',
|
| 138 |
+
'DIRMODE': 755,
|
| 139 |
+
'DIST': 'README.rst ChangeLog configure configure.ac acconfig.h pyconfig.h.in '
|
| 140 |
+
'Makefile.pre.in Include Lib Misc Ext-dummy',
|
| 141 |
+
'DISTDIRS': 'Include Lib Misc Ext-dummy',
|
| 142 |
+
'DISTFILES': 'README.rst ChangeLog configure configure.ac acconfig.h '
|
| 143 |
+
'pyconfig.h.in Makefile.pre.in',
|
| 144 |
+
'DLINCLDIR': '.',
|
| 145 |
+
'DLLLIBRARY': '',
|
| 146 |
+
'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754': 0,
|
| 147 |
+
'DOUBLE_IS_BIG_ENDIAN_IEEE754': 0,
|
| 148 |
+
'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1,
|
| 149 |
+
'DTRACE': '',
|
| 150 |
+
'DTRACE_DEPS': '\\',
|
| 151 |
+
'DTRACE_HEADERS': '',
|
| 152 |
+
'DTRACE_OBJS': '',
|
| 153 |
+
'DYNLOADFILE': 'dynload_shlib.o',
|
| 154 |
+
'ENABLE_IPV6': 1,
|
| 155 |
+
'ENSUREPIP': 'no',
|
| 156 |
+
'EXE': '',
|
| 157 |
+
'EXEMODE': 755,
|
| 158 |
+
'EXPERIMENTAL_ISOLATED_SUBINTERPRETERS': 0,
|
| 159 |
+
'EXPORTSFROM': '',
|
| 160 |
+
'EXPORTSYMS': '',
|
| 161 |
+
'EXTRATESTOPTS': '',
|
| 162 |
+
'EXT_SUFFIX': '.cpython-310-x86_64-linux-gnu.so',
|
| 163 |
+
'FILEMODE': 644,
|
| 164 |
+
'FLOAT_WORDS_BIGENDIAN': 0,
|
| 165 |
+
'FLOCK_NEEDS_LIBBSD': 0,
|
| 166 |
+
'GETPGRP_HAVE_ARG': 0,
|
| 167 |
+
'GITBRANCH': '',
|
| 168 |
+
'GITTAG': '',
|
| 169 |
+
'GITVERSION': '',
|
| 170 |
+
'GNULD': 'yes',
|
| 171 |
+
'HAVE_ACCEPT4': 1,
|
| 172 |
+
'HAVE_ACOSH': 1,
|
| 173 |
+
'HAVE_ADDRINFO': 1,
|
| 174 |
+
'HAVE_ALARM': 1,
|
| 175 |
+
'HAVE_ALIGNED_REQUIRED': 0,
|
| 176 |
+
'HAVE_ALLOCA_H': 1,
|
| 177 |
+
'HAVE_ALTZONE': 0,
|
| 178 |
+
'HAVE_ASINH': 1,
|
| 179 |
+
'HAVE_ASM_TYPES_H': 1,
|
| 180 |
+
'HAVE_ATANH': 1,
|
| 181 |
+
'HAVE_BIND_TEXTDOMAIN_CODESET': 1,
|
| 182 |
+
'HAVE_BLUETOOTH_BLUETOOTH_H': 0,
|
| 183 |
+
'HAVE_BLUETOOTH_H': 0,
|
| 184 |
+
'HAVE_BROKEN_MBSTOWCS': 0,
|
| 185 |
+
'HAVE_BROKEN_NICE': 0,
|
| 186 |
+
'HAVE_BROKEN_PIPE_BUF': 0,
|
| 187 |
+
'HAVE_BROKEN_POLL': 0,
|
| 188 |
+
'HAVE_BROKEN_POSIX_SEMAPHORES': 0,
|
| 189 |
+
'HAVE_BROKEN_PTHREAD_SIGMASK': 0,
|
| 190 |
+
'HAVE_BROKEN_SEM_GETVALUE': 0,
|
| 191 |
+
'HAVE_BROKEN_UNSETENV': 0,
|
| 192 |
+
'HAVE_BUILTIN_ATOMIC': 1,
|
| 193 |
+
'HAVE_CHFLAGS': 0,
|
| 194 |
+
'HAVE_CHOWN': 1,
|
| 195 |
+
'HAVE_CHROOT': 1,
|
| 196 |
+
'HAVE_CLOCK': 1,
|
| 197 |
+
'HAVE_CLOCK_GETRES': 1,
|
| 198 |
+
'HAVE_CLOCK_GETTIME': 1,
|
| 199 |
+
'HAVE_CLOCK_SETTIME': 1,
|
| 200 |
+
'HAVE_CLOSE_RANGE': 0,
|
| 201 |
+
'HAVE_COMPUTED_GOTOS': 1,
|
| 202 |
+
'HAVE_CONFSTR': 1,
|
| 203 |
+
'HAVE_CONIO_H': 0,
|
| 204 |
+
'HAVE_COPYSIGN': 1,
|
| 205 |
+
'HAVE_COPY_FILE_RANGE': 0,
|
| 206 |
+
'HAVE_CRYPT_H': 1,
|
| 207 |
+
'HAVE_CRYPT_R': 1,
|
| 208 |
+
'HAVE_CTERMID': 1,
|
| 209 |
+
'HAVE_CTERMID_R': 0,
|
| 210 |
+
'HAVE_CURSES_FILTER': 1,
|
| 211 |
+
'HAVE_CURSES_H': 1,
|
| 212 |
+
'HAVE_CURSES_HAS_KEY': 1,
|
| 213 |
+
'HAVE_CURSES_IMMEDOK': 1,
|
| 214 |
+
'HAVE_CURSES_IS_PAD': 1,
|
| 215 |
+
'HAVE_CURSES_IS_TERM_RESIZED': 1,
|
| 216 |
+
'HAVE_CURSES_RESIZETERM': 1,
|
| 217 |
+
'HAVE_CURSES_RESIZE_TERM': 1,
|
| 218 |
+
'HAVE_CURSES_SYNCOK': 1,
|
| 219 |
+
'HAVE_CURSES_TYPEAHEAD': 1,
|
| 220 |
+
'HAVE_CURSES_USE_ENV': 1,
|
| 221 |
+
'HAVE_CURSES_WCHGAT': 1,
|
| 222 |
+
'HAVE_DECL_ISFINITE': 1,
|
| 223 |
+
'HAVE_DECL_ISINF': 1,
|
| 224 |
+
'HAVE_DECL_ISNAN': 1,
|
| 225 |
+
'HAVE_DECL_RTLD_DEEPBIND': 1,
|
| 226 |
+
'HAVE_DECL_RTLD_GLOBAL': 1,
|
| 227 |
+
'HAVE_DECL_RTLD_LAZY': 1,
|
| 228 |
+
'HAVE_DECL_RTLD_LOCAL': 1,
|
| 229 |
+
'HAVE_DECL_RTLD_MEMBER': 0,
|
| 230 |
+
'HAVE_DECL_RTLD_NODELETE': 1,
|
| 231 |
+
'HAVE_DECL_RTLD_NOLOAD': 1,
|
| 232 |
+
'HAVE_DECL_RTLD_NOW': 1,
|
| 233 |
+
'HAVE_DECL_TZNAME': 0,
|
| 234 |
+
'HAVE_DEVICE_MACROS': 1,
|
| 235 |
+
'HAVE_DEV_PTC': 0,
|
| 236 |
+
'HAVE_DEV_PTMX': 1,
|
| 237 |
+
'HAVE_DIRECT_H': 0,
|
| 238 |
+
'HAVE_DIRENT_D_TYPE': 1,
|
| 239 |
+
'HAVE_DIRENT_H': 1,
|
| 240 |
+
'HAVE_DIRFD': 1,
|
| 241 |
+
'HAVE_DLFCN_H': 1,
|
| 242 |
+
'HAVE_DLOPEN': 1,
|
| 243 |
+
'HAVE_DUP2': 1,
|
| 244 |
+
'HAVE_DUP3': 1,
|
| 245 |
+
'HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH': 0,
|
| 246 |
+
'HAVE_DYNAMIC_LOADING': 1,
|
| 247 |
+
'HAVE_ENDIAN_H': 1,
|
| 248 |
+
'HAVE_EPOLL': 1,
|
| 249 |
+
'HAVE_EPOLL_CREATE1': 1,
|
| 250 |
+
'HAVE_ERF': 1,
|
| 251 |
+
'HAVE_ERFC': 1,
|
| 252 |
+
'HAVE_ERRNO_H': 1,
|
| 253 |
+
'HAVE_EVENTFD': 1,
|
| 254 |
+
'HAVE_EXECV': 1,
|
| 255 |
+
'HAVE_EXPLICIT_BZERO': 0,
|
| 256 |
+
'HAVE_EXPLICIT_MEMSET': 0,
|
| 257 |
+
'HAVE_EXPM1': 1,
|
| 258 |
+
'HAVE_FACCESSAT': 1,
|
| 259 |
+
'HAVE_FCHDIR': 1,
|
| 260 |
+
'HAVE_FCHMOD': 1,
|
| 261 |
+
'HAVE_FCHMODAT': 1,
|
| 262 |
+
'HAVE_FCHOWN': 1,
|
| 263 |
+
'HAVE_FCHOWNAT': 1,
|
| 264 |
+
'HAVE_FCNTL_H': 1,
|
| 265 |
+
'HAVE_FDATASYNC': 1,
|
| 266 |
+
'HAVE_FDOPENDIR': 1,
|
| 267 |
+
'HAVE_FDWALK': 0,
|
| 268 |
+
'HAVE_FEXECVE': 1,
|
| 269 |
+
'HAVE_FINITE': 1,
|
| 270 |
+
'HAVE_FLOCK': 1,
|
| 271 |
+
'HAVE_FORK': 1,
|
| 272 |
+
'HAVE_FORKPTY': 1,
|
| 273 |
+
'HAVE_FPATHCONF': 1,
|
| 274 |
+
'HAVE_FSEEK64': 0,
|
| 275 |
+
'HAVE_FSEEKO': 1,
|
| 276 |
+
'HAVE_FSTATAT': 1,
|
| 277 |
+
'HAVE_FSTATVFS': 1,
|
| 278 |
+
'HAVE_FSYNC': 1,
|
| 279 |
+
'HAVE_FTELL64': 0,
|
| 280 |
+
'HAVE_FTELLO': 1,
|
| 281 |
+
'HAVE_FTIME': 1,
|
| 282 |
+
'HAVE_FTRUNCATE': 1,
|
| 283 |
+
'HAVE_FUTIMENS': 1,
|
| 284 |
+
'HAVE_FUTIMES': 1,
|
| 285 |
+
'HAVE_FUTIMESAT': 1,
|
| 286 |
+
'HAVE_GAI_STRERROR': 1,
|
| 287 |
+
'HAVE_GAMMA': 1,
|
| 288 |
+
'HAVE_GCC_ASM_FOR_MC68881': 0,
|
| 289 |
+
'HAVE_GCC_ASM_FOR_X64': 1,
|
| 290 |
+
'HAVE_GCC_ASM_FOR_X87': 1,
|
| 291 |
+
'HAVE_GCC_UINT128_T': 1,
|
| 292 |
+
'HAVE_GETADDRINFO': 1,
|
| 293 |
+
'HAVE_GETC_UNLOCKED': 1,
|
| 294 |
+
'HAVE_GETENTROPY': 0,
|
| 295 |
+
'HAVE_GETGRGID_R': 1,
|
| 296 |
+
'HAVE_GETGRNAM_R': 1,
|
| 297 |
+
'HAVE_GETGROUPLIST': 1,
|
| 298 |
+
'HAVE_GETGROUPS': 1,
|
| 299 |
+
'HAVE_GETHOSTBYNAME': 0,
|
| 300 |
+
'HAVE_GETHOSTBYNAME_R': 1,
|
| 301 |
+
'HAVE_GETHOSTBYNAME_R_3_ARG': 0,
|
| 302 |
+
'HAVE_GETHOSTBYNAME_R_5_ARG': 0,
|
| 303 |
+
'HAVE_GETHOSTBYNAME_R_6_ARG': 1,
|
| 304 |
+
'HAVE_GETITIMER': 1,
|
| 305 |
+
'HAVE_GETLOADAVG': 1,
|
| 306 |
+
'HAVE_GETLOGIN': 1,
|
| 307 |
+
'HAVE_GETNAMEINFO': 1,
|
| 308 |
+
'HAVE_GETPAGESIZE': 1,
|
| 309 |
+
'HAVE_GETPEERNAME': 1,
|
| 310 |
+
'HAVE_GETPGID': 1,
|
| 311 |
+
'HAVE_GETPGRP': 1,
|
| 312 |
+
'HAVE_GETPID': 1,
|
| 313 |
+
'HAVE_GETPRIORITY': 1,
|
| 314 |
+
'HAVE_GETPWENT': 1,
|
| 315 |
+
'HAVE_GETPWNAM_R': 1,
|
| 316 |
+
'HAVE_GETPWUID_R': 1,
|
| 317 |
+
'HAVE_GETRANDOM': 0,
|
| 318 |
+
'HAVE_GETRANDOM_SYSCALL': 1,
|
| 319 |
+
'HAVE_GETRESGID': 1,
|
| 320 |
+
'HAVE_GETRESUID': 1,
|
| 321 |
+
'HAVE_GETSID': 1,
|
| 322 |
+
'HAVE_GETSPENT': 1,
|
| 323 |
+
'HAVE_GETSPNAM': 1,
|
| 324 |
+
'HAVE_GETWD': 1,
|
| 325 |
+
'HAVE_GLIBC_MEMMOVE_BUG': 0,
|
| 326 |
+
'HAVE_GRP_H': 1,
|
| 327 |
+
'HAVE_HSTRERROR': 1,
|
| 328 |
+
'HAVE_HTOLE64': 1,
|
| 329 |
+
'HAVE_HYPOT': 1,
|
| 330 |
+
'HAVE_IEEEFP_H': 0,
|
| 331 |
+
'HAVE_IF_NAMEINDEX': 1,
|
| 332 |
+
'HAVE_INET_ATON': 1,
|
| 333 |
+
'HAVE_INET_PTON': 1,
|
| 334 |
+
'HAVE_INITGROUPS': 1,
|
| 335 |
+
'HAVE_INTTYPES_H': 1,
|
| 336 |
+
'HAVE_IO_H': 0,
|
| 337 |
+
'HAVE_IPA_PURE_CONST_BUG': 0,
|
| 338 |
+
'HAVE_KILL': 1,
|
| 339 |
+
'HAVE_KILLPG': 1,
|
| 340 |
+
'HAVE_KQUEUE': 0,
|
| 341 |
+
'HAVE_LANGINFO_H': 1,
|
| 342 |
+
'HAVE_LARGEFILE_SUPPORT': 0,
|
| 343 |
+
'HAVE_LCHFLAGS': 0,
|
| 344 |
+
'HAVE_LCHMOD': 0,
|
| 345 |
+
'HAVE_LCHOWN': 1,
|
| 346 |
+
'HAVE_LGAMMA': 1,
|
| 347 |
+
'HAVE_LIBDL': 1,
|
| 348 |
+
'HAVE_LIBDLD': 0,
|
| 349 |
+
'HAVE_LIBIEEE': 0,
|
| 350 |
+
'HAVE_LIBINTL_H': 1,
|
| 351 |
+
'HAVE_LIBREADLINE': 1,
|
| 352 |
+
'HAVE_LIBRESOLV': 0,
|
| 353 |
+
'HAVE_LIBSENDFILE': 0,
|
| 354 |
+
'HAVE_LIBUTIL_H': 0,
|
| 355 |
+
'HAVE_LIBUUID': 1,
|
| 356 |
+
'HAVE_LINK': 1,
|
| 357 |
+
'HAVE_LINKAT': 1,
|
| 358 |
+
'HAVE_LINUX_AUXVEC_H': 1,
|
| 359 |
+
'HAVE_LINUX_CAN_BCM_H': 1,
|
| 360 |
+
'HAVE_LINUX_CAN_H': 1,
|
| 361 |
+
'HAVE_LINUX_CAN_J1939_H': 0,
|
| 362 |
+
'HAVE_LINUX_CAN_RAW_FD_FRAMES': 1,
|
| 363 |
+
'HAVE_LINUX_CAN_RAW_H': 1,
|
| 364 |
+
'HAVE_LINUX_CAN_RAW_JOIN_FILTERS': 1,
|
| 365 |
+
'HAVE_LINUX_MEMFD_H': 1,
|
| 366 |
+
'HAVE_LINUX_NETLINK_H': 1,
|
| 367 |
+
'HAVE_LINUX_QRTR_H': 0,
|
| 368 |
+
'HAVE_LINUX_RANDOM_H': 1,
|
| 369 |
+
'HAVE_LINUX_TIPC_H': 1,
|
| 370 |
+
'HAVE_LINUX_VM_SOCKETS_H': 1,
|
| 371 |
+
'HAVE_LINUX_WAIT_H': 1,
|
| 372 |
+
'HAVE_LOCKF': 1,
|
| 373 |
+
'HAVE_LOG1P': 1,
|
| 374 |
+
'HAVE_LOG2': 1,
|
| 375 |
+
'HAVE_LONG_DOUBLE': 1,
|
| 376 |
+
'HAVE_LSTAT': 1,
|
| 377 |
+
'HAVE_LUTIMES': 1,
|
| 378 |
+
'HAVE_MADVISE': 1,
|
| 379 |
+
'HAVE_MAKEDEV': 1,
|
| 380 |
+
'HAVE_MBRTOWC': 1,
|
| 381 |
+
'HAVE_MEMFD_CREATE': 0,
|
| 382 |
+
'HAVE_MEMORY_H': 1,
|
| 383 |
+
'HAVE_MEMRCHR': 1,
|
| 384 |
+
'HAVE_MKDIRAT': 1,
|
| 385 |
+
'HAVE_MKFIFO': 1,
|
| 386 |
+
'HAVE_MKFIFOAT': 1,
|
| 387 |
+
'HAVE_MKNOD': 1,
|
| 388 |
+
'HAVE_MKNODAT': 1,
|
| 389 |
+
'HAVE_MKTIME': 1,
|
| 390 |
+
'HAVE_MMAP': 1,
|
| 391 |
+
'HAVE_MREMAP': 1,
|
| 392 |
+
'HAVE_NCURSES_H': 1,
|
| 393 |
+
'HAVE_NDIR_H': 0,
|
| 394 |
+
'HAVE_NETPACKET_PACKET_H': 1,
|
| 395 |
+
'HAVE_NET_IF_H': 1,
|
| 396 |
+
'HAVE_NICE': 1,
|
| 397 |
+
'HAVE_NON_UNICODE_WCHAR_T_REPRESENTATION': 0,
|
| 398 |
+
'HAVE_OPENAT': 1,
|
| 399 |
+
'HAVE_OPENPTY': 1,
|
| 400 |
+
'HAVE_PATHCONF': 1,
|
| 401 |
+
'HAVE_PAUSE': 1,
|
| 402 |
+
'HAVE_PIPE2': 1,
|
| 403 |
+
'HAVE_PLOCK': 0,
|
| 404 |
+
'HAVE_POLL': 1,
|
| 405 |
+
'HAVE_POLL_H': 1,
|
| 406 |
+
'HAVE_POSIX_FADVISE': 1,
|
| 407 |
+
'HAVE_POSIX_FALLOCATE': 1,
|
| 408 |
+
'HAVE_POSIX_SPAWN': 1,
|
| 409 |
+
'HAVE_POSIX_SPAWNP': 1,
|
| 410 |
+
'HAVE_PREAD': 1,
|
| 411 |
+
'HAVE_PREADV': 1,
|
| 412 |
+
'HAVE_PREADV2': 0,
|
| 413 |
+
'HAVE_PRLIMIT': 1,
|
| 414 |
+
'HAVE_PROCESS_H': 0,
|
| 415 |
+
'HAVE_PROTOTYPES': 1,
|
| 416 |
+
'HAVE_PTHREAD_CONDATTR_SETCLOCK': 1,
|
| 417 |
+
'HAVE_PTHREAD_DESTRUCTOR': 0,
|
| 418 |
+
'HAVE_PTHREAD_GETCPUCLOCKID': 1,
|
| 419 |
+
'HAVE_PTHREAD_H': 1,
|
| 420 |
+
'HAVE_PTHREAD_INIT': 0,
|
| 421 |
+
'HAVE_PTHREAD_KILL': 1,
|
| 422 |
+
'HAVE_PTHREAD_SIGMASK': 1,
|
| 423 |
+
'HAVE_PTY_H': 1,
|
| 424 |
+
'HAVE_PWRITE': 1,
|
| 425 |
+
'HAVE_PWRITEV': 1,
|
| 426 |
+
'HAVE_PWRITEV2': 0,
|
| 427 |
+
'HAVE_READLINK': 1,
|
| 428 |
+
'HAVE_READLINKAT': 1,
|
| 429 |
+
'HAVE_READV': 1,
|
| 430 |
+
'HAVE_REALPATH': 1,
|
| 431 |
+
'HAVE_RENAMEAT': 1,
|
| 432 |
+
'HAVE_RL_APPEND_HISTORY': 1,
|
| 433 |
+
'HAVE_RL_CATCH_SIGNAL': 1,
|
| 434 |
+
'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1,
|
| 435 |
+
'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1,
|
| 436 |
+
'HAVE_RL_COMPLETION_MATCHES': 1,
|
| 437 |
+
'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1,
|
| 438 |
+
'HAVE_RL_PRE_INPUT_HOOK': 1,
|
| 439 |
+
'HAVE_RL_RESIZE_TERMINAL': 1,
|
| 440 |
+
'HAVE_ROUND': 1,
|
| 441 |
+
'HAVE_RTPSPAWN': 0,
|
| 442 |
+
'HAVE_SCHED_GET_PRIORITY_MAX': 1,
|
| 443 |
+
'HAVE_SCHED_H': 1,
|
| 444 |
+
'HAVE_SCHED_RR_GET_INTERVAL': 1,
|
| 445 |
+
'HAVE_SCHED_SETAFFINITY': 1,
|
| 446 |
+
'HAVE_SCHED_SETPARAM': 1,
|
| 447 |
+
'HAVE_SCHED_SETSCHEDULER': 1,
|
| 448 |
+
'HAVE_SEM_CLOCKWAIT': 0,
|
| 449 |
+
'HAVE_SEM_GETVALUE': 1,
|
| 450 |
+
'HAVE_SEM_OPEN': 1,
|
| 451 |
+
'HAVE_SEM_TIMEDWAIT': 1,
|
| 452 |
+
'HAVE_SEM_UNLINK': 1,
|
| 453 |
+
'HAVE_SENDFILE': 1,
|
| 454 |
+
'HAVE_SETEGID': 1,
|
| 455 |
+
'HAVE_SETEUID': 1,
|
| 456 |
+
'HAVE_SETGID': 1,
|
| 457 |
+
'HAVE_SETGROUPS': 1,
|
| 458 |
+
'HAVE_SETHOSTNAME': 1,
|
| 459 |
+
'HAVE_SETITIMER': 1,
|
| 460 |
+
'HAVE_SETLOCALE': 1,
|
| 461 |
+
'HAVE_SETPGID': 1,
|
| 462 |
+
'HAVE_SETPGRP': 1,
|
| 463 |
+
'HAVE_SETPRIORITY': 1,
|
| 464 |
+
'HAVE_SETREGID': 1,
|
| 465 |
+
'HAVE_SETRESGID': 1,
|
| 466 |
+
'HAVE_SETRESUID': 1,
|
| 467 |
+
'HAVE_SETREUID': 1,
|
| 468 |
+
'HAVE_SETSID': 1,
|
| 469 |
+
'HAVE_SETUID': 1,
|
| 470 |
+
'HAVE_SETVBUF': 1,
|
| 471 |
+
'HAVE_SHADOW_H': 1,
|
| 472 |
+
'HAVE_SHM_OPEN': 1,
|
| 473 |
+
'HAVE_SHM_UNLINK': 1,
|
| 474 |
+
'HAVE_SIGACTION': 1,
|
| 475 |
+
'HAVE_SIGALTSTACK': 1,
|
| 476 |
+
'HAVE_SIGFILLSET': 1,
|
| 477 |
+
'HAVE_SIGINFO_T_SI_BAND': 1,
|
| 478 |
+
'HAVE_SIGINTERRUPT': 1,
|
| 479 |
+
'HAVE_SIGNAL_H': 1,
|
| 480 |
+
'HAVE_SIGPENDING': 1,
|
| 481 |
+
'HAVE_SIGRELSE': 1,
|
| 482 |
+
'HAVE_SIGTIMEDWAIT': 1,
|
| 483 |
+
'HAVE_SIGWAIT': 1,
|
| 484 |
+
'HAVE_SIGWAITINFO': 1,
|
| 485 |
+
'HAVE_SNPRINTF': 1,
|
| 486 |
+
'HAVE_SOCKADDR_ALG': 1,
|
| 487 |
+
'HAVE_SOCKADDR_SA_LEN': 0,
|
| 488 |
+
'HAVE_SOCKADDR_STORAGE': 1,
|
| 489 |
+
'HAVE_SOCKETPAIR': 1,
|
| 490 |
+
'HAVE_SPAWN_H': 1,
|
| 491 |
+
'HAVE_SPLICE': 1,
|
| 492 |
+
'HAVE_SSIZE_T': 1,
|
| 493 |
+
'HAVE_STATVFS': 1,
|
| 494 |
+
'HAVE_STAT_TV_NSEC': 1,
|
| 495 |
+
'HAVE_STAT_TV_NSEC2': 0,
|
| 496 |
+
'HAVE_STDARG_PROTOTYPES': 1,
|
| 497 |
+
'HAVE_STDINT_H': 1,
|
| 498 |
+
'HAVE_STDLIB_H': 1,
|
| 499 |
+
'HAVE_STD_ATOMIC': 1,
|
| 500 |
+
'HAVE_STRFTIME': 1,
|
| 501 |
+
'HAVE_STRINGS_H': 1,
|
| 502 |
+
'HAVE_STRING_H': 1,
|
| 503 |
+
'HAVE_STRLCPY': 0,
|
| 504 |
+
'HAVE_STROPTS_H': 0,
|
| 505 |
+
'HAVE_STRSIGNAL': 1,
|
| 506 |
+
'HAVE_STRUCT_PASSWD_PW_GECOS': 1,
|
| 507 |
+
'HAVE_STRUCT_PASSWD_PW_PASSWD': 1,
|
| 508 |
+
'HAVE_STRUCT_STAT_ST_BIRTHTIME': 0,
|
| 509 |
+
'HAVE_STRUCT_STAT_ST_BLKSIZE': 1,
|
| 510 |
+
'HAVE_STRUCT_STAT_ST_BLOCKS': 1,
|
| 511 |
+
'HAVE_STRUCT_STAT_ST_FLAGS': 0,
|
| 512 |
+
'HAVE_STRUCT_STAT_ST_GEN': 0,
|
| 513 |
+
'HAVE_STRUCT_STAT_ST_RDEV': 1,
|
| 514 |
+
'HAVE_STRUCT_TM_TM_ZONE': 1,
|
| 515 |
+
'HAVE_SYMLINK': 1,
|
| 516 |
+
'HAVE_SYMLINKAT': 1,
|
| 517 |
+
'HAVE_SYNC': 1,
|
| 518 |
+
'HAVE_SYSCONF': 1,
|
| 519 |
+
'HAVE_SYSEXITS_H': 1,
|
| 520 |
+
'HAVE_SYS_AUDIOIO_H': 0,
|
| 521 |
+
'HAVE_SYS_AUXV_H': 1,
|
| 522 |
+
'HAVE_SYS_BSDTTY_H': 0,
|
| 523 |
+
'HAVE_SYS_DEVPOLL_H': 0,
|
| 524 |
+
'HAVE_SYS_DIR_H': 0,
|
| 525 |
+
'HAVE_SYS_ENDIAN_H': 0,
|
| 526 |
+
'HAVE_SYS_EPOLL_H': 1,
|
| 527 |
+
'HAVE_SYS_EVENTFD_H': 1,
|
| 528 |
+
'HAVE_SYS_EVENT_H': 0,
|
| 529 |
+
'HAVE_SYS_FILE_H': 1,
|
| 530 |
+
'HAVE_SYS_IOCTL_H': 1,
|
| 531 |
+
'HAVE_SYS_KERN_CONTROL_H': 0,
|
| 532 |
+
'HAVE_SYS_LOADAVG_H': 0,
|
| 533 |
+
'HAVE_SYS_LOCK_H': 0,
|
| 534 |
+
'HAVE_SYS_MEMFD_H': 0,
|
| 535 |
+
'HAVE_SYS_MKDEV_H': 0,
|
| 536 |
+
'HAVE_SYS_MMAN_H': 1,
|
| 537 |
+
'HAVE_SYS_MODEM_H': 0,
|
| 538 |
+
'HAVE_SYS_NDIR_H': 0,
|
| 539 |
+
'HAVE_SYS_PARAM_H': 1,
|
| 540 |
+
'HAVE_SYS_POLL_H': 1,
|
| 541 |
+
'HAVE_SYS_RANDOM_H': 0,
|
| 542 |
+
'HAVE_SYS_RESOURCE_H': 1,
|
| 543 |
+
'HAVE_SYS_SELECT_H': 1,
|
| 544 |
+
'HAVE_SYS_SENDFILE_H': 1,
|
| 545 |
+
'HAVE_SYS_SOCKET_H': 1,
|
| 546 |
+
'HAVE_SYS_STATVFS_H': 1,
|
| 547 |
+
'HAVE_SYS_STAT_H': 1,
|
| 548 |
+
'HAVE_SYS_SYSCALL_H': 1,
|
| 549 |
+
'HAVE_SYS_SYSMACROS_H': 1,
|
| 550 |
+
'HAVE_SYS_SYS_DOMAIN_H': 0,
|
| 551 |
+
'HAVE_SYS_TERMIO_H': 0,
|
| 552 |
+
'HAVE_SYS_TIMES_H': 1,
|
| 553 |
+
'HAVE_SYS_TIME_H': 1,
|
| 554 |
+
'HAVE_SYS_TYPES_H': 1,
|
| 555 |
+
'HAVE_SYS_UIO_H': 1,
|
| 556 |
+
'HAVE_SYS_UN_H': 1,
|
| 557 |
+
'HAVE_SYS_UTSNAME_H': 1,
|
| 558 |
+
'HAVE_SYS_WAIT_H': 1,
|
| 559 |
+
'HAVE_SYS_XATTR_H': 1,
|
| 560 |
+
'HAVE_TCGETPGRP': 1,
|
| 561 |
+
'HAVE_TCSETPGRP': 1,
|
| 562 |
+
'HAVE_TEMPNAM': 1,
|
| 563 |
+
'HAVE_TERMIOS_H': 1,
|
| 564 |
+
'HAVE_TERM_H': 1,
|
| 565 |
+
'HAVE_TGAMMA': 1,
|
| 566 |
+
'HAVE_TIMEGM': 1,
|
| 567 |
+
'HAVE_TIMES': 1,
|
| 568 |
+
'HAVE_TMPFILE': 1,
|
| 569 |
+
'HAVE_TMPNAM': 1,
|
| 570 |
+
'HAVE_TMPNAM_R': 1,
|
| 571 |
+
'HAVE_TM_ZONE': 1,
|
| 572 |
+
'HAVE_TRUNCATE': 1,
|
| 573 |
+
'HAVE_TZNAME': 0,
|
| 574 |
+
'HAVE_UCS4_TCL': 0,
|
| 575 |
+
'HAVE_UNAME': 1,
|
| 576 |
+
'HAVE_UNISTD_H': 1,
|
| 577 |
+
'HAVE_UNLINKAT': 1,
|
| 578 |
+
'HAVE_USABLE_WCHAR_T': 0,
|
| 579 |
+
'HAVE_UTIL_H': 0,
|
| 580 |
+
'HAVE_UTIMENSAT': 1,
|
| 581 |
+
'HAVE_UTIMES': 1,
|
| 582 |
+
'HAVE_UTIME_H': 1,
|
| 583 |
+
'HAVE_UUID_CREATE': 0,
|
| 584 |
+
'HAVE_UUID_ENC_BE': 0,
|
| 585 |
+
'HAVE_UUID_GENERATE_TIME_SAFE': 1,
|
| 586 |
+
'HAVE_UUID_H': 1,
|
| 587 |
+
'HAVE_UUID_UUID_H': 1,
|
| 588 |
+
'HAVE_VFORK': 1,
|
| 589 |
+
'HAVE_WAIT3': 1,
|
| 590 |
+
'HAVE_WAIT4': 1,
|
| 591 |
+
'HAVE_WAITID': 1,
|
| 592 |
+
'HAVE_WAITPID': 1,
|
| 593 |
+
'HAVE_WCHAR_H': 1,
|
| 594 |
+
'HAVE_WCSCOLL': 1,
|
| 595 |
+
'HAVE_WCSFTIME': 1,
|
| 596 |
+
'HAVE_WCSXFRM': 1,
|
| 597 |
+
'HAVE_WMEMCMP': 1,
|
| 598 |
+
'HAVE_WORKING_TZSET': 1,
|
| 599 |
+
'HAVE_WRITEV': 1,
|
| 600 |
+
'HAVE_ZLIB_COPY': 1,
|
| 601 |
+
'HAVE__GETPTY': 0,
|
| 602 |
+
'HOST_GNU_TYPE': 'x86_64-conda-linux-gnu',
|
| 603 |
+
'INCLDIRSTOMAKE': '/root/envs/evalkit_cambrian/include '
|
| 604 |
+
'/root/envs/evalkit_cambrian/include '
|
| 605 |
+
'/root/envs/evalkit_cambrian/include/python3.10 '
|
| 606 |
+
'/root/envs/evalkit_cambrian/include/python3.10',
|
| 607 |
+
'INCLUDEDIR': '/root/envs/evalkit_cambrian/include',
|
| 608 |
+
'INCLUDEPY': '/root/envs/evalkit_cambrian/include/python3.10',
|
| 609 |
+
'INSTALL': '/usr/bin/install -c',
|
| 610 |
+
'INSTALL_DATA': '/usr/bin/install -c -m 644',
|
| 611 |
+
'INSTALL_PROGRAM': '/usr/bin/install -c',
|
| 612 |
+
'INSTALL_SCRIPT': '/usr/bin/install -c',
|
| 613 |
+
'INSTALL_SHARED': '/usr/bin/install -c -m 755',
|
| 614 |
+
'INSTSONAME': 'libpython3.10.a',
|
| 615 |
+
'IO_H': 'Modules/_io/_iomodule.h',
|
| 616 |
+
'IO_OBJS': '\\',
|
| 617 |
+
'LDCXXSHARED': 'x86_64-conda-linux-gnu-c++ -pthread -shared',
|
| 618 |
+
'LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 619 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 620 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 621 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 622 |
+
'-L/root/envs/evalkit_cambrian/lib '
|
| 623 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 624 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 625 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 626 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 627 |
+
'-L/root/envs/evalkit_cambrian/lib',
|
| 628 |
+
'LDLIBRARY': 'libpython3.10.a',
|
| 629 |
+
'LDLIBRARYDIR': '',
|
| 630 |
+
'LDSHARED': 'x86_64-conda-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 631 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 632 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 633 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 634 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 635 |
+
'-L/root/envs/evalkit_cambrian/lib '
|
| 636 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 637 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 638 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 639 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 640 |
+
'-L/root/envs/evalkit_cambrian/lib',
|
| 641 |
+
'LDVERSION': '3.10',
|
| 642 |
+
'LIBC': '',
|
| 643 |
+
'LIBDEST': '/root/envs/evalkit_cambrian/lib/python3.10',
|
| 644 |
+
'LIBDIR': '/root/envs/evalkit_cambrian/lib',
|
| 645 |
+
'LIBFFI_INCLUDEDIR': '/root/envs/evalkit_cambrian/include',
|
| 646 |
+
'LIBM': '-lm',
|
| 647 |
+
'LIBOBJDIR': 'Python/',
|
| 648 |
+
'LIBOBJS': '',
|
| 649 |
+
'LIBPC': '/root/envs/evalkit_cambrian/lib/pkgconfig',
|
| 650 |
+
'LIBPL': '/root/envs/evalkit_cambrian/lib/python3.10/config-3.10-x86_64-linux-gnu',
|
| 651 |
+
'LIBPYTHON': '',
|
| 652 |
+
'LIBRARY': 'libpython3.10.a',
|
| 653 |
+
'LIBRARY_DEPS': 'libpython3.10.a',
|
| 654 |
+
'LIBRARY_OBJS': '\\',
|
| 655 |
+
'LIBRARY_OBJS_OMIT_FROZEN': '\\',
|
| 656 |
+
'LIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 657 |
+
'LIBSUBDIRS': 'asyncio \\',
|
| 658 |
+
'LINKCC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 659 |
+
'LINKFORSHARED': '-Xlinker -export-dynamic',
|
| 660 |
+
'LIPO_32BIT_FLAGS': '',
|
| 661 |
+
'LIPO_INTEL64_FLAGS': '',
|
| 662 |
+
'LLVM_PROF_ERR': 'no',
|
| 663 |
+
'LLVM_PROF_FILE': '',
|
| 664 |
+
'LLVM_PROF_MERGER': 'true',
|
| 665 |
+
'LN': 'ln',
|
| 666 |
+
'LOCALMODLIBS': '',
|
| 667 |
+
'MACHDEP': 'linux',
|
| 668 |
+
'MACHDEP_OBJS': '',
|
| 669 |
+
'MACHDESTLIB': '/root/envs/evalkit_cambrian/lib/python3.10',
|
| 670 |
+
'MACOSX_DEPLOYMENT_TARGET': '',
|
| 671 |
+
'MAINCC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 672 |
+
'MAJOR_IN_MKDEV': 0,
|
| 673 |
+
'MAJOR_IN_SYSMACROS': 0,
|
| 674 |
+
'MAKESETUP': '/croot/python-split_1733933809325/work/Modules/makesetup',
|
| 675 |
+
'MANDIR': '/root/envs/evalkit_cambrian/share/man',
|
| 676 |
+
'MKDIR_P': '/usr/bin/mkdir -p',
|
| 677 |
+
'MODBUILT_NAMES': 'posix errno pwd _sre _codecs _weakref _functools '
|
| 678 |
+
'_operator _collections _abc itertools atexit _signal '
|
| 679 |
+
'_stat time _thread _locale _io faulthandler '
|
| 680 |
+
'_tracemalloc _symtable xxsubtype',
|
| 681 |
+
'MODDISABLED_NAMES': '',
|
| 682 |
+
'MODLIBS': '',
|
| 683 |
+
'MODOBJS': 'Modules/posixmodule.o Modules/errnomodule.o '
|
| 684 |
+
'Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o '
|
| 685 |
+
'Modules/_weakref.o Modules/_functoolsmodule.o '
|
| 686 |
+
'Modules/_operator.o Modules/_collectionsmodule.o '
|
| 687 |
+
'Modules/_abc.o Modules/itertoolsmodule.o '
|
| 688 |
+
'Modules/atexitmodule.o Modules/signalmodule.o Modules/_stat.o '
|
| 689 |
+
'Modules/timemodule.o Modules/_threadmodule.o '
|
| 690 |
+
'Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o '
|
| 691 |
+
'Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o '
|
| 692 |
+
'Modules/textio.o Modules/stringio.o Modules/faulthandler.o '
|
| 693 |
+
'Modules/_tracemalloc.o Modules/symtablemodule.o '
|
| 694 |
+
'Modules/xxsubtype.o',
|
| 695 |
+
'MODULE_OBJS': '\\',
|
| 696 |
+
'MULTIARCH': 'x86_64-linux-gnu',
|
| 697 |
+
'MULTIARCH_CPPFLAGS': '-DMULTIARCH=\\"x86_64-linux-gnu\\"',
|
| 698 |
+
'MVWDELCH_IS_EXPRESSION': 1,
|
| 699 |
+
'NO_AS_NEEDED': '-Wl,--no-as-needed',
|
| 700 |
+
'OBJECT_OBJS': '\\',
|
| 701 |
+
'OPENSSL_INCLUDES': '-I/root/envs/evalkit_cambrian/include',
|
| 702 |
+
'OPENSSL_LDFLAGS': '-L/root/envs/evalkit_cambrian/lib',
|
| 703 |
+
'OPENSSL_LIBS': '-lssl -lcrypto',
|
| 704 |
+
'OPENSSL_RPATH': '',
|
| 705 |
+
'OPT': '-DNDEBUG -fwrapv -O2 -Wall',
|
| 706 |
+
'OTHER_LIBTOOL_OPT': '',
|
| 707 |
+
'PACKAGE_BUGREPORT': 0,
|
| 708 |
+
'PACKAGE_NAME': 0,
|
| 709 |
+
'PACKAGE_STRING': 0,
|
| 710 |
+
'PACKAGE_TARNAME': 0,
|
| 711 |
+
'PACKAGE_URL': 0,
|
| 712 |
+
'PACKAGE_VERSION': 0,
|
| 713 |
+
'PARSER_HEADERS': '\\',
|
| 714 |
+
'PARSER_OBJS': '\\ \\ Parser/myreadline.o Parser/tokenizer.o',
|
| 715 |
+
'PEGEN_HEADERS': '\\',
|
| 716 |
+
'PEGEN_OBJS': '\\',
|
| 717 |
+
'PGO_PROF_GEN_FLAG': '-fprofile-generate',
|
| 718 |
+
'PGO_PROF_USE_FLAG': ' ',
|
| 719 |
+
'PLATLIBDIR': 'lib',
|
| 720 |
+
'POBJS': '\\',
|
| 721 |
+
'POSIX_SEMAPHORES_NOT_ENABLED': 0,
|
| 722 |
+
'PROFILE_TASK': '-m test --pgo',
|
| 723 |
+
'PTHREAD_KEY_T_IS_COMPATIBLE_WITH_INT': 1,
|
| 724 |
+
'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1,
|
| 725 |
+
'PURIFY': '',
|
| 726 |
+
'PY3LIBRARY': '',
|
| 727 |
+
'PYLONG_BITS_IN_DIGIT': 0,
|
| 728 |
+
'PYTHON': 'python',
|
| 729 |
+
'PYTHONFRAMEWORK': '',
|
| 730 |
+
'PYTHONFRAMEWORKDIR': 'no-framework',
|
| 731 |
+
'PYTHONFRAMEWORKINSTALLDIR': '',
|
| 732 |
+
'PYTHONFRAMEWORKPREFIX': '',
|
| 733 |
+
'PYTHONPATH': '',
|
| 734 |
+
'PYTHON_FOR_BUILD': './python -E',
|
| 735 |
+
'PYTHON_FOR_REGEN': '',
|
| 736 |
+
'PYTHON_HEADERS': '\\',
|
| 737 |
+
'PYTHON_OBJS': '\\',
|
| 738 |
+
'PY_BUILD_ENVIRON': '',
|
| 739 |
+
'PY_BUILTIN_HASHLIB_HASHES': '"md5,sha1,sha256,sha512,sha3,blake2"',
|
| 740 |
+
'PY_BUILTIN_MODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG '
|
| 741 |
+
'-fwrapv -O2 -Wall -march=nocona -mtune=haswell '
|
| 742 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 743 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 744 |
+
'/root/envs/evalkit_cambrian/include '
|
| 745 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 746 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 747 |
+
' '
|
| 748 |
+
' -march=nocona '
|
| 749 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 750 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 751 |
+
'-ffunction-sections -pipe -isystem '
|
| 752 |
+
'/root/envs/evalkit_cambrian/include '
|
| 753 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 754 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 755 |
+
' '
|
| 756 |
+
' '
|
| 757 |
+
'-fno-semantic-interposition '
|
| 758 |
+
' '
|
| 759 |
+
' -g -std=c99 -Wextra '
|
| 760 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 761 |
+
'-Wno-missing-field-initializers '
|
| 762 |
+
'-Werror=implicit-function-declaration '
|
| 763 |
+
'-fvisibility=hidden '
|
| 764 |
+
' '
|
| 765 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 766 |
+
'-IObjects -IInclude -IPython -I. '
|
| 767 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 768 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 769 |
+
'/root/envs/evalkit_cambrian/include '
|
| 770 |
+
'-I/root/envs/evalkit_cambrian/include '
|
| 771 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 772 |
+
'/root/envs/evalkit_cambrian/include '
|
| 773 |
+
'-I/root/envs/evalkit_cambrian/include '
|
| 774 |
+
'-DPy_BUILD_CORE_BUILTIN',
|
| 775 |
+
'PY_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 776 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 777 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 778 |
+
'-isystem '
|
| 779 |
+
'/root/envs/evalkit_cambrian/include '
|
| 780 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 781 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 782 |
+
' '
|
| 783 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 784 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 785 |
+
'-isystem '
|
| 786 |
+
'/root/envs/evalkit_cambrian/include '
|
| 787 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 788 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 789 |
+
' '
|
| 790 |
+
'',
|
| 791 |
+
'PY_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 792 |
+
' -g -std=c99 '
|
| 793 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 794 |
+
'-Wno-missing-field-initializers '
|
| 795 |
+
'-Werror=implicit-function-declaration '
|
| 796 |
+
'-fvisibility=hidden '
|
| 797 |
+
'-I/croot/python-split_1733933809325/work/Include/internal',
|
| 798 |
+
'PY_COERCE_C_LOCALE': 1,
|
| 799 |
+
'PY_CORE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 '
|
| 800 |
+
'-Wall -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 801 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 802 |
+
'-pipe -isystem '
|
| 803 |
+
'/root/envs/evalkit_cambrian/include '
|
| 804 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 805 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 806 |
+
' '
|
| 807 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 808 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 809 |
+
'-pipe -isystem '
|
| 810 |
+
'/root/envs/evalkit_cambrian/include '
|
| 811 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 812 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 813 |
+
' '
|
| 814 |
+
' -fno-semantic-interposition '
|
| 815 |
+
' '
|
| 816 |
+
'-g -std=c99 -Wextra -Wno-unused-result '
|
| 817 |
+
'-Wno-unused-parameter -Wno-missing-field-initializers '
|
| 818 |
+
'-Werror=implicit-function-declaration -fvisibility=hidden '
|
| 819 |
+
' '
|
| 820 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 821 |
+
'-IObjects -IInclude -IPython -I. '
|
| 822 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 823 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 824 |
+
'/root/envs/evalkit_cambrian/include '
|
| 825 |
+
'-I/root/envs/evalkit_cambrian/include '
|
| 826 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 827 |
+
'/root/envs/evalkit_cambrian/include '
|
| 828 |
+
'-I/root/envs/evalkit_cambrian/include '
|
| 829 |
+
'-DPy_BUILD_CORE',
|
| 830 |
+
'PY_CORE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 831 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 832 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 833 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 834 |
+
'-L/root/envs/evalkit_cambrian/lib '
|
| 835 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 836 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 837 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 838 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 839 |
+
'-L/root/envs/evalkit_cambrian/lib '
|
| 840 |
+
'-fno-semantic-interposition '
|
| 841 |
+
' -g',
|
| 842 |
+
'PY_CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 843 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 844 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 845 |
+
'/root/envs/evalkit_cambrian/include '
|
| 846 |
+
'-I/root/envs/evalkit_cambrian/include '
|
| 847 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 848 |
+
'/root/envs/evalkit_cambrian/include '
|
| 849 |
+
'-I/root/envs/evalkit_cambrian/include',
|
| 850 |
+
'PY_ENABLE_SHARED': 0,
|
| 851 |
+
'PY_FORMAT_SIZE_T': '"z"',
|
| 852 |
+
'PY_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 853 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 854 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 855 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 856 |
+
'-L/root/envs/evalkit_cambrian/lib '
|
| 857 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 858 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 859 |
+
'-Wl,-rpath,/root/envs/evalkit_cambrian/lib '
|
| 860 |
+
'-Wl,-rpath-link,/root/envs/evalkit_cambrian/lib '
|
| 861 |
+
'-L/root/envs/evalkit_cambrian/lib',
|
| 862 |
+
'PY_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 863 |
+
' -g',
|
| 864 |
+
'PY_SSL_DEFAULT_CIPHERS': 1,
|
| 865 |
+
'PY_SSL_DEFAULT_CIPHER_STRING': 0,
|
| 866 |
+
'PY_STDMODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv '
|
| 867 |
+
'-O2 -Wall -march=nocona -mtune=haswell '
|
| 868 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 869 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 870 |
+
'/root/envs/evalkit_cambrian/include '
|
| 871 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 872 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 873 |
+
' '
|
| 874 |
+
' -march=nocona '
|
| 875 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 876 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 877 |
+
'-ffunction-sections -pipe -isystem '
|
| 878 |
+
'/root/envs/evalkit_cambrian/include '
|
| 879 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 880 |
+
'-fdebug-prefix-map=/root/envs/evalkit_cambrian=/usr/local/src/conda-prefix '
|
| 881 |
+
' '
|
| 882 |
+
' '
|
| 883 |
+
'-fno-semantic-interposition '
|
| 884 |
+
' -g -std=c99 '
|
| 885 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 886 |
+
'-Wno-missing-field-initializers '
|
| 887 |
+
'-Werror=implicit-function-declaration '
|
| 888 |
+
'-fvisibility=hidden '
|
| 889 |
+
' '
|
| 890 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 891 |
+
'-IObjects -IInclude -IPython -I. '
|
| 892 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 893 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 894 |
+
'/root/envs/evalkit_cambrian/include '
|
| 895 |
+
'-I/root/envs/evalkit_cambrian/include '
|
| 896 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 897 |
+
'/root/envs/evalkit_cambrian/include '
|
| 898 |
+
'-I/root/envs/evalkit_cambrian/include',
|
| 899 |
+
'Py_DEBUG': 0,
|
| 900 |
+
'Py_ENABLE_SHARED': 0,
|
| 901 |
+
'Py_HASH_ALGORITHM': 0,
|
| 902 |
+
'Py_TRACE_REFS': 0,
|
| 903 |
+
'QUICKTESTOPTS': '-x test_subprocess test_io test_lib2to3 \\',
|
| 904 |
+
'READELF': 'x86_64-conda-linux-gnu-readelf',
|
| 905 |
+
'RESSRCDIR': 'Mac/Resources/framework',
|
| 906 |
+
'RETSIGTYPE': 'void',
|
| 907 |
+
'RUNSHARED': '',
|
| 908 |
+
'SCRIPTDIR': '/root/envs/evalkit_cambrian/lib',
|
| 909 |
+
'SETPGRP_HAVE_ARG': 0,
|
| 910 |
+
'SHELL': '/bin/sh',
|
| 911 |
+
'SHLIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 912 |
+
'SHLIB_SUFFIX': '.so',
|
| 913 |
+
'SHM_NEEDS_LIBRT': 1,
|
| 914 |
+
'SIGNED_RIGHT_SHIFT_ZERO_FILLS': 0,
|
| 915 |
+
'SITEPATH': '',
|
| 916 |
+
'SIZEOF_DOUBLE': 8,
|
| 917 |
+
'SIZEOF_FLOAT': 4,
|
| 918 |
+
'SIZEOF_FPOS_T': 16,
|
| 919 |
+
'SIZEOF_INT': 4,
|
| 920 |
+
'SIZEOF_LONG': 8,
|
| 921 |
+
'SIZEOF_LONG_DOUBLE': 16,
|
| 922 |
+
'SIZEOF_LONG_LONG': 8,
|
| 923 |
+
'SIZEOF_OFF_T': 8,
|
| 924 |
+
'SIZEOF_PID_T': 4,
|
| 925 |
+
'SIZEOF_PTHREAD_KEY_T': 4,
|
| 926 |
+
'SIZEOF_PTHREAD_T': 8,
|
| 927 |
+
'SIZEOF_SHORT': 2,
|
| 928 |
+
'SIZEOF_SIZE_T': 8,
|
| 929 |
+
'SIZEOF_TIME_T': 8,
|
| 930 |
+
'SIZEOF_UINTPTR_T': 8,
|
| 931 |
+
'SIZEOF_VOID_P': 8,
|
| 932 |
+
'SIZEOF_WCHAR_T': 4,
|
| 933 |
+
'SIZEOF__BOOL': 1,
|
| 934 |
+
'SOABI': 'cpython-310-x86_64-linux-gnu',
|
| 935 |
+
'SRCDIRS': 'Parser Objects Python Modules Modules/_io Programs',
|
| 936 |
+
'SRC_GDB_HOOKS': '/croot/python-split_1733933809325/work/Tools/gdb/libpython.py',
|
| 937 |
+
'STATIC_LIBPYTHON': 1,
|
| 938 |
+
'STDC_HEADERS': 1,
|
| 939 |
+
'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */",
|
| 940 |
+
'STRIPFLAG': '-s',
|
| 941 |
+
'SUBDIRS': '',
|
| 942 |
+
'SUBDIRSTOO': 'Include Lib Misc',
|
| 943 |
+
'SYSLIBS': '-lm',
|
| 944 |
+
'SYS_SELECT_WITH_SYS_TIME': 1,
|
| 945 |
+
'TCLTK_INCLUDES': '-I/root/envs/evalkit_cambrian/include',
|
| 946 |
+
'TCLTK_LIBS': '-L/root/envs/evalkit_cambrian/lib '
|
| 947 |
+
'-ltcl8.6 -ltk8.6',
|
| 948 |
+
'TESTOPTS': '',
|
| 949 |
+
'TESTPATH': '',
|
| 950 |
+
'TESTPYTHON': './python',
|
| 951 |
+
'TESTPYTHONOPTS': '',
|
| 952 |
+
'TESTRUNNER': './python '
|
| 953 |
+
'/croot/python-split_1733933809325/work/Tools/scripts/run_tests.py',
|
| 954 |
+
'TESTSUBDIRS': 'ctypes/test \\',
|
| 955 |
+
'TESTTIMEOUT': 1200,
|
| 956 |
+
'TEST_MODULES': 'yes',
|
| 957 |
+
'THREAD_STACK_SIZE': 0,
|
| 958 |
+
'TIMEMODULE_LIB': 0,
|
| 959 |
+
'TIME_WITH_SYS_TIME': 1,
|
| 960 |
+
'TM_IN_SYS_TIME': 0,
|
| 961 |
+
'TZPATH': '/root/envs/evalkit_cambrian/share/zoneinfo',
|
| 962 |
+
'UNICODE_DEPS': '\\',
|
| 963 |
+
'UNIVERSALSDK': '',
|
| 964 |
+
'UPDATE_FILE': '/croot/python-split_1733933809325/work/Tools/scripts/update_file.py',
|
| 965 |
+
'USE_COMPUTED_GOTOS': 1,
|
| 966 |
+
'VERSION': '3.10',
|
| 967 |
+
'VPATH': '/croot/python-split_1733933809325/work',
|
| 968 |
+
'WHEEL_PKG_DIR': '',
|
| 969 |
+
'WINDOW_HAS_FLAGS': 1,
|
| 970 |
+
'WITH_DECIMAL_CONTEXTVAR': 1,
|
| 971 |
+
'WITH_DOC_STRINGS': 1,
|
| 972 |
+
'WITH_DTRACE': 0,
|
| 973 |
+
'WITH_DYLD': 0,
|
| 974 |
+
'WITH_EDITLINE': 0,
|
| 975 |
+
'WITH_LIBINTL': 0,
|
| 976 |
+
'WITH_NEXT_FRAMEWORK': 0,
|
| 977 |
+
'WITH_PYMALLOC': 1,
|
| 978 |
+
'WITH_VALGRIND': 0,
|
| 979 |
+
'X87_DOUBLE_ROUNDING': 0,
|
| 980 |
+
'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax',
|
| 981 |
+
'abs_builddir': '/croot/python-split_1733933809325/work/build-static',
|
| 982 |
+
'abs_srcdir': '/croot/python-split_1733933809325/work',
|
| 983 |
+
'datarootdir': '/root/envs/evalkit_cambrian/share',
|
| 984 |
+
'exec_prefix': '/root/envs/evalkit_cambrian',
|
| 985 |
+
'prefix': '/root/envs/evalkit_cambrian',
|
| 986 |
+
'srcdir': '/croot/python-split_1733933809325/work'}
|
evalkit_cambrian/lib/python3.10/_threading_local.py
ADDED
|
@@ -0,0 +1,242 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Thread-local objects.
|
| 2 |
+
|
| 3 |
+
(Note that this module provides a Python version of the threading.local
|
| 4 |
+
class. Depending on the version of Python you're using, there may be a
|
| 5 |
+
faster one available. You should always import the `local` class from
|
| 6 |
+
`threading`.)
|
| 7 |
+
|
| 8 |
+
Thread-local objects support the management of thread-local data.
|
| 9 |
+
If you have data that you want to be local to a thread, simply create
|
| 10 |
+
a thread-local object and use its attributes:
|
| 11 |
+
|
| 12 |
+
>>> mydata = local()
|
| 13 |
+
>>> mydata.number = 42
|
| 14 |
+
>>> mydata.number
|
| 15 |
+
42
|
| 16 |
+
|
| 17 |
+
You can also access the local-object's dictionary:
|
| 18 |
+
|
| 19 |
+
>>> mydata.__dict__
|
| 20 |
+
{'number': 42}
|
| 21 |
+
>>> mydata.__dict__.setdefault('widgets', [])
|
| 22 |
+
[]
|
| 23 |
+
>>> mydata.widgets
|
| 24 |
+
[]
|
| 25 |
+
|
| 26 |
+
What's important about thread-local objects is that their data are
|
| 27 |
+
local to a thread. If we access the data in a different thread:
|
| 28 |
+
|
| 29 |
+
>>> log = []
|
| 30 |
+
>>> def f():
|
| 31 |
+
... items = sorted(mydata.__dict__.items())
|
| 32 |
+
... log.append(items)
|
| 33 |
+
... mydata.number = 11
|
| 34 |
+
... log.append(mydata.number)
|
| 35 |
+
|
| 36 |
+
>>> import threading
|
| 37 |
+
>>> thread = threading.Thread(target=f)
|
| 38 |
+
>>> thread.start()
|
| 39 |
+
>>> thread.join()
|
| 40 |
+
>>> log
|
| 41 |
+
[[], 11]
|
| 42 |
+
|
| 43 |
+
we get different data. Furthermore, changes made in the other thread
|
| 44 |
+
don't affect data seen in this thread:
|
| 45 |
+
|
| 46 |
+
>>> mydata.number
|
| 47 |
+
42
|
| 48 |
+
|
| 49 |
+
Of course, values you get from a local object, including a __dict__
|
| 50 |
+
attribute, are for whatever thread was current at the time the
|
| 51 |
+
attribute was read. For that reason, you generally don't want to save
|
| 52 |
+
these values across threads, as they apply only to the thread they
|
| 53 |
+
came from.
|
| 54 |
+
|
| 55 |
+
You can create custom local objects by subclassing the local class:
|
| 56 |
+
|
| 57 |
+
>>> class MyLocal(local):
|
| 58 |
+
... number = 2
|
| 59 |
+
... def __init__(self, /, **kw):
|
| 60 |
+
... self.__dict__.update(kw)
|
| 61 |
+
... def squared(self):
|
| 62 |
+
... return self.number ** 2
|
| 63 |
+
|
| 64 |
+
This can be useful to support default values, methods and
|
| 65 |
+
initialization. Note that if you define an __init__ method, it will be
|
| 66 |
+
called each time the local object is used in a separate thread. This
|
| 67 |
+
is necessary to initialize each thread's dictionary.
|
| 68 |
+
|
| 69 |
+
Now if we create a local object:
|
| 70 |
+
|
| 71 |
+
>>> mydata = MyLocal(color='red')
|
| 72 |
+
|
| 73 |
+
Now we have a default number:
|
| 74 |
+
|
| 75 |
+
>>> mydata.number
|
| 76 |
+
2
|
| 77 |
+
|
| 78 |
+
an initial color:
|
| 79 |
+
|
| 80 |
+
>>> mydata.color
|
| 81 |
+
'red'
|
| 82 |
+
>>> del mydata.color
|
| 83 |
+
|
| 84 |
+
And a method that operates on the data:
|
| 85 |
+
|
| 86 |
+
>>> mydata.squared()
|
| 87 |
+
4
|
| 88 |
+
|
| 89 |
+
As before, we can access the data in a separate thread:
|
| 90 |
+
|
| 91 |
+
>>> log = []
|
| 92 |
+
>>> thread = threading.Thread(target=f)
|
| 93 |
+
>>> thread.start()
|
| 94 |
+
>>> thread.join()
|
| 95 |
+
>>> log
|
| 96 |
+
[[('color', 'red')], 11]
|
| 97 |
+
|
| 98 |
+
without affecting this thread's data:
|
| 99 |
+
|
| 100 |
+
>>> mydata.number
|
| 101 |
+
2
|
| 102 |
+
>>> mydata.color
|
| 103 |
+
Traceback (most recent call last):
|
| 104 |
+
...
|
| 105 |
+
AttributeError: 'MyLocal' object has no attribute 'color'
|
| 106 |
+
|
| 107 |
+
Note that subclasses can define slots, but they are not thread
|
| 108 |
+
local. They are shared across threads:
|
| 109 |
+
|
| 110 |
+
>>> class MyLocal(local):
|
| 111 |
+
... __slots__ = 'number'
|
| 112 |
+
|
| 113 |
+
>>> mydata = MyLocal()
|
| 114 |
+
>>> mydata.number = 42
|
| 115 |
+
>>> mydata.color = 'red'
|
| 116 |
+
|
| 117 |
+
So, the separate thread:
|
| 118 |
+
|
| 119 |
+
>>> thread = threading.Thread(target=f)
|
| 120 |
+
>>> thread.start()
|
| 121 |
+
>>> thread.join()
|
| 122 |
+
|
| 123 |
+
affects what we see:
|
| 124 |
+
|
| 125 |
+
>>> mydata.number
|
| 126 |
+
11
|
| 127 |
+
|
| 128 |
+
>>> del mydata
|
| 129 |
+
"""
|
| 130 |
+
|
| 131 |
+
from weakref import ref
|
| 132 |
+
from contextlib import contextmanager
|
| 133 |
+
|
| 134 |
+
__all__ = ["local"]
|
| 135 |
+
|
| 136 |
+
# We need to use objects from the threading module, but the threading
|
| 137 |
+
# module may also want to use our `local` class, if support for locals
|
| 138 |
+
# isn't compiled in to the `thread` module. This creates potential problems
|
| 139 |
+
# with circular imports. For that reason, we don't import `threading`
|
| 140 |
+
# until the bottom of this file (a hack sufficient to worm around the
|
| 141 |
+
# potential problems). Note that all platforms on CPython do have support
|
| 142 |
+
# for locals in the `thread` module, and there is no circular import problem
|
| 143 |
+
# then, so problems introduced by fiddling the order of imports here won't
|
| 144 |
+
# manifest.
|
| 145 |
+
|
| 146 |
+
class _localimpl:
|
| 147 |
+
"""A class managing thread-local dicts"""
|
| 148 |
+
__slots__ = 'key', 'dicts', 'localargs', 'locallock', '__weakref__'
|
| 149 |
+
|
| 150 |
+
def __init__(self):
|
| 151 |
+
# The key used in the Thread objects' attribute dicts.
|
| 152 |
+
# We keep it a string for speed but make it unlikely to clash with
|
| 153 |
+
# a "real" attribute.
|
| 154 |
+
self.key = '_threading_local._localimpl.' + str(id(self))
|
| 155 |
+
# { id(Thread) -> (ref(Thread), thread-local dict) }
|
| 156 |
+
self.dicts = {}
|
| 157 |
+
|
| 158 |
+
def get_dict(self):
|
| 159 |
+
"""Return the dict for the current thread. Raises KeyError if none
|
| 160 |
+
defined."""
|
| 161 |
+
thread = current_thread()
|
| 162 |
+
return self.dicts[id(thread)][1]
|
| 163 |
+
|
| 164 |
+
def create_dict(self):
|
| 165 |
+
"""Create a new dict for the current thread, and return it."""
|
| 166 |
+
localdict = {}
|
| 167 |
+
key = self.key
|
| 168 |
+
thread = current_thread()
|
| 169 |
+
idt = id(thread)
|
| 170 |
+
def local_deleted(_, key=key):
|
| 171 |
+
# When the localimpl is deleted, remove the thread attribute.
|
| 172 |
+
thread = wrthread()
|
| 173 |
+
if thread is not None:
|
| 174 |
+
del thread.__dict__[key]
|
| 175 |
+
def thread_deleted(_, idt=idt):
|
| 176 |
+
# When the thread is deleted, remove the local dict.
|
| 177 |
+
# Note that this is suboptimal if the thread object gets
|
| 178 |
+
# caught in a reference loop. We would like to be called
|
| 179 |
+
# as soon as the OS-level thread ends instead.
|
| 180 |
+
local = wrlocal()
|
| 181 |
+
if local is not None:
|
| 182 |
+
dct = local.dicts.pop(idt)
|
| 183 |
+
wrlocal = ref(self, local_deleted)
|
| 184 |
+
wrthread = ref(thread, thread_deleted)
|
| 185 |
+
thread.__dict__[key] = wrlocal
|
| 186 |
+
self.dicts[idt] = wrthread, localdict
|
| 187 |
+
return localdict
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
@contextmanager
|
| 191 |
+
def _patch(self):
|
| 192 |
+
impl = object.__getattribute__(self, '_local__impl')
|
| 193 |
+
try:
|
| 194 |
+
dct = impl.get_dict()
|
| 195 |
+
except KeyError:
|
| 196 |
+
dct = impl.create_dict()
|
| 197 |
+
args, kw = impl.localargs
|
| 198 |
+
self.__init__(*args, **kw)
|
| 199 |
+
with impl.locallock:
|
| 200 |
+
object.__setattr__(self, '__dict__', dct)
|
| 201 |
+
yield
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
class local:
|
| 205 |
+
__slots__ = '_local__impl', '__dict__'
|
| 206 |
+
|
| 207 |
+
def __new__(cls, /, *args, **kw):
|
| 208 |
+
if (args or kw) and (cls.__init__ is object.__init__):
|
| 209 |
+
raise TypeError("Initialization arguments are not supported")
|
| 210 |
+
self = object.__new__(cls)
|
| 211 |
+
impl = _localimpl()
|
| 212 |
+
impl.localargs = (args, kw)
|
| 213 |
+
impl.locallock = RLock()
|
| 214 |
+
object.__setattr__(self, '_local__impl', impl)
|
| 215 |
+
# We need to create the thread dict in anticipation of
|
| 216 |
+
# __init__ being called, to make sure we don't call it
|
| 217 |
+
# again ourselves.
|
| 218 |
+
impl.create_dict()
|
| 219 |
+
return self
|
| 220 |
+
|
| 221 |
+
def __getattribute__(self, name):
|
| 222 |
+
with _patch(self):
|
| 223 |
+
return object.__getattribute__(self, name)
|
| 224 |
+
|
| 225 |
+
def __setattr__(self, name, value):
|
| 226 |
+
if name == '__dict__':
|
| 227 |
+
raise AttributeError(
|
| 228 |
+
"%r object attribute '__dict__' is read-only"
|
| 229 |
+
% self.__class__.__name__)
|
| 230 |
+
with _patch(self):
|
| 231 |
+
return object.__setattr__(self, name, value)
|
| 232 |
+
|
| 233 |
+
def __delattr__(self, name):
|
| 234 |
+
if name == '__dict__':
|
| 235 |
+
raise AttributeError(
|
| 236 |
+
"%r object attribute '__dict__' is read-only"
|
| 237 |
+
% self.__class__.__name__)
|
| 238 |
+
with _patch(self):
|
| 239 |
+
return object.__delattr__(self, name)
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
from threading import current_thread, RLock
|
evalkit_cambrian/lib/python3.10/abc.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Abstract Base Classes (ABCs) according to PEP 3119."""
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def abstractmethod(funcobj):
|
| 8 |
+
"""A decorator indicating abstract methods.
|
| 9 |
+
|
| 10 |
+
Requires that the metaclass is ABCMeta or derived from it. A
|
| 11 |
+
class that has a metaclass derived from ABCMeta cannot be
|
| 12 |
+
instantiated unless all of its abstract methods are overridden.
|
| 13 |
+
The abstract methods can be called using any of the normal
|
| 14 |
+
'super' call mechanisms. abstractmethod() may be used to declare
|
| 15 |
+
abstract methods for properties and descriptors.
|
| 16 |
+
|
| 17 |
+
Usage:
|
| 18 |
+
|
| 19 |
+
class C(metaclass=ABCMeta):
|
| 20 |
+
@abstractmethod
|
| 21 |
+
def my_abstract_method(self, ...):
|
| 22 |
+
...
|
| 23 |
+
"""
|
| 24 |
+
funcobj.__isabstractmethod__ = True
|
| 25 |
+
return funcobj
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class abstractclassmethod(classmethod):
|
| 29 |
+
"""A decorator indicating abstract classmethods.
|
| 30 |
+
|
| 31 |
+
Deprecated, use 'classmethod' with 'abstractmethod' instead:
|
| 32 |
+
|
| 33 |
+
class C(ABC):
|
| 34 |
+
@classmethod
|
| 35 |
+
@abstractmethod
|
| 36 |
+
def my_abstract_classmethod(cls, ...):
|
| 37 |
+
...
|
| 38 |
+
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
__isabstractmethod__ = True
|
| 42 |
+
|
| 43 |
+
def __init__(self, callable):
|
| 44 |
+
callable.__isabstractmethod__ = True
|
| 45 |
+
super().__init__(callable)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class abstractstaticmethod(staticmethod):
|
| 49 |
+
"""A decorator indicating abstract staticmethods.
|
| 50 |
+
|
| 51 |
+
Deprecated, use 'staticmethod' with 'abstractmethod' instead:
|
| 52 |
+
|
| 53 |
+
class C(ABC):
|
| 54 |
+
@staticmethod
|
| 55 |
+
@abstractmethod
|
| 56 |
+
def my_abstract_staticmethod(...):
|
| 57 |
+
...
|
| 58 |
+
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
__isabstractmethod__ = True
|
| 62 |
+
|
| 63 |
+
def __init__(self, callable):
|
| 64 |
+
callable.__isabstractmethod__ = True
|
| 65 |
+
super().__init__(callable)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class abstractproperty(property):
|
| 69 |
+
"""A decorator indicating abstract properties.
|
| 70 |
+
|
| 71 |
+
Deprecated, use 'property' with 'abstractmethod' instead:
|
| 72 |
+
|
| 73 |
+
class C(ABC):
|
| 74 |
+
@property
|
| 75 |
+
@abstractmethod
|
| 76 |
+
def my_abstract_property(self):
|
| 77 |
+
...
|
| 78 |
+
|
| 79 |
+
"""
|
| 80 |
+
|
| 81 |
+
__isabstractmethod__ = True
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
try:
|
| 85 |
+
from _abc import (get_cache_token, _abc_init, _abc_register,
|
| 86 |
+
_abc_instancecheck, _abc_subclasscheck, _get_dump,
|
| 87 |
+
_reset_registry, _reset_caches)
|
| 88 |
+
except ImportError:
|
| 89 |
+
from _py_abc import ABCMeta, get_cache_token
|
| 90 |
+
ABCMeta.__module__ = 'abc'
|
| 91 |
+
else:
|
| 92 |
+
class ABCMeta(type):
|
| 93 |
+
"""Metaclass for defining Abstract Base Classes (ABCs).
|
| 94 |
+
|
| 95 |
+
Use this metaclass to create an ABC. An ABC can be subclassed
|
| 96 |
+
directly, and then acts as a mix-in class. You can also register
|
| 97 |
+
unrelated concrete classes (even built-in classes) and unrelated
|
| 98 |
+
ABCs as 'virtual subclasses' -- these and their descendants will
|
| 99 |
+
be considered subclasses of the registering ABC by the built-in
|
| 100 |
+
issubclass() function, but the registering ABC won't show up in
|
| 101 |
+
their MRO (Method Resolution Order) nor will method
|
| 102 |
+
implementations defined by the registering ABC be callable (not
|
| 103 |
+
even via super()).
|
| 104 |
+
"""
|
| 105 |
+
def __new__(mcls, name, bases, namespace, **kwargs):
|
| 106 |
+
cls = super().__new__(mcls, name, bases, namespace, **kwargs)
|
| 107 |
+
_abc_init(cls)
|
| 108 |
+
return cls
|
| 109 |
+
|
| 110 |
+
def register(cls, subclass):
|
| 111 |
+
"""Register a virtual subclass of an ABC.
|
| 112 |
+
|
| 113 |
+
Returns the subclass, to allow usage as a class decorator.
|
| 114 |
+
"""
|
| 115 |
+
return _abc_register(cls, subclass)
|
| 116 |
+
|
| 117 |
+
def __instancecheck__(cls, instance):
|
| 118 |
+
"""Override for isinstance(instance, cls)."""
|
| 119 |
+
return _abc_instancecheck(cls, instance)
|
| 120 |
+
|
| 121 |
+
def __subclasscheck__(cls, subclass):
|
| 122 |
+
"""Override for issubclass(subclass, cls)."""
|
| 123 |
+
return _abc_subclasscheck(cls, subclass)
|
| 124 |
+
|
| 125 |
+
def _dump_registry(cls, file=None):
|
| 126 |
+
"""Debug helper to print the ABC registry."""
|
| 127 |
+
print(f"Class: {cls.__module__}.{cls.__qualname__}", file=file)
|
| 128 |
+
print(f"Inv. counter: {get_cache_token()}", file=file)
|
| 129 |
+
(_abc_registry, _abc_cache, _abc_negative_cache,
|
| 130 |
+
_abc_negative_cache_version) = _get_dump(cls)
|
| 131 |
+
print(f"_abc_registry: {_abc_registry!r}", file=file)
|
| 132 |
+
print(f"_abc_cache: {_abc_cache!r}", file=file)
|
| 133 |
+
print(f"_abc_negative_cache: {_abc_negative_cache!r}", file=file)
|
| 134 |
+
print(f"_abc_negative_cache_version: {_abc_negative_cache_version!r}",
|
| 135 |
+
file=file)
|
| 136 |
+
|
| 137 |
+
def _abc_registry_clear(cls):
|
| 138 |
+
"""Clear the registry (for debugging or testing)."""
|
| 139 |
+
_reset_registry(cls)
|
| 140 |
+
|
| 141 |
+
def _abc_caches_clear(cls):
|
| 142 |
+
"""Clear the caches (for debugging or testing)."""
|
| 143 |
+
_reset_caches(cls)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def update_abstractmethods(cls):
|
| 147 |
+
"""Recalculate the set of abstract methods of an abstract class.
|
| 148 |
+
|
| 149 |
+
If a class has had one of its abstract methods implemented after the
|
| 150 |
+
class was created, the method will not be considered implemented until
|
| 151 |
+
this function is called. Alternatively, if a new abstract method has been
|
| 152 |
+
added to the class, it will only be considered an abstract method of the
|
| 153 |
+
class after this function is called.
|
| 154 |
+
|
| 155 |
+
This function should be called before any use is made of the class,
|
| 156 |
+
usually in class decorators that add methods to the subject class.
|
| 157 |
+
|
| 158 |
+
Returns cls, to allow usage as a class decorator.
|
| 159 |
+
|
| 160 |
+
If cls is not an instance of ABCMeta, does nothing.
|
| 161 |
+
"""
|
| 162 |
+
if not hasattr(cls, '__abstractmethods__'):
|
| 163 |
+
# We check for __abstractmethods__ here because cls might by a C
|
| 164 |
+
# implementation or a python implementation (especially during
|
| 165 |
+
# testing), and we want to handle both cases.
|
| 166 |
+
return cls
|
| 167 |
+
|
| 168 |
+
abstracts = set()
|
| 169 |
+
# Check the existing abstract methods of the parents, keep only the ones
|
| 170 |
+
# that are not implemented.
|
| 171 |
+
for scls in cls.__bases__:
|
| 172 |
+
for name in getattr(scls, '__abstractmethods__', ()):
|
| 173 |
+
value = getattr(cls, name, None)
|
| 174 |
+
if getattr(value, "__isabstractmethod__", False):
|
| 175 |
+
abstracts.add(name)
|
| 176 |
+
# Also add any other newly added abstract methods.
|
| 177 |
+
for name, value in cls.__dict__.items():
|
| 178 |
+
if getattr(value, "__isabstractmethod__", False):
|
| 179 |
+
abstracts.add(name)
|
| 180 |
+
cls.__abstractmethods__ = frozenset(abstracts)
|
| 181 |
+
return cls
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class ABC(metaclass=ABCMeta):
|
| 185 |
+
"""Helper class that provides a standard way to create an ABC using
|
| 186 |
+
inheritance.
|
| 187 |
+
"""
|
| 188 |
+
__slots__ = ()
|
evalkit_cambrian/lib/python3.10/ast.py
ADDED
|
@@ -0,0 +1,1709 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ast
|
| 3 |
+
~~~
|
| 4 |
+
|
| 5 |
+
The `ast` module helps Python applications to process trees of the Python
|
| 6 |
+
abstract syntax grammar. The abstract syntax itself might change with
|
| 7 |
+
each Python release; this module helps to find out programmatically what
|
| 8 |
+
the current grammar looks like and allows modifications of it.
|
| 9 |
+
|
| 10 |
+
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
|
| 11 |
+
a flag to the `compile()` builtin function or by using the `parse()`
|
| 12 |
+
function from this module. The result will be a tree of objects whose
|
| 13 |
+
classes all inherit from `ast.AST`.
|
| 14 |
+
|
| 15 |
+
A modified abstract syntax tree can be compiled into a Python code object
|
| 16 |
+
using the built-in `compile()` function.
|
| 17 |
+
|
| 18 |
+
Additionally various helper functions are provided that make working with
|
| 19 |
+
the trees simpler. The main intention of the helper functions and this
|
| 20 |
+
module in general is to provide an easy to use interface for libraries
|
| 21 |
+
that work tightly with the python syntax (template engines for example).
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
:copyright: Copyright 2008 by Armin Ronacher.
|
| 25 |
+
:license: Python License.
|
| 26 |
+
"""
|
| 27 |
+
import sys
|
| 28 |
+
from _ast import *
|
| 29 |
+
from contextlib import contextmanager, nullcontext
|
| 30 |
+
from enum import IntEnum, auto
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def parse(source, filename='<unknown>', mode='exec', *,
|
| 34 |
+
type_comments=False, feature_version=None):
|
| 35 |
+
"""
|
| 36 |
+
Parse the source into an AST node.
|
| 37 |
+
Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
|
| 38 |
+
Pass type_comments=True to get back type comments where the syntax allows.
|
| 39 |
+
"""
|
| 40 |
+
flags = PyCF_ONLY_AST
|
| 41 |
+
if type_comments:
|
| 42 |
+
flags |= PyCF_TYPE_COMMENTS
|
| 43 |
+
if isinstance(feature_version, tuple):
|
| 44 |
+
major, minor = feature_version # Should be a 2-tuple.
|
| 45 |
+
assert major == 3
|
| 46 |
+
feature_version = minor
|
| 47 |
+
elif feature_version is None:
|
| 48 |
+
feature_version = -1
|
| 49 |
+
# Else it should be an int giving the minor version for 3.x.
|
| 50 |
+
return compile(source, filename, mode, flags,
|
| 51 |
+
_feature_version=feature_version)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def literal_eval(node_or_string):
|
| 55 |
+
"""
|
| 56 |
+
Evaluate an expression node or a string containing only a Python
|
| 57 |
+
expression. The string or node provided may only consist of the following
|
| 58 |
+
Python literal structures: strings, bytes, numbers, tuples, lists, dicts,
|
| 59 |
+
sets, booleans, and None.
|
| 60 |
+
|
| 61 |
+
Caution: A complex expression can overflow the C stack and cause a crash.
|
| 62 |
+
"""
|
| 63 |
+
if isinstance(node_or_string, str):
|
| 64 |
+
node_or_string = parse(node_or_string.lstrip(" \t"), mode='eval')
|
| 65 |
+
if isinstance(node_or_string, Expression):
|
| 66 |
+
node_or_string = node_or_string.body
|
| 67 |
+
def _raise_malformed_node(node):
|
| 68 |
+
msg = "malformed node or string"
|
| 69 |
+
if lno := getattr(node, 'lineno', None):
|
| 70 |
+
msg += f' on line {lno}'
|
| 71 |
+
raise ValueError(msg + f': {node!r}')
|
| 72 |
+
def _convert_num(node):
|
| 73 |
+
if not isinstance(node, Constant) or type(node.value) not in (int, float, complex):
|
| 74 |
+
_raise_malformed_node(node)
|
| 75 |
+
return node.value
|
| 76 |
+
def _convert_signed_num(node):
|
| 77 |
+
if isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)):
|
| 78 |
+
operand = _convert_num(node.operand)
|
| 79 |
+
if isinstance(node.op, UAdd):
|
| 80 |
+
return + operand
|
| 81 |
+
else:
|
| 82 |
+
return - operand
|
| 83 |
+
return _convert_num(node)
|
| 84 |
+
def _convert(node):
|
| 85 |
+
if isinstance(node, Constant):
|
| 86 |
+
return node.value
|
| 87 |
+
elif isinstance(node, Tuple):
|
| 88 |
+
return tuple(map(_convert, node.elts))
|
| 89 |
+
elif isinstance(node, List):
|
| 90 |
+
return list(map(_convert, node.elts))
|
| 91 |
+
elif isinstance(node, Set):
|
| 92 |
+
return set(map(_convert, node.elts))
|
| 93 |
+
elif (isinstance(node, Call) and isinstance(node.func, Name) and
|
| 94 |
+
node.func.id == 'set' and node.args == node.keywords == []):
|
| 95 |
+
return set()
|
| 96 |
+
elif isinstance(node, Dict):
|
| 97 |
+
if len(node.keys) != len(node.values):
|
| 98 |
+
_raise_malformed_node(node)
|
| 99 |
+
return dict(zip(map(_convert, node.keys),
|
| 100 |
+
map(_convert, node.values)))
|
| 101 |
+
elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)):
|
| 102 |
+
left = _convert_signed_num(node.left)
|
| 103 |
+
right = _convert_num(node.right)
|
| 104 |
+
if isinstance(left, (int, float)) and isinstance(right, complex):
|
| 105 |
+
if isinstance(node.op, Add):
|
| 106 |
+
return left + right
|
| 107 |
+
else:
|
| 108 |
+
return left - right
|
| 109 |
+
return _convert_signed_num(node)
|
| 110 |
+
return _convert(node_or_string)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def dump(node, annotate_fields=True, include_attributes=False, *, indent=None):
|
| 114 |
+
"""
|
| 115 |
+
Return a formatted dump of the tree in node. This is mainly useful for
|
| 116 |
+
debugging purposes. If annotate_fields is true (by default),
|
| 117 |
+
the returned string will show the names and the values for fields.
|
| 118 |
+
If annotate_fields is false, the result string will be more compact by
|
| 119 |
+
omitting unambiguous field names. Attributes such as line
|
| 120 |
+
numbers and column offsets are not dumped by default. If this is wanted,
|
| 121 |
+
include_attributes can be set to true. If indent is a non-negative
|
| 122 |
+
integer or string, then the tree will be pretty-printed with that indent
|
| 123 |
+
level. None (the default) selects the single line representation.
|
| 124 |
+
"""
|
| 125 |
+
def _format(node, level=0):
|
| 126 |
+
if indent is not None:
|
| 127 |
+
level += 1
|
| 128 |
+
prefix = '\n' + indent * level
|
| 129 |
+
sep = ',\n' + indent * level
|
| 130 |
+
else:
|
| 131 |
+
prefix = ''
|
| 132 |
+
sep = ', '
|
| 133 |
+
if isinstance(node, AST):
|
| 134 |
+
cls = type(node)
|
| 135 |
+
args = []
|
| 136 |
+
allsimple = True
|
| 137 |
+
keywords = annotate_fields
|
| 138 |
+
for name in node._fields:
|
| 139 |
+
try:
|
| 140 |
+
value = getattr(node, name)
|
| 141 |
+
except AttributeError:
|
| 142 |
+
keywords = True
|
| 143 |
+
continue
|
| 144 |
+
if value is None and getattr(cls, name, ...) is None:
|
| 145 |
+
keywords = True
|
| 146 |
+
continue
|
| 147 |
+
value, simple = _format(value, level)
|
| 148 |
+
allsimple = allsimple and simple
|
| 149 |
+
if keywords:
|
| 150 |
+
args.append('%s=%s' % (name, value))
|
| 151 |
+
else:
|
| 152 |
+
args.append(value)
|
| 153 |
+
if include_attributes and node._attributes:
|
| 154 |
+
for name in node._attributes:
|
| 155 |
+
try:
|
| 156 |
+
value = getattr(node, name)
|
| 157 |
+
except AttributeError:
|
| 158 |
+
continue
|
| 159 |
+
if value is None and getattr(cls, name, ...) is None:
|
| 160 |
+
continue
|
| 161 |
+
value, simple = _format(value, level)
|
| 162 |
+
allsimple = allsimple and simple
|
| 163 |
+
args.append('%s=%s' % (name, value))
|
| 164 |
+
if allsimple and len(args) <= 3:
|
| 165 |
+
return '%s(%s)' % (node.__class__.__name__, ', '.join(args)), not args
|
| 166 |
+
return '%s(%s%s)' % (node.__class__.__name__, prefix, sep.join(args)), False
|
| 167 |
+
elif isinstance(node, list):
|
| 168 |
+
if not node:
|
| 169 |
+
return '[]', True
|
| 170 |
+
return '[%s%s]' % (prefix, sep.join(_format(x, level)[0] for x in node)), False
|
| 171 |
+
return repr(node), True
|
| 172 |
+
|
| 173 |
+
if not isinstance(node, AST):
|
| 174 |
+
raise TypeError('expected AST, got %r' % node.__class__.__name__)
|
| 175 |
+
if indent is not None and not isinstance(indent, str):
|
| 176 |
+
indent = ' ' * indent
|
| 177 |
+
return _format(node)[0]
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def copy_location(new_node, old_node):
|
| 181 |
+
"""
|
| 182 |
+
Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset`
|
| 183 |
+
attributes) from *old_node* to *new_node* if possible, and return *new_node*.
|
| 184 |
+
"""
|
| 185 |
+
for attr in 'lineno', 'col_offset', 'end_lineno', 'end_col_offset':
|
| 186 |
+
if attr in old_node._attributes and attr in new_node._attributes:
|
| 187 |
+
value = getattr(old_node, attr, None)
|
| 188 |
+
# end_lineno and end_col_offset are optional attributes, and they
|
| 189 |
+
# should be copied whether the value is None or not.
|
| 190 |
+
if value is not None or (
|
| 191 |
+
hasattr(old_node, attr) and attr.startswith("end_")
|
| 192 |
+
):
|
| 193 |
+
setattr(new_node, attr, value)
|
| 194 |
+
return new_node
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def fix_missing_locations(node):
|
| 198 |
+
"""
|
| 199 |
+
When you compile a node tree with compile(), the compiler expects lineno and
|
| 200 |
+
col_offset attributes for every node that supports them. This is rather
|
| 201 |
+
tedious to fill in for generated nodes, so this helper adds these attributes
|
| 202 |
+
recursively where not already set, by setting them to the values of the
|
| 203 |
+
parent node. It works recursively starting at *node*.
|
| 204 |
+
"""
|
| 205 |
+
def _fix(node, lineno, col_offset, end_lineno, end_col_offset):
|
| 206 |
+
if 'lineno' in node._attributes:
|
| 207 |
+
if not hasattr(node, 'lineno'):
|
| 208 |
+
node.lineno = lineno
|
| 209 |
+
else:
|
| 210 |
+
lineno = node.lineno
|
| 211 |
+
if 'end_lineno' in node._attributes:
|
| 212 |
+
if getattr(node, 'end_lineno', None) is None:
|
| 213 |
+
node.end_lineno = end_lineno
|
| 214 |
+
else:
|
| 215 |
+
end_lineno = node.end_lineno
|
| 216 |
+
if 'col_offset' in node._attributes:
|
| 217 |
+
if not hasattr(node, 'col_offset'):
|
| 218 |
+
node.col_offset = col_offset
|
| 219 |
+
else:
|
| 220 |
+
col_offset = node.col_offset
|
| 221 |
+
if 'end_col_offset' in node._attributes:
|
| 222 |
+
if getattr(node, 'end_col_offset', None) is None:
|
| 223 |
+
node.end_col_offset = end_col_offset
|
| 224 |
+
else:
|
| 225 |
+
end_col_offset = node.end_col_offset
|
| 226 |
+
for child in iter_child_nodes(node):
|
| 227 |
+
_fix(child, lineno, col_offset, end_lineno, end_col_offset)
|
| 228 |
+
_fix(node, 1, 0, 1, 0)
|
| 229 |
+
return node
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def increment_lineno(node, n=1):
|
| 233 |
+
"""
|
| 234 |
+
Increment the line number and end line number of each node in the tree
|
| 235 |
+
starting at *node* by *n*. This is useful to "move code" to a different
|
| 236 |
+
location in a file.
|
| 237 |
+
"""
|
| 238 |
+
for child in walk(node):
|
| 239 |
+
# TypeIgnore is a special case where lineno is not an attribute
|
| 240 |
+
# but rather a field of the node itself.
|
| 241 |
+
if isinstance(child, TypeIgnore):
|
| 242 |
+
child.lineno = getattr(child, 'lineno', 0) + n
|
| 243 |
+
continue
|
| 244 |
+
|
| 245 |
+
if 'lineno' in child._attributes:
|
| 246 |
+
child.lineno = getattr(child, 'lineno', 0) + n
|
| 247 |
+
if (
|
| 248 |
+
"end_lineno" in child._attributes
|
| 249 |
+
and (end_lineno := getattr(child, "end_lineno", 0)) is not None
|
| 250 |
+
):
|
| 251 |
+
child.end_lineno = end_lineno + n
|
| 252 |
+
return node
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
def iter_fields(node):
|
| 256 |
+
"""
|
| 257 |
+
Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``
|
| 258 |
+
that is present on *node*.
|
| 259 |
+
"""
|
| 260 |
+
for field in node._fields:
|
| 261 |
+
try:
|
| 262 |
+
yield field, getattr(node, field)
|
| 263 |
+
except AttributeError:
|
| 264 |
+
pass
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def iter_child_nodes(node):
|
| 268 |
+
"""
|
| 269 |
+
Yield all direct child nodes of *node*, that is, all fields that are nodes
|
| 270 |
+
and all items of fields that are lists of nodes.
|
| 271 |
+
"""
|
| 272 |
+
for name, field in iter_fields(node):
|
| 273 |
+
if isinstance(field, AST):
|
| 274 |
+
yield field
|
| 275 |
+
elif isinstance(field, list):
|
| 276 |
+
for item in field:
|
| 277 |
+
if isinstance(item, AST):
|
| 278 |
+
yield item
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def get_docstring(node, clean=True):
|
| 282 |
+
"""
|
| 283 |
+
Return the docstring for the given node or None if no docstring can
|
| 284 |
+
be found. If the node provided does not have docstrings a TypeError
|
| 285 |
+
will be raised.
|
| 286 |
+
|
| 287 |
+
If *clean* is `True`, all tabs are expanded to spaces and any whitespace
|
| 288 |
+
that can be uniformly removed from the second line onwards is removed.
|
| 289 |
+
"""
|
| 290 |
+
if not isinstance(node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)):
|
| 291 |
+
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
|
| 292 |
+
if not(node.body and isinstance(node.body[0], Expr)):
|
| 293 |
+
return None
|
| 294 |
+
node = node.body[0].value
|
| 295 |
+
if isinstance(node, Str):
|
| 296 |
+
text = node.s
|
| 297 |
+
elif isinstance(node, Constant) and isinstance(node.value, str):
|
| 298 |
+
text = node.value
|
| 299 |
+
else:
|
| 300 |
+
return None
|
| 301 |
+
if clean:
|
| 302 |
+
import inspect
|
| 303 |
+
text = inspect.cleandoc(text)
|
| 304 |
+
return text
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
def _splitlines_no_ff(source):
|
| 308 |
+
"""Split a string into lines ignoring form feed and other chars.
|
| 309 |
+
|
| 310 |
+
This mimics how the Python parser splits source code.
|
| 311 |
+
"""
|
| 312 |
+
idx = 0
|
| 313 |
+
lines = []
|
| 314 |
+
next_line = ''
|
| 315 |
+
while idx < len(source):
|
| 316 |
+
c = source[idx]
|
| 317 |
+
next_line += c
|
| 318 |
+
idx += 1
|
| 319 |
+
# Keep \r\n together
|
| 320 |
+
if c == '\r' and idx < len(source) and source[idx] == '\n':
|
| 321 |
+
next_line += '\n'
|
| 322 |
+
idx += 1
|
| 323 |
+
if c in '\r\n':
|
| 324 |
+
lines.append(next_line)
|
| 325 |
+
next_line = ''
|
| 326 |
+
|
| 327 |
+
if next_line:
|
| 328 |
+
lines.append(next_line)
|
| 329 |
+
return lines
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def _pad_whitespace(source):
|
| 333 |
+
r"""Replace all chars except '\f\t' in a line with spaces."""
|
| 334 |
+
result = ''
|
| 335 |
+
for c in source:
|
| 336 |
+
if c in '\f\t':
|
| 337 |
+
result += c
|
| 338 |
+
else:
|
| 339 |
+
result += ' '
|
| 340 |
+
return result
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def get_source_segment(source, node, *, padded=False):
|
| 344 |
+
"""Get source code segment of the *source* that generated *node*.
|
| 345 |
+
|
| 346 |
+
If some location information (`lineno`, `end_lineno`, `col_offset`,
|
| 347 |
+
or `end_col_offset`) is missing, return None.
|
| 348 |
+
|
| 349 |
+
If *padded* is `True`, the first line of a multi-line statement will
|
| 350 |
+
be padded with spaces to match its original position.
|
| 351 |
+
"""
|
| 352 |
+
try:
|
| 353 |
+
if node.end_lineno is None or node.end_col_offset is None:
|
| 354 |
+
return None
|
| 355 |
+
lineno = node.lineno - 1
|
| 356 |
+
end_lineno = node.end_lineno - 1
|
| 357 |
+
col_offset = node.col_offset
|
| 358 |
+
end_col_offset = node.end_col_offset
|
| 359 |
+
except AttributeError:
|
| 360 |
+
return None
|
| 361 |
+
|
| 362 |
+
lines = _splitlines_no_ff(source)
|
| 363 |
+
if end_lineno == lineno:
|
| 364 |
+
return lines[lineno].encode()[col_offset:end_col_offset].decode()
|
| 365 |
+
|
| 366 |
+
if padded:
|
| 367 |
+
padding = _pad_whitespace(lines[lineno].encode()[:col_offset].decode())
|
| 368 |
+
else:
|
| 369 |
+
padding = ''
|
| 370 |
+
|
| 371 |
+
first = padding + lines[lineno].encode()[col_offset:].decode()
|
| 372 |
+
last = lines[end_lineno].encode()[:end_col_offset].decode()
|
| 373 |
+
lines = lines[lineno+1:end_lineno]
|
| 374 |
+
|
| 375 |
+
lines.insert(0, first)
|
| 376 |
+
lines.append(last)
|
| 377 |
+
return ''.join(lines)
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
def walk(node):
|
| 381 |
+
"""
|
| 382 |
+
Recursively yield all descendant nodes in the tree starting at *node*
|
| 383 |
+
(including *node* itself), in no specified order. This is useful if you
|
| 384 |
+
only want to modify nodes in place and don't care about the context.
|
| 385 |
+
"""
|
| 386 |
+
from collections import deque
|
| 387 |
+
todo = deque([node])
|
| 388 |
+
while todo:
|
| 389 |
+
node = todo.popleft()
|
| 390 |
+
todo.extend(iter_child_nodes(node))
|
| 391 |
+
yield node
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
class NodeVisitor(object):
|
| 395 |
+
"""
|
| 396 |
+
A node visitor base class that walks the abstract syntax tree and calls a
|
| 397 |
+
visitor function for every node found. This function may return a value
|
| 398 |
+
which is forwarded by the `visit` method.
|
| 399 |
+
|
| 400 |
+
This class is meant to be subclassed, with the subclass adding visitor
|
| 401 |
+
methods.
|
| 402 |
+
|
| 403 |
+
Per default the visitor functions for the nodes are ``'visit_'`` +
|
| 404 |
+
class name of the node. So a `TryFinally` node visit function would
|
| 405 |
+
be `visit_TryFinally`. This behavior can be changed by overriding
|
| 406 |
+
the `visit` method. If no visitor function exists for a node
|
| 407 |
+
(return value `None`) the `generic_visit` visitor is used instead.
|
| 408 |
+
|
| 409 |
+
Don't use the `NodeVisitor` if you want to apply changes to nodes during
|
| 410 |
+
traversing. For this a special visitor exists (`NodeTransformer`) that
|
| 411 |
+
allows modifications.
|
| 412 |
+
"""
|
| 413 |
+
|
| 414 |
+
def visit(self, node):
|
| 415 |
+
"""Visit a node."""
|
| 416 |
+
method = 'visit_' + node.__class__.__name__
|
| 417 |
+
visitor = getattr(self, method, self.generic_visit)
|
| 418 |
+
return visitor(node)
|
| 419 |
+
|
| 420 |
+
def generic_visit(self, node):
|
| 421 |
+
"""Called if no explicit visitor function exists for a node."""
|
| 422 |
+
for field, value in iter_fields(node):
|
| 423 |
+
if isinstance(value, list):
|
| 424 |
+
for item in value:
|
| 425 |
+
if isinstance(item, AST):
|
| 426 |
+
self.visit(item)
|
| 427 |
+
elif isinstance(value, AST):
|
| 428 |
+
self.visit(value)
|
| 429 |
+
|
| 430 |
+
def visit_Constant(self, node):
|
| 431 |
+
value = node.value
|
| 432 |
+
type_name = _const_node_type_names.get(type(value))
|
| 433 |
+
if type_name is None:
|
| 434 |
+
for cls, name in _const_node_type_names.items():
|
| 435 |
+
if isinstance(value, cls):
|
| 436 |
+
type_name = name
|
| 437 |
+
break
|
| 438 |
+
if type_name is not None:
|
| 439 |
+
method = 'visit_' + type_name
|
| 440 |
+
try:
|
| 441 |
+
visitor = getattr(self, method)
|
| 442 |
+
except AttributeError:
|
| 443 |
+
pass
|
| 444 |
+
else:
|
| 445 |
+
import warnings
|
| 446 |
+
warnings.warn(f"{method} is deprecated; add visit_Constant",
|
| 447 |
+
DeprecationWarning, 2)
|
| 448 |
+
return visitor(node)
|
| 449 |
+
return self.generic_visit(node)
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
class NodeTransformer(NodeVisitor):
|
| 453 |
+
"""
|
| 454 |
+
A :class:`NodeVisitor` subclass that walks the abstract syntax tree and
|
| 455 |
+
allows modification of nodes.
|
| 456 |
+
|
| 457 |
+
The `NodeTransformer` will walk the AST and use the return value of the
|
| 458 |
+
visitor methods to replace or remove the old node. If the return value of
|
| 459 |
+
the visitor method is ``None``, the node will be removed from its location,
|
| 460 |
+
otherwise it is replaced with the return value. The return value may be the
|
| 461 |
+
original node in which case no replacement takes place.
|
| 462 |
+
|
| 463 |
+
Here is an example transformer that rewrites all occurrences of name lookups
|
| 464 |
+
(``foo``) to ``data['foo']``::
|
| 465 |
+
|
| 466 |
+
class RewriteName(NodeTransformer):
|
| 467 |
+
|
| 468 |
+
def visit_Name(self, node):
|
| 469 |
+
return Subscript(
|
| 470 |
+
value=Name(id='data', ctx=Load()),
|
| 471 |
+
slice=Constant(value=node.id),
|
| 472 |
+
ctx=node.ctx
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
Keep in mind that if the node you're operating on has child nodes you must
|
| 476 |
+
either transform the child nodes yourself or call the :meth:`generic_visit`
|
| 477 |
+
method for the node first.
|
| 478 |
+
|
| 479 |
+
For nodes that were part of a collection of statements (that applies to all
|
| 480 |
+
statement nodes), the visitor may also return a list of nodes rather than
|
| 481 |
+
just a single node.
|
| 482 |
+
|
| 483 |
+
Usually you use the transformer like this::
|
| 484 |
+
|
| 485 |
+
node = YourTransformer().visit(node)
|
| 486 |
+
"""
|
| 487 |
+
|
| 488 |
+
def generic_visit(self, node):
|
| 489 |
+
for field, old_value in iter_fields(node):
|
| 490 |
+
if isinstance(old_value, list):
|
| 491 |
+
new_values = []
|
| 492 |
+
for value in old_value:
|
| 493 |
+
if isinstance(value, AST):
|
| 494 |
+
value = self.visit(value)
|
| 495 |
+
if value is None:
|
| 496 |
+
continue
|
| 497 |
+
elif not isinstance(value, AST):
|
| 498 |
+
new_values.extend(value)
|
| 499 |
+
continue
|
| 500 |
+
new_values.append(value)
|
| 501 |
+
old_value[:] = new_values
|
| 502 |
+
elif isinstance(old_value, AST):
|
| 503 |
+
new_node = self.visit(old_value)
|
| 504 |
+
if new_node is None:
|
| 505 |
+
delattr(node, field)
|
| 506 |
+
else:
|
| 507 |
+
setattr(node, field, new_node)
|
| 508 |
+
return node
|
| 509 |
+
|
| 510 |
+
|
| 511 |
+
# If the ast module is loaded more than once, only add deprecated methods once
|
| 512 |
+
if not hasattr(Constant, 'n'):
|
| 513 |
+
# The following code is for backward compatibility.
|
| 514 |
+
# It will be removed in future.
|
| 515 |
+
|
| 516 |
+
def _getter(self):
|
| 517 |
+
"""Deprecated. Use value instead."""
|
| 518 |
+
return self.value
|
| 519 |
+
|
| 520 |
+
def _setter(self, value):
|
| 521 |
+
self.value = value
|
| 522 |
+
|
| 523 |
+
Constant.n = property(_getter, _setter)
|
| 524 |
+
Constant.s = property(_getter, _setter)
|
| 525 |
+
|
| 526 |
+
class _ABC(type):
|
| 527 |
+
|
| 528 |
+
def __init__(cls, *args):
|
| 529 |
+
cls.__doc__ = """Deprecated AST node class. Use ast.Constant instead"""
|
| 530 |
+
|
| 531 |
+
def __instancecheck__(cls, inst):
|
| 532 |
+
if not isinstance(inst, Constant):
|
| 533 |
+
return False
|
| 534 |
+
if cls in _const_types:
|
| 535 |
+
try:
|
| 536 |
+
value = inst.value
|
| 537 |
+
except AttributeError:
|
| 538 |
+
return False
|
| 539 |
+
else:
|
| 540 |
+
return (
|
| 541 |
+
isinstance(value, _const_types[cls]) and
|
| 542 |
+
not isinstance(value, _const_types_not.get(cls, ()))
|
| 543 |
+
)
|
| 544 |
+
return type.__instancecheck__(cls, inst)
|
| 545 |
+
|
| 546 |
+
def _new(cls, *args, **kwargs):
|
| 547 |
+
for key in kwargs:
|
| 548 |
+
if key not in cls._fields:
|
| 549 |
+
# arbitrary keyword arguments are accepted
|
| 550 |
+
continue
|
| 551 |
+
pos = cls._fields.index(key)
|
| 552 |
+
if pos < len(args):
|
| 553 |
+
raise TypeError(f"{cls.__name__} got multiple values for argument {key!r}")
|
| 554 |
+
if cls in _const_types:
|
| 555 |
+
return Constant(*args, **kwargs)
|
| 556 |
+
return Constant.__new__(cls, *args, **kwargs)
|
| 557 |
+
|
| 558 |
+
class Num(Constant, metaclass=_ABC):
|
| 559 |
+
_fields = ('n',)
|
| 560 |
+
__new__ = _new
|
| 561 |
+
|
| 562 |
+
class Str(Constant, metaclass=_ABC):
|
| 563 |
+
_fields = ('s',)
|
| 564 |
+
__new__ = _new
|
| 565 |
+
|
| 566 |
+
class Bytes(Constant, metaclass=_ABC):
|
| 567 |
+
_fields = ('s',)
|
| 568 |
+
__new__ = _new
|
| 569 |
+
|
| 570 |
+
class NameConstant(Constant, metaclass=_ABC):
|
| 571 |
+
__new__ = _new
|
| 572 |
+
|
| 573 |
+
class Ellipsis(Constant, metaclass=_ABC):
|
| 574 |
+
_fields = ()
|
| 575 |
+
|
| 576 |
+
def __new__(cls, *args, **kwargs):
|
| 577 |
+
if cls is Ellipsis:
|
| 578 |
+
return Constant(..., *args, **kwargs)
|
| 579 |
+
return Constant.__new__(cls, *args, **kwargs)
|
| 580 |
+
|
| 581 |
+
_const_types = {
|
| 582 |
+
Num: (int, float, complex),
|
| 583 |
+
Str: (str,),
|
| 584 |
+
Bytes: (bytes,),
|
| 585 |
+
NameConstant: (type(None), bool),
|
| 586 |
+
Ellipsis: (type(...),),
|
| 587 |
+
}
|
| 588 |
+
_const_types_not = {
|
| 589 |
+
Num: (bool,),
|
| 590 |
+
}
|
| 591 |
+
|
| 592 |
+
_const_node_type_names = {
|
| 593 |
+
bool: 'NameConstant', # should be before int
|
| 594 |
+
type(None): 'NameConstant',
|
| 595 |
+
int: 'Num',
|
| 596 |
+
float: 'Num',
|
| 597 |
+
complex: 'Num',
|
| 598 |
+
str: 'Str',
|
| 599 |
+
bytes: 'Bytes',
|
| 600 |
+
type(...): 'Ellipsis',
|
| 601 |
+
}
|
| 602 |
+
|
| 603 |
+
class slice(AST):
|
| 604 |
+
"""Deprecated AST node class."""
|
| 605 |
+
|
| 606 |
+
class Index(slice):
|
| 607 |
+
"""Deprecated AST node class. Use the index value directly instead."""
|
| 608 |
+
def __new__(cls, value, **kwargs):
|
| 609 |
+
return value
|
| 610 |
+
|
| 611 |
+
class ExtSlice(slice):
|
| 612 |
+
"""Deprecated AST node class. Use ast.Tuple instead."""
|
| 613 |
+
def __new__(cls, dims=(), **kwargs):
|
| 614 |
+
return Tuple(list(dims), Load(), **kwargs)
|
| 615 |
+
|
| 616 |
+
# If the ast module is loaded more than once, only add deprecated methods once
|
| 617 |
+
if not hasattr(Tuple, 'dims'):
|
| 618 |
+
# The following code is for backward compatibility.
|
| 619 |
+
# It will be removed in future.
|
| 620 |
+
|
| 621 |
+
def _dims_getter(self):
|
| 622 |
+
"""Deprecated. Use elts instead."""
|
| 623 |
+
return self.elts
|
| 624 |
+
|
| 625 |
+
def _dims_setter(self, value):
|
| 626 |
+
self.elts = value
|
| 627 |
+
|
| 628 |
+
Tuple.dims = property(_dims_getter, _dims_setter)
|
| 629 |
+
|
| 630 |
+
class Suite(mod):
|
| 631 |
+
"""Deprecated AST node class. Unused in Python 3."""
|
| 632 |
+
|
| 633 |
+
class AugLoad(expr_context):
|
| 634 |
+
"""Deprecated AST node class. Unused in Python 3."""
|
| 635 |
+
|
| 636 |
+
class AugStore(expr_context):
|
| 637 |
+
"""Deprecated AST node class. Unused in Python 3."""
|
| 638 |
+
|
| 639 |
+
class Param(expr_context):
|
| 640 |
+
"""Deprecated AST node class. Unused in Python 3."""
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
# Large float and imaginary literals get turned into infinities in the AST.
|
| 644 |
+
# We unparse those infinities to INFSTR.
|
| 645 |
+
_INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
|
| 646 |
+
|
| 647 |
+
class _Precedence(IntEnum):
|
| 648 |
+
"""Precedence table that originated from python grammar."""
|
| 649 |
+
|
| 650 |
+
TUPLE = auto()
|
| 651 |
+
YIELD = auto() # 'yield', 'yield from'
|
| 652 |
+
TEST = auto() # 'if'-'else', 'lambda'
|
| 653 |
+
OR = auto() # 'or'
|
| 654 |
+
AND = auto() # 'and'
|
| 655 |
+
NOT = auto() # 'not'
|
| 656 |
+
CMP = auto() # '<', '>', '==', '>=', '<=', '!=',
|
| 657 |
+
# 'in', 'not in', 'is', 'is not'
|
| 658 |
+
EXPR = auto()
|
| 659 |
+
BOR = EXPR # '|'
|
| 660 |
+
BXOR = auto() # '^'
|
| 661 |
+
BAND = auto() # '&'
|
| 662 |
+
SHIFT = auto() # '<<', '>>'
|
| 663 |
+
ARITH = auto() # '+', '-'
|
| 664 |
+
TERM = auto() # '*', '@', '/', '%', '//'
|
| 665 |
+
FACTOR = auto() # unary '+', '-', '~'
|
| 666 |
+
POWER = auto() # '**'
|
| 667 |
+
AWAIT = auto() # 'await'
|
| 668 |
+
ATOM = auto()
|
| 669 |
+
|
| 670 |
+
def next(self):
|
| 671 |
+
try:
|
| 672 |
+
return self.__class__(self + 1)
|
| 673 |
+
except ValueError:
|
| 674 |
+
return self
|
| 675 |
+
|
| 676 |
+
|
| 677 |
+
_SINGLE_QUOTES = ("'", '"')
|
| 678 |
+
_MULTI_QUOTES = ('"""', "'''")
|
| 679 |
+
_ALL_QUOTES = (*_SINGLE_QUOTES, *_MULTI_QUOTES)
|
| 680 |
+
|
| 681 |
+
class _Unparser(NodeVisitor):
|
| 682 |
+
"""Methods in this class recursively traverse an AST and
|
| 683 |
+
output source code for the abstract syntax; original formatting
|
| 684 |
+
is disregarded."""
|
| 685 |
+
|
| 686 |
+
def __init__(self, *, _avoid_backslashes=False):
|
| 687 |
+
self._source = []
|
| 688 |
+
self._buffer = []
|
| 689 |
+
self._precedences = {}
|
| 690 |
+
self._type_ignores = {}
|
| 691 |
+
self._indent = 0
|
| 692 |
+
self._avoid_backslashes = _avoid_backslashes
|
| 693 |
+
|
| 694 |
+
def interleave(self, inter, f, seq):
|
| 695 |
+
"""Call f on each item in seq, calling inter() in between."""
|
| 696 |
+
seq = iter(seq)
|
| 697 |
+
try:
|
| 698 |
+
f(next(seq))
|
| 699 |
+
except StopIteration:
|
| 700 |
+
pass
|
| 701 |
+
else:
|
| 702 |
+
for x in seq:
|
| 703 |
+
inter()
|
| 704 |
+
f(x)
|
| 705 |
+
|
| 706 |
+
def items_view(self, traverser, items):
|
| 707 |
+
"""Traverse and separate the given *items* with a comma and append it to
|
| 708 |
+
the buffer. If *items* is a single item sequence, a trailing comma
|
| 709 |
+
will be added."""
|
| 710 |
+
if len(items) == 1:
|
| 711 |
+
traverser(items[0])
|
| 712 |
+
self.write(",")
|
| 713 |
+
else:
|
| 714 |
+
self.interleave(lambda: self.write(", "), traverser, items)
|
| 715 |
+
|
| 716 |
+
def maybe_newline(self):
|
| 717 |
+
"""Adds a newline if it isn't the start of generated source"""
|
| 718 |
+
if self._source:
|
| 719 |
+
self.write("\n")
|
| 720 |
+
|
| 721 |
+
def fill(self, text=""):
|
| 722 |
+
"""Indent a piece of text and append it, according to the current
|
| 723 |
+
indentation level"""
|
| 724 |
+
self.maybe_newline()
|
| 725 |
+
self.write(" " * self._indent + text)
|
| 726 |
+
|
| 727 |
+
def write(self, text):
|
| 728 |
+
"""Append a piece of text"""
|
| 729 |
+
self._source.append(text)
|
| 730 |
+
|
| 731 |
+
def buffer_writer(self, text):
|
| 732 |
+
self._buffer.append(text)
|
| 733 |
+
|
| 734 |
+
@property
|
| 735 |
+
def buffer(self):
|
| 736 |
+
value = "".join(self._buffer)
|
| 737 |
+
self._buffer.clear()
|
| 738 |
+
return value
|
| 739 |
+
|
| 740 |
+
@contextmanager
|
| 741 |
+
def block(self, *, extra = None):
|
| 742 |
+
"""A context manager for preparing the source for blocks. It adds
|
| 743 |
+
the character':', increases the indentation on enter and decreases
|
| 744 |
+
the indentation on exit. If *extra* is given, it will be directly
|
| 745 |
+
appended after the colon character.
|
| 746 |
+
"""
|
| 747 |
+
self.write(":")
|
| 748 |
+
if extra:
|
| 749 |
+
self.write(extra)
|
| 750 |
+
self._indent += 1
|
| 751 |
+
yield
|
| 752 |
+
self._indent -= 1
|
| 753 |
+
|
| 754 |
+
@contextmanager
|
| 755 |
+
def delimit(self, start, end):
|
| 756 |
+
"""A context manager for preparing the source for expressions. It adds
|
| 757 |
+
*start* to the buffer and enters, after exit it adds *end*."""
|
| 758 |
+
|
| 759 |
+
self.write(start)
|
| 760 |
+
yield
|
| 761 |
+
self.write(end)
|
| 762 |
+
|
| 763 |
+
def delimit_if(self, start, end, condition):
|
| 764 |
+
if condition:
|
| 765 |
+
return self.delimit(start, end)
|
| 766 |
+
else:
|
| 767 |
+
return nullcontext()
|
| 768 |
+
|
| 769 |
+
def require_parens(self, precedence, node):
|
| 770 |
+
"""Shortcut to adding precedence related parens"""
|
| 771 |
+
return self.delimit_if("(", ")", self.get_precedence(node) > precedence)
|
| 772 |
+
|
| 773 |
+
def get_precedence(self, node):
|
| 774 |
+
return self._precedences.get(node, _Precedence.TEST)
|
| 775 |
+
|
| 776 |
+
def set_precedence(self, precedence, *nodes):
|
| 777 |
+
for node in nodes:
|
| 778 |
+
self._precedences[node] = precedence
|
| 779 |
+
|
| 780 |
+
def get_raw_docstring(self, node):
|
| 781 |
+
"""If a docstring node is found in the body of the *node* parameter,
|
| 782 |
+
return that docstring node, None otherwise.
|
| 783 |
+
|
| 784 |
+
Logic mirrored from ``_PyAST_GetDocString``."""
|
| 785 |
+
if not isinstance(
|
| 786 |
+
node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)
|
| 787 |
+
) or len(node.body) < 1:
|
| 788 |
+
return None
|
| 789 |
+
node = node.body[0]
|
| 790 |
+
if not isinstance(node, Expr):
|
| 791 |
+
return None
|
| 792 |
+
node = node.value
|
| 793 |
+
if isinstance(node, Constant) and isinstance(node.value, str):
|
| 794 |
+
return node
|
| 795 |
+
|
| 796 |
+
def get_type_comment(self, node):
|
| 797 |
+
comment = self._type_ignores.get(node.lineno) or node.type_comment
|
| 798 |
+
if comment is not None:
|
| 799 |
+
return f" # type: {comment}"
|
| 800 |
+
|
| 801 |
+
def traverse(self, node):
|
| 802 |
+
if isinstance(node, list):
|
| 803 |
+
for item in node:
|
| 804 |
+
self.traverse(item)
|
| 805 |
+
else:
|
| 806 |
+
super().visit(node)
|
| 807 |
+
|
| 808 |
+
# Note: as visit() resets the output text, do NOT rely on
|
| 809 |
+
# NodeVisitor.generic_visit to handle any nodes (as it calls back in to
|
| 810 |
+
# the subclass visit() method, which resets self._source to an empty list)
|
| 811 |
+
def visit(self, node):
|
| 812 |
+
"""Outputs a source code string that, if converted back to an ast
|
| 813 |
+
(using ast.parse) will generate an AST equivalent to *node*"""
|
| 814 |
+
self._source = []
|
| 815 |
+
self.traverse(node)
|
| 816 |
+
return "".join(self._source)
|
| 817 |
+
|
| 818 |
+
def _write_docstring_and_traverse_body(self, node):
|
| 819 |
+
if (docstring := self.get_raw_docstring(node)):
|
| 820 |
+
self._write_docstring(docstring)
|
| 821 |
+
self.traverse(node.body[1:])
|
| 822 |
+
else:
|
| 823 |
+
self.traverse(node.body)
|
| 824 |
+
|
| 825 |
+
def visit_Module(self, node):
|
| 826 |
+
self._type_ignores = {
|
| 827 |
+
ignore.lineno: f"ignore{ignore.tag}"
|
| 828 |
+
for ignore in node.type_ignores
|
| 829 |
+
}
|
| 830 |
+
self._write_docstring_and_traverse_body(node)
|
| 831 |
+
self._type_ignores.clear()
|
| 832 |
+
|
| 833 |
+
def visit_FunctionType(self, node):
|
| 834 |
+
with self.delimit("(", ")"):
|
| 835 |
+
self.interleave(
|
| 836 |
+
lambda: self.write(", "), self.traverse, node.argtypes
|
| 837 |
+
)
|
| 838 |
+
|
| 839 |
+
self.write(" -> ")
|
| 840 |
+
self.traverse(node.returns)
|
| 841 |
+
|
| 842 |
+
def visit_Expr(self, node):
|
| 843 |
+
self.fill()
|
| 844 |
+
self.set_precedence(_Precedence.YIELD, node.value)
|
| 845 |
+
self.traverse(node.value)
|
| 846 |
+
|
| 847 |
+
def visit_NamedExpr(self, node):
|
| 848 |
+
with self.require_parens(_Precedence.TUPLE, node):
|
| 849 |
+
self.set_precedence(_Precedence.ATOM, node.target, node.value)
|
| 850 |
+
self.traverse(node.target)
|
| 851 |
+
self.write(" := ")
|
| 852 |
+
self.traverse(node.value)
|
| 853 |
+
|
| 854 |
+
def visit_Import(self, node):
|
| 855 |
+
self.fill("import ")
|
| 856 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.names)
|
| 857 |
+
|
| 858 |
+
def visit_ImportFrom(self, node):
|
| 859 |
+
self.fill("from ")
|
| 860 |
+
self.write("." * (node.level or 0))
|
| 861 |
+
if node.module:
|
| 862 |
+
self.write(node.module)
|
| 863 |
+
self.write(" import ")
|
| 864 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.names)
|
| 865 |
+
|
| 866 |
+
def visit_Assign(self, node):
|
| 867 |
+
self.fill()
|
| 868 |
+
for target in node.targets:
|
| 869 |
+
self.traverse(target)
|
| 870 |
+
self.write(" = ")
|
| 871 |
+
self.traverse(node.value)
|
| 872 |
+
if type_comment := self.get_type_comment(node):
|
| 873 |
+
self.write(type_comment)
|
| 874 |
+
|
| 875 |
+
def visit_AugAssign(self, node):
|
| 876 |
+
self.fill()
|
| 877 |
+
self.traverse(node.target)
|
| 878 |
+
self.write(" " + self.binop[node.op.__class__.__name__] + "= ")
|
| 879 |
+
self.traverse(node.value)
|
| 880 |
+
|
| 881 |
+
def visit_AnnAssign(self, node):
|
| 882 |
+
self.fill()
|
| 883 |
+
with self.delimit_if("(", ")", not node.simple and isinstance(node.target, Name)):
|
| 884 |
+
self.traverse(node.target)
|
| 885 |
+
self.write(": ")
|
| 886 |
+
self.traverse(node.annotation)
|
| 887 |
+
if node.value:
|
| 888 |
+
self.write(" = ")
|
| 889 |
+
self.traverse(node.value)
|
| 890 |
+
|
| 891 |
+
def visit_Return(self, node):
|
| 892 |
+
self.fill("return")
|
| 893 |
+
if node.value:
|
| 894 |
+
self.write(" ")
|
| 895 |
+
self.traverse(node.value)
|
| 896 |
+
|
| 897 |
+
def visit_Pass(self, node):
|
| 898 |
+
self.fill("pass")
|
| 899 |
+
|
| 900 |
+
def visit_Break(self, node):
|
| 901 |
+
self.fill("break")
|
| 902 |
+
|
| 903 |
+
def visit_Continue(self, node):
|
| 904 |
+
self.fill("continue")
|
| 905 |
+
|
| 906 |
+
def visit_Delete(self, node):
|
| 907 |
+
self.fill("del ")
|
| 908 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.targets)
|
| 909 |
+
|
| 910 |
+
def visit_Assert(self, node):
|
| 911 |
+
self.fill("assert ")
|
| 912 |
+
self.traverse(node.test)
|
| 913 |
+
if node.msg:
|
| 914 |
+
self.write(", ")
|
| 915 |
+
self.traverse(node.msg)
|
| 916 |
+
|
| 917 |
+
def visit_Global(self, node):
|
| 918 |
+
self.fill("global ")
|
| 919 |
+
self.interleave(lambda: self.write(", "), self.write, node.names)
|
| 920 |
+
|
| 921 |
+
def visit_Nonlocal(self, node):
|
| 922 |
+
self.fill("nonlocal ")
|
| 923 |
+
self.interleave(lambda: self.write(", "), self.write, node.names)
|
| 924 |
+
|
| 925 |
+
def visit_Await(self, node):
|
| 926 |
+
with self.require_parens(_Precedence.AWAIT, node):
|
| 927 |
+
self.write("await")
|
| 928 |
+
if node.value:
|
| 929 |
+
self.write(" ")
|
| 930 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 931 |
+
self.traverse(node.value)
|
| 932 |
+
|
| 933 |
+
def visit_Yield(self, node):
|
| 934 |
+
with self.require_parens(_Precedence.YIELD, node):
|
| 935 |
+
self.write("yield")
|
| 936 |
+
if node.value:
|
| 937 |
+
self.write(" ")
|
| 938 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 939 |
+
self.traverse(node.value)
|
| 940 |
+
|
| 941 |
+
def visit_YieldFrom(self, node):
|
| 942 |
+
with self.require_parens(_Precedence.YIELD, node):
|
| 943 |
+
self.write("yield from ")
|
| 944 |
+
if not node.value:
|
| 945 |
+
raise ValueError("Node can't be used without a value attribute.")
|
| 946 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 947 |
+
self.traverse(node.value)
|
| 948 |
+
|
| 949 |
+
def visit_Raise(self, node):
|
| 950 |
+
self.fill("raise")
|
| 951 |
+
if not node.exc:
|
| 952 |
+
if node.cause:
|
| 953 |
+
raise ValueError(f"Node can't use cause without an exception.")
|
| 954 |
+
return
|
| 955 |
+
self.write(" ")
|
| 956 |
+
self.traverse(node.exc)
|
| 957 |
+
if node.cause:
|
| 958 |
+
self.write(" from ")
|
| 959 |
+
self.traverse(node.cause)
|
| 960 |
+
|
| 961 |
+
def visit_Try(self, node):
|
| 962 |
+
self.fill("try")
|
| 963 |
+
with self.block():
|
| 964 |
+
self.traverse(node.body)
|
| 965 |
+
for ex in node.handlers:
|
| 966 |
+
self.traverse(ex)
|
| 967 |
+
if node.orelse:
|
| 968 |
+
self.fill("else")
|
| 969 |
+
with self.block():
|
| 970 |
+
self.traverse(node.orelse)
|
| 971 |
+
if node.finalbody:
|
| 972 |
+
self.fill("finally")
|
| 973 |
+
with self.block():
|
| 974 |
+
self.traverse(node.finalbody)
|
| 975 |
+
|
| 976 |
+
def visit_ExceptHandler(self, node):
|
| 977 |
+
self.fill("except")
|
| 978 |
+
if node.type:
|
| 979 |
+
self.write(" ")
|
| 980 |
+
self.traverse(node.type)
|
| 981 |
+
if node.name:
|
| 982 |
+
self.write(" as ")
|
| 983 |
+
self.write(node.name)
|
| 984 |
+
with self.block():
|
| 985 |
+
self.traverse(node.body)
|
| 986 |
+
|
| 987 |
+
def visit_ClassDef(self, node):
|
| 988 |
+
self.maybe_newline()
|
| 989 |
+
for deco in node.decorator_list:
|
| 990 |
+
self.fill("@")
|
| 991 |
+
self.traverse(deco)
|
| 992 |
+
self.fill("class " + node.name)
|
| 993 |
+
with self.delimit_if("(", ")", condition = node.bases or node.keywords):
|
| 994 |
+
comma = False
|
| 995 |
+
for e in node.bases:
|
| 996 |
+
if comma:
|
| 997 |
+
self.write(", ")
|
| 998 |
+
else:
|
| 999 |
+
comma = True
|
| 1000 |
+
self.traverse(e)
|
| 1001 |
+
for e in node.keywords:
|
| 1002 |
+
if comma:
|
| 1003 |
+
self.write(", ")
|
| 1004 |
+
else:
|
| 1005 |
+
comma = True
|
| 1006 |
+
self.traverse(e)
|
| 1007 |
+
|
| 1008 |
+
with self.block():
|
| 1009 |
+
self._write_docstring_and_traverse_body(node)
|
| 1010 |
+
|
| 1011 |
+
def visit_FunctionDef(self, node):
|
| 1012 |
+
self._function_helper(node, "def")
|
| 1013 |
+
|
| 1014 |
+
def visit_AsyncFunctionDef(self, node):
|
| 1015 |
+
self._function_helper(node, "async def")
|
| 1016 |
+
|
| 1017 |
+
def _function_helper(self, node, fill_suffix):
|
| 1018 |
+
self.maybe_newline()
|
| 1019 |
+
for deco in node.decorator_list:
|
| 1020 |
+
self.fill("@")
|
| 1021 |
+
self.traverse(deco)
|
| 1022 |
+
def_str = fill_suffix + " " + node.name
|
| 1023 |
+
self.fill(def_str)
|
| 1024 |
+
with self.delimit("(", ")"):
|
| 1025 |
+
self.traverse(node.args)
|
| 1026 |
+
if node.returns:
|
| 1027 |
+
self.write(" -> ")
|
| 1028 |
+
self.traverse(node.returns)
|
| 1029 |
+
with self.block(extra=self.get_type_comment(node)):
|
| 1030 |
+
self._write_docstring_and_traverse_body(node)
|
| 1031 |
+
|
| 1032 |
+
def visit_For(self, node):
|
| 1033 |
+
self._for_helper("for ", node)
|
| 1034 |
+
|
| 1035 |
+
def visit_AsyncFor(self, node):
|
| 1036 |
+
self._for_helper("async for ", node)
|
| 1037 |
+
|
| 1038 |
+
def _for_helper(self, fill, node):
|
| 1039 |
+
self.fill(fill)
|
| 1040 |
+
self.traverse(node.target)
|
| 1041 |
+
self.write(" in ")
|
| 1042 |
+
self.traverse(node.iter)
|
| 1043 |
+
with self.block(extra=self.get_type_comment(node)):
|
| 1044 |
+
self.traverse(node.body)
|
| 1045 |
+
if node.orelse:
|
| 1046 |
+
self.fill("else")
|
| 1047 |
+
with self.block():
|
| 1048 |
+
self.traverse(node.orelse)
|
| 1049 |
+
|
| 1050 |
+
def visit_If(self, node):
|
| 1051 |
+
self.fill("if ")
|
| 1052 |
+
self.traverse(node.test)
|
| 1053 |
+
with self.block():
|
| 1054 |
+
self.traverse(node.body)
|
| 1055 |
+
# collapse nested ifs into equivalent elifs.
|
| 1056 |
+
while node.orelse and len(node.orelse) == 1 and isinstance(node.orelse[0], If):
|
| 1057 |
+
node = node.orelse[0]
|
| 1058 |
+
self.fill("elif ")
|
| 1059 |
+
self.traverse(node.test)
|
| 1060 |
+
with self.block():
|
| 1061 |
+
self.traverse(node.body)
|
| 1062 |
+
# final else
|
| 1063 |
+
if node.orelse:
|
| 1064 |
+
self.fill("else")
|
| 1065 |
+
with self.block():
|
| 1066 |
+
self.traverse(node.orelse)
|
| 1067 |
+
|
| 1068 |
+
def visit_While(self, node):
|
| 1069 |
+
self.fill("while ")
|
| 1070 |
+
self.traverse(node.test)
|
| 1071 |
+
with self.block():
|
| 1072 |
+
self.traverse(node.body)
|
| 1073 |
+
if node.orelse:
|
| 1074 |
+
self.fill("else")
|
| 1075 |
+
with self.block():
|
| 1076 |
+
self.traverse(node.orelse)
|
| 1077 |
+
|
| 1078 |
+
def visit_With(self, node):
|
| 1079 |
+
self.fill("with ")
|
| 1080 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.items)
|
| 1081 |
+
with self.block(extra=self.get_type_comment(node)):
|
| 1082 |
+
self.traverse(node.body)
|
| 1083 |
+
|
| 1084 |
+
def visit_AsyncWith(self, node):
|
| 1085 |
+
self.fill("async with ")
|
| 1086 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.items)
|
| 1087 |
+
with self.block(extra=self.get_type_comment(node)):
|
| 1088 |
+
self.traverse(node.body)
|
| 1089 |
+
|
| 1090 |
+
def _str_literal_helper(
|
| 1091 |
+
self, string, *, quote_types=_ALL_QUOTES, escape_special_whitespace=False
|
| 1092 |
+
):
|
| 1093 |
+
"""Helper for writing string literals, minimizing escapes.
|
| 1094 |
+
Returns the tuple (string literal to write, possible quote types).
|
| 1095 |
+
"""
|
| 1096 |
+
def escape_char(c):
|
| 1097 |
+
# \n and \t are non-printable, but we only escape them if
|
| 1098 |
+
# escape_special_whitespace is True
|
| 1099 |
+
if not escape_special_whitespace and c in "\n\t":
|
| 1100 |
+
return c
|
| 1101 |
+
# Always escape backslashes and other non-printable characters
|
| 1102 |
+
if c == "\\" or not c.isprintable():
|
| 1103 |
+
return c.encode("unicode_escape").decode("ascii")
|
| 1104 |
+
return c
|
| 1105 |
+
|
| 1106 |
+
escaped_string = "".join(map(escape_char, string))
|
| 1107 |
+
possible_quotes = quote_types
|
| 1108 |
+
if "\n" in escaped_string:
|
| 1109 |
+
possible_quotes = [q for q in possible_quotes if q in _MULTI_QUOTES]
|
| 1110 |
+
possible_quotes = [q for q in possible_quotes if q not in escaped_string]
|
| 1111 |
+
if not possible_quotes:
|
| 1112 |
+
# If there aren't any possible_quotes, fallback to using repr
|
| 1113 |
+
# on the original string. Try to use a quote from quote_types,
|
| 1114 |
+
# e.g., so that we use triple quotes for docstrings.
|
| 1115 |
+
string = repr(string)
|
| 1116 |
+
quote = next((q for q in quote_types if string[0] in q), string[0])
|
| 1117 |
+
return string[1:-1], [quote]
|
| 1118 |
+
if escaped_string:
|
| 1119 |
+
# Sort so that we prefer '''"''' over """\""""
|
| 1120 |
+
possible_quotes.sort(key=lambda q: q[0] == escaped_string[-1])
|
| 1121 |
+
# If we're using triple quotes and we'd need to escape a final
|
| 1122 |
+
# quote, escape it
|
| 1123 |
+
if possible_quotes[0][0] == escaped_string[-1]:
|
| 1124 |
+
assert len(possible_quotes[0]) == 3
|
| 1125 |
+
escaped_string = escaped_string[:-1] + "\\" + escaped_string[-1]
|
| 1126 |
+
return escaped_string, possible_quotes
|
| 1127 |
+
|
| 1128 |
+
def _write_str_avoiding_backslashes(self, string, *, quote_types=_ALL_QUOTES):
|
| 1129 |
+
"""Write string literal value with a best effort attempt to avoid backslashes."""
|
| 1130 |
+
string, quote_types = self._str_literal_helper(string, quote_types=quote_types)
|
| 1131 |
+
quote_type = quote_types[0]
|
| 1132 |
+
self.write(f"{quote_type}{string}{quote_type}")
|
| 1133 |
+
|
| 1134 |
+
def visit_JoinedStr(self, node):
|
| 1135 |
+
self.write("f")
|
| 1136 |
+
if self._avoid_backslashes:
|
| 1137 |
+
self._fstring_JoinedStr(node, self.buffer_writer)
|
| 1138 |
+
self._write_str_avoiding_backslashes(self.buffer)
|
| 1139 |
+
return
|
| 1140 |
+
|
| 1141 |
+
# If we don't need to avoid backslashes globally (i.e., we only need
|
| 1142 |
+
# to avoid them inside FormattedValues), it's cosmetically preferred
|
| 1143 |
+
# to use escaped whitespace. That is, it's preferred to use backslashes
|
| 1144 |
+
# for cases like: f"{x}\n". To accomplish this, we keep track of what
|
| 1145 |
+
# in our buffer corresponds to FormattedValues and what corresponds to
|
| 1146 |
+
# Constant parts of the f-string, and allow escapes accordingly.
|
| 1147 |
+
buffer = []
|
| 1148 |
+
for value in node.values:
|
| 1149 |
+
meth = getattr(self, "_fstring_" + type(value).__name__)
|
| 1150 |
+
meth(value, self.buffer_writer)
|
| 1151 |
+
buffer.append((self.buffer, isinstance(value, Constant)))
|
| 1152 |
+
new_buffer = []
|
| 1153 |
+
quote_types = _ALL_QUOTES
|
| 1154 |
+
for value, is_constant in buffer:
|
| 1155 |
+
# Repeatedly narrow down the list of possible quote_types
|
| 1156 |
+
value, quote_types = self._str_literal_helper(
|
| 1157 |
+
value, quote_types=quote_types,
|
| 1158 |
+
escape_special_whitespace=is_constant
|
| 1159 |
+
)
|
| 1160 |
+
new_buffer.append(value)
|
| 1161 |
+
value = "".join(new_buffer)
|
| 1162 |
+
quote_type = quote_types[0]
|
| 1163 |
+
self.write(f"{quote_type}{value}{quote_type}")
|
| 1164 |
+
|
| 1165 |
+
def visit_FormattedValue(self, node):
|
| 1166 |
+
self.write("f")
|
| 1167 |
+
self._fstring_FormattedValue(node, self.buffer_writer)
|
| 1168 |
+
self._write_str_avoiding_backslashes(self.buffer)
|
| 1169 |
+
|
| 1170 |
+
def _fstring_JoinedStr(self, node, write):
|
| 1171 |
+
for value in node.values:
|
| 1172 |
+
meth = getattr(self, "_fstring_" + type(value).__name__)
|
| 1173 |
+
meth(value, write)
|
| 1174 |
+
|
| 1175 |
+
def _fstring_Constant(self, node, write):
|
| 1176 |
+
if not isinstance(node.value, str):
|
| 1177 |
+
raise ValueError("Constants inside JoinedStr should be a string.")
|
| 1178 |
+
value = node.value.replace("{", "{{").replace("}", "}}")
|
| 1179 |
+
write(value)
|
| 1180 |
+
|
| 1181 |
+
def _fstring_FormattedValue(self, node, write):
|
| 1182 |
+
write("{")
|
| 1183 |
+
unparser = type(self)(_avoid_backslashes=True)
|
| 1184 |
+
unparser.set_precedence(_Precedence.TEST.next(), node.value)
|
| 1185 |
+
expr = unparser.visit(node.value)
|
| 1186 |
+
if expr.startswith("{"):
|
| 1187 |
+
write(" ") # Separate pair of opening brackets as "{ {"
|
| 1188 |
+
if "\\" in expr:
|
| 1189 |
+
raise ValueError("Unable to avoid backslash in f-string expression part")
|
| 1190 |
+
write(expr)
|
| 1191 |
+
if node.conversion != -1:
|
| 1192 |
+
conversion = chr(node.conversion)
|
| 1193 |
+
if conversion not in "sra":
|
| 1194 |
+
raise ValueError("Unknown f-string conversion.")
|
| 1195 |
+
write(f"!{conversion}")
|
| 1196 |
+
if node.format_spec:
|
| 1197 |
+
write(":")
|
| 1198 |
+
meth = getattr(self, "_fstring_" + type(node.format_spec).__name__)
|
| 1199 |
+
meth(node.format_spec, write)
|
| 1200 |
+
write("}")
|
| 1201 |
+
|
| 1202 |
+
def visit_Name(self, node):
|
| 1203 |
+
self.write(node.id)
|
| 1204 |
+
|
| 1205 |
+
def _write_docstring(self, node):
|
| 1206 |
+
self.fill()
|
| 1207 |
+
if node.kind == "u":
|
| 1208 |
+
self.write("u")
|
| 1209 |
+
self._write_str_avoiding_backslashes(node.value, quote_types=_MULTI_QUOTES)
|
| 1210 |
+
|
| 1211 |
+
def _write_constant(self, value):
|
| 1212 |
+
if isinstance(value, (float, complex)):
|
| 1213 |
+
# Substitute overflowing decimal literal for AST infinities,
|
| 1214 |
+
# and inf - inf for NaNs.
|
| 1215 |
+
self.write(
|
| 1216 |
+
repr(value)
|
| 1217 |
+
.replace("inf", _INFSTR)
|
| 1218 |
+
.replace("nan", f"({_INFSTR}-{_INFSTR})")
|
| 1219 |
+
)
|
| 1220 |
+
elif self._avoid_backslashes and isinstance(value, str):
|
| 1221 |
+
self._write_str_avoiding_backslashes(value)
|
| 1222 |
+
else:
|
| 1223 |
+
self.write(repr(value))
|
| 1224 |
+
|
| 1225 |
+
def visit_Constant(self, node):
|
| 1226 |
+
value = node.value
|
| 1227 |
+
if isinstance(value, tuple):
|
| 1228 |
+
with self.delimit("(", ")"):
|
| 1229 |
+
self.items_view(self._write_constant, value)
|
| 1230 |
+
elif value is ...:
|
| 1231 |
+
self.write("...")
|
| 1232 |
+
else:
|
| 1233 |
+
if node.kind == "u":
|
| 1234 |
+
self.write("u")
|
| 1235 |
+
self._write_constant(node.value)
|
| 1236 |
+
|
| 1237 |
+
def visit_List(self, node):
|
| 1238 |
+
with self.delimit("[", "]"):
|
| 1239 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.elts)
|
| 1240 |
+
|
| 1241 |
+
def visit_ListComp(self, node):
|
| 1242 |
+
with self.delimit("[", "]"):
|
| 1243 |
+
self.traverse(node.elt)
|
| 1244 |
+
for gen in node.generators:
|
| 1245 |
+
self.traverse(gen)
|
| 1246 |
+
|
| 1247 |
+
def visit_GeneratorExp(self, node):
|
| 1248 |
+
with self.delimit("(", ")"):
|
| 1249 |
+
self.traverse(node.elt)
|
| 1250 |
+
for gen in node.generators:
|
| 1251 |
+
self.traverse(gen)
|
| 1252 |
+
|
| 1253 |
+
def visit_SetComp(self, node):
|
| 1254 |
+
with self.delimit("{", "}"):
|
| 1255 |
+
self.traverse(node.elt)
|
| 1256 |
+
for gen in node.generators:
|
| 1257 |
+
self.traverse(gen)
|
| 1258 |
+
|
| 1259 |
+
def visit_DictComp(self, node):
|
| 1260 |
+
with self.delimit("{", "}"):
|
| 1261 |
+
self.traverse(node.key)
|
| 1262 |
+
self.write(": ")
|
| 1263 |
+
self.traverse(node.value)
|
| 1264 |
+
for gen in node.generators:
|
| 1265 |
+
self.traverse(gen)
|
| 1266 |
+
|
| 1267 |
+
def visit_comprehension(self, node):
|
| 1268 |
+
if node.is_async:
|
| 1269 |
+
self.write(" async for ")
|
| 1270 |
+
else:
|
| 1271 |
+
self.write(" for ")
|
| 1272 |
+
self.set_precedence(_Precedence.TUPLE, node.target)
|
| 1273 |
+
self.traverse(node.target)
|
| 1274 |
+
self.write(" in ")
|
| 1275 |
+
self.set_precedence(_Precedence.TEST.next(), node.iter, *node.ifs)
|
| 1276 |
+
self.traverse(node.iter)
|
| 1277 |
+
for if_clause in node.ifs:
|
| 1278 |
+
self.write(" if ")
|
| 1279 |
+
self.traverse(if_clause)
|
| 1280 |
+
|
| 1281 |
+
def visit_IfExp(self, node):
|
| 1282 |
+
with self.require_parens(_Precedence.TEST, node):
|
| 1283 |
+
self.set_precedence(_Precedence.TEST.next(), node.body, node.test)
|
| 1284 |
+
self.traverse(node.body)
|
| 1285 |
+
self.write(" if ")
|
| 1286 |
+
self.traverse(node.test)
|
| 1287 |
+
self.write(" else ")
|
| 1288 |
+
self.set_precedence(_Precedence.TEST, node.orelse)
|
| 1289 |
+
self.traverse(node.orelse)
|
| 1290 |
+
|
| 1291 |
+
def visit_Set(self, node):
|
| 1292 |
+
if node.elts:
|
| 1293 |
+
with self.delimit("{", "}"):
|
| 1294 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.elts)
|
| 1295 |
+
else:
|
| 1296 |
+
# `{}` would be interpreted as a dictionary literal, and
|
| 1297 |
+
# `set` might be shadowed. Thus:
|
| 1298 |
+
self.write('{*()}')
|
| 1299 |
+
|
| 1300 |
+
def visit_Dict(self, node):
|
| 1301 |
+
def write_key_value_pair(k, v):
|
| 1302 |
+
self.traverse(k)
|
| 1303 |
+
self.write(": ")
|
| 1304 |
+
self.traverse(v)
|
| 1305 |
+
|
| 1306 |
+
def write_item(item):
|
| 1307 |
+
k, v = item
|
| 1308 |
+
if k is None:
|
| 1309 |
+
# for dictionary unpacking operator in dicts {**{'y': 2}}
|
| 1310 |
+
# see PEP 448 for details
|
| 1311 |
+
self.write("**")
|
| 1312 |
+
self.set_precedence(_Precedence.EXPR, v)
|
| 1313 |
+
self.traverse(v)
|
| 1314 |
+
else:
|
| 1315 |
+
write_key_value_pair(k, v)
|
| 1316 |
+
|
| 1317 |
+
with self.delimit("{", "}"):
|
| 1318 |
+
self.interleave(
|
| 1319 |
+
lambda: self.write(", "), write_item, zip(node.keys, node.values)
|
| 1320 |
+
)
|
| 1321 |
+
|
| 1322 |
+
def visit_Tuple(self, node):
|
| 1323 |
+
with self.delimit("(", ")"):
|
| 1324 |
+
self.items_view(self.traverse, node.elts)
|
| 1325 |
+
|
| 1326 |
+
unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"}
|
| 1327 |
+
unop_precedence = {
|
| 1328 |
+
"not": _Precedence.NOT,
|
| 1329 |
+
"~": _Precedence.FACTOR,
|
| 1330 |
+
"+": _Precedence.FACTOR,
|
| 1331 |
+
"-": _Precedence.FACTOR,
|
| 1332 |
+
}
|
| 1333 |
+
|
| 1334 |
+
def visit_UnaryOp(self, node):
|
| 1335 |
+
operator = self.unop[node.op.__class__.__name__]
|
| 1336 |
+
operator_precedence = self.unop_precedence[operator]
|
| 1337 |
+
with self.require_parens(operator_precedence, node):
|
| 1338 |
+
self.write(operator)
|
| 1339 |
+
# factor prefixes (+, -, ~) shouldn't be seperated
|
| 1340 |
+
# from the value they belong, (e.g: +1 instead of + 1)
|
| 1341 |
+
if operator_precedence is not _Precedence.FACTOR:
|
| 1342 |
+
self.write(" ")
|
| 1343 |
+
self.set_precedence(operator_precedence, node.operand)
|
| 1344 |
+
self.traverse(node.operand)
|
| 1345 |
+
|
| 1346 |
+
binop = {
|
| 1347 |
+
"Add": "+",
|
| 1348 |
+
"Sub": "-",
|
| 1349 |
+
"Mult": "*",
|
| 1350 |
+
"MatMult": "@",
|
| 1351 |
+
"Div": "/",
|
| 1352 |
+
"Mod": "%",
|
| 1353 |
+
"LShift": "<<",
|
| 1354 |
+
"RShift": ">>",
|
| 1355 |
+
"BitOr": "|",
|
| 1356 |
+
"BitXor": "^",
|
| 1357 |
+
"BitAnd": "&",
|
| 1358 |
+
"FloorDiv": "//",
|
| 1359 |
+
"Pow": "**",
|
| 1360 |
+
}
|
| 1361 |
+
|
| 1362 |
+
binop_precedence = {
|
| 1363 |
+
"+": _Precedence.ARITH,
|
| 1364 |
+
"-": _Precedence.ARITH,
|
| 1365 |
+
"*": _Precedence.TERM,
|
| 1366 |
+
"@": _Precedence.TERM,
|
| 1367 |
+
"/": _Precedence.TERM,
|
| 1368 |
+
"%": _Precedence.TERM,
|
| 1369 |
+
"<<": _Precedence.SHIFT,
|
| 1370 |
+
">>": _Precedence.SHIFT,
|
| 1371 |
+
"|": _Precedence.BOR,
|
| 1372 |
+
"^": _Precedence.BXOR,
|
| 1373 |
+
"&": _Precedence.BAND,
|
| 1374 |
+
"//": _Precedence.TERM,
|
| 1375 |
+
"**": _Precedence.POWER,
|
| 1376 |
+
}
|
| 1377 |
+
|
| 1378 |
+
binop_rassoc = frozenset(("**",))
|
| 1379 |
+
def visit_BinOp(self, node):
|
| 1380 |
+
operator = self.binop[node.op.__class__.__name__]
|
| 1381 |
+
operator_precedence = self.binop_precedence[operator]
|
| 1382 |
+
with self.require_parens(operator_precedence, node):
|
| 1383 |
+
if operator in self.binop_rassoc:
|
| 1384 |
+
left_precedence = operator_precedence.next()
|
| 1385 |
+
right_precedence = operator_precedence
|
| 1386 |
+
else:
|
| 1387 |
+
left_precedence = operator_precedence
|
| 1388 |
+
right_precedence = operator_precedence.next()
|
| 1389 |
+
|
| 1390 |
+
self.set_precedence(left_precedence, node.left)
|
| 1391 |
+
self.traverse(node.left)
|
| 1392 |
+
self.write(f" {operator} ")
|
| 1393 |
+
self.set_precedence(right_precedence, node.right)
|
| 1394 |
+
self.traverse(node.right)
|
| 1395 |
+
|
| 1396 |
+
cmpops = {
|
| 1397 |
+
"Eq": "==",
|
| 1398 |
+
"NotEq": "!=",
|
| 1399 |
+
"Lt": "<",
|
| 1400 |
+
"LtE": "<=",
|
| 1401 |
+
"Gt": ">",
|
| 1402 |
+
"GtE": ">=",
|
| 1403 |
+
"Is": "is",
|
| 1404 |
+
"IsNot": "is not",
|
| 1405 |
+
"In": "in",
|
| 1406 |
+
"NotIn": "not in",
|
| 1407 |
+
}
|
| 1408 |
+
|
| 1409 |
+
def visit_Compare(self, node):
|
| 1410 |
+
with self.require_parens(_Precedence.CMP, node):
|
| 1411 |
+
self.set_precedence(_Precedence.CMP.next(), node.left, *node.comparators)
|
| 1412 |
+
self.traverse(node.left)
|
| 1413 |
+
for o, e in zip(node.ops, node.comparators):
|
| 1414 |
+
self.write(" " + self.cmpops[o.__class__.__name__] + " ")
|
| 1415 |
+
self.traverse(e)
|
| 1416 |
+
|
| 1417 |
+
boolops = {"And": "and", "Or": "or"}
|
| 1418 |
+
boolop_precedence = {"and": _Precedence.AND, "or": _Precedence.OR}
|
| 1419 |
+
|
| 1420 |
+
def visit_BoolOp(self, node):
|
| 1421 |
+
operator = self.boolops[node.op.__class__.__name__]
|
| 1422 |
+
operator_precedence = self.boolop_precedence[operator]
|
| 1423 |
+
|
| 1424 |
+
def increasing_level_traverse(node):
|
| 1425 |
+
nonlocal operator_precedence
|
| 1426 |
+
operator_precedence = operator_precedence.next()
|
| 1427 |
+
self.set_precedence(operator_precedence, node)
|
| 1428 |
+
self.traverse(node)
|
| 1429 |
+
|
| 1430 |
+
with self.require_parens(operator_precedence, node):
|
| 1431 |
+
s = f" {operator} "
|
| 1432 |
+
self.interleave(lambda: self.write(s), increasing_level_traverse, node.values)
|
| 1433 |
+
|
| 1434 |
+
def visit_Attribute(self, node):
|
| 1435 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 1436 |
+
self.traverse(node.value)
|
| 1437 |
+
# Special case: 3.__abs__() is a syntax error, so if node.value
|
| 1438 |
+
# is an integer literal then we need to either parenthesize
|
| 1439 |
+
# it or add an extra space to get 3 .__abs__().
|
| 1440 |
+
if isinstance(node.value, Constant) and isinstance(node.value.value, int):
|
| 1441 |
+
self.write(" ")
|
| 1442 |
+
self.write(".")
|
| 1443 |
+
self.write(node.attr)
|
| 1444 |
+
|
| 1445 |
+
def visit_Call(self, node):
|
| 1446 |
+
self.set_precedence(_Precedence.ATOM, node.func)
|
| 1447 |
+
self.traverse(node.func)
|
| 1448 |
+
with self.delimit("(", ")"):
|
| 1449 |
+
comma = False
|
| 1450 |
+
for e in node.args:
|
| 1451 |
+
if comma:
|
| 1452 |
+
self.write(", ")
|
| 1453 |
+
else:
|
| 1454 |
+
comma = True
|
| 1455 |
+
self.traverse(e)
|
| 1456 |
+
for e in node.keywords:
|
| 1457 |
+
if comma:
|
| 1458 |
+
self.write(", ")
|
| 1459 |
+
else:
|
| 1460 |
+
comma = True
|
| 1461 |
+
self.traverse(e)
|
| 1462 |
+
|
| 1463 |
+
def visit_Subscript(self, node):
|
| 1464 |
+
def is_simple_tuple(slice_value):
|
| 1465 |
+
# when unparsing a non-empty tuple, the parentheses can be safely
|
| 1466 |
+
# omitted if there aren't any elements that explicitly requires
|
| 1467 |
+
# parentheses (such as starred expressions).
|
| 1468 |
+
return (
|
| 1469 |
+
isinstance(slice_value, Tuple)
|
| 1470 |
+
and slice_value.elts
|
| 1471 |
+
and not any(isinstance(elt, Starred) for elt in slice_value.elts)
|
| 1472 |
+
)
|
| 1473 |
+
|
| 1474 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 1475 |
+
self.traverse(node.value)
|
| 1476 |
+
with self.delimit("[", "]"):
|
| 1477 |
+
if is_simple_tuple(node.slice):
|
| 1478 |
+
self.items_view(self.traverse, node.slice.elts)
|
| 1479 |
+
else:
|
| 1480 |
+
self.traverse(node.slice)
|
| 1481 |
+
|
| 1482 |
+
def visit_Starred(self, node):
|
| 1483 |
+
self.write("*")
|
| 1484 |
+
self.set_precedence(_Precedence.EXPR, node.value)
|
| 1485 |
+
self.traverse(node.value)
|
| 1486 |
+
|
| 1487 |
+
def visit_Ellipsis(self, node):
|
| 1488 |
+
self.write("...")
|
| 1489 |
+
|
| 1490 |
+
def visit_Slice(self, node):
|
| 1491 |
+
if node.lower:
|
| 1492 |
+
self.traverse(node.lower)
|
| 1493 |
+
self.write(":")
|
| 1494 |
+
if node.upper:
|
| 1495 |
+
self.traverse(node.upper)
|
| 1496 |
+
if node.step:
|
| 1497 |
+
self.write(":")
|
| 1498 |
+
self.traverse(node.step)
|
| 1499 |
+
|
| 1500 |
+
def visit_Match(self, node):
|
| 1501 |
+
self.fill("match ")
|
| 1502 |
+
self.traverse(node.subject)
|
| 1503 |
+
with self.block():
|
| 1504 |
+
for case in node.cases:
|
| 1505 |
+
self.traverse(case)
|
| 1506 |
+
|
| 1507 |
+
def visit_arg(self, node):
|
| 1508 |
+
self.write(node.arg)
|
| 1509 |
+
if node.annotation:
|
| 1510 |
+
self.write(": ")
|
| 1511 |
+
self.traverse(node.annotation)
|
| 1512 |
+
|
| 1513 |
+
def visit_arguments(self, node):
|
| 1514 |
+
first = True
|
| 1515 |
+
# normal arguments
|
| 1516 |
+
all_args = node.posonlyargs + node.args
|
| 1517 |
+
defaults = [None] * (len(all_args) - len(node.defaults)) + node.defaults
|
| 1518 |
+
for index, elements in enumerate(zip(all_args, defaults), 1):
|
| 1519 |
+
a, d = elements
|
| 1520 |
+
if first:
|
| 1521 |
+
first = False
|
| 1522 |
+
else:
|
| 1523 |
+
self.write(", ")
|
| 1524 |
+
self.traverse(a)
|
| 1525 |
+
if d:
|
| 1526 |
+
self.write("=")
|
| 1527 |
+
self.traverse(d)
|
| 1528 |
+
if index == len(node.posonlyargs):
|
| 1529 |
+
self.write(", /")
|
| 1530 |
+
|
| 1531 |
+
# varargs, or bare '*' if no varargs but keyword-only arguments present
|
| 1532 |
+
if node.vararg or node.kwonlyargs:
|
| 1533 |
+
if first:
|
| 1534 |
+
first = False
|
| 1535 |
+
else:
|
| 1536 |
+
self.write(", ")
|
| 1537 |
+
self.write("*")
|
| 1538 |
+
if node.vararg:
|
| 1539 |
+
self.write(node.vararg.arg)
|
| 1540 |
+
if node.vararg.annotation:
|
| 1541 |
+
self.write(": ")
|
| 1542 |
+
self.traverse(node.vararg.annotation)
|
| 1543 |
+
|
| 1544 |
+
# keyword-only arguments
|
| 1545 |
+
if node.kwonlyargs:
|
| 1546 |
+
for a, d in zip(node.kwonlyargs, node.kw_defaults):
|
| 1547 |
+
self.write(", ")
|
| 1548 |
+
self.traverse(a)
|
| 1549 |
+
if d:
|
| 1550 |
+
self.write("=")
|
| 1551 |
+
self.traverse(d)
|
| 1552 |
+
|
| 1553 |
+
# kwargs
|
| 1554 |
+
if node.kwarg:
|
| 1555 |
+
if first:
|
| 1556 |
+
first = False
|
| 1557 |
+
else:
|
| 1558 |
+
self.write(", ")
|
| 1559 |
+
self.write("**" + node.kwarg.arg)
|
| 1560 |
+
if node.kwarg.annotation:
|
| 1561 |
+
self.write(": ")
|
| 1562 |
+
self.traverse(node.kwarg.annotation)
|
| 1563 |
+
|
| 1564 |
+
def visit_keyword(self, node):
|
| 1565 |
+
if node.arg is None:
|
| 1566 |
+
self.write("**")
|
| 1567 |
+
else:
|
| 1568 |
+
self.write(node.arg)
|
| 1569 |
+
self.write("=")
|
| 1570 |
+
self.traverse(node.value)
|
| 1571 |
+
|
| 1572 |
+
def visit_Lambda(self, node):
|
| 1573 |
+
with self.require_parens(_Precedence.TEST, node):
|
| 1574 |
+
self.write("lambda ")
|
| 1575 |
+
self.traverse(node.args)
|
| 1576 |
+
self.write(": ")
|
| 1577 |
+
self.set_precedence(_Precedence.TEST, node.body)
|
| 1578 |
+
self.traverse(node.body)
|
| 1579 |
+
|
| 1580 |
+
def visit_alias(self, node):
|
| 1581 |
+
self.write(node.name)
|
| 1582 |
+
if node.asname:
|
| 1583 |
+
self.write(" as " + node.asname)
|
| 1584 |
+
|
| 1585 |
+
def visit_withitem(self, node):
|
| 1586 |
+
self.traverse(node.context_expr)
|
| 1587 |
+
if node.optional_vars:
|
| 1588 |
+
self.write(" as ")
|
| 1589 |
+
self.traverse(node.optional_vars)
|
| 1590 |
+
|
| 1591 |
+
def visit_match_case(self, node):
|
| 1592 |
+
self.fill("case ")
|
| 1593 |
+
self.traverse(node.pattern)
|
| 1594 |
+
if node.guard:
|
| 1595 |
+
self.write(" if ")
|
| 1596 |
+
self.traverse(node.guard)
|
| 1597 |
+
with self.block():
|
| 1598 |
+
self.traverse(node.body)
|
| 1599 |
+
|
| 1600 |
+
def visit_MatchValue(self, node):
|
| 1601 |
+
self.traverse(node.value)
|
| 1602 |
+
|
| 1603 |
+
def visit_MatchSingleton(self, node):
|
| 1604 |
+
self._write_constant(node.value)
|
| 1605 |
+
|
| 1606 |
+
def visit_MatchSequence(self, node):
|
| 1607 |
+
with self.delimit("[", "]"):
|
| 1608 |
+
self.interleave(
|
| 1609 |
+
lambda: self.write(", "), self.traverse, node.patterns
|
| 1610 |
+
)
|
| 1611 |
+
|
| 1612 |
+
def visit_MatchStar(self, node):
|
| 1613 |
+
name = node.name
|
| 1614 |
+
if name is None:
|
| 1615 |
+
name = "_"
|
| 1616 |
+
self.write(f"*{name}")
|
| 1617 |
+
|
| 1618 |
+
def visit_MatchMapping(self, node):
|
| 1619 |
+
def write_key_pattern_pair(pair):
|
| 1620 |
+
k, p = pair
|
| 1621 |
+
self.traverse(k)
|
| 1622 |
+
self.write(": ")
|
| 1623 |
+
self.traverse(p)
|
| 1624 |
+
|
| 1625 |
+
with self.delimit("{", "}"):
|
| 1626 |
+
keys = node.keys
|
| 1627 |
+
self.interleave(
|
| 1628 |
+
lambda: self.write(", "),
|
| 1629 |
+
write_key_pattern_pair,
|
| 1630 |
+
zip(keys, node.patterns, strict=True),
|
| 1631 |
+
)
|
| 1632 |
+
rest = node.rest
|
| 1633 |
+
if rest is not None:
|
| 1634 |
+
if keys:
|
| 1635 |
+
self.write(", ")
|
| 1636 |
+
self.write(f"**{rest}")
|
| 1637 |
+
|
| 1638 |
+
def visit_MatchClass(self, node):
|
| 1639 |
+
self.set_precedence(_Precedence.ATOM, node.cls)
|
| 1640 |
+
self.traverse(node.cls)
|
| 1641 |
+
with self.delimit("(", ")"):
|
| 1642 |
+
patterns = node.patterns
|
| 1643 |
+
self.interleave(
|
| 1644 |
+
lambda: self.write(", "), self.traverse, patterns
|
| 1645 |
+
)
|
| 1646 |
+
attrs = node.kwd_attrs
|
| 1647 |
+
if attrs:
|
| 1648 |
+
def write_attr_pattern(pair):
|
| 1649 |
+
attr, pattern = pair
|
| 1650 |
+
self.write(f"{attr}=")
|
| 1651 |
+
self.traverse(pattern)
|
| 1652 |
+
|
| 1653 |
+
if patterns:
|
| 1654 |
+
self.write(", ")
|
| 1655 |
+
self.interleave(
|
| 1656 |
+
lambda: self.write(", "),
|
| 1657 |
+
write_attr_pattern,
|
| 1658 |
+
zip(attrs, node.kwd_patterns, strict=True),
|
| 1659 |
+
)
|
| 1660 |
+
|
| 1661 |
+
def visit_MatchAs(self, node):
|
| 1662 |
+
name = node.name
|
| 1663 |
+
pattern = node.pattern
|
| 1664 |
+
if name is None:
|
| 1665 |
+
self.write("_")
|
| 1666 |
+
elif pattern is None:
|
| 1667 |
+
self.write(node.name)
|
| 1668 |
+
else:
|
| 1669 |
+
with self.require_parens(_Precedence.TEST, node):
|
| 1670 |
+
self.set_precedence(_Precedence.BOR, node.pattern)
|
| 1671 |
+
self.traverse(node.pattern)
|
| 1672 |
+
self.write(f" as {node.name}")
|
| 1673 |
+
|
| 1674 |
+
def visit_MatchOr(self, node):
|
| 1675 |
+
with self.require_parens(_Precedence.BOR, node):
|
| 1676 |
+
self.set_precedence(_Precedence.BOR.next(), *node.patterns)
|
| 1677 |
+
self.interleave(lambda: self.write(" | "), self.traverse, node.patterns)
|
| 1678 |
+
|
| 1679 |
+
def unparse(ast_obj):
|
| 1680 |
+
unparser = _Unparser()
|
| 1681 |
+
return unparser.visit(ast_obj)
|
| 1682 |
+
|
| 1683 |
+
|
| 1684 |
+
def main():
|
| 1685 |
+
import argparse
|
| 1686 |
+
|
| 1687 |
+
parser = argparse.ArgumentParser(prog='python -m ast')
|
| 1688 |
+
parser.add_argument('infile', type=argparse.FileType(mode='rb'), nargs='?',
|
| 1689 |
+
default='-',
|
| 1690 |
+
help='the file to parse; defaults to stdin')
|
| 1691 |
+
parser.add_argument('-m', '--mode', default='exec',
|
| 1692 |
+
choices=('exec', 'single', 'eval', 'func_type'),
|
| 1693 |
+
help='specify what kind of code must be parsed')
|
| 1694 |
+
parser.add_argument('--no-type-comments', default=True, action='store_false',
|
| 1695 |
+
help="don't add information about type comments")
|
| 1696 |
+
parser.add_argument('-a', '--include-attributes', action='store_true',
|
| 1697 |
+
help='include attributes such as line numbers and '
|
| 1698 |
+
'column offsets')
|
| 1699 |
+
parser.add_argument('-i', '--indent', type=int, default=3,
|
| 1700 |
+
help='indentation of nodes (number of spaces)')
|
| 1701 |
+
args = parser.parse_args()
|
| 1702 |
+
|
| 1703 |
+
with args.infile as infile:
|
| 1704 |
+
source = infile.read()
|
| 1705 |
+
tree = parse(source, args.infile.name, args.mode, type_comments=args.no_type_comments)
|
| 1706 |
+
print(dump(tree, include_attributes=args.include_attributes, indent=args.indent))
|
| 1707 |
+
|
| 1708 |
+
if __name__ == '__main__':
|
| 1709 |
+
main()
|
evalkit_cambrian/lib/python3.10/binhex.py
ADDED
|
@@ -0,0 +1,502 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Macintosh binhex compression/decompression.
|
| 2 |
+
|
| 3 |
+
easy interface:
|
| 4 |
+
binhex(inputfilename, outputfilename)
|
| 5 |
+
hexbin(inputfilename, outputfilename)
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
#
|
| 9 |
+
# Jack Jansen, CWI, August 1995.
|
| 10 |
+
#
|
| 11 |
+
# The module is supposed to be as compatible as possible. Especially the
|
| 12 |
+
# easy interface should work "as expected" on any platform.
|
| 13 |
+
# XXXX Note: currently, textfiles appear in mac-form on all platforms.
|
| 14 |
+
# We seem to lack a simple character-translate in python.
|
| 15 |
+
# (we should probably use ISO-Latin-1 on all but the mac platform).
|
| 16 |
+
# XXXX The simple routines are too simple: they expect to hold the complete
|
| 17 |
+
# files in-core. Should be fixed.
|
| 18 |
+
# XXXX It would be nice to handle AppleDouble format on unix
|
| 19 |
+
# (for servers serving macs).
|
| 20 |
+
# XXXX I don't understand what happens when you get 0x90 times the same byte on
|
| 21 |
+
# input. The resulting code (xx 90 90) would appear to be interpreted as an
|
| 22 |
+
# escaped *value* of 0x90. All coders I've seen appear to ignore this nicety...
|
| 23 |
+
#
|
| 24 |
+
import binascii
|
| 25 |
+
import contextlib
|
| 26 |
+
import io
|
| 27 |
+
import os
|
| 28 |
+
import struct
|
| 29 |
+
import warnings
|
| 30 |
+
|
| 31 |
+
warnings.warn('the binhex module is deprecated', DeprecationWarning,
|
| 32 |
+
stacklevel=2)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
__all__ = ["binhex","hexbin","Error"]
|
| 36 |
+
|
| 37 |
+
class Error(Exception):
|
| 38 |
+
pass
|
| 39 |
+
|
| 40 |
+
# States (what have we written)
|
| 41 |
+
_DID_HEADER = 0
|
| 42 |
+
_DID_DATA = 1
|
| 43 |
+
|
| 44 |
+
# Various constants
|
| 45 |
+
REASONABLY_LARGE = 32768 # Minimal amount we pass the rle-coder
|
| 46 |
+
LINELEN = 64
|
| 47 |
+
RUNCHAR = b"\x90"
|
| 48 |
+
|
| 49 |
+
#
|
| 50 |
+
# This code is no longer byte-order dependent
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class FInfo:
|
| 54 |
+
def __init__(self):
|
| 55 |
+
self.Type = '????'
|
| 56 |
+
self.Creator = '????'
|
| 57 |
+
self.Flags = 0
|
| 58 |
+
|
| 59 |
+
def getfileinfo(name):
|
| 60 |
+
finfo = FInfo()
|
| 61 |
+
with io.open(name, 'rb') as fp:
|
| 62 |
+
# Quick check for textfile
|
| 63 |
+
data = fp.read(512)
|
| 64 |
+
if 0 not in data:
|
| 65 |
+
finfo.Type = 'TEXT'
|
| 66 |
+
fp.seek(0, 2)
|
| 67 |
+
dsize = fp.tell()
|
| 68 |
+
dir, file = os.path.split(name)
|
| 69 |
+
file = file.replace(':', '-', 1)
|
| 70 |
+
return file, finfo, dsize, 0
|
| 71 |
+
|
| 72 |
+
class openrsrc:
|
| 73 |
+
def __init__(self, *args):
|
| 74 |
+
pass
|
| 75 |
+
|
| 76 |
+
def read(self, *args):
|
| 77 |
+
return b''
|
| 78 |
+
|
| 79 |
+
def write(self, *args):
|
| 80 |
+
pass
|
| 81 |
+
|
| 82 |
+
def close(self):
|
| 83 |
+
pass
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
# DeprecationWarning is already emitted on "import binhex". There is no need
|
| 87 |
+
# to repeat the warning at each call to deprecated binascii functions.
|
| 88 |
+
@contextlib.contextmanager
|
| 89 |
+
def _ignore_deprecation_warning():
|
| 90 |
+
with warnings.catch_warnings():
|
| 91 |
+
warnings.filterwarnings('ignore', '', DeprecationWarning)
|
| 92 |
+
yield
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class _Hqxcoderengine:
|
| 96 |
+
"""Write data to the coder in 3-byte chunks"""
|
| 97 |
+
|
| 98 |
+
def __init__(self, ofp):
|
| 99 |
+
self.ofp = ofp
|
| 100 |
+
self.data = b''
|
| 101 |
+
self.hqxdata = b''
|
| 102 |
+
self.linelen = LINELEN - 1
|
| 103 |
+
|
| 104 |
+
def write(self, data):
|
| 105 |
+
self.data = self.data + data
|
| 106 |
+
datalen = len(self.data)
|
| 107 |
+
todo = (datalen // 3) * 3
|
| 108 |
+
data = self.data[:todo]
|
| 109 |
+
self.data = self.data[todo:]
|
| 110 |
+
if not data:
|
| 111 |
+
return
|
| 112 |
+
with _ignore_deprecation_warning():
|
| 113 |
+
self.hqxdata = self.hqxdata + binascii.b2a_hqx(data)
|
| 114 |
+
self._flush(0)
|
| 115 |
+
|
| 116 |
+
def _flush(self, force):
|
| 117 |
+
first = 0
|
| 118 |
+
while first <= len(self.hqxdata) - self.linelen:
|
| 119 |
+
last = first + self.linelen
|
| 120 |
+
self.ofp.write(self.hqxdata[first:last] + b'\r')
|
| 121 |
+
self.linelen = LINELEN
|
| 122 |
+
first = last
|
| 123 |
+
self.hqxdata = self.hqxdata[first:]
|
| 124 |
+
if force:
|
| 125 |
+
self.ofp.write(self.hqxdata + b':\r')
|
| 126 |
+
|
| 127 |
+
def close(self):
|
| 128 |
+
if self.data:
|
| 129 |
+
with _ignore_deprecation_warning():
|
| 130 |
+
self.hqxdata = self.hqxdata + binascii.b2a_hqx(self.data)
|
| 131 |
+
self._flush(1)
|
| 132 |
+
self.ofp.close()
|
| 133 |
+
del self.ofp
|
| 134 |
+
|
| 135 |
+
class _Rlecoderengine:
|
| 136 |
+
"""Write data to the RLE-coder in suitably large chunks"""
|
| 137 |
+
|
| 138 |
+
def __init__(self, ofp):
|
| 139 |
+
self.ofp = ofp
|
| 140 |
+
self.data = b''
|
| 141 |
+
|
| 142 |
+
def write(self, data):
|
| 143 |
+
self.data = self.data + data
|
| 144 |
+
if len(self.data) < REASONABLY_LARGE:
|
| 145 |
+
return
|
| 146 |
+
with _ignore_deprecation_warning():
|
| 147 |
+
rledata = binascii.rlecode_hqx(self.data)
|
| 148 |
+
self.ofp.write(rledata)
|
| 149 |
+
self.data = b''
|
| 150 |
+
|
| 151 |
+
def close(self):
|
| 152 |
+
if self.data:
|
| 153 |
+
with _ignore_deprecation_warning():
|
| 154 |
+
rledata = binascii.rlecode_hqx(self.data)
|
| 155 |
+
self.ofp.write(rledata)
|
| 156 |
+
self.ofp.close()
|
| 157 |
+
del self.ofp
|
| 158 |
+
|
| 159 |
+
class BinHex:
|
| 160 |
+
def __init__(self, name_finfo_dlen_rlen, ofp):
|
| 161 |
+
name, finfo, dlen, rlen = name_finfo_dlen_rlen
|
| 162 |
+
close_on_error = False
|
| 163 |
+
if isinstance(ofp, str):
|
| 164 |
+
ofname = ofp
|
| 165 |
+
ofp = io.open(ofname, 'wb')
|
| 166 |
+
close_on_error = True
|
| 167 |
+
try:
|
| 168 |
+
ofp.write(b'(This file must be converted with BinHex 4.0)\r\r:')
|
| 169 |
+
hqxer = _Hqxcoderengine(ofp)
|
| 170 |
+
self.ofp = _Rlecoderengine(hqxer)
|
| 171 |
+
self.crc = 0
|
| 172 |
+
if finfo is None:
|
| 173 |
+
finfo = FInfo()
|
| 174 |
+
self.dlen = dlen
|
| 175 |
+
self.rlen = rlen
|
| 176 |
+
self._writeinfo(name, finfo)
|
| 177 |
+
self.state = _DID_HEADER
|
| 178 |
+
except:
|
| 179 |
+
if close_on_error:
|
| 180 |
+
ofp.close()
|
| 181 |
+
raise
|
| 182 |
+
|
| 183 |
+
def _writeinfo(self, name, finfo):
|
| 184 |
+
nl = len(name)
|
| 185 |
+
if nl > 63:
|
| 186 |
+
raise Error('Filename too long')
|
| 187 |
+
d = bytes([nl]) + name.encode("latin-1") + b'\0'
|
| 188 |
+
tp, cr = finfo.Type, finfo.Creator
|
| 189 |
+
if isinstance(tp, str):
|
| 190 |
+
tp = tp.encode("latin-1")
|
| 191 |
+
if isinstance(cr, str):
|
| 192 |
+
cr = cr.encode("latin-1")
|
| 193 |
+
d2 = tp + cr
|
| 194 |
+
|
| 195 |
+
# Force all structs to be packed with big-endian
|
| 196 |
+
d3 = struct.pack('>h', finfo.Flags)
|
| 197 |
+
d4 = struct.pack('>ii', self.dlen, self.rlen)
|
| 198 |
+
info = d + d2 + d3 + d4
|
| 199 |
+
self._write(info)
|
| 200 |
+
self._writecrc()
|
| 201 |
+
|
| 202 |
+
def _write(self, data):
|
| 203 |
+
self.crc = binascii.crc_hqx(data, self.crc)
|
| 204 |
+
self.ofp.write(data)
|
| 205 |
+
|
| 206 |
+
def _writecrc(self):
|
| 207 |
+
# XXXX Should this be here??
|
| 208 |
+
# self.crc = binascii.crc_hqx('\0\0', self.crc)
|
| 209 |
+
if self.crc < 0:
|
| 210 |
+
fmt = '>h'
|
| 211 |
+
else:
|
| 212 |
+
fmt = '>H'
|
| 213 |
+
self.ofp.write(struct.pack(fmt, self.crc))
|
| 214 |
+
self.crc = 0
|
| 215 |
+
|
| 216 |
+
def write(self, data):
|
| 217 |
+
if self.state != _DID_HEADER:
|
| 218 |
+
raise Error('Writing data at the wrong time')
|
| 219 |
+
self.dlen = self.dlen - len(data)
|
| 220 |
+
self._write(data)
|
| 221 |
+
|
| 222 |
+
def close_data(self):
|
| 223 |
+
if self.dlen != 0:
|
| 224 |
+
raise Error('Incorrect data size, diff=%r' % (self.rlen,))
|
| 225 |
+
self._writecrc()
|
| 226 |
+
self.state = _DID_DATA
|
| 227 |
+
|
| 228 |
+
def write_rsrc(self, data):
|
| 229 |
+
if self.state < _DID_DATA:
|
| 230 |
+
self.close_data()
|
| 231 |
+
if self.state != _DID_DATA:
|
| 232 |
+
raise Error('Writing resource data at the wrong time')
|
| 233 |
+
self.rlen = self.rlen - len(data)
|
| 234 |
+
self._write(data)
|
| 235 |
+
|
| 236 |
+
def close(self):
|
| 237 |
+
if self.state is None:
|
| 238 |
+
return
|
| 239 |
+
try:
|
| 240 |
+
if self.state < _DID_DATA:
|
| 241 |
+
self.close_data()
|
| 242 |
+
if self.state != _DID_DATA:
|
| 243 |
+
raise Error('Close at the wrong time')
|
| 244 |
+
if self.rlen != 0:
|
| 245 |
+
raise Error("Incorrect resource-datasize, diff=%r" % (self.rlen,))
|
| 246 |
+
self._writecrc()
|
| 247 |
+
finally:
|
| 248 |
+
self.state = None
|
| 249 |
+
ofp = self.ofp
|
| 250 |
+
del self.ofp
|
| 251 |
+
ofp.close()
|
| 252 |
+
|
| 253 |
+
def binhex(inp, out):
|
| 254 |
+
"""binhex(infilename, outfilename): create binhex-encoded copy of a file"""
|
| 255 |
+
finfo = getfileinfo(inp)
|
| 256 |
+
ofp = BinHex(finfo, out)
|
| 257 |
+
|
| 258 |
+
with io.open(inp, 'rb') as ifp:
|
| 259 |
+
# XXXX Do textfile translation on non-mac systems
|
| 260 |
+
while True:
|
| 261 |
+
d = ifp.read(128000)
|
| 262 |
+
if not d: break
|
| 263 |
+
ofp.write(d)
|
| 264 |
+
ofp.close_data()
|
| 265 |
+
|
| 266 |
+
ifp = openrsrc(inp, 'rb')
|
| 267 |
+
while True:
|
| 268 |
+
d = ifp.read(128000)
|
| 269 |
+
if not d: break
|
| 270 |
+
ofp.write_rsrc(d)
|
| 271 |
+
ofp.close()
|
| 272 |
+
ifp.close()
|
| 273 |
+
|
| 274 |
+
class _Hqxdecoderengine:
|
| 275 |
+
"""Read data via the decoder in 4-byte chunks"""
|
| 276 |
+
|
| 277 |
+
def __init__(self, ifp):
|
| 278 |
+
self.ifp = ifp
|
| 279 |
+
self.eof = 0
|
| 280 |
+
|
| 281 |
+
def read(self, totalwtd):
|
| 282 |
+
"""Read at least wtd bytes (or until EOF)"""
|
| 283 |
+
decdata = b''
|
| 284 |
+
wtd = totalwtd
|
| 285 |
+
#
|
| 286 |
+
# The loop here is convoluted, since we don't really now how
|
| 287 |
+
# much to decode: there may be newlines in the incoming data.
|
| 288 |
+
while wtd > 0:
|
| 289 |
+
if self.eof: return decdata
|
| 290 |
+
wtd = ((wtd + 2) // 3) * 4
|
| 291 |
+
data = self.ifp.read(wtd)
|
| 292 |
+
#
|
| 293 |
+
# Next problem: there may not be a complete number of
|
| 294 |
+
# bytes in what we pass to a2b. Solve by yet another
|
| 295 |
+
# loop.
|
| 296 |
+
#
|
| 297 |
+
while True:
|
| 298 |
+
try:
|
| 299 |
+
with _ignore_deprecation_warning():
|
| 300 |
+
decdatacur, self.eof = binascii.a2b_hqx(data)
|
| 301 |
+
break
|
| 302 |
+
except binascii.Incomplete:
|
| 303 |
+
pass
|
| 304 |
+
newdata = self.ifp.read(1)
|
| 305 |
+
if not newdata:
|
| 306 |
+
raise Error('Premature EOF on binhex file')
|
| 307 |
+
data = data + newdata
|
| 308 |
+
decdata = decdata + decdatacur
|
| 309 |
+
wtd = totalwtd - len(decdata)
|
| 310 |
+
if not decdata and not self.eof:
|
| 311 |
+
raise Error('Premature EOF on binhex file')
|
| 312 |
+
return decdata
|
| 313 |
+
|
| 314 |
+
def close(self):
|
| 315 |
+
self.ifp.close()
|
| 316 |
+
|
| 317 |
+
class _Rledecoderengine:
|
| 318 |
+
"""Read data via the RLE-coder"""
|
| 319 |
+
|
| 320 |
+
def __init__(self, ifp):
|
| 321 |
+
self.ifp = ifp
|
| 322 |
+
self.pre_buffer = b''
|
| 323 |
+
self.post_buffer = b''
|
| 324 |
+
self.eof = 0
|
| 325 |
+
|
| 326 |
+
def read(self, wtd):
|
| 327 |
+
if wtd > len(self.post_buffer):
|
| 328 |
+
self._fill(wtd - len(self.post_buffer))
|
| 329 |
+
rv = self.post_buffer[:wtd]
|
| 330 |
+
self.post_buffer = self.post_buffer[wtd:]
|
| 331 |
+
return rv
|
| 332 |
+
|
| 333 |
+
def _fill(self, wtd):
|
| 334 |
+
self.pre_buffer = self.pre_buffer + self.ifp.read(wtd + 4)
|
| 335 |
+
if self.ifp.eof:
|
| 336 |
+
with _ignore_deprecation_warning():
|
| 337 |
+
self.post_buffer = self.post_buffer + \
|
| 338 |
+
binascii.rledecode_hqx(self.pre_buffer)
|
| 339 |
+
self.pre_buffer = b''
|
| 340 |
+
return
|
| 341 |
+
|
| 342 |
+
#
|
| 343 |
+
# Obfuscated code ahead. We have to take care that we don't
|
| 344 |
+
# end up with an orphaned RUNCHAR later on. So, we keep a couple
|
| 345 |
+
# of bytes in the buffer, depending on what the end of
|
| 346 |
+
# the buffer looks like:
|
| 347 |
+
# '\220\0\220' - Keep 3 bytes: repeated \220 (escaped as \220\0)
|
| 348 |
+
# '?\220' - Keep 2 bytes: repeated something-else
|
| 349 |
+
# '\220\0' - Escaped \220: Keep 2 bytes.
|
| 350 |
+
# '?\220?' - Complete repeat sequence: decode all
|
| 351 |
+
# otherwise: keep 1 byte.
|
| 352 |
+
#
|
| 353 |
+
mark = len(self.pre_buffer)
|
| 354 |
+
if self.pre_buffer[-3:] == RUNCHAR + b'\0' + RUNCHAR:
|
| 355 |
+
mark = mark - 3
|
| 356 |
+
elif self.pre_buffer[-1:] == RUNCHAR:
|
| 357 |
+
mark = mark - 2
|
| 358 |
+
elif self.pre_buffer[-2:] == RUNCHAR + b'\0':
|
| 359 |
+
mark = mark - 2
|
| 360 |
+
elif self.pre_buffer[-2:-1] == RUNCHAR:
|
| 361 |
+
pass # Decode all
|
| 362 |
+
else:
|
| 363 |
+
mark = mark - 1
|
| 364 |
+
|
| 365 |
+
with _ignore_deprecation_warning():
|
| 366 |
+
self.post_buffer = self.post_buffer + \
|
| 367 |
+
binascii.rledecode_hqx(self.pre_buffer[:mark])
|
| 368 |
+
self.pre_buffer = self.pre_buffer[mark:]
|
| 369 |
+
|
| 370 |
+
def close(self):
|
| 371 |
+
self.ifp.close()
|
| 372 |
+
|
| 373 |
+
class HexBin:
|
| 374 |
+
def __init__(self, ifp):
|
| 375 |
+
if isinstance(ifp, str):
|
| 376 |
+
ifp = io.open(ifp, 'rb')
|
| 377 |
+
#
|
| 378 |
+
# Find initial colon.
|
| 379 |
+
#
|
| 380 |
+
while True:
|
| 381 |
+
ch = ifp.read(1)
|
| 382 |
+
if not ch:
|
| 383 |
+
raise Error("No binhex data found")
|
| 384 |
+
# Cater for \r\n terminated lines (which show up as \n\r, hence
|
| 385 |
+
# all lines start with \r)
|
| 386 |
+
if ch == b'\r':
|
| 387 |
+
continue
|
| 388 |
+
if ch == b':':
|
| 389 |
+
break
|
| 390 |
+
|
| 391 |
+
hqxifp = _Hqxdecoderengine(ifp)
|
| 392 |
+
self.ifp = _Rledecoderengine(hqxifp)
|
| 393 |
+
self.crc = 0
|
| 394 |
+
self._readheader()
|
| 395 |
+
|
| 396 |
+
def _read(self, len):
|
| 397 |
+
data = self.ifp.read(len)
|
| 398 |
+
self.crc = binascii.crc_hqx(data, self.crc)
|
| 399 |
+
return data
|
| 400 |
+
|
| 401 |
+
def _checkcrc(self):
|
| 402 |
+
filecrc = struct.unpack('>h', self.ifp.read(2))[0] & 0xffff
|
| 403 |
+
#self.crc = binascii.crc_hqx('\0\0', self.crc)
|
| 404 |
+
# XXXX Is this needed??
|
| 405 |
+
self.crc = self.crc & 0xffff
|
| 406 |
+
if filecrc != self.crc:
|
| 407 |
+
raise Error('CRC error, computed %x, read %x'
|
| 408 |
+
% (self.crc, filecrc))
|
| 409 |
+
self.crc = 0
|
| 410 |
+
|
| 411 |
+
def _readheader(self):
|
| 412 |
+
len = self._read(1)
|
| 413 |
+
fname = self._read(ord(len))
|
| 414 |
+
rest = self._read(1 + 4 + 4 + 2 + 4 + 4)
|
| 415 |
+
self._checkcrc()
|
| 416 |
+
|
| 417 |
+
type = rest[1:5]
|
| 418 |
+
creator = rest[5:9]
|
| 419 |
+
flags = struct.unpack('>h', rest[9:11])[0]
|
| 420 |
+
self.dlen = struct.unpack('>l', rest[11:15])[0]
|
| 421 |
+
self.rlen = struct.unpack('>l', rest[15:19])[0]
|
| 422 |
+
|
| 423 |
+
self.FName = fname
|
| 424 |
+
self.FInfo = FInfo()
|
| 425 |
+
self.FInfo.Creator = creator
|
| 426 |
+
self.FInfo.Type = type
|
| 427 |
+
self.FInfo.Flags = flags
|
| 428 |
+
|
| 429 |
+
self.state = _DID_HEADER
|
| 430 |
+
|
| 431 |
+
def read(self, *n):
|
| 432 |
+
if self.state != _DID_HEADER:
|
| 433 |
+
raise Error('Read data at wrong time')
|
| 434 |
+
if n:
|
| 435 |
+
n = n[0]
|
| 436 |
+
n = min(n, self.dlen)
|
| 437 |
+
else:
|
| 438 |
+
n = self.dlen
|
| 439 |
+
rv = b''
|
| 440 |
+
while len(rv) < n:
|
| 441 |
+
rv = rv + self._read(n-len(rv))
|
| 442 |
+
self.dlen = self.dlen - n
|
| 443 |
+
return rv
|
| 444 |
+
|
| 445 |
+
def close_data(self):
|
| 446 |
+
if self.state != _DID_HEADER:
|
| 447 |
+
raise Error('close_data at wrong time')
|
| 448 |
+
if self.dlen:
|
| 449 |
+
dummy = self._read(self.dlen)
|
| 450 |
+
self._checkcrc()
|
| 451 |
+
self.state = _DID_DATA
|
| 452 |
+
|
| 453 |
+
def read_rsrc(self, *n):
|
| 454 |
+
if self.state == _DID_HEADER:
|
| 455 |
+
self.close_data()
|
| 456 |
+
if self.state != _DID_DATA:
|
| 457 |
+
raise Error('Read resource data at wrong time')
|
| 458 |
+
if n:
|
| 459 |
+
n = n[0]
|
| 460 |
+
n = min(n, self.rlen)
|
| 461 |
+
else:
|
| 462 |
+
n = self.rlen
|
| 463 |
+
self.rlen = self.rlen - n
|
| 464 |
+
return self._read(n)
|
| 465 |
+
|
| 466 |
+
def close(self):
|
| 467 |
+
if self.state is None:
|
| 468 |
+
return
|
| 469 |
+
try:
|
| 470 |
+
if self.rlen:
|
| 471 |
+
dummy = self.read_rsrc(self.rlen)
|
| 472 |
+
self._checkcrc()
|
| 473 |
+
finally:
|
| 474 |
+
self.state = None
|
| 475 |
+
self.ifp.close()
|
| 476 |
+
|
| 477 |
+
def hexbin(inp, out):
|
| 478 |
+
"""hexbin(infilename, outfilename) - Decode binhexed file"""
|
| 479 |
+
ifp = HexBin(inp)
|
| 480 |
+
finfo = ifp.FInfo
|
| 481 |
+
if not out:
|
| 482 |
+
out = ifp.FName
|
| 483 |
+
|
| 484 |
+
with io.open(out, 'wb') as ofp:
|
| 485 |
+
# XXXX Do translation on non-mac systems
|
| 486 |
+
while True:
|
| 487 |
+
d = ifp.read(128000)
|
| 488 |
+
if not d: break
|
| 489 |
+
ofp.write(d)
|
| 490 |
+
ifp.close_data()
|
| 491 |
+
|
| 492 |
+
d = ifp.read_rsrc(128000)
|
| 493 |
+
if d:
|
| 494 |
+
ofp = openrsrc(out, 'wb')
|
| 495 |
+
ofp.write(d)
|
| 496 |
+
while True:
|
| 497 |
+
d = ifp.read_rsrc(128000)
|
| 498 |
+
if not d: break
|
| 499 |
+
ofp.write(d)
|
| 500 |
+
ofp.close()
|
| 501 |
+
|
| 502 |
+
ifp.close()
|
evalkit_cambrian/lib/python3.10/bz2.py
ADDED
|
@@ -0,0 +1,344 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Interface to the libbzip2 compression library.
|
| 2 |
+
|
| 3 |
+
This module provides a file interface, classes for incremental
|
| 4 |
+
(de)compression, and functions for one-shot (de)compression.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor",
|
| 8 |
+
"open", "compress", "decompress"]
|
| 9 |
+
|
| 10 |
+
__author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>"
|
| 11 |
+
|
| 12 |
+
from builtins import open as _builtin_open
|
| 13 |
+
import io
|
| 14 |
+
import os
|
| 15 |
+
import _compression
|
| 16 |
+
|
| 17 |
+
from _bz2 import BZ2Compressor, BZ2Decompressor
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
_MODE_CLOSED = 0
|
| 21 |
+
_MODE_READ = 1
|
| 22 |
+
# Value 2 no longer used
|
| 23 |
+
_MODE_WRITE = 3
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class BZ2File(_compression.BaseStream):
|
| 27 |
+
|
| 28 |
+
"""A file object providing transparent bzip2 (de)compression.
|
| 29 |
+
|
| 30 |
+
A BZ2File can act as a wrapper for an existing file object, or refer
|
| 31 |
+
directly to a named file on disk.
|
| 32 |
+
|
| 33 |
+
Note that BZ2File provides a *binary* file interface - data read is
|
| 34 |
+
returned as bytes, and data to be written should be given as bytes.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
def __init__(self, filename, mode="r", *, compresslevel=9):
|
| 38 |
+
"""Open a bzip2-compressed file.
|
| 39 |
+
|
| 40 |
+
If filename is a str, bytes, or PathLike object, it gives the
|
| 41 |
+
name of the file to be opened. Otherwise, it should be a file
|
| 42 |
+
object, which will be used to read or write the compressed data.
|
| 43 |
+
|
| 44 |
+
mode can be 'r' for reading (default), 'w' for (over)writing,
|
| 45 |
+
'x' for creating exclusively, or 'a' for appending. These can
|
| 46 |
+
equivalently be given as 'rb', 'wb', 'xb', and 'ab'.
|
| 47 |
+
|
| 48 |
+
If mode is 'w', 'x' or 'a', compresslevel can be a number between 1
|
| 49 |
+
and 9 specifying the level of compression: 1 produces the least
|
| 50 |
+
compression, and 9 (default) produces the most compression.
|
| 51 |
+
|
| 52 |
+
If mode is 'r', the input file may be the concatenation of
|
| 53 |
+
multiple compressed streams.
|
| 54 |
+
"""
|
| 55 |
+
self._fp = None
|
| 56 |
+
self._closefp = False
|
| 57 |
+
self._mode = _MODE_CLOSED
|
| 58 |
+
|
| 59 |
+
if not (1 <= compresslevel <= 9):
|
| 60 |
+
raise ValueError("compresslevel must be between 1 and 9")
|
| 61 |
+
|
| 62 |
+
if mode in ("", "r", "rb"):
|
| 63 |
+
mode = "rb"
|
| 64 |
+
mode_code = _MODE_READ
|
| 65 |
+
elif mode in ("w", "wb"):
|
| 66 |
+
mode = "wb"
|
| 67 |
+
mode_code = _MODE_WRITE
|
| 68 |
+
self._compressor = BZ2Compressor(compresslevel)
|
| 69 |
+
elif mode in ("x", "xb"):
|
| 70 |
+
mode = "xb"
|
| 71 |
+
mode_code = _MODE_WRITE
|
| 72 |
+
self._compressor = BZ2Compressor(compresslevel)
|
| 73 |
+
elif mode in ("a", "ab"):
|
| 74 |
+
mode = "ab"
|
| 75 |
+
mode_code = _MODE_WRITE
|
| 76 |
+
self._compressor = BZ2Compressor(compresslevel)
|
| 77 |
+
else:
|
| 78 |
+
raise ValueError("Invalid mode: %r" % (mode,))
|
| 79 |
+
|
| 80 |
+
if isinstance(filename, (str, bytes, os.PathLike)):
|
| 81 |
+
self._fp = _builtin_open(filename, mode)
|
| 82 |
+
self._closefp = True
|
| 83 |
+
self._mode = mode_code
|
| 84 |
+
elif hasattr(filename, "read") or hasattr(filename, "write"):
|
| 85 |
+
self._fp = filename
|
| 86 |
+
self._mode = mode_code
|
| 87 |
+
else:
|
| 88 |
+
raise TypeError("filename must be a str, bytes, file or PathLike object")
|
| 89 |
+
|
| 90 |
+
if self._mode == _MODE_READ:
|
| 91 |
+
raw = _compression.DecompressReader(self._fp,
|
| 92 |
+
BZ2Decompressor, trailing_error=OSError)
|
| 93 |
+
self._buffer = io.BufferedReader(raw)
|
| 94 |
+
else:
|
| 95 |
+
self._pos = 0
|
| 96 |
+
|
| 97 |
+
def close(self):
|
| 98 |
+
"""Flush and close the file.
|
| 99 |
+
|
| 100 |
+
May be called more than once without error. Once the file is
|
| 101 |
+
closed, any other operation on it will raise a ValueError.
|
| 102 |
+
"""
|
| 103 |
+
if self._mode == _MODE_CLOSED:
|
| 104 |
+
return
|
| 105 |
+
try:
|
| 106 |
+
if self._mode == _MODE_READ:
|
| 107 |
+
self._buffer.close()
|
| 108 |
+
elif self._mode == _MODE_WRITE:
|
| 109 |
+
self._fp.write(self._compressor.flush())
|
| 110 |
+
self._compressor = None
|
| 111 |
+
finally:
|
| 112 |
+
try:
|
| 113 |
+
if self._closefp:
|
| 114 |
+
self._fp.close()
|
| 115 |
+
finally:
|
| 116 |
+
self._fp = None
|
| 117 |
+
self._closefp = False
|
| 118 |
+
self._mode = _MODE_CLOSED
|
| 119 |
+
self._buffer = None
|
| 120 |
+
|
| 121 |
+
@property
|
| 122 |
+
def closed(self):
|
| 123 |
+
"""True if this file is closed."""
|
| 124 |
+
return self._mode == _MODE_CLOSED
|
| 125 |
+
|
| 126 |
+
def fileno(self):
|
| 127 |
+
"""Return the file descriptor for the underlying file."""
|
| 128 |
+
self._check_not_closed()
|
| 129 |
+
return self._fp.fileno()
|
| 130 |
+
|
| 131 |
+
def seekable(self):
|
| 132 |
+
"""Return whether the file supports seeking."""
|
| 133 |
+
return self.readable() and self._buffer.seekable()
|
| 134 |
+
|
| 135 |
+
def readable(self):
|
| 136 |
+
"""Return whether the file was opened for reading."""
|
| 137 |
+
self._check_not_closed()
|
| 138 |
+
return self._mode == _MODE_READ
|
| 139 |
+
|
| 140 |
+
def writable(self):
|
| 141 |
+
"""Return whether the file was opened for writing."""
|
| 142 |
+
self._check_not_closed()
|
| 143 |
+
return self._mode == _MODE_WRITE
|
| 144 |
+
|
| 145 |
+
def peek(self, n=0):
|
| 146 |
+
"""Return buffered data without advancing the file position.
|
| 147 |
+
|
| 148 |
+
Always returns at least one byte of data, unless at EOF.
|
| 149 |
+
The exact number of bytes returned is unspecified.
|
| 150 |
+
"""
|
| 151 |
+
self._check_can_read()
|
| 152 |
+
# Relies on the undocumented fact that BufferedReader.peek()
|
| 153 |
+
# always returns at least one byte (except at EOF), independent
|
| 154 |
+
# of the value of n
|
| 155 |
+
return self._buffer.peek(n)
|
| 156 |
+
|
| 157 |
+
def read(self, size=-1):
|
| 158 |
+
"""Read up to size uncompressed bytes from the file.
|
| 159 |
+
|
| 160 |
+
If size is negative or omitted, read until EOF is reached.
|
| 161 |
+
Returns b'' if the file is already at EOF.
|
| 162 |
+
"""
|
| 163 |
+
self._check_can_read()
|
| 164 |
+
return self._buffer.read(size)
|
| 165 |
+
|
| 166 |
+
def read1(self, size=-1):
|
| 167 |
+
"""Read up to size uncompressed bytes, while trying to avoid
|
| 168 |
+
making multiple reads from the underlying stream. Reads up to a
|
| 169 |
+
buffer's worth of data if size is negative.
|
| 170 |
+
|
| 171 |
+
Returns b'' if the file is at EOF.
|
| 172 |
+
"""
|
| 173 |
+
self._check_can_read()
|
| 174 |
+
if size < 0:
|
| 175 |
+
size = io.DEFAULT_BUFFER_SIZE
|
| 176 |
+
return self._buffer.read1(size)
|
| 177 |
+
|
| 178 |
+
def readinto(self, b):
|
| 179 |
+
"""Read bytes into b.
|
| 180 |
+
|
| 181 |
+
Returns the number of bytes read (0 for EOF).
|
| 182 |
+
"""
|
| 183 |
+
self._check_can_read()
|
| 184 |
+
return self._buffer.readinto(b)
|
| 185 |
+
|
| 186 |
+
def readline(self, size=-1):
|
| 187 |
+
"""Read a line of uncompressed bytes from the file.
|
| 188 |
+
|
| 189 |
+
The terminating newline (if present) is retained. If size is
|
| 190 |
+
non-negative, no more than size bytes will be read (in which
|
| 191 |
+
case the line may be incomplete). Returns b'' if already at EOF.
|
| 192 |
+
"""
|
| 193 |
+
if not isinstance(size, int):
|
| 194 |
+
if not hasattr(size, "__index__"):
|
| 195 |
+
raise TypeError("Integer argument expected")
|
| 196 |
+
size = size.__index__()
|
| 197 |
+
self._check_can_read()
|
| 198 |
+
return self._buffer.readline(size)
|
| 199 |
+
|
| 200 |
+
def readlines(self, size=-1):
|
| 201 |
+
"""Read a list of lines of uncompressed bytes from the file.
|
| 202 |
+
|
| 203 |
+
size can be specified to control the number of lines read: no
|
| 204 |
+
further lines will be read once the total size of the lines read
|
| 205 |
+
so far equals or exceeds size.
|
| 206 |
+
"""
|
| 207 |
+
if not isinstance(size, int):
|
| 208 |
+
if not hasattr(size, "__index__"):
|
| 209 |
+
raise TypeError("Integer argument expected")
|
| 210 |
+
size = size.__index__()
|
| 211 |
+
self._check_can_read()
|
| 212 |
+
return self._buffer.readlines(size)
|
| 213 |
+
|
| 214 |
+
def write(self, data):
|
| 215 |
+
"""Write a byte string to the file.
|
| 216 |
+
|
| 217 |
+
Returns the number of uncompressed bytes written, which is
|
| 218 |
+
always the length of data in bytes. Note that due to buffering,
|
| 219 |
+
the file on disk may not reflect the data written until close()
|
| 220 |
+
is called.
|
| 221 |
+
"""
|
| 222 |
+
self._check_can_write()
|
| 223 |
+
if isinstance(data, (bytes, bytearray)):
|
| 224 |
+
length = len(data)
|
| 225 |
+
else:
|
| 226 |
+
# accept any data that supports the buffer protocol
|
| 227 |
+
data = memoryview(data)
|
| 228 |
+
length = data.nbytes
|
| 229 |
+
|
| 230 |
+
compressed = self._compressor.compress(data)
|
| 231 |
+
self._fp.write(compressed)
|
| 232 |
+
self._pos += length
|
| 233 |
+
return length
|
| 234 |
+
|
| 235 |
+
def writelines(self, seq):
|
| 236 |
+
"""Write a sequence of byte strings to the file.
|
| 237 |
+
|
| 238 |
+
Returns the number of uncompressed bytes written.
|
| 239 |
+
seq can be any iterable yielding byte strings.
|
| 240 |
+
|
| 241 |
+
Line separators are not added between the written byte strings.
|
| 242 |
+
"""
|
| 243 |
+
return _compression.BaseStream.writelines(self, seq)
|
| 244 |
+
|
| 245 |
+
def seek(self, offset, whence=io.SEEK_SET):
|
| 246 |
+
"""Change the file position.
|
| 247 |
+
|
| 248 |
+
The new position is specified by offset, relative to the
|
| 249 |
+
position indicated by whence. Values for whence are:
|
| 250 |
+
|
| 251 |
+
0: start of stream (default); offset must not be negative
|
| 252 |
+
1: current stream position
|
| 253 |
+
2: end of stream; offset must not be positive
|
| 254 |
+
|
| 255 |
+
Returns the new file position.
|
| 256 |
+
|
| 257 |
+
Note that seeking is emulated, so depending on the parameters,
|
| 258 |
+
this operation may be extremely slow.
|
| 259 |
+
"""
|
| 260 |
+
self._check_can_seek()
|
| 261 |
+
return self._buffer.seek(offset, whence)
|
| 262 |
+
|
| 263 |
+
def tell(self):
|
| 264 |
+
"""Return the current file position."""
|
| 265 |
+
self._check_not_closed()
|
| 266 |
+
if self._mode == _MODE_READ:
|
| 267 |
+
return self._buffer.tell()
|
| 268 |
+
return self._pos
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def open(filename, mode="rb", compresslevel=9,
|
| 272 |
+
encoding=None, errors=None, newline=None):
|
| 273 |
+
"""Open a bzip2-compressed file in binary or text mode.
|
| 274 |
+
|
| 275 |
+
The filename argument can be an actual filename (a str, bytes, or
|
| 276 |
+
PathLike object), or an existing file object to read from or write
|
| 277 |
+
to.
|
| 278 |
+
|
| 279 |
+
The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or
|
| 280 |
+
"ab" for binary mode, or "rt", "wt", "xt" or "at" for text mode.
|
| 281 |
+
The default mode is "rb", and the default compresslevel is 9.
|
| 282 |
+
|
| 283 |
+
For binary mode, this function is equivalent to the BZ2File
|
| 284 |
+
constructor: BZ2File(filename, mode, compresslevel). In this case,
|
| 285 |
+
the encoding, errors and newline arguments must not be provided.
|
| 286 |
+
|
| 287 |
+
For text mode, a BZ2File object is created, and wrapped in an
|
| 288 |
+
io.TextIOWrapper instance with the specified encoding, error
|
| 289 |
+
handling behavior, and line ending(s).
|
| 290 |
+
|
| 291 |
+
"""
|
| 292 |
+
if "t" in mode:
|
| 293 |
+
if "b" in mode:
|
| 294 |
+
raise ValueError("Invalid mode: %r" % (mode,))
|
| 295 |
+
else:
|
| 296 |
+
if encoding is not None:
|
| 297 |
+
raise ValueError("Argument 'encoding' not supported in binary mode")
|
| 298 |
+
if errors is not None:
|
| 299 |
+
raise ValueError("Argument 'errors' not supported in binary mode")
|
| 300 |
+
if newline is not None:
|
| 301 |
+
raise ValueError("Argument 'newline' not supported in binary mode")
|
| 302 |
+
|
| 303 |
+
bz_mode = mode.replace("t", "")
|
| 304 |
+
binary_file = BZ2File(filename, bz_mode, compresslevel=compresslevel)
|
| 305 |
+
|
| 306 |
+
if "t" in mode:
|
| 307 |
+
encoding = io.text_encoding(encoding)
|
| 308 |
+
return io.TextIOWrapper(binary_file, encoding, errors, newline)
|
| 309 |
+
else:
|
| 310 |
+
return binary_file
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def compress(data, compresslevel=9):
|
| 314 |
+
"""Compress a block of data.
|
| 315 |
+
|
| 316 |
+
compresslevel, if given, must be a number between 1 and 9.
|
| 317 |
+
|
| 318 |
+
For incremental compression, use a BZ2Compressor object instead.
|
| 319 |
+
"""
|
| 320 |
+
comp = BZ2Compressor(compresslevel)
|
| 321 |
+
return comp.compress(data) + comp.flush()
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
def decompress(data):
|
| 325 |
+
"""Decompress a block of data.
|
| 326 |
+
|
| 327 |
+
For incremental decompression, use a BZ2Decompressor object instead.
|
| 328 |
+
"""
|
| 329 |
+
results = []
|
| 330 |
+
while data:
|
| 331 |
+
decomp = BZ2Decompressor()
|
| 332 |
+
try:
|
| 333 |
+
res = decomp.decompress(data)
|
| 334 |
+
except OSError:
|
| 335 |
+
if results:
|
| 336 |
+
break # Leftover data is not a valid bzip2 stream; ignore it.
|
| 337 |
+
else:
|
| 338 |
+
raise # Error on the first iteration; bail out.
|
| 339 |
+
results.append(res)
|
| 340 |
+
if not decomp.eof:
|
| 341 |
+
raise ValueError("Compressed data ended before the "
|
| 342 |
+
"end-of-stream marker was reached")
|
| 343 |
+
data = decomp.unused_data
|
| 344 |
+
return b"".join(results)
|
evalkit_cambrian/lib/python3.10/cProfile.py
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/bin/env python3
|
| 2 |
+
|
| 3 |
+
"""Python interface for the 'lsprof' profiler.
|
| 4 |
+
Compatible with the 'profile' module.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
__all__ = ["run", "runctx", "Profile"]
|
| 8 |
+
|
| 9 |
+
import _lsprof
|
| 10 |
+
import io
|
| 11 |
+
import profile as _pyprofile
|
| 12 |
+
|
| 13 |
+
# ____________________________________________________________
|
| 14 |
+
# Simple interface
|
| 15 |
+
|
| 16 |
+
def run(statement, filename=None, sort=-1):
|
| 17 |
+
return _pyprofile._Utils(Profile).run(statement, filename, sort)
|
| 18 |
+
|
| 19 |
+
def runctx(statement, globals, locals, filename=None, sort=-1):
|
| 20 |
+
return _pyprofile._Utils(Profile).runctx(statement, globals, locals,
|
| 21 |
+
filename, sort)
|
| 22 |
+
|
| 23 |
+
run.__doc__ = _pyprofile.run.__doc__
|
| 24 |
+
runctx.__doc__ = _pyprofile.runctx.__doc__
|
| 25 |
+
|
| 26 |
+
# ____________________________________________________________
|
| 27 |
+
|
| 28 |
+
class Profile(_lsprof.Profiler):
|
| 29 |
+
"""Profile(timer=None, timeunit=None, subcalls=True, builtins=True)
|
| 30 |
+
|
| 31 |
+
Builds a profiler object using the specified timer function.
|
| 32 |
+
The default timer is a fast built-in one based on real time.
|
| 33 |
+
For custom timer functions returning integers, timeunit can
|
| 34 |
+
be a float specifying a scale (i.e. how long each integer unit
|
| 35 |
+
is, in seconds).
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
# Most of the functionality is in the base class.
|
| 39 |
+
# This subclass only adds convenient and backward-compatible methods.
|
| 40 |
+
|
| 41 |
+
def print_stats(self, sort=-1):
|
| 42 |
+
import pstats
|
| 43 |
+
pstats.Stats(self).strip_dirs().sort_stats(sort).print_stats()
|
| 44 |
+
|
| 45 |
+
def dump_stats(self, file):
|
| 46 |
+
import marshal
|
| 47 |
+
with open(file, 'wb') as f:
|
| 48 |
+
self.create_stats()
|
| 49 |
+
marshal.dump(self.stats, f)
|
| 50 |
+
|
| 51 |
+
def create_stats(self):
|
| 52 |
+
self.disable()
|
| 53 |
+
self.snapshot_stats()
|
| 54 |
+
|
| 55 |
+
def snapshot_stats(self):
|
| 56 |
+
entries = self.getstats()
|
| 57 |
+
self.stats = {}
|
| 58 |
+
callersdicts = {}
|
| 59 |
+
# call information
|
| 60 |
+
for entry in entries:
|
| 61 |
+
func = label(entry.code)
|
| 62 |
+
nc = entry.callcount # ncalls column of pstats (before '/')
|
| 63 |
+
cc = nc - entry.reccallcount # ncalls column of pstats (after '/')
|
| 64 |
+
tt = entry.inlinetime # tottime column of pstats
|
| 65 |
+
ct = entry.totaltime # cumtime column of pstats
|
| 66 |
+
callers = {}
|
| 67 |
+
callersdicts[id(entry.code)] = callers
|
| 68 |
+
self.stats[func] = cc, nc, tt, ct, callers
|
| 69 |
+
# subcall information
|
| 70 |
+
for entry in entries:
|
| 71 |
+
if entry.calls:
|
| 72 |
+
func = label(entry.code)
|
| 73 |
+
for subentry in entry.calls:
|
| 74 |
+
try:
|
| 75 |
+
callers = callersdicts[id(subentry.code)]
|
| 76 |
+
except KeyError:
|
| 77 |
+
continue
|
| 78 |
+
nc = subentry.callcount
|
| 79 |
+
cc = nc - subentry.reccallcount
|
| 80 |
+
tt = subentry.inlinetime
|
| 81 |
+
ct = subentry.totaltime
|
| 82 |
+
if func in callers:
|
| 83 |
+
prev = callers[func]
|
| 84 |
+
nc += prev[0]
|
| 85 |
+
cc += prev[1]
|
| 86 |
+
tt += prev[2]
|
| 87 |
+
ct += prev[3]
|
| 88 |
+
callers[func] = nc, cc, tt, ct
|
| 89 |
+
|
| 90 |
+
# The following two methods can be called by clients to use
|
| 91 |
+
# a profiler to profile a statement, given as a string.
|
| 92 |
+
|
| 93 |
+
def run(self, cmd):
|
| 94 |
+
import __main__
|
| 95 |
+
dict = __main__.__dict__
|
| 96 |
+
return self.runctx(cmd, dict, dict)
|
| 97 |
+
|
| 98 |
+
def runctx(self, cmd, globals, locals):
|
| 99 |
+
self.enable()
|
| 100 |
+
try:
|
| 101 |
+
exec(cmd, globals, locals)
|
| 102 |
+
finally:
|
| 103 |
+
self.disable()
|
| 104 |
+
return self
|
| 105 |
+
|
| 106 |
+
# This method is more useful to profile a single function call.
|
| 107 |
+
def runcall(self, func, /, *args, **kw):
|
| 108 |
+
self.enable()
|
| 109 |
+
try:
|
| 110 |
+
return func(*args, **kw)
|
| 111 |
+
finally:
|
| 112 |
+
self.disable()
|
| 113 |
+
|
| 114 |
+
def __enter__(self):
|
| 115 |
+
self.enable()
|
| 116 |
+
return self
|
| 117 |
+
|
| 118 |
+
def __exit__(self, *exc_info):
|
| 119 |
+
self.disable()
|
| 120 |
+
|
| 121 |
+
# ____________________________________________________________
|
| 122 |
+
|
| 123 |
+
def label(code):
|
| 124 |
+
if isinstance(code, str):
|
| 125 |
+
return ('~', 0, code) # built-in functions ('~' sorts at the end)
|
| 126 |
+
else:
|
| 127 |
+
return (code.co_filename, code.co_firstlineno, code.co_name)
|
| 128 |
+
|
| 129 |
+
# ____________________________________________________________
|
| 130 |
+
|
| 131 |
+
def main():
|
| 132 |
+
import os
|
| 133 |
+
import sys
|
| 134 |
+
import runpy
|
| 135 |
+
import pstats
|
| 136 |
+
from optparse import OptionParser
|
| 137 |
+
usage = "cProfile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
|
| 138 |
+
parser = OptionParser(usage=usage)
|
| 139 |
+
parser.allow_interspersed_args = False
|
| 140 |
+
parser.add_option('-o', '--outfile', dest="outfile",
|
| 141 |
+
help="Save stats to <outfile>", default=None)
|
| 142 |
+
parser.add_option('-s', '--sort', dest="sort",
|
| 143 |
+
help="Sort order when printing to stdout, based on pstats.Stats class",
|
| 144 |
+
default=-1,
|
| 145 |
+
choices=sorted(pstats.Stats.sort_arg_dict_default))
|
| 146 |
+
parser.add_option('-m', dest="module", action="store_true",
|
| 147 |
+
help="Profile a library module", default=False)
|
| 148 |
+
|
| 149 |
+
if not sys.argv[1:]:
|
| 150 |
+
parser.print_usage()
|
| 151 |
+
sys.exit(2)
|
| 152 |
+
|
| 153 |
+
(options, args) = parser.parse_args()
|
| 154 |
+
sys.argv[:] = args
|
| 155 |
+
|
| 156 |
+
# The script that we're profiling may chdir, so capture the absolute path
|
| 157 |
+
# to the output file at startup.
|
| 158 |
+
if options.outfile is not None:
|
| 159 |
+
options.outfile = os.path.abspath(options.outfile)
|
| 160 |
+
|
| 161 |
+
if len(args) > 0:
|
| 162 |
+
if options.module:
|
| 163 |
+
code = "run_module(modname, run_name='__main__')"
|
| 164 |
+
globs = {
|
| 165 |
+
'run_module': runpy.run_module,
|
| 166 |
+
'modname': args[0]
|
| 167 |
+
}
|
| 168 |
+
else:
|
| 169 |
+
progname = args[0]
|
| 170 |
+
sys.path.insert(0, os.path.dirname(progname))
|
| 171 |
+
with io.open_code(progname) as fp:
|
| 172 |
+
code = compile(fp.read(), progname, 'exec')
|
| 173 |
+
globs = {
|
| 174 |
+
'__file__': progname,
|
| 175 |
+
'__name__': '__main__',
|
| 176 |
+
'__package__': None,
|
| 177 |
+
'__cached__': None,
|
| 178 |
+
}
|
| 179 |
+
try:
|
| 180 |
+
runctx(code, globs, None, options.outfile, options.sort)
|
| 181 |
+
except BrokenPipeError as exc:
|
| 182 |
+
# Prevent "Exception ignored" during interpreter shutdown.
|
| 183 |
+
sys.stdout = None
|
| 184 |
+
sys.exit(exc.errno)
|
| 185 |
+
else:
|
| 186 |
+
parser.print_usage()
|
| 187 |
+
return parser
|
| 188 |
+
|
| 189 |
+
# When invoked as main program, invoke the profiler on a script
|
| 190 |
+
if __name__ == '__main__':
|
| 191 |
+
main()
|
evalkit_cambrian/lib/python3.10/cgi.py
ADDED
|
@@ -0,0 +1,1004 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/local/bin/python
|
| 2 |
+
|
| 3 |
+
# NOTE: the above "/usr/local/bin/python" is NOT a mistake. It is
|
| 4 |
+
# intentionally NOT "/usr/bin/env python". On many systems
|
| 5 |
+
# (e.g. Solaris), /usr/local/bin is not in $PATH as passed to CGI
|
| 6 |
+
# scripts, and /usr/local/bin is the default directory where Python is
|
| 7 |
+
# installed, so /usr/bin/env would be unable to find python. Granted,
|
| 8 |
+
# binary installations by Linux vendors often install Python in
|
| 9 |
+
# /usr/bin. So let those vendors patch cgi.py to match their choice
|
| 10 |
+
# of installation.
|
| 11 |
+
|
| 12 |
+
"""Support module for CGI (Common Gateway Interface) scripts.
|
| 13 |
+
|
| 14 |
+
This module defines a number of utilities for use by CGI scripts
|
| 15 |
+
written in Python.
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
# History
|
| 19 |
+
# -------
|
| 20 |
+
#
|
| 21 |
+
# Michael McLay started this module. Steve Majewski changed the
|
| 22 |
+
# interface to SvFormContentDict and FormContentDict. The multipart
|
| 23 |
+
# parsing was inspired by code submitted by Andreas Paepcke. Guido van
|
| 24 |
+
# Rossum rewrote, reformatted and documented the module and is currently
|
| 25 |
+
# responsible for its maintenance.
|
| 26 |
+
#
|
| 27 |
+
|
| 28 |
+
__version__ = "2.6"
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# Imports
|
| 32 |
+
# =======
|
| 33 |
+
|
| 34 |
+
from io import StringIO, BytesIO, TextIOWrapper
|
| 35 |
+
from collections.abc import Mapping
|
| 36 |
+
import sys
|
| 37 |
+
import os
|
| 38 |
+
import urllib.parse
|
| 39 |
+
from email.parser import FeedParser
|
| 40 |
+
from email.message import Message
|
| 41 |
+
import html
|
| 42 |
+
import locale
|
| 43 |
+
import tempfile
|
| 44 |
+
import warnings
|
| 45 |
+
|
| 46 |
+
__all__ = ["MiniFieldStorage", "FieldStorage", "parse", "parse_multipart",
|
| 47 |
+
"parse_header", "test", "print_exception", "print_environ",
|
| 48 |
+
"print_form", "print_directory", "print_arguments",
|
| 49 |
+
"print_environ_usage"]
|
| 50 |
+
|
| 51 |
+
# Logging support
|
| 52 |
+
# ===============
|
| 53 |
+
|
| 54 |
+
logfile = "" # Filename to log to, if not empty
|
| 55 |
+
logfp = None # File object to log to, if not None
|
| 56 |
+
|
| 57 |
+
def initlog(*allargs):
|
| 58 |
+
"""Write a log message, if there is a log file.
|
| 59 |
+
|
| 60 |
+
Even though this function is called initlog(), you should always
|
| 61 |
+
use log(); log is a variable that is set either to initlog
|
| 62 |
+
(initially), to dolog (once the log file has been opened), or to
|
| 63 |
+
nolog (when logging is disabled).
|
| 64 |
+
|
| 65 |
+
The first argument is a format string; the remaining arguments (if
|
| 66 |
+
any) are arguments to the % operator, so e.g.
|
| 67 |
+
log("%s: %s", "a", "b")
|
| 68 |
+
will write "a: b" to the log file, followed by a newline.
|
| 69 |
+
|
| 70 |
+
If the global logfp is not None, it should be a file object to
|
| 71 |
+
which log data is written.
|
| 72 |
+
|
| 73 |
+
If the global logfp is None, the global logfile may be a string
|
| 74 |
+
giving a filename to open, in append mode. This file should be
|
| 75 |
+
world writable!!! If the file can't be opened, logging is
|
| 76 |
+
silently disabled (since there is no safe place where we could
|
| 77 |
+
send an error message).
|
| 78 |
+
|
| 79 |
+
"""
|
| 80 |
+
global log, logfile, logfp
|
| 81 |
+
warnings.warn("cgi.log() is deprecated as of 3.10. Use logging instead",
|
| 82 |
+
DeprecationWarning, stacklevel=2)
|
| 83 |
+
if logfile and not logfp:
|
| 84 |
+
try:
|
| 85 |
+
logfp = open(logfile, "a", encoding="locale")
|
| 86 |
+
except OSError:
|
| 87 |
+
pass
|
| 88 |
+
if not logfp:
|
| 89 |
+
log = nolog
|
| 90 |
+
else:
|
| 91 |
+
log = dolog
|
| 92 |
+
log(*allargs)
|
| 93 |
+
|
| 94 |
+
def dolog(fmt, *args):
|
| 95 |
+
"""Write a log message to the log file. See initlog() for docs."""
|
| 96 |
+
logfp.write(fmt%args + "\n")
|
| 97 |
+
|
| 98 |
+
def nolog(*allargs):
|
| 99 |
+
"""Dummy function, assigned to log when logging is disabled."""
|
| 100 |
+
pass
|
| 101 |
+
|
| 102 |
+
def closelog():
|
| 103 |
+
"""Close the log file."""
|
| 104 |
+
global log, logfile, logfp
|
| 105 |
+
logfile = ''
|
| 106 |
+
if logfp:
|
| 107 |
+
logfp.close()
|
| 108 |
+
logfp = None
|
| 109 |
+
log = initlog
|
| 110 |
+
|
| 111 |
+
log = initlog # The current logging function
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
# Parsing functions
|
| 115 |
+
# =================
|
| 116 |
+
|
| 117 |
+
# Maximum input we will accept when REQUEST_METHOD is POST
|
| 118 |
+
# 0 ==> unlimited input
|
| 119 |
+
maxlen = 0
|
| 120 |
+
|
| 121 |
+
def parse(fp=None, environ=os.environ, keep_blank_values=0,
|
| 122 |
+
strict_parsing=0, separator='&'):
|
| 123 |
+
"""Parse a query in the environment or from a file (default stdin)
|
| 124 |
+
|
| 125 |
+
Arguments, all optional:
|
| 126 |
+
|
| 127 |
+
fp : file pointer; default: sys.stdin.buffer
|
| 128 |
+
|
| 129 |
+
environ : environment dictionary; default: os.environ
|
| 130 |
+
|
| 131 |
+
keep_blank_values: flag indicating whether blank values in
|
| 132 |
+
percent-encoded forms should be treated as blank strings.
|
| 133 |
+
A true value indicates that blanks should be retained as
|
| 134 |
+
blank strings. The default false value indicates that
|
| 135 |
+
blank values are to be ignored and treated as if they were
|
| 136 |
+
not included.
|
| 137 |
+
|
| 138 |
+
strict_parsing: flag indicating what to do with parsing errors.
|
| 139 |
+
If false (the default), errors are silently ignored.
|
| 140 |
+
If true, errors raise a ValueError exception.
|
| 141 |
+
|
| 142 |
+
separator: str. The symbol to use for separating the query arguments.
|
| 143 |
+
Defaults to &.
|
| 144 |
+
"""
|
| 145 |
+
if fp is None:
|
| 146 |
+
fp = sys.stdin
|
| 147 |
+
|
| 148 |
+
# field keys and values (except for files) are returned as strings
|
| 149 |
+
# an encoding is required to decode the bytes read from self.fp
|
| 150 |
+
if hasattr(fp,'encoding'):
|
| 151 |
+
encoding = fp.encoding
|
| 152 |
+
else:
|
| 153 |
+
encoding = 'latin-1'
|
| 154 |
+
|
| 155 |
+
# fp.read() must return bytes
|
| 156 |
+
if isinstance(fp, TextIOWrapper):
|
| 157 |
+
fp = fp.buffer
|
| 158 |
+
|
| 159 |
+
if not 'REQUEST_METHOD' in environ:
|
| 160 |
+
environ['REQUEST_METHOD'] = 'GET' # For testing stand-alone
|
| 161 |
+
if environ['REQUEST_METHOD'] == 'POST':
|
| 162 |
+
ctype, pdict = parse_header(environ['CONTENT_TYPE'])
|
| 163 |
+
if ctype == 'multipart/form-data':
|
| 164 |
+
return parse_multipart(fp, pdict, separator=separator)
|
| 165 |
+
elif ctype == 'application/x-www-form-urlencoded':
|
| 166 |
+
clength = int(environ['CONTENT_LENGTH'])
|
| 167 |
+
if maxlen and clength > maxlen:
|
| 168 |
+
raise ValueError('Maximum content length exceeded')
|
| 169 |
+
qs = fp.read(clength).decode(encoding)
|
| 170 |
+
else:
|
| 171 |
+
qs = '' # Unknown content-type
|
| 172 |
+
if 'QUERY_STRING' in environ:
|
| 173 |
+
if qs: qs = qs + '&'
|
| 174 |
+
qs = qs + environ['QUERY_STRING']
|
| 175 |
+
elif sys.argv[1:]:
|
| 176 |
+
if qs: qs = qs + '&'
|
| 177 |
+
qs = qs + sys.argv[1]
|
| 178 |
+
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
|
| 179 |
+
elif 'QUERY_STRING' in environ:
|
| 180 |
+
qs = environ['QUERY_STRING']
|
| 181 |
+
else:
|
| 182 |
+
if sys.argv[1:]:
|
| 183 |
+
qs = sys.argv[1]
|
| 184 |
+
else:
|
| 185 |
+
qs = ""
|
| 186 |
+
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
|
| 187 |
+
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing,
|
| 188 |
+
encoding=encoding, separator=separator)
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def parse_multipart(fp, pdict, encoding="utf-8", errors="replace", separator='&'):
|
| 192 |
+
"""Parse multipart input.
|
| 193 |
+
|
| 194 |
+
Arguments:
|
| 195 |
+
fp : input file
|
| 196 |
+
pdict: dictionary containing other parameters of content-type header
|
| 197 |
+
encoding, errors: request encoding and error handler, passed to
|
| 198 |
+
FieldStorage
|
| 199 |
+
|
| 200 |
+
Returns a dictionary just like parse_qs(): keys are the field names, each
|
| 201 |
+
value is a list of values for that field. For non-file fields, the value
|
| 202 |
+
is a list of strings.
|
| 203 |
+
"""
|
| 204 |
+
# RFC 2046, Section 5.1 : The "multipart" boundary delimiters are always
|
| 205 |
+
# represented as 7bit US-ASCII.
|
| 206 |
+
boundary = pdict['boundary'].decode('ascii')
|
| 207 |
+
ctype = "multipart/form-data; boundary={}".format(boundary)
|
| 208 |
+
headers = Message()
|
| 209 |
+
headers.set_type(ctype)
|
| 210 |
+
try:
|
| 211 |
+
headers['Content-Length'] = pdict['CONTENT-LENGTH']
|
| 212 |
+
except KeyError:
|
| 213 |
+
pass
|
| 214 |
+
fs = FieldStorage(fp, headers=headers, encoding=encoding, errors=errors,
|
| 215 |
+
environ={'REQUEST_METHOD': 'POST'}, separator=separator)
|
| 216 |
+
return {k: fs.getlist(k) for k in fs}
|
| 217 |
+
|
| 218 |
+
def _parseparam(s):
|
| 219 |
+
while s[:1] == ';':
|
| 220 |
+
s = s[1:]
|
| 221 |
+
end = s.find(';')
|
| 222 |
+
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
|
| 223 |
+
end = s.find(';', end + 1)
|
| 224 |
+
if end < 0:
|
| 225 |
+
end = len(s)
|
| 226 |
+
f = s[:end]
|
| 227 |
+
yield f.strip()
|
| 228 |
+
s = s[end:]
|
| 229 |
+
|
| 230 |
+
def parse_header(line):
|
| 231 |
+
"""Parse a Content-type like header.
|
| 232 |
+
|
| 233 |
+
Return the main content-type and a dictionary of options.
|
| 234 |
+
|
| 235 |
+
"""
|
| 236 |
+
parts = _parseparam(';' + line)
|
| 237 |
+
key = parts.__next__()
|
| 238 |
+
pdict = {}
|
| 239 |
+
for p in parts:
|
| 240 |
+
i = p.find('=')
|
| 241 |
+
if i >= 0:
|
| 242 |
+
name = p[:i].strip().lower()
|
| 243 |
+
value = p[i+1:].strip()
|
| 244 |
+
if len(value) >= 2 and value[0] == value[-1] == '"':
|
| 245 |
+
value = value[1:-1]
|
| 246 |
+
value = value.replace('\\\\', '\\').replace('\\"', '"')
|
| 247 |
+
pdict[name] = value
|
| 248 |
+
return key, pdict
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
# Classes for field storage
|
| 252 |
+
# =========================
|
| 253 |
+
|
| 254 |
+
class MiniFieldStorage:
|
| 255 |
+
|
| 256 |
+
"""Like FieldStorage, for use when no file uploads are possible."""
|
| 257 |
+
|
| 258 |
+
# Dummy attributes
|
| 259 |
+
filename = None
|
| 260 |
+
list = None
|
| 261 |
+
type = None
|
| 262 |
+
file = None
|
| 263 |
+
type_options = {}
|
| 264 |
+
disposition = None
|
| 265 |
+
disposition_options = {}
|
| 266 |
+
headers = {}
|
| 267 |
+
|
| 268 |
+
def __init__(self, name, value):
|
| 269 |
+
"""Constructor from field name and value."""
|
| 270 |
+
self.name = name
|
| 271 |
+
self.value = value
|
| 272 |
+
# self.file = StringIO(value)
|
| 273 |
+
|
| 274 |
+
def __repr__(self):
|
| 275 |
+
"""Return printable representation."""
|
| 276 |
+
return "MiniFieldStorage(%r, %r)" % (self.name, self.value)
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
class FieldStorage:
|
| 280 |
+
|
| 281 |
+
"""Store a sequence of fields, reading multipart/form-data.
|
| 282 |
+
|
| 283 |
+
This class provides naming, typing, files stored on disk, and
|
| 284 |
+
more. At the top level, it is accessible like a dictionary, whose
|
| 285 |
+
keys are the field names. (Note: None can occur as a field name.)
|
| 286 |
+
The items are either a Python list (if there's multiple values) or
|
| 287 |
+
another FieldStorage or MiniFieldStorage object. If it's a single
|
| 288 |
+
object, it has the following attributes:
|
| 289 |
+
|
| 290 |
+
name: the field name, if specified; otherwise None
|
| 291 |
+
|
| 292 |
+
filename: the filename, if specified; otherwise None; this is the
|
| 293 |
+
client side filename, *not* the file name on which it is
|
| 294 |
+
stored (that's a temporary file you don't deal with)
|
| 295 |
+
|
| 296 |
+
value: the value as a *string*; for file uploads, this
|
| 297 |
+
transparently reads the file every time you request the value
|
| 298 |
+
and returns *bytes*
|
| 299 |
+
|
| 300 |
+
file: the file(-like) object from which you can read the data *as
|
| 301 |
+
bytes* ; None if the data is stored a simple string
|
| 302 |
+
|
| 303 |
+
type: the content-type, or None if not specified
|
| 304 |
+
|
| 305 |
+
type_options: dictionary of options specified on the content-type
|
| 306 |
+
line
|
| 307 |
+
|
| 308 |
+
disposition: content-disposition, or None if not specified
|
| 309 |
+
|
| 310 |
+
disposition_options: dictionary of corresponding options
|
| 311 |
+
|
| 312 |
+
headers: a dictionary(-like) object (sometimes email.message.Message or a
|
| 313 |
+
subclass thereof) containing *all* headers
|
| 314 |
+
|
| 315 |
+
The class is subclassable, mostly for the purpose of overriding
|
| 316 |
+
the make_file() method, which is called internally to come up with
|
| 317 |
+
a file open for reading and writing. This makes it possible to
|
| 318 |
+
override the default choice of storing all files in a temporary
|
| 319 |
+
directory and unlinking them as soon as they have been opened.
|
| 320 |
+
|
| 321 |
+
"""
|
| 322 |
+
def __init__(self, fp=None, headers=None, outerboundary=b'',
|
| 323 |
+
environ=os.environ, keep_blank_values=0, strict_parsing=0,
|
| 324 |
+
limit=None, encoding='utf-8', errors='replace',
|
| 325 |
+
max_num_fields=None, separator='&'):
|
| 326 |
+
"""Constructor. Read multipart/* until last part.
|
| 327 |
+
|
| 328 |
+
Arguments, all optional:
|
| 329 |
+
|
| 330 |
+
fp : file pointer; default: sys.stdin.buffer
|
| 331 |
+
(not used when the request method is GET)
|
| 332 |
+
Can be :
|
| 333 |
+
1. a TextIOWrapper object
|
| 334 |
+
2. an object whose read() and readline() methods return bytes
|
| 335 |
+
|
| 336 |
+
headers : header dictionary-like object; default:
|
| 337 |
+
taken from environ as per CGI spec
|
| 338 |
+
|
| 339 |
+
outerboundary : terminating multipart boundary
|
| 340 |
+
(for internal use only)
|
| 341 |
+
|
| 342 |
+
environ : environment dictionary; default: os.environ
|
| 343 |
+
|
| 344 |
+
keep_blank_values: flag indicating whether blank values in
|
| 345 |
+
percent-encoded forms should be treated as blank strings.
|
| 346 |
+
A true value indicates that blanks should be retained as
|
| 347 |
+
blank strings. The default false value indicates that
|
| 348 |
+
blank values are to be ignored and treated as if they were
|
| 349 |
+
not included.
|
| 350 |
+
|
| 351 |
+
strict_parsing: flag indicating what to do with parsing errors.
|
| 352 |
+
If false (the default), errors are silently ignored.
|
| 353 |
+
If true, errors raise a ValueError exception.
|
| 354 |
+
|
| 355 |
+
limit : used internally to read parts of multipart/form-data forms,
|
| 356 |
+
to exit from the reading loop when reached. It is the difference
|
| 357 |
+
between the form content-length and the number of bytes already
|
| 358 |
+
read
|
| 359 |
+
|
| 360 |
+
encoding, errors : the encoding and error handler used to decode the
|
| 361 |
+
binary stream to strings. Must be the same as the charset defined
|
| 362 |
+
for the page sending the form (content-type : meta http-equiv or
|
| 363 |
+
header)
|
| 364 |
+
|
| 365 |
+
max_num_fields: int. If set, then __init__ throws a ValueError
|
| 366 |
+
if there are more than n fields read by parse_qsl().
|
| 367 |
+
|
| 368 |
+
"""
|
| 369 |
+
method = 'GET'
|
| 370 |
+
self.keep_blank_values = keep_blank_values
|
| 371 |
+
self.strict_parsing = strict_parsing
|
| 372 |
+
self.max_num_fields = max_num_fields
|
| 373 |
+
self.separator = separator
|
| 374 |
+
if 'REQUEST_METHOD' in environ:
|
| 375 |
+
method = environ['REQUEST_METHOD'].upper()
|
| 376 |
+
self.qs_on_post = None
|
| 377 |
+
if method == 'GET' or method == 'HEAD':
|
| 378 |
+
if 'QUERY_STRING' in environ:
|
| 379 |
+
qs = environ['QUERY_STRING']
|
| 380 |
+
elif sys.argv[1:]:
|
| 381 |
+
qs = sys.argv[1]
|
| 382 |
+
else:
|
| 383 |
+
qs = ""
|
| 384 |
+
qs = qs.encode(locale.getpreferredencoding(), 'surrogateescape')
|
| 385 |
+
fp = BytesIO(qs)
|
| 386 |
+
if headers is None:
|
| 387 |
+
headers = {'content-type':
|
| 388 |
+
"application/x-www-form-urlencoded"}
|
| 389 |
+
if headers is None:
|
| 390 |
+
headers = {}
|
| 391 |
+
if method == 'POST':
|
| 392 |
+
# Set default content-type for POST to what's traditional
|
| 393 |
+
headers['content-type'] = "application/x-www-form-urlencoded"
|
| 394 |
+
if 'CONTENT_TYPE' in environ:
|
| 395 |
+
headers['content-type'] = environ['CONTENT_TYPE']
|
| 396 |
+
if 'QUERY_STRING' in environ:
|
| 397 |
+
self.qs_on_post = environ['QUERY_STRING']
|
| 398 |
+
if 'CONTENT_LENGTH' in environ:
|
| 399 |
+
headers['content-length'] = environ['CONTENT_LENGTH']
|
| 400 |
+
else:
|
| 401 |
+
if not (isinstance(headers, (Mapping, Message))):
|
| 402 |
+
raise TypeError("headers must be mapping or an instance of "
|
| 403 |
+
"email.message.Message")
|
| 404 |
+
self.headers = headers
|
| 405 |
+
if fp is None:
|
| 406 |
+
self.fp = sys.stdin.buffer
|
| 407 |
+
# self.fp.read() must return bytes
|
| 408 |
+
elif isinstance(fp, TextIOWrapper):
|
| 409 |
+
self.fp = fp.buffer
|
| 410 |
+
else:
|
| 411 |
+
if not (hasattr(fp, 'read') and hasattr(fp, 'readline')):
|
| 412 |
+
raise TypeError("fp must be file pointer")
|
| 413 |
+
self.fp = fp
|
| 414 |
+
|
| 415 |
+
self.encoding = encoding
|
| 416 |
+
self.errors = errors
|
| 417 |
+
|
| 418 |
+
if not isinstance(outerboundary, bytes):
|
| 419 |
+
raise TypeError('outerboundary must be bytes, not %s'
|
| 420 |
+
% type(outerboundary).__name__)
|
| 421 |
+
self.outerboundary = outerboundary
|
| 422 |
+
|
| 423 |
+
self.bytes_read = 0
|
| 424 |
+
self.limit = limit
|
| 425 |
+
|
| 426 |
+
# Process content-disposition header
|
| 427 |
+
cdisp, pdict = "", {}
|
| 428 |
+
if 'content-disposition' in self.headers:
|
| 429 |
+
cdisp, pdict = parse_header(self.headers['content-disposition'])
|
| 430 |
+
self.disposition = cdisp
|
| 431 |
+
self.disposition_options = pdict
|
| 432 |
+
self.name = None
|
| 433 |
+
if 'name' in pdict:
|
| 434 |
+
self.name = pdict['name']
|
| 435 |
+
self.filename = None
|
| 436 |
+
if 'filename' in pdict:
|
| 437 |
+
self.filename = pdict['filename']
|
| 438 |
+
self._binary_file = self.filename is not None
|
| 439 |
+
|
| 440 |
+
# Process content-type header
|
| 441 |
+
#
|
| 442 |
+
# Honor any existing content-type header. But if there is no
|
| 443 |
+
# content-type header, use some sensible defaults. Assume
|
| 444 |
+
# outerboundary is "" at the outer level, but something non-false
|
| 445 |
+
# inside a multi-part. The default for an inner part is text/plain,
|
| 446 |
+
# but for an outer part it should be urlencoded. This should catch
|
| 447 |
+
# bogus clients which erroneously forget to include a content-type
|
| 448 |
+
# header.
|
| 449 |
+
#
|
| 450 |
+
# See below for what we do if there does exist a content-type header,
|
| 451 |
+
# but it happens to be something we don't understand.
|
| 452 |
+
if 'content-type' in self.headers:
|
| 453 |
+
ctype, pdict = parse_header(self.headers['content-type'])
|
| 454 |
+
elif self.outerboundary or method != 'POST':
|
| 455 |
+
ctype, pdict = "text/plain", {}
|
| 456 |
+
else:
|
| 457 |
+
ctype, pdict = 'application/x-www-form-urlencoded', {}
|
| 458 |
+
self.type = ctype
|
| 459 |
+
self.type_options = pdict
|
| 460 |
+
if 'boundary' in pdict:
|
| 461 |
+
self.innerboundary = pdict['boundary'].encode(self.encoding,
|
| 462 |
+
self.errors)
|
| 463 |
+
else:
|
| 464 |
+
self.innerboundary = b""
|
| 465 |
+
|
| 466 |
+
clen = -1
|
| 467 |
+
if 'content-length' in self.headers:
|
| 468 |
+
try:
|
| 469 |
+
clen = int(self.headers['content-length'])
|
| 470 |
+
except ValueError:
|
| 471 |
+
pass
|
| 472 |
+
if maxlen and clen > maxlen:
|
| 473 |
+
raise ValueError('Maximum content length exceeded')
|
| 474 |
+
self.length = clen
|
| 475 |
+
if self.limit is None and clen >= 0:
|
| 476 |
+
self.limit = clen
|
| 477 |
+
|
| 478 |
+
self.list = self.file = None
|
| 479 |
+
self.done = 0
|
| 480 |
+
if ctype == 'application/x-www-form-urlencoded':
|
| 481 |
+
self.read_urlencoded()
|
| 482 |
+
elif ctype[:10] == 'multipart/':
|
| 483 |
+
self.read_multi(environ, keep_blank_values, strict_parsing)
|
| 484 |
+
else:
|
| 485 |
+
self.read_single()
|
| 486 |
+
|
| 487 |
+
def __del__(self):
|
| 488 |
+
try:
|
| 489 |
+
self.file.close()
|
| 490 |
+
except AttributeError:
|
| 491 |
+
pass
|
| 492 |
+
|
| 493 |
+
def __enter__(self):
|
| 494 |
+
return self
|
| 495 |
+
|
| 496 |
+
def __exit__(self, *args):
|
| 497 |
+
self.file.close()
|
| 498 |
+
|
| 499 |
+
def __repr__(self):
|
| 500 |
+
"""Return a printable representation."""
|
| 501 |
+
return "FieldStorage(%r, %r, %r)" % (
|
| 502 |
+
self.name, self.filename, self.value)
|
| 503 |
+
|
| 504 |
+
def __iter__(self):
|
| 505 |
+
return iter(self.keys())
|
| 506 |
+
|
| 507 |
+
def __getattr__(self, name):
|
| 508 |
+
if name != 'value':
|
| 509 |
+
raise AttributeError(name)
|
| 510 |
+
if self.file:
|
| 511 |
+
self.file.seek(0)
|
| 512 |
+
value = self.file.read()
|
| 513 |
+
self.file.seek(0)
|
| 514 |
+
elif self.list is not None:
|
| 515 |
+
value = self.list
|
| 516 |
+
else:
|
| 517 |
+
value = None
|
| 518 |
+
return value
|
| 519 |
+
|
| 520 |
+
def __getitem__(self, key):
|
| 521 |
+
"""Dictionary style indexing."""
|
| 522 |
+
if self.list is None:
|
| 523 |
+
raise TypeError("not indexable")
|
| 524 |
+
found = []
|
| 525 |
+
for item in self.list:
|
| 526 |
+
if item.name == key: found.append(item)
|
| 527 |
+
if not found:
|
| 528 |
+
raise KeyError(key)
|
| 529 |
+
if len(found) == 1:
|
| 530 |
+
return found[0]
|
| 531 |
+
else:
|
| 532 |
+
return found
|
| 533 |
+
|
| 534 |
+
def getvalue(self, key, default=None):
|
| 535 |
+
"""Dictionary style get() method, including 'value' lookup."""
|
| 536 |
+
if key in self:
|
| 537 |
+
value = self[key]
|
| 538 |
+
if isinstance(value, list):
|
| 539 |
+
return [x.value for x in value]
|
| 540 |
+
else:
|
| 541 |
+
return value.value
|
| 542 |
+
else:
|
| 543 |
+
return default
|
| 544 |
+
|
| 545 |
+
def getfirst(self, key, default=None):
|
| 546 |
+
""" Return the first value received."""
|
| 547 |
+
if key in self:
|
| 548 |
+
value = self[key]
|
| 549 |
+
if isinstance(value, list):
|
| 550 |
+
return value[0].value
|
| 551 |
+
else:
|
| 552 |
+
return value.value
|
| 553 |
+
else:
|
| 554 |
+
return default
|
| 555 |
+
|
| 556 |
+
def getlist(self, key):
|
| 557 |
+
""" Return list of received values."""
|
| 558 |
+
if key in self:
|
| 559 |
+
value = self[key]
|
| 560 |
+
if isinstance(value, list):
|
| 561 |
+
return [x.value for x in value]
|
| 562 |
+
else:
|
| 563 |
+
return [value.value]
|
| 564 |
+
else:
|
| 565 |
+
return []
|
| 566 |
+
|
| 567 |
+
def keys(self):
|
| 568 |
+
"""Dictionary style keys() method."""
|
| 569 |
+
if self.list is None:
|
| 570 |
+
raise TypeError("not indexable")
|
| 571 |
+
return list(set(item.name for item in self.list))
|
| 572 |
+
|
| 573 |
+
def __contains__(self, key):
|
| 574 |
+
"""Dictionary style __contains__ method."""
|
| 575 |
+
if self.list is None:
|
| 576 |
+
raise TypeError("not indexable")
|
| 577 |
+
return any(item.name == key for item in self.list)
|
| 578 |
+
|
| 579 |
+
def __len__(self):
|
| 580 |
+
"""Dictionary style len(x) support."""
|
| 581 |
+
return len(self.keys())
|
| 582 |
+
|
| 583 |
+
def __bool__(self):
|
| 584 |
+
if self.list is None:
|
| 585 |
+
raise TypeError("Cannot be converted to bool.")
|
| 586 |
+
return bool(self.list)
|
| 587 |
+
|
| 588 |
+
def read_urlencoded(self):
|
| 589 |
+
"""Internal: read data in query string format."""
|
| 590 |
+
qs = self.fp.read(self.length)
|
| 591 |
+
if not isinstance(qs, bytes):
|
| 592 |
+
raise ValueError("%s should return bytes, got %s" \
|
| 593 |
+
% (self.fp, type(qs).__name__))
|
| 594 |
+
qs = qs.decode(self.encoding, self.errors)
|
| 595 |
+
if self.qs_on_post:
|
| 596 |
+
qs += '&' + self.qs_on_post
|
| 597 |
+
query = urllib.parse.parse_qsl(
|
| 598 |
+
qs, self.keep_blank_values, self.strict_parsing,
|
| 599 |
+
encoding=self.encoding, errors=self.errors,
|
| 600 |
+
max_num_fields=self.max_num_fields, separator=self.separator)
|
| 601 |
+
self.list = [MiniFieldStorage(key, value) for key, value in query]
|
| 602 |
+
self.skip_lines()
|
| 603 |
+
|
| 604 |
+
FieldStorageClass = None
|
| 605 |
+
|
| 606 |
+
def read_multi(self, environ, keep_blank_values, strict_parsing):
|
| 607 |
+
"""Internal: read a part that is itself multipart."""
|
| 608 |
+
ib = self.innerboundary
|
| 609 |
+
if not valid_boundary(ib):
|
| 610 |
+
raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
|
| 611 |
+
self.list = []
|
| 612 |
+
if self.qs_on_post:
|
| 613 |
+
query = urllib.parse.parse_qsl(
|
| 614 |
+
self.qs_on_post, self.keep_blank_values, self.strict_parsing,
|
| 615 |
+
encoding=self.encoding, errors=self.errors,
|
| 616 |
+
max_num_fields=self.max_num_fields, separator=self.separator)
|
| 617 |
+
self.list.extend(MiniFieldStorage(key, value) for key, value in query)
|
| 618 |
+
|
| 619 |
+
klass = self.FieldStorageClass or self.__class__
|
| 620 |
+
first_line = self.fp.readline() # bytes
|
| 621 |
+
if not isinstance(first_line, bytes):
|
| 622 |
+
raise ValueError("%s should return bytes, got %s" \
|
| 623 |
+
% (self.fp, type(first_line).__name__))
|
| 624 |
+
self.bytes_read += len(first_line)
|
| 625 |
+
|
| 626 |
+
# Ensure that we consume the file until we've hit our inner boundary
|
| 627 |
+
while (first_line.strip() != (b"--" + self.innerboundary) and
|
| 628 |
+
first_line):
|
| 629 |
+
first_line = self.fp.readline()
|
| 630 |
+
self.bytes_read += len(first_line)
|
| 631 |
+
|
| 632 |
+
# Propagate max_num_fields into the sub class appropriately
|
| 633 |
+
max_num_fields = self.max_num_fields
|
| 634 |
+
if max_num_fields is not None:
|
| 635 |
+
max_num_fields -= len(self.list)
|
| 636 |
+
|
| 637 |
+
while True:
|
| 638 |
+
parser = FeedParser()
|
| 639 |
+
hdr_text = b""
|
| 640 |
+
while True:
|
| 641 |
+
data = self.fp.readline()
|
| 642 |
+
hdr_text += data
|
| 643 |
+
if not data.strip():
|
| 644 |
+
break
|
| 645 |
+
if not hdr_text:
|
| 646 |
+
break
|
| 647 |
+
# parser takes strings, not bytes
|
| 648 |
+
self.bytes_read += len(hdr_text)
|
| 649 |
+
parser.feed(hdr_text.decode(self.encoding, self.errors))
|
| 650 |
+
headers = parser.close()
|
| 651 |
+
|
| 652 |
+
# Some clients add Content-Length for part headers, ignore them
|
| 653 |
+
if 'content-length' in headers:
|
| 654 |
+
del headers['content-length']
|
| 655 |
+
|
| 656 |
+
limit = None if self.limit is None \
|
| 657 |
+
else self.limit - self.bytes_read
|
| 658 |
+
part = klass(self.fp, headers, ib, environ, keep_blank_values,
|
| 659 |
+
strict_parsing, limit,
|
| 660 |
+
self.encoding, self.errors, max_num_fields, self.separator)
|
| 661 |
+
|
| 662 |
+
if max_num_fields is not None:
|
| 663 |
+
max_num_fields -= 1
|
| 664 |
+
if part.list:
|
| 665 |
+
max_num_fields -= len(part.list)
|
| 666 |
+
if max_num_fields < 0:
|
| 667 |
+
raise ValueError('Max number of fields exceeded')
|
| 668 |
+
|
| 669 |
+
self.bytes_read += part.bytes_read
|
| 670 |
+
self.list.append(part)
|
| 671 |
+
if part.done or self.bytes_read >= self.length > 0:
|
| 672 |
+
break
|
| 673 |
+
self.skip_lines()
|
| 674 |
+
|
| 675 |
+
def read_single(self):
|
| 676 |
+
"""Internal: read an atomic part."""
|
| 677 |
+
if self.length >= 0:
|
| 678 |
+
self.read_binary()
|
| 679 |
+
self.skip_lines()
|
| 680 |
+
else:
|
| 681 |
+
self.read_lines()
|
| 682 |
+
self.file.seek(0)
|
| 683 |
+
|
| 684 |
+
bufsize = 8*1024 # I/O buffering size for copy to file
|
| 685 |
+
|
| 686 |
+
def read_binary(self):
|
| 687 |
+
"""Internal: read binary data."""
|
| 688 |
+
self.file = self.make_file()
|
| 689 |
+
todo = self.length
|
| 690 |
+
if todo >= 0:
|
| 691 |
+
while todo > 0:
|
| 692 |
+
data = self.fp.read(min(todo, self.bufsize)) # bytes
|
| 693 |
+
if not isinstance(data, bytes):
|
| 694 |
+
raise ValueError("%s should return bytes, got %s"
|
| 695 |
+
% (self.fp, type(data).__name__))
|
| 696 |
+
self.bytes_read += len(data)
|
| 697 |
+
if not data:
|
| 698 |
+
self.done = -1
|
| 699 |
+
break
|
| 700 |
+
self.file.write(data)
|
| 701 |
+
todo = todo - len(data)
|
| 702 |
+
|
| 703 |
+
def read_lines(self):
|
| 704 |
+
"""Internal: read lines until EOF or outerboundary."""
|
| 705 |
+
if self._binary_file:
|
| 706 |
+
self.file = self.__file = BytesIO() # store data as bytes for files
|
| 707 |
+
else:
|
| 708 |
+
self.file = self.__file = StringIO() # as strings for other fields
|
| 709 |
+
if self.outerboundary:
|
| 710 |
+
self.read_lines_to_outerboundary()
|
| 711 |
+
else:
|
| 712 |
+
self.read_lines_to_eof()
|
| 713 |
+
|
| 714 |
+
def __write(self, line):
|
| 715 |
+
"""line is always bytes, not string"""
|
| 716 |
+
if self.__file is not None:
|
| 717 |
+
if self.__file.tell() + len(line) > 1000:
|
| 718 |
+
self.file = self.make_file()
|
| 719 |
+
data = self.__file.getvalue()
|
| 720 |
+
self.file.write(data)
|
| 721 |
+
self.__file = None
|
| 722 |
+
if self._binary_file:
|
| 723 |
+
# keep bytes
|
| 724 |
+
self.file.write(line)
|
| 725 |
+
else:
|
| 726 |
+
# decode to string
|
| 727 |
+
self.file.write(line.decode(self.encoding, self.errors))
|
| 728 |
+
|
| 729 |
+
def read_lines_to_eof(self):
|
| 730 |
+
"""Internal: read lines until EOF."""
|
| 731 |
+
while 1:
|
| 732 |
+
line = self.fp.readline(1<<16) # bytes
|
| 733 |
+
self.bytes_read += len(line)
|
| 734 |
+
if not line:
|
| 735 |
+
self.done = -1
|
| 736 |
+
break
|
| 737 |
+
self.__write(line)
|
| 738 |
+
|
| 739 |
+
def read_lines_to_outerboundary(self):
|
| 740 |
+
"""Internal: read lines until outerboundary.
|
| 741 |
+
Data is read as bytes: boundaries and line ends must be converted
|
| 742 |
+
to bytes for comparisons.
|
| 743 |
+
"""
|
| 744 |
+
next_boundary = b"--" + self.outerboundary
|
| 745 |
+
last_boundary = next_boundary + b"--"
|
| 746 |
+
delim = b""
|
| 747 |
+
last_line_lfend = True
|
| 748 |
+
_read = 0
|
| 749 |
+
while 1:
|
| 750 |
+
|
| 751 |
+
if self.limit is not None and 0 <= self.limit <= _read:
|
| 752 |
+
break
|
| 753 |
+
line = self.fp.readline(1<<16) # bytes
|
| 754 |
+
self.bytes_read += len(line)
|
| 755 |
+
_read += len(line)
|
| 756 |
+
if not line:
|
| 757 |
+
self.done = -1
|
| 758 |
+
break
|
| 759 |
+
if delim == b"\r":
|
| 760 |
+
line = delim + line
|
| 761 |
+
delim = b""
|
| 762 |
+
if line.startswith(b"--") and last_line_lfend:
|
| 763 |
+
strippedline = line.rstrip()
|
| 764 |
+
if strippedline == next_boundary:
|
| 765 |
+
break
|
| 766 |
+
if strippedline == last_boundary:
|
| 767 |
+
self.done = 1
|
| 768 |
+
break
|
| 769 |
+
odelim = delim
|
| 770 |
+
if line.endswith(b"\r\n"):
|
| 771 |
+
delim = b"\r\n"
|
| 772 |
+
line = line[:-2]
|
| 773 |
+
last_line_lfend = True
|
| 774 |
+
elif line.endswith(b"\n"):
|
| 775 |
+
delim = b"\n"
|
| 776 |
+
line = line[:-1]
|
| 777 |
+
last_line_lfend = True
|
| 778 |
+
elif line.endswith(b"\r"):
|
| 779 |
+
# We may interrupt \r\n sequences if they span the 2**16
|
| 780 |
+
# byte boundary
|
| 781 |
+
delim = b"\r"
|
| 782 |
+
line = line[:-1]
|
| 783 |
+
last_line_lfend = False
|
| 784 |
+
else:
|
| 785 |
+
delim = b""
|
| 786 |
+
last_line_lfend = False
|
| 787 |
+
self.__write(odelim + line)
|
| 788 |
+
|
| 789 |
+
def skip_lines(self):
|
| 790 |
+
"""Internal: skip lines until outer boundary if defined."""
|
| 791 |
+
if not self.outerboundary or self.done:
|
| 792 |
+
return
|
| 793 |
+
next_boundary = b"--" + self.outerboundary
|
| 794 |
+
last_boundary = next_boundary + b"--"
|
| 795 |
+
last_line_lfend = True
|
| 796 |
+
while True:
|
| 797 |
+
line = self.fp.readline(1<<16)
|
| 798 |
+
self.bytes_read += len(line)
|
| 799 |
+
if not line:
|
| 800 |
+
self.done = -1
|
| 801 |
+
break
|
| 802 |
+
if line.endswith(b"--") and last_line_lfend:
|
| 803 |
+
strippedline = line.strip()
|
| 804 |
+
if strippedline == next_boundary:
|
| 805 |
+
break
|
| 806 |
+
if strippedline == last_boundary:
|
| 807 |
+
self.done = 1
|
| 808 |
+
break
|
| 809 |
+
last_line_lfend = line.endswith(b'\n')
|
| 810 |
+
|
| 811 |
+
def make_file(self):
|
| 812 |
+
"""Overridable: return a readable & writable file.
|
| 813 |
+
|
| 814 |
+
The file will be used as follows:
|
| 815 |
+
- data is written to it
|
| 816 |
+
- seek(0)
|
| 817 |
+
- data is read from it
|
| 818 |
+
|
| 819 |
+
The file is opened in binary mode for files, in text mode
|
| 820 |
+
for other fields
|
| 821 |
+
|
| 822 |
+
This version opens a temporary file for reading and writing,
|
| 823 |
+
and immediately deletes (unlinks) it. The trick (on Unix!) is
|
| 824 |
+
that the file can still be used, but it can't be opened by
|
| 825 |
+
another process, and it will automatically be deleted when it
|
| 826 |
+
is closed or when the current process terminates.
|
| 827 |
+
|
| 828 |
+
If you want a more permanent file, you derive a class which
|
| 829 |
+
overrides this method. If you want a visible temporary file
|
| 830 |
+
that is nevertheless automatically deleted when the script
|
| 831 |
+
terminates, try defining a __del__ method in a derived class
|
| 832 |
+
which unlinks the temporary files you have created.
|
| 833 |
+
|
| 834 |
+
"""
|
| 835 |
+
if self._binary_file:
|
| 836 |
+
return tempfile.TemporaryFile("wb+")
|
| 837 |
+
else:
|
| 838 |
+
return tempfile.TemporaryFile("w+",
|
| 839 |
+
encoding=self.encoding, newline = '\n')
|
| 840 |
+
|
| 841 |
+
|
| 842 |
+
# Test/debug code
|
| 843 |
+
# ===============
|
| 844 |
+
|
| 845 |
+
def test(environ=os.environ):
|
| 846 |
+
"""Robust test CGI script, usable as main program.
|
| 847 |
+
|
| 848 |
+
Write minimal HTTP headers and dump all information provided to
|
| 849 |
+
the script in HTML form.
|
| 850 |
+
|
| 851 |
+
"""
|
| 852 |
+
print("Content-type: text/html")
|
| 853 |
+
print()
|
| 854 |
+
sys.stderr = sys.stdout
|
| 855 |
+
try:
|
| 856 |
+
form = FieldStorage() # Replace with other classes to test those
|
| 857 |
+
print_directory()
|
| 858 |
+
print_arguments()
|
| 859 |
+
print_form(form)
|
| 860 |
+
print_environ(environ)
|
| 861 |
+
print_environ_usage()
|
| 862 |
+
def f():
|
| 863 |
+
exec("testing print_exception() -- <I>italics?</I>")
|
| 864 |
+
def g(f=f):
|
| 865 |
+
f()
|
| 866 |
+
print("<H3>What follows is a test, not an actual exception:</H3>")
|
| 867 |
+
g()
|
| 868 |
+
except:
|
| 869 |
+
print_exception()
|
| 870 |
+
|
| 871 |
+
print("<H1>Second try with a small maxlen...</H1>")
|
| 872 |
+
|
| 873 |
+
global maxlen
|
| 874 |
+
maxlen = 50
|
| 875 |
+
try:
|
| 876 |
+
form = FieldStorage() # Replace with other classes to test those
|
| 877 |
+
print_directory()
|
| 878 |
+
print_arguments()
|
| 879 |
+
print_form(form)
|
| 880 |
+
print_environ(environ)
|
| 881 |
+
except:
|
| 882 |
+
print_exception()
|
| 883 |
+
|
| 884 |
+
def print_exception(type=None, value=None, tb=None, limit=None):
|
| 885 |
+
if type is None:
|
| 886 |
+
type, value, tb = sys.exc_info()
|
| 887 |
+
import traceback
|
| 888 |
+
print()
|
| 889 |
+
print("<H3>Traceback (most recent call last):</H3>")
|
| 890 |
+
list = traceback.format_tb(tb, limit) + \
|
| 891 |
+
traceback.format_exception_only(type, value)
|
| 892 |
+
print("<PRE>%s<B>%s</B></PRE>" % (
|
| 893 |
+
html.escape("".join(list[:-1])),
|
| 894 |
+
html.escape(list[-1]),
|
| 895 |
+
))
|
| 896 |
+
del tb
|
| 897 |
+
|
| 898 |
+
def print_environ(environ=os.environ):
|
| 899 |
+
"""Dump the shell environment as HTML."""
|
| 900 |
+
keys = sorted(environ.keys())
|
| 901 |
+
print()
|
| 902 |
+
print("<H3>Shell Environment:</H3>")
|
| 903 |
+
print("<DL>")
|
| 904 |
+
for key in keys:
|
| 905 |
+
print("<DT>", html.escape(key), "<DD>", html.escape(environ[key]))
|
| 906 |
+
print("</DL>")
|
| 907 |
+
print()
|
| 908 |
+
|
| 909 |
+
def print_form(form):
|
| 910 |
+
"""Dump the contents of a form as HTML."""
|
| 911 |
+
keys = sorted(form.keys())
|
| 912 |
+
print()
|
| 913 |
+
print("<H3>Form Contents:</H3>")
|
| 914 |
+
if not keys:
|
| 915 |
+
print("<P>No form fields.")
|
| 916 |
+
print("<DL>")
|
| 917 |
+
for key in keys:
|
| 918 |
+
print("<DT>" + html.escape(key) + ":", end=' ')
|
| 919 |
+
value = form[key]
|
| 920 |
+
print("<i>" + html.escape(repr(type(value))) + "</i>")
|
| 921 |
+
print("<DD>" + html.escape(repr(value)))
|
| 922 |
+
print("</DL>")
|
| 923 |
+
print()
|
| 924 |
+
|
| 925 |
+
def print_directory():
|
| 926 |
+
"""Dump the current directory as HTML."""
|
| 927 |
+
print()
|
| 928 |
+
print("<H3>Current Working Directory:</H3>")
|
| 929 |
+
try:
|
| 930 |
+
pwd = os.getcwd()
|
| 931 |
+
except OSError as msg:
|
| 932 |
+
print("OSError:", html.escape(str(msg)))
|
| 933 |
+
else:
|
| 934 |
+
print(html.escape(pwd))
|
| 935 |
+
print()
|
| 936 |
+
|
| 937 |
+
def print_arguments():
|
| 938 |
+
print()
|
| 939 |
+
print("<H3>Command Line Arguments:</H3>")
|
| 940 |
+
print()
|
| 941 |
+
print(sys.argv)
|
| 942 |
+
print()
|
| 943 |
+
|
| 944 |
+
def print_environ_usage():
|
| 945 |
+
"""Dump a list of environment variables used by CGI as HTML."""
|
| 946 |
+
print("""
|
| 947 |
+
<H3>These environment variables could have been set:</H3>
|
| 948 |
+
<UL>
|
| 949 |
+
<LI>AUTH_TYPE
|
| 950 |
+
<LI>CONTENT_LENGTH
|
| 951 |
+
<LI>CONTENT_TYPE
|
| 952 |
+
<LI>DATE_GMT
|
| 953 |
+
<LI>DATE_LOCAL
|
| 954 |
+
<LI>DOCUMENT_NAME
|
| 955 |
+
<LI>DOCUMENT_ROOT
|
| 956 |
+
<LI>DOCUMENT_URI
|
| 957 |
+
<LI>GATEWAY_INTERFACE
|
| 958 |
+
<LI>LAST_MODIFIED
|
| 959 |
+
<LI>PATH
|
| 960 |
+
<LI>PATH_INFO
|
| 961 |
+
<LI>PATH_TRANSLATED
|
| 962 |
+
<LI>QUERY_STRING
|
| 963 |
+
<LI>REMOTE_ADDR
|
| 964 |
+
<LI>REMOTE_HOST
|
| 965 |
+
<LI>REMOTE_IDENT
|
| 966 |
+
<LI>REMOTE_USER
|
| 967 |
+
<LI>REQUEST_METHOD
|
| 968 |
+
<LI>SCRIPT_NAME
|
| 969 |
+
<LI>SERVER_NAME
|
| 970 |
+
<LI>SERVER_PORT
|
| 971 |
+
<LI>SERVER_PROTOCOL
|
| 972 |
+
<LI>SERVER_ROOT
|
| 973 |
+
<LI>SERVER_SOFTWARE
|
| 974 |
+
</UL>
|
| 975 |
+
In addition, HTTP headers sent by the server may be passed in the
|
| 976 |
+
environment as well. Here are some common variable names:
|
| 977 |
+
<UL>
|
| 978 |
+
<LI>HTTP_ACCEPT
|
| 979 |
+
<LI>HTTP_CONNECTION
|
| 980 |
+
<LI>HTTP_HOST
|
| 981 |
+
<LI>HTTP_PRAGMA
|
| 982 |
+
<LI>HTTP_REFERER
|
| 983 |
+
<LI>HTTP_USER_AGENT
|
| 984 |
+
</UL>
|
| 985 |
+
""")
|
| 986 |
+
|
| 987 |
+
|
| 988 |
+
# Utilities
|
| 989 |
+
# =========
|
| 990 |
+
|
| 991 |
+
def valid_boundary(s):
|
| 992 |
+
import re
|
| 993 |
+
if isinstance(s, bytes):
|
| 994 |
+
_vb_pattern = b"^[ -~]{0,200}[!-~]$"
|
| 995 |
+
else:
|
| 996 |
+
_vb_pattern = "^[ -~]{0,200}[!-~]$"
|
| 997 |
+
return re.match(_vb_pattern, s)
|
| 998 |
+
|
| 999 |
+
# Invoke mainline
|
| 1000 |
+
# ===============
|
| 1001 |
+
|
| 1002 |
+
# Call test() when this file is run as a script (not imported as a module)
|
| 1003 |
+
if __name__ == '__main__':
|
| 1004 |
+
test()
|
evalkit_cambrian/lib/python3.10/cgitb.py
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""More comprehensive traceback formatting for Python scripts.
|
| 2 |
+
|
| 3 |
+
To enable this module, do:
|
| 4 |
+
|
| 5 |
+
import cgitb; cgitb.enable()
|
| 6 |
+
|
| 7 |
+
at the top of your script. The optional arguments to enable() are:
|
| 8 |
+
|
| 9 |
+
display - if true, tracebacks are displayed in the web browser
|
| 10 |
+
logdir - if set, tracebacks are written to files in this directory
|
| 11 |
+
context - number of lines of source code to show for each stack frame
|
| 12 |
+
format - 'text' or 'html' controls the output format
|
| 13 |
+
|
| 14 |
+
By default, tracebacks are displayed but not saved, the context is 5 lines
|
| 15 |
+
and the output format is 'html' (for backwards compatibility with the
|
| 16 |
+
original use of this module)
|
| 17 |
+
|
| 18 |
+
Alternatively, if you have caught an exception and want cgitb to display it
|
| 19 |
+
for you, call cgitb.handler(). The optional argument to handler() is a
|
| 20 |
+
3-item tuple (etype, evalue, etb) just like the value of sys.exc_info().
|
| 21 |
+
The default handler displays output as HTML.
|
| 22 |
+
|
| 23 |
+
"""
|
| 24 |
+
import inspect
|
| 25 |
+
import keyword
|
| 26 |
+
import linecache
|
| 27 |
+
import os
|
| 28 |
+
import pydoc
|
| 29 |
+
import sys
|
| 30 |
+
import tempfile
|
| 31 |
+
import time
|
| 32 |
+
import tokenize
|
| 33 |
+
import traceback
|
| 34 |
+
|
| 35 |
+
def reset():
|
| 36 |
+
"""Return a string that resets the CGI and browser to a known state."""
|
| 37 |
+
return '''<!--: spam
|
| 38 |
+
Content-Type: text/html
|
| 39 |
+
|
| 40 |
+
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> -->
|
| 41 |
+
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> -->
|
| 42 |
+
</font> </font> </font> </script> </object> </blockquote> </pre>
|
| 43 |
+
</table> </table> </table> </table> </table> </font> </font> </font>'''
|
| 44 |
+
|
| 45 |
+
__UNDEF__ = [] # a special sentinel object
|
| 46 |
+
def small(text):
|
| 47 |
+
if text:
|
| 48 |
+
return '<small>' + text + '</small>'
|
| 49 |
+
else:
|
| 50 |
+
return ''
|
| 51 |
+
|
| 52 |
+
def strong(text):
|
| 53 |
+
if text:
|
| 54 |
+
return '<strong>' + text + '</strong>'
|
| 55 |
+
else:
|
| 56 |
+
return ''
|
| 57 |
+
|
| 58 |
+
def grey(text):
|
| 59 |
+
if text:
|
| 60 |
+
return '<font color="#909090">' + text + '</font>'
|
| 61 |
+
else:
|
| 62 |
+
return ''
|
| 63 |
+
|
| 64 |
+
def lookup(name, frame, locals):
|
| 65 |
+
"""Find the value for a given name in the given environment."""
|
| 66 |
+
if name in locals:
|
| 67 |
+
return 'local', locals[name]
|
| 68 |
+
if name in frame.f_globals:
|
| 69 |
+
return 'global', frame.f_globals[name]
|
| 70 |
+
if '__builtins__' in frame.f_globals:
|
| 71 |
+
builtins = frame.f_globals['__builtins__']
|
| 72 |
+
if type(builtins) is type({}):
|
| 73 |
+
if name in builtins:
|
| 74 |
+
return 'builtin', builtins[name]
|
| 75 |
+
else:
|
| 76 |
+
if hasattr(builtins, name):
|
| 77 |
+
return 'builtin', getattr(builtins, name)
|
| 78 |
+
return None, __UNDEF__
|
| 79 |
+
|
| 80 |
+
def scanvars(reader, frame, locals):
|
| 81 |
+
"""Scan one logical line of Python and look up values of variables used."""
|
| 82 |
+
vars, lasttoken, parent, prefix, value = [], None, None, '', __UNDEF__
|
| 83 |
+
for ttype, token, start, end, line in tokenize.generate_tokens(reader):
|
| 84 |
+
if ttype == tokenize.NEWLINE: break
|
| 85 |
+
if ttype == tokenize.NAME and token not in keyword.kwlist:
|
| 86 |
+
if lasttoken == '.':
|
| 87 |
+
if parent is not __UNDEF__:
|
| 88 |
+
value = getattr(parent, token, __UNDEF__)
|
| 89 |
+
vars.append((prefix + token, prefix, value))
|
| 90 |
+
else:
|
| 91 |
+
where, value = lookup(token, frame, locals)
|
| 92 |
+
vars.append((token, where, value))
|
| 93 |
+
elif token == '.':
|
| 94 |
+
prefix += lasttoken + '.'
|
| 95 |
+
parent = value
|
| 96 |
+
else:
|
| 97 |
+
parent, prefix = None, ''
|
| 98 |
+
lasttoken = token
|
| 99 |
+
return vars
|
| 100 |
+
|
| 101 |
+
def html(einfo, context=5):
|
| 102 |
+
"""Return a nice HTML document describing a given traceback."""
|
| 103 |
+
etype, evalue, etb = einfo
|
| 104 |
+
if isinstance(etype, type):
|
| 105 |
+
etype = etype.__name__
|
| 106 |
+
pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
|
| 107 |
+
date = time.ctime(time.time())
|
| 108 |
+
head = '<body bgcolor="#f0f0f8">' + pydoc.html.heading(
|
| 109 |
+
'<big><big>%s</big></big>' %
|
| 110 |
+
strong(pydoc.html.escape(str(etype))),
|
| 111 |
+
'#ffffff', '#6622aa', pyver + '<br>' + date) + '''
|
| 112 |
+
<p>A problem occurred in a Python script. Here is the sequence of
|
| 113 |
+
function calls leading up to the error, in the order they occurred.</p>'''
|
| 114 |
+
|
| 115 |
+
indent = '<tt>' + small(' ' * 5) + ' </tt>'
|
| 116 |
+
frames = []
|
| 117 |
+
records = inspect.getinnerframes(etb, context)
|
| 118 |
+
for frame, file, lnum, func, lines, index in records:
|
| 119 |
+
if file:
|
| 120 |
+
file = os.path.abspath(file)
|
| 121 |
+
link = '<a href="file://%s">%s</a>' % (file, pydoc.html.escape(file))
|
| 122 |
+
else:
|
| 123 |
+
file = link = '?'
|
| 124 |
+
args, varargs, varkw, locals = inspect.getargvalues(frame)
|
| 125 |
+
call = ''
|
| 126 |
+
if func != '?':
|
| 127 |
+
call = 'in ' + strong(pydoc.html.escape(func))
|
| 128 |
+
if func != "<module>":
|
| 129 |
+
call += inspect.formatargvalues(args, varargs, varkw, locals,
|
| 130 |
+
formatvalue=lambda value: '=' + pydoc.html.repr(value))
|
| 131 |
+
|
| 132 |
+
highlight = {}
|
| 133 |
+
def reader(lnum=[lnum]):
|
| 134 |
+
highlight[lnum[0]] = 1
|
| 135 |
+
try: return linecache.getline(file, lnum[0])
|
| 136 |
+
finally: lnum[0] += 1
|
| 137 |
+
vars = scanvars(reader, frame, locals)
|
| 138 |
+
|
| 139 |
+
rows = ['<tr><td bgcolor="#d8bbff">%s%s %s</td></tr>' %
|
| 140 |
+
('<big> </big>', link, call)]
|
| 141 |
+
if index is not None:
|
| 142 |
+
i = lnum - index
|
| 143 |
+
for line in lines:
|
| 144 |
+
num = small(' ' * (5-len(str(i))) + str(i)) + ' '
|
| 145 |
+
if i in highlight:
|
| 146 |
+
line = '<tt>=>%s%s</tt>' % (num, pydoc.html.preformat(line))
|
| 147 |
+
rows.append('<tr><td bgcolor="#ffccee">%s</td></tr>' % line)
|
| 148 |
+
else:
|
| 149 |
+
line = '<tt> %s%s</tt>' % (num, pydoc.html.preformat(line))
|
| 150 |
+
rows.append('<tr><td>%s</td></tr>' % grey(line))
|
| 151 |
+
i += 1
|
| 152 |
+
|
| 153 |
+
done, dump = {}, []
|
| 154 |
+
for name, where, value in vars:
|
| 155 |
+
if name in done: continue
|
| 156 |
+
done[name] = 1
|
| 157 |
+
if value is not __UNDEF__:
|
| 158 |
+
if where in ('global', 'builtin'):
|
| 159 |
+
name = ('<em>%s</em> ' % where) + strong(name)
|
| 160 |
+
elif where == 'local':
|
| 161 |
+
name = strong(name)
|
| 162 |
+
else:
|
| 163 |
+
name = where + strong(name.split('.')[-1])
|
| 164 |
+
dump.append('%s = %s' % (name, pydoc.html.repr(value)))
|
| 165 |
+
else:
|
| 166 |
+
dump.append(name + ' <em>undefined</em>')
|
| 167 |
+
|
| 168 |
+
rows.append('<tr><td>%s</td></tr>' % small(grey(', '.join(dump))))
|
| 169 |
+
frames.append('''
|
| 170 |
+
<table width="100%%" cellspacing=0 cellpadding=0 border=0>
|
| 171 |
+
%s</table>''' % '\n'.join(rows))
|
| 172 |
+
|
| 173 |
+
exception = ['<p>%s: %s' % (strong(pydoc.html.escape(str(etype))),
|
| 174 |
+
pydoc.html.escape(str(evalue)))]
|
| 175 |
+
for name in dir(evalue):
|
| 176 |
+
if name[:1] == '_': continue
|
| 177 |
+
value = pydoc.html.repr(getattr(evalue, name))
|
| 178 |
+
exception.append('\n<br>%s%s =\n%s' % (indent, name, value))
|
| 179 |
+
|
| 180 |
+
return head + ''.join(frames) + ''.join(exception) + '''
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
<!-- The above is a description of an error in a Python program, formatted
|
| 184 |
+
for a web browser because the 'cgitb' module was enabled. In case you
|
| 185 |
+
are not reading this in a web browser, here is the original traceback:
|
| 186 |
+
|
| 187 |
+
%s
|
| 188 |
+
-->
|
| 189 |
+
''' % pydoc.html.escape(
|
| 190 |
+
''.join(traceback.format_exception(etype, evalue, etb)))
|
| 191 |
+
|
| 192 |
+
def text(einfo, context=5):
|
| 193 |
+
"""Return a plain text document describing a given traceback."""
|
| 194 |
+
etype, evalue, etb = einfo
|
| 195 |
+
if isinstance(etype, type):
|
| 196 |
+
etype = etype.__name__
|
| 197 |
+
pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
|
| 198 |
+
date = time.ctime(time.time())
|
| 199 |
+
head = "%s\n%s\n%s\n" % (str(etype), pyver, date) + '''
|
| 200 |
+
A problem occurred in a Python script. Here is the sequence of
|
| 201 |
+
function calls leading up to the error, in the order they occurred.
|
| 202 |
+
'''
|
| 203 |
+
|
| 204 |
+
frames = []
|
| 205 |
+
records = inspect.getinnerframes(etb, context)
|
| 206 |
+
for frame, file, lnum, func, lines, index in records:
|
| 207 |
+
file = file and os.path.abspath(file) or '?'
|
| 208 |
+
args, varargs, varkw, locals = inspect.getargvalues(frame)
|
| 209 |
+
call = ''
|
| 210 |
+
if func != '?':
|
| 211 |
+
call = 'in ' + func
|
| 212 |
+
if func != "<module>":
|
| 213 |
+
call += inspect.formatargvalues(args, varargs, varkw, locals,
|
| 214 |
+
formatvalue=lambda value: '=' + pydoc.text.repr(value))
|
| 215 |
+
|
| 216 |
+
highlight = {}
|
| 217 |
+
def reader(lnum=[lnum]):
|
| 218 |
+
highlight[lnum[0]] = 1
|
| 219 |
+
try: return linecache.getline(file, lnum[0])
|
| 220 |
+
finally: lnum[0] += 1
|
| 221 |
+
vars = scanvars(reader, frame, locals)
|
| 222 |
+
|
| 223 |
+
rows = [' %s %s' % (file, call)]
|
| 224 |
+
if index is not None:
|
| 225 |
+
i = lnum - index
|
| 226 |
+
for line in lines:
|
| 227 |
+
num = '%5d ' % i
|
| 228 |
+
rows.append(num+line.rstrip())
|
| 229 |
+
i += 1
|
| 230 |
+
|
| 231 |
+
done, dump = {}, []
|
| 232 |
+
for name, where, value in vars:
|
| 233 |
+
if name in done: continue
|
| 234 |
+
done[name] = 1
|
| 235 |
+
if value is not __UNDEF__:
|
| 236 |
+
if where == 'global': name = 'global ' + name
|
| 237 |
+
elif where != 'local': name = where + name.split('.')[-1]
|
| 238 |
+
dump.append('%s = %s' % (name, pydoc.text.repr(value)))
|
| 239 |
+
else:
|
| 240 |
+
dump.append(name + ' undefined')
|
| 241 |
+
|
| 242 |
+
rows.append('\n'.join(dump))
|
| 243 |
+
frames.append('\n%s\n' % '\n'.join(rows))
|
| 244 |
+
|
| 245 |
+
exception = ['%s: %s' % (str(etype), str(evalue))]
|
| 246 |
+
for name in dir(evalue):
|
| 247 |
+
value = pydoc.text.repr(getattr(evalue, name))
|
| 248 |
+
exception.append('\n%s%s = %s' % (" "*4, name, value))
|
| 249 |
+
|
| 250 |
+
return head + ''.join(frames) + ''.join(exception) + '''
|
| 251 |
+
|
| 252 |
+
The above is a description of an error in a Python program. Here is
|
| 253 |
+
the original traceback:
|
| 254 |
+
|
| 255 |
+
%s
|
| 256 |
+
''' % ''.join(traceback.format_exception(etype, evalue, etb))
|
| 257 |
+
|
| 258 |
+
class Hook:
|
| 259 |
+
"""A hook to replace sys.excepthook that shows tracebacks in HTML."""
|
| 260 |
+
|
| 261 |
+
def __init__(self, display=1, logdir=None, context=5, file=None,
|
| 262 |
+
format="html"):
|
| 263 |
+
self.display = display # send tracebacks to browser if true
|
| 264 |
+
self.logdir = logdir # log tracebacks to files if not None
|
| 265 |
+
self.context = context # number of source code lines per frame
|
| 266 |
+
self.file = file or sys.stdout # place to send the output
|
| 267 |
+
self.format = format
|
| 268 |
+
|
| 269 |
+
def __call__(self, etype, evalue, etb):
|
| 270 |
+
self.handle((etype, evalue, etb))
|
| 271 |
+
|
| 272 |
+
def handle(self, info=None):
|
| 273 |
+
info = info or sys.exc_info()
|
| 274 |
+
if self.format == "html":
|
| 275 |
+
self.file.write(reset())
|
| 276 |
+
|
| 277 |
+
formatter = (self.format=="html") and html or text
|
| 278 |
+
plain = False
|
| 279 |
+
try:
|
| 280 |
+
doc = formatter(info, self.context)
|
| 281 |
+
except: # just in case something goes wrong
|
| 282 |
+
doc = ''.join(traceback.format_exception(*info))
|
| 283 |
+
plain = True
|
| 284 |
+
|
| 285 |
+
if self.display:
|
| 286 |
+
if plain:
|
| 287 |
+
doc = pydoc.html.escape(doc)
|
| 288 |
+
self.file.write('<pre>' + doc + '</pre>\n')
|
| 289 |
+
else:
|
| 290 |
+
self.file.write(doc + '\n')
|
| 291 |
+
else:
|
| 292 |
+
self.file.write('<p>A problem occurred in a Python script.\n')
|
| 293 |
+
|
| 294 |
+
if self.logdir is not None:
|
| 295 |
+
suffix = ['.txt', '.html'][self.format=="html"]
|
| 296 |
+
(fd, path) = tempfile.mkstemp(suffix=suffix, dir=self.logdir)
|
| 297 |
+
|
| 298 |
+
try:
|
| 299 |
+
with os.fdopen(fd, 'w') as file:
|
| 300 |
+
file.write(doc)
|
| 301 |
+
msg = '%s contains the description of this error.' % path
|
| 302 |
+
except:
|
| 303 |
+
msg = 'Tried to save traceback to %s, but failed.' % path
|
| 304 |
+
|
| 305 |
+
if self.format == 'html':
|
| 306 |
+
self.file.write('<p>%s</p>\n' % msg)
|
| 307 |
+
else:
|
| 308 |
+
self.file.write(msg + '\n')
|
| 309 |
+
try:
|
| 310 |
+
self.file.flush()
|
| 311 |
+
except: pass
|
| 312 |
+
|
| 313 |
+
handler = Hook().handle
|
| 314 |
+
def enable(display=1, logdir=None, context=5, format="html"):
|
| 315 |
+
"""Install an exception handler that formats tracebacks as HTML.
|
| 316 |
+
|
| 317 |
+
The optional argument 'display' can be set to 0 to suppress sending the
|
| 318 |
+
traceback to the browser, and 'logdir' can be set to a directory to cause
|
| 319 |
+
tracebacks to be written to files there."""
|
| 320 |
+
sys.excepthook = Hook(display=display, logdir=logdir,
|
| 321 |
+
context=context, format=format)
|
evalkit_cambrian/lib/python3.10/chunk.py
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Simple class to read IFF chunks.
|
| 2 |
+
|
| 3 |
+
An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File
|
| 4 |
+
Format)) has the following structure:
|
| 5 |
+
|
| 6 |
+
+----------------+
|
| 7 |
+
| ID (4 bytes) |
|
| 8 |
+
+----------------+
|
| 9 |
+
| size (4 bytes) |
|
| 10 |
+
+----------------+
|
| 11 |
+
| data |
|
| 12 |
+
| ... |
|
| 13 |
+
+----------------+
|
| 14 |
+
|
| 15 |
+
The ID is a 4-byte string which identifies the type of chunk.
|
| 16 |
+
|
| 17 |
+
The size field (a 32-bit value, encoded using big-endian byte order)
|
| 18 |
+
gives the size of the whole chunk, including the 8-byte header.
|
| 19 |
+
|
| 20 |
+
Usually an IFF-type file consists of one or more chunks. The proposed
|
| 21 |
+
usage of the Chunk class defined here is to instantiate an instance at
|
| 22 |
+
the start of each chunk and read from the instance until it reaches
|
| 23 |
+
the end, after which a new instance can be instantiated. At the end
|
| 24 |
+
of the file, creating a new instance will fail with an EOFError
|
| 25 |
+
exception.
|
| 26 |
+
|
| 27 |
+
Usage:
|
| 28 |
+
while True:
|
| 29 |
+
try:
|
| 30 |
+
chunk = Chunk(file)
|
| 31 |
+
except EOFError:
|
| 32 |
+
break
|
| 33 |
+
chunktype = chunk.getname()
|
| 34 |
+
while True:
|
| 35 |
+
data = chunk.read(nbytes)
|
| 36 |
+
if not data:
|
| 37 |
+
pass
|
| 38 |
+
# do something with data
|
| 39 |
+
|
| 40 |
+
The interface is file-like. The implemented methods are:
|
| 41 |
+
read, close, seek, tell, isatty.
|
| 42 |
+
Extra methods are: skip() (called by close, skips to the end of the chunk),
|
| 43 |
+
getname() (returns the name (ID) of the chunk)
|
| 44 |
+
|
| 45 |
+
The __init__ method has one required argument, a file-like object
|
| 46 |
+
(including a chunk instance), and one optional argument, a flag which
|
| 47 |
+
specifies whether or not chunks are aligned on 2-byte boundaries. The
|
| 48 |
+
default is 1, i.e. aligned.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
class Chunk:
|
| 52 |
+
def __init__(self, file, align=True, bigendian=True, inclheader=False):
|
| 53 |
+
import struct
|
| 54 |
+
self.closed = False
|
| 55 |
+
self.align = align # whether to align to word (2-byte) boundaries
|
| 56 |
+
if bigendian:
|
| 57 |
+
strflag = '>'
|
| 58 |
+
else:
|
| 59 |
+
strflag = '<'
|
| 60 |
+
self.file = file
|
| 61 |
+
self.chunkname = file.read(4)
|
| 62 |
+
if len(self.chunkname) < 4:
|
| 63 |
+
raise EOFError
|
| 64 |
+
try:
|
| 65 |
+
self.chunksize = struct.unpack_from(strflag+'L', file.read(4))[0]
|
| 66 |
+
except struct.error:
|
| 67 |
+
raise EOFError from None
|
| 68 |
+
if inclheader:
|
| 69 |
+
self.chunksize = self.chunksize - 8 # subtract header
|
| 70 |
+
self.size_read = 0
|
| 71 |
+
try:
|
| 72 |
+
self.offset = self.file.tell()
|
| 73 |
+
except (AttributeError, OSError):
|
| 74 |
+
self.seekable = False
|
| 75 |
+
else:
|
| 76 |
+
self.seekable = True
|
| 77 |
+
|
| 78 |
+
def getname(self):
|
| 79 |
+
"""Return the name (ID) of the current chunk."""
|
| 80 |
+
return self.chunkname
|
| 81 |
+
|
| 82 |
+
def getsize(self):
|
| 83 |
+
"""Return the size of the current chunk."""
|
| 84 |
+
return self.chunksize
|
| 85 |
+
|
| 86 |
+
def close(self):
|
| 87 |
+
if not self.closed:
|
| 88 |
+
try:
|
| 89 |
+
self.skip()
|
| 90 |
+
finally:
|
| 91 |
+
self.closed = True
|
| 92 |
+
|
| 93 |
+
def isatty(self):
|
| 94 |
+
if self.closed:
|
| 95 |
+
raise ValueError("I/O operation on closed file")
|
| 96 |
+
return False
|
| 97 |
+
|
| 98 |
+
def seek(self, pos, whence=0):
|
| 99 |
+
"""Seek to specified position into the chunk.
|
| 100 |
+
Default position is 0 (start of chunk).
|
| 101 |
+
If the file is not seekable, this will result in an error.
|
| 102 |
+
"""
|
| 103 |
+
|
| 104 |
+
if self.closed:
|
| 105 |
+
raise ValueError("I/O operation on closed file")
|
| 106 |
+
if not self.seekable:
|
| 107 |
+
raise OSError("cannot seek")
|
| 108 |
+
if whence == 1:
|
| 109 |
+
pos = pos + self.size_read
|
| 110 |
+
elif whence == 2:
|
| 111 |
+
pos = pos + self.chunksize
|
| 112 |
+
if pos < 0 or pos > self.chunksize:
|
| 113 |
+
raise RuntimeError
|
| 114 |
+
self.file.seek(self.offset + pos, 0)
|
| 115 |
+
self.size_read = pos
|
| 116 |
+
|
| 117 |
+
def tell(self):
|
| 118 |
+
if self.closed:
|
| 119 |
+
raise ValueError("I/O operation on closed file")
|
| 120 |
+
return self.size_read
|
| 121 |
+
|
| 122 |
+
def read(self, size=-1):
|
| 123 |
+
"""Read at most size bytes from the chunk.
|
| 124 |
+
If size is omitted or negative, read until the end
|
| 125 |
+
of the chunk.
|
| 126 |
+
"""
|
| 127 |
+
|
| 128 |
+
if self.closed:
|
| 129 |
+
raise ValueError("I/O operation on closed file")
|
| 130 |
+
if self.size_read >= self.chunksize:
|
| 131 |
+
return b''
|
| 132 |
+
if size < 0:
|
| 133 |
+
size = self.chunksize - self.size_read
|
| 134 |
+
if size > self.chunksize - self.size_read:
|
| 135 |
+
size = self.chunksize - self.size_read
|
| 136 |
+
data = self.file.read(size)
|
| 137 |
+
self.size_read = self.size_read + len(data)
|
| 138 |
+
if self.size_read == self.chunksize and \
|
| 139 |
+
self.align and \
|
| 140 |
+
(self.chunksize & 1):
|
| 141 |
+
dummy = self.file.read(1)
|
| 142 |
+
self.size_read = self.size_read + len(dummy)
|
| 143 |
+
return data
|
| 144 |
+
|
| 145 |
+
def skip(self):
|
| 146 |
+
"""Skip the rest of the chunk.
|
| 147 |
+
If you are not interested in the contents of the chunk,
|
| 148 |
+
this method should be called so that the file points to
|
| 149 |
+
the start of the next chunk.
|
| 150 |
+
"""
|
| 151 |
+
|
| 152 |
+
if self.closed:
|
| 153 |
+
raise ValueError("I/O operation on closed file")
|
| 154 |
+
if self.seekable:
|
| 155 |
+
try:
|
| 156 |
+
n = self.chunksize - self.size_read
|
| 157 |
+
# maybe fix alignment
|
| 158 |
+
if self.align and (self.chunksize & 1):
|
| 159 |
+
n = n + 1
|
| 160 |
+
self.file.seek(n, 1)
|
| 161 |
+
self.size_read = self.size_read + n
|
| 162 |
+
return
|
| 163 |
+
except OSError:
|
| 164 |
+
pass
|
| 165 |
+
while self.size_read < self.chunksize:
|
| 166 |
+
n = min(8192, self.chunksize - self.size_read)
|
| 167 |
+
dummy = self.read(n)
|
| 168 |
+
if not dummy:
|
| 169 |
+
raise EOFError
|
evalkit_cambrian/lib/python3.10/code.py
ADDED
|
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utilities needed to emulate Python's interactive interpreter.
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
# Inspired by similar code by Jeff Epler and Fredrik Lundh.
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
import sys
|
| 9 |
+
import traceback
|
| 10 |
+
from codeop import CommandCompiler, compile_command
|
| 11 |
+
|
| 12 |
+
__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact",
|
| 13 |
+
"compile_command"]
|
| 14 |
+
|
| 15 |
+
class InteractiveInterpreter:
|
| 16 |
+
"""Base class for InteractiveConsole.
|
| 17 |
+
|
| 18 |
+
This class deals with parsing and interpreter state (the user's
|
| 19 |
+
namespace); it doesn't deal with input buffering or prompting or
|
| 20 |
+
input file naming (the filename is always passed in explicitly).
|
| 21 |
+
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def __init__(self, locals=None):
|
| 25 |
+
"""Constructor.
|
| 26 |
+
|
| 27 |
+
The optional 'locals' argument specifies the dictionary in
|
| 28 |
+
which code will be executed; it defaults to a newly created
|
| 29 |
+
dictionary with key "__name__" set to "__console__" and key
|
| 30 |
+
"__doc__" set to None.
|
| 31 |
+
|
| 32 |
+
"""
|
| 33 |
+
if locals is None:
|
| 34 |
+
locals = {"__name__": "__console__", "__doc__": None}
|
| 35 |
+
self.locals = locals
|
| 36 |
+
self.compile = CommandCompiler()
|
| 37 |
+
|
| 38 |
+
def runsource(self, source, filename="<input>", symbol="single"):
|
| 39 |
+
"""Compile and run some source in the interpreter.
|
| 40 |
+
|
| 41 |
+
Arguments are as for compile_command().
|
| 42 |
+
|
| 43 |
+
One of several things can happen:
|
| 44 |
+
|
| 45 |
+
1) The input is incorrect; compile_command() raised an
|
| 46 |
+
exception (SyntaxError or OverflowError). A syntax traceback
|
| 47 |
+
will be printed by calling the showsyntaxerror() method.
|
| 48 |
+
|
| 49 |
+
2) The input is incomplete, and more input is required;
|
| 50 |
+
compile_command() returned None. Nothing happens.
|
| 51 |
+
|
| 52 |
+
3) The input is complete; compile_command() returned a code
|
| 53 |
+
object. The code is executed by calling self.runcode() (which
|
| 54 |
+
also handles run-time exceptions, except for SystemExit).
|
| 55 |
+
|
| 56 |
+
The return value is True in case 2, False in the other cases (unless
|
| 57 |
+
an exception is raised). The return value can be used to
|
| 58 |
+
decide whether to use sys.ps1 or sys.ps2 to prompt the next
|
| 59 |
+
line.
|
| 60 |
+
|
| 61 |
+
"""
|
| 62 |
+
try:
|
| 63 |
+
code = self.compile(source, filename, symbol)
|
| 64 |
+
except (OverflowError, SyntaxError, ValueError):
|
| 65 |
+
# Case 1
|
| 66 |
+
self.showsyntaxerror(filename)
|
| 67 |
+
return False
|
| 68 |
+
|
| 69 |
+
if code is None:
|
| 70 |
+
# Case 2
|
| 71 |
+
return True
|
| 72 |
+
|
| 73 |
+
# Case 3
|
| 74 |
+
self.runcode(code)
|
| 75 |
+
return False
|
| 76 |
+
|
| 77 |
+
def runcode(self, code):
|
| 78 |
+
"""Execute a code object.
|
| 79 |
+
|
| 80 |
+
When an exception occurs, self.showtraceback() is called to
|
| 81 |
+
display a traceback. All exceptions are caught except
|
| 82 |
+
SystemExit, which is reraised.
|
| 83 |
+
|
| 84 |
+
A note about KeyboardInterrupt: this exception may occur
|
| 85 |
+
elsewhere in this code, and may not always be caught. The
|
| 86 |
+
caller should be prepared to deal with it.
|
| 87 |
+
|
| 88 |
+
"""
|
| 89 |
+
try:
|
| 90 |
+
exec(code, self.locals)
|
| 91 |
+
except SystemExit:
|
| 92 |
+
raise
|
| 93 |
+
except:
|
| 94 |
+
self.showtraceback()
|
| 95 |
+
|
| 96 |
+
def showsyntaxerror(self, filename=None):
|
| 97 |
+
"""Display the syntax error that just occurred.
|
| 98 |
+
|
| 99 |
+
This doesn't display a stack trace because there isn't one.
|
| 100 |
+
|
| 101 |
+
If a filename is given, it is stuffed in the exception instead
|
| 102 |
+
of what was there before (because Python's parser always uses
|
| 103 |
+
"<string>" when reading from a string).
|
| 104 |
+
|
| 105 |
+
The output is written by self.write(), below.
|
| 106 |
+
|
| 107 |
+
"""
|
| 108 |
+
type, value, tb = sys.exc_info()
|
| 109 |
+
sys.last_type = type
|
| 110 |
+
sys.last_value = value
|
| 111 |
+
sys.last_traceback = tb
|
| 112 |
+
if filename and type is SyntaxError:
|
| 113 |
+
# Work hard to stuff the correct filename in the exception
|
| 114 |
+
try:
|
| 115 |
+
msg, (dummy_filename, lineno, offset, line) = value.args
|
| 116 |
+
except ValueError:
|
| 117 |
+
# Not the format we expect; leave it alone
|
| 118 |
+
pass
|
| 119 |
+
else:
|
| 120 |
+
# Stuff in the right filename
|
| 121 |
+
value = SyntaxError(msg, (filename, lineno, offset, line))
|
| 122 |
+
sys.last_value = value
|
| 123 |
+
if sys.excepthook is sys.__excepthook__:
|
| 124 |
+
lines = traceback.format_exception_only(type, value)
|
| 125 |
+
self.write(''.join(lines))
|
| 126 |
+
else:
|
| 127 |
+
# If someone has set sys.excepthook, we let that take precedence
|
| 128 |
+
# over self.write
|
| 129 |
+
sys.excepthook(type, value, tb)
|
| 130 |
+
|
| 131 |
+
def showtraceback(self):
|
| 132 |
+
"""Display the exception that just occurred.
|
| 133 |
+
|
| 134 |
+
We remove the first stack item because it is our own code.
|
| 135 |
+
|
| 136 |
+
The output is written by self.write(), below.
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
sys.last_type, sys.last_value, last_tb = ei = sys.exc_info()
|
| 140 |
+
sys.last_traceback = last_tb
|
| 141 |
+
try:
|
| 142 |
+
lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next)
|
| 143 |
+
if sys.excepthook is sys.__excepthook__:
|
| 144 |
+
self.write(''.join(lines))
|
| 145 |
+
else:
|
| 146 |
+
# If someone has set sys.excepthook, we let that take precedence
|
| 147 |
+
# over self.write
|
| 148 |
+
sys.excepthook(ei[0], ei[1], last_tb)
|
| 149 |
+
finally:
|
| 150 |
+
last_tb = ei = None
|
| 151 |
+
|
| 152 |
+
def write(self, data):
|
| 153 |
+
"""Write a string.
|
| 154 |
+
|
| 155 |
+
The base implementation writes to sys.stderr; a subclass may
|
| 156 |
+
replace this with a different implementation.
|
| 157 |
+
|
| 158 |
+
"""
|
| 159 |
+
sys.stderr.write(data)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
class InteractiveConsole(InteractiveInterpreter):
|
| 163 |
+
"""Closely emulate the behavior of the interactive Python interpreter.
|
| 164 |
+
|
| 165 |
+
This class builds on InteractiveInterpreter and adds prompting
|
| 166 |
+
using the familiar sys.ps1 and sys.ps2, and input buffering.
|
| 167 |
+
|
| 168 |
+
"""
|
| 169 |
+
|
| 170 |
+
def __init__(self, locals=None, filename="<console>"):
|
| 171 |
+
"""Constructor.
|
| 172 |
+
|
| 173 |
+
The optional locals argument will be passed to the
|
| 174 |
+
InteractiveInterpreter base class.
|
| 175 |
+
|
| 176 |
+
The optional filename argument should specify the (file)name
|
| 177 |
+
of the input stream; it will show up in tracebacks.
|
| 178 |
+
|
| 179 |
+
"""
|
| 180 |
+
InteractiveInterpreter.__init__(self, locals)
|
| 181 |
+
self.filename = filename
|
| 182 |
+
self.resetbuffer()
|
| 183 |
+
|
| 184 |
+
def resetbuffer(self):
|
| 185 |
+
"""Reset the input buffer."""
|
| 186 |
+
self.buffer = []
|
| 187 |
+
|
| 188 |
+
def interact(self, banner=None, exitmsg=None):
|
| 189 |
+
"""Closely emulate the interactive Python console.
|
| 190 |
+
|
| 191 |
+
The optional banner argument specifies the banner to print
|
| 192 |
+
before the first interaction; by default it prints a banner
|
| 193 |
+
similar to the one printed by the real Python interpreter,
|
| 194 |
+
followed by the current class name in parentheses (so as not
|
| 195 |
+
to confuse this with the real interpreter -- since it's so
|
| 196 |
+
close!).
|
| 197 |
+
|
| 198 |
+
The optional exitmsg argument specifies the exit message
|
| 199 |
+
printed when exiting. Pass the empty string to suppress
|
| 200 |
+
printing an exit message. If exitmsg is not given or None,
|
| 201 |
+
a default message is printed.
|
| 202 |
+
|
| 203 |
+
"""
|
| 204 |
+
try:
|
| 205 |
+
sys.ps1
|
| 206 |
+
except AttributeError:
|
| 207 |
+
sys.ps1 = ">>> "
|
| 208 |
+
try:
|
| 209 |
+
sys.ps2
|
| 210 |
+
except AttributeError:
|
| 211 |
+
sys.ps2 = "... "
|
| 212 |
+
cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
|
| 213 |
+
if banner is None:
|
| 214 |
+
self.write("Python %s on %s\n%s\n(%s)\n" %
|
| 215 |
+
(sys.version, sys.platform, cprt,
|
| 216 |
+
self.__class__.__name__))
|
| 217 |
+
elif banner:
|
| 218 |
+
self.write("%s\n" % str(banner))
|
| 219 |
+
more = 0
|
| 220 |
+
while 1:
|
| 221 |
+
try:
|
| 222 |
+
if more:
|
| 223 |
+
prompt = sys.ps2
|
| 224 |
+
else:
|
| 225 |
+
prompt = sys.ps1
|
| 226 |
+
try:
|
| 227 |
+
line = self.raw_input(prompt)
|
| 228 |
+
except EOFError:
|
| 229 |
+
self.write("\n")
|
| 230 |
+
break
|
| 231 |
+
else:
|
| 232 |
+
more = self.push(line)
|
| 233 |
+
except KeyboardInterrupt:
|
| 234 |
+
self.write("\nKeyboardInterrupt\n")
|
| 235 |
+
self.resetbuffer()
|
| 236 |
+
more = 0
|
| 237 |
+
if exitmsg is None:
|
| 238 |
+
self.write('now exiting %s...\n' % self.__class__.__name__)
|
| 239 |
+
elif exitmsg != '':
|
| 240 |
+
self.write('%s\n' % exitmsg)
|
| 241 |
+
|
| 242 |
+
def push(self, line):
|
| 243 |
+
"""Push a line to the interpreter.
|
| 244 |
+
|
| 245 |
+
The line should not have a trailing newline; it may have
|
| 246 |
+
internal newlines. The line is appended to a buffer and the
|
| 247 |
+
interpreter's runsource() method is called with the
|
| 248 |
+
concatenated contents of the buffer as source. If this
|
| 249 |
+
indicates that the command was executed or invalid, the buffer
|
| 250 |
+
is reset; otherwise, the command is incomplete, and the buffer
|
| 251 |
+
is left as it was after the line was appended. The return
|
| 252 |
+
value is 1 if more input is required, 0 if the line was dealt
|
| 253 |
+
with in some way (this is the same as runsource()).
|
| 254 |
+
|
| 255 |
+
"""
|
| 256 |
+
self.buffer.append(line)
|
| 257 |
+
source = "\n".join(self.buffer)
|
| 258 |
+
more = self.runsource(source, self.filename)
|
| 259 |
+
if not more:
|
| 260 |
+
self.resetbuffer()
|
| 261 |
+
return more
|
| 262 |
+
|
| 263 |
+
def raw_input(self, prompt=""):
|
| 264 |
+
"""Write a prompt and read a line.
|
| 265 |
+
|
| 266 |
+
The returned line does not include the trailing newline.
|
| 267 |
+
When the user enters the EOF key sequence, EOFError is raised.
|
| 268 |
+
|
| 269 |
+
The base implementation uses the built-in function
|
| 270 |
+
input(); a subclass may replace this with a different
|
| 271 |
+
implementation.
|
| 272 |
+
|
| 273 |
+
"""
|
| 274 |
+
return input(prompt)
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
def interact(banner=None, readfunc=None, local=None, exitmsg=None):
|
| 279 |
+
"""Closely emulate the interactive Python interpreter.
|
| 280 |
+
|
| 281 |
+
This is a backwards compatible interface to the InteractiveConsole
|
| 282 |
+
class. When readfunc is not specified, it attempts to import the
|
| 283 |
+
readline module to enable GNU readline if it is available.
|
| 284 |
+
|
| 285 |
+
Arguments (all optional, all default to None):
|
| 286 |
+
|
| 287 |
+
banner -- passed to InteractiveConsole.interact()
|
| 288 |
+
readfunc -- if not None, replaces InteractiveConsole.raw_input()
|
| 289 |
+
local -- passed to InteractiveInterpreter.__init__()
|
| 290 |
+
exitmsg -- passed to InteractiveConsole.interact()
|
| 291 |
+
|
| 292 |
+
"""
|
| 293 |
+
console = InteractiveConsole(local)
|
| 294 |
+
if readfunc is not None:
|
| 295 |
+
console.raw_input = readfunc
|
| 296 |
+
else:
|
| 297 |
+
try:
|
| 298 |
+
import readline
|
| 299 |
+
except ImportError:
|
| 300 |
+
pass
|
| 301 |
+
console.interact(banner, exitmsg)
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
if __name__ == "__main__":
|
| 305 |
+
import argparse
|
| 306 |
+
|
| 307 |
+
parser = argparse.ArgumentParser()
|
| 308 |
+
parser.add_argument('-q', action='store_true',
|
| 309 |
+
help="don't print version and copyright messages")
|
| 310 |
+
args = parser.parse_args()
|
| 311 |
+
if args.q or sys.flags.quiet:
|
| 312 |
+
banner = ''
|
| 313 |
+
else:
|
| 314 |
+
banner = None
|
| 315 |
+
interact(banner)
|
evalkit_cambrian/lib/python3.10/codecs.py
ADDED
|
@@ -0,0 +1,1127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" codecs -- Python Codec Registry, API and helpers.
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
Written by Marc-Andre Lemburg (mal@lemburg.com).
|
| 5 |
+
|
| 6 |
+
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
|
| 7 |
+
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import builtins
|
| 11 |
+
import sys
|
| 12 |
+
|
| 13 |
+
### Registry and builtin stateless codec functions
|
| 14 |
+
|
| 15 |
+
try:
|
| 16 |
+
from _codecs import *
|
| 17 |
+
except ImportError as why:
|
| 18 |
+
raise SystemError('Failed to load the builtin codecs: %s' % why)
|
| 19 |
+
|
| 20 |
+
__all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE",
|
| 21 |
+
"BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE",
|
| 22 |
+
"BOM_UTF8", "BOM_UTF16", "BOM_UTF16_LE", "BOM_UTF16_BE",
|
| 23 |
+
"BOM_UTF32", "BOM_UTF32_LE", "BOM_UTF32_BE",
|
| 24 |
+
"CodecInfo", "Codec", "IncrementalEncoder", "IncrementalDecoder",
|
| 25 |
+
"StreamReader", "StreamWriter",
|
| 26 |
+
"StreamReaderWriter", "StreamRecoder",
|
| 27 |
+
"getencoder", "getdecoder", "getincrementalencoder",
|
| 28 |
+
"getincrementaldecoder", "getreader", "getwriter",
|
| 29 |
+
"encode", "decode", "iterencode", "iterdecode",
|
| 30 |
+
"strict_errors", "ignore_errors", "replace_errors",
|
| 31 |
+
"xmlcharrefreplace_errors",
|
| 32 |
+
"backslashreplace_errors", "namereplace_errors",
|
| 33 |
+
"register_error", "lookup_error"]
|
| 34 |
+
|
| 35 |
+
### Constants
|
| 36 |
+
|
| 37 |
+
#
|
| 38 |
+
# Byte Order Mark (BOM = ZERO WIDTH NO-BREAK SPACE = U+FEFF)
|
| 39 |
+
# and its possible byte string values
|
| 40 |
+
# for UTF8/UTF16/UTF32 output and little/big endian machines
|
| 41 |
+
#
|
| 42 |
+
|
| 43 |
+
# UTF-8
|
| 44 |
+
BOM_UTF8 = b'\xef\xbb\xbf'
|
| 45 |
+
|
| 46 |
+
# UTF-16, little endian
|
| 47 |
+
BOM_LE = BOM_UTF16_LE = b'\xff\xfe'
|
| 48 |
+
|
| 49 |
+
# UTF-16, big endian
|
| 50 |
+
BOM_BE = BOM_UTF16_BE = b'\xfe\xff'
|
| 51 |
+
|
| 52 |
+
# UTF-32, little endian
|
| 53 |
+
BOM_UTF32_LE = b'\xff\xfe\x00\x00'
|
| 54 |
+
|
| 55 |
+
# UTF-32, big endian
|
| 56 |
+
BOM_UTF32_BE = b'\x00\x00\xfe\xff'
|
| 57 |
+
|
| 58 |
+
if sys.byteorder == 'little':
|
| 59 |
+
|
| 60 |
+
# UTF-16, native endianness
|
| 61 |
+
BOM = BOM_UTF16 = BOM_UTF16_LE
|
| 62 |
+
|
| 63 |
+
# UTF-32, native endianness
|
| 64 |
+
BOM_UTF32 = BOM_UTF32_LE
|
| 65 |
+
|
| 66 |
+
else:
|
| 67 |
+
|
| 68 |
+
# UTF-16, native endianness
|
| 69 |
+
BOM = BOM_UTF16 = BOM_UTF16_BE
|
| 70 |
+
|
| 71 |
+
# UTF-32, native endianness
|
| 72 |
+
BOM_UTF32 = BOM_UTF32_BE
|
| 73 |
+
|
| 74 |
+
# Old broken names (don't use in new code)
|
| 75 |
+
BOM32_LE = BOM_UTF16_LE
|
| 76 |
+
BOM32_BE = BOM_UTF16_BE
|
| 77 |
+
BOM64_LE = BOM_UTF32_LE
|
| 78 |
+
BOM64_BE = BOM_UTF32_BE
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
### Codec base classes (defining the API)
|
| 82 |
+
|
| 83 |
+
class CodecInfo(tuple):
|
| 84 |
+
"""Codec details when looking up the codec registry"""
|
| 85 |
+
|
| 86 |
+
# Private API to allow Python 3.4 to denylist the known non-Unicode
|
| 87 |
+
# codecs in the standard library. A more general mechanism to
|
| 88 |
+
# reliably distinguish test encodings from other codecs will hopefully
|
| 89 |
+
# be defined for Python 3.5
|
| 90 |
+
#
|
| 91 |
+
# See http://bugs.python.org/issue19619
|
| 92 |
+
_is_text_encoding = True # Assume codecs are text encodings by default
|
| 93 |
+
|
| 94 |
+
def __new__(cls, encode, decode, streamreader=None, streamwriter=None,
|
| 95 |
+
incrementalencoder=None, incrementaldecoder=None, name=None,
|
| 96 |
+
*, _is_text_encoding=None):
|
| 97 |
+
self = tuple.__new__(cls, (encode, decode, streamreader, streamwriter))
|
| 98 |
+
self.name = name
|
| 99 |
+
self.encode = encode
|
| 100 |
+
self.decode = decode
|
| 101 |
+
self.incrementalencoder = incrementalencoder
|
| 102 |
+
self.incrementaldecoder = incrementaldecoder
|
| 103 |
+
self.streamwriter = streamwriter
|
| 104 |
+
self.streamreader = streamreader
|
| 105 |
+
if _is_text_encoding is not None:
|
| 106 |
+
self._is_text_encoding = _is_text_encoding
|
| 107 |
+
return self
|
| 108 |
+
|
| 109 |
+
def __repr__(self):
|
| 110 |
+
return "<%s.%s object for encoding %s at %#x>" % \
|
| 111 |
+
(self.__class__.__module__, self.__class__.__qualname__,
|
| 112 |
+
self.name, id(self))
|
| 113 |
+
|
| 114 |
+
class Codec:
|
| 115 |
+
|
| 116 |
+
""" Defines the interface for stateless encoders/decoders.
|
| 117 |
+
|
| 118 |
+
The .encode()/.decode() methods may use different error
|
| 119 |
+
handling schemes by providing the errors argument. These
|
| 120 |
+
string values are predefined:
|
| 121 |
+
|
| 122 |
+
'strict' - raise a ValueError error (or a subclass)
|
| 123 |
+
'ignore' - ignore the character and continue with the next
|
| 124 |
+
'replace' - replace with a suitable replacement character;
|
| 125 |
+
Python will use the official U+FFFD REPLACEMENT
|
| 126 |
+
CHARACTER for the builtin Unicode codecs on
|
| 127 |
+
decoding and '?' on encoding.
|
| 128 |
+
'surrogateescape' - replace with private code points U+DCnn.
|
| 129 |
+
'xmlcharrefreplace' - Replace with the appropriate XML
|
| 130 |
+
character reference (only for encoding).
|
| 131 |
+
'backslashreplace' - Replace with backslashed escape sequences.
|
| 132 |
+
'namereplace' - Replace with \\N{...} escape sequences
|
| 133 |
+
(only for encoding).
|
| 134 |
+
|
| 135 |
+
The set of allowed values can be extended via register_error.
|
| 136 |
+
|
| 137 |
+
"""
|
| 138 |
+
def encode(self, input, errors='strict'):
|
| 139 |
+
|
| 140 |
+
""" Encodes the object input and returns a tuple (output
|
| 141 |
+
object, length consumed).
|
| 142 |
+
|
| 143 |
+
errors defines the error handling to apply. It defaults to
|
| 144 |
+
'strict' handling.
|
| 145 |
+
|
| 146 |
+
The method may not store state in the Codec instance. Use
|
| 147 |
+
StreamWriter for codecs which have to keep state in order to
|
| 148 |
+
make encoding efficient.
|
| 149 |
+
|
| 150 |
+
The encoder must be able to handle zero length input and
|
| 151 |
+
return an empty object of the output object type in this
|
| 152 |
+
situation.
|
| 153 |
+
|
| 154 |
+
"""
|
| 155 |
+
raise NotImplementedError
|
| 156 |
+
|
| 157 |
+
def decode(self, input, errors='strict'):
|
| 158 |
+
|
| 159 |
+
""" Decodes the object input and returns a tuple (output
|
| 160 |
+
object, length consumed).
|
| 161 |
+
|
| 162 |
+
input must be an object which provides the bf_getreadbuf
|
| 163 |
+
buffer slot. Python strings, buffer objects and memory
|
| 164 |
+
mapped files are examples of objects providing this slot.
|
| 165 |
+
|
| 166 |
+
errors defines the error handling to apply. It defaults to
|
| 167 |
+
'strict' handling.
|
| 168 |
+
|
| 169 |
+
The method may not store state in the Codec instance. Use
|
| 170 |
+
StreamReader for codecs which have to keep state in order to
|
| 171 |
+
make decoding efficient.
|
| 172 |
+
|
| 173 |
+
The decoder must be able to handle zero length input and
|
| 174 |
+
return an empty object of the output object type in this
|
| 175 |
+
situation.
|
| 176 |
+
|
| 177 |
+
"""
|
| 178 |
+
raise NotImplementedError
|
| 179 |
+
|
| 180 |
+
class IncrementalEncoder(object):
|
| 181 |
+
"""
|
| 182 |
+
An IncrementalEncoder encodes an input in multiple steps. The input can
|
| 183 |
+
be passed piece by piece to the encode() method. The IncrementalEncoder
|
| 184 |
+
remembers the state of the encoding process between calls to encode().
|
| 185 |
+
"""
|
| 186 |
+
def __init__(self, errors='strict'):
|
| 187 |
+
"""
|
| 188 |
+
Creates an IncrementalEncoder instance.
|
| 189 |
+
|
| 190 |
+
The IncrementalEncoder may use different error handling schemes by
|
| 191 |
+
providing the errors keyword argument. See the module docstring
|
| 192 |
+
for a list of possible values.
|
| 193 |
+
"""
|
| 194 |
+
self.errors = errors
|
| 195 |
+
self.buffer = ""
|
| 196 |
+
|
| 197 |
+
def encode(self, input, final=False):
|
| 198 |
+
"""
|
| 199 |
+
Encodes input and returns the resulting object.
|
| 200 |
+
"""
|
| 201 |
+
raise NotImplementedError
|
| 202 |
+
|
| 203 |
+
def reset(self):
|
| 204 |
+
"""
|
| 205 |
+
Resets the encoder to the initial state.
|
| 206 |
+
"""
|
| 207 |
+
|
| 208 |
+
def getstate(self):
|
| 209 |
+
"""
|
| 210 |
+
Return the current state of the encoder.
|
| 211 |
+
"""
|
| 212 |
+
return 0
|
| 213 |
+
|
| 214 |
+
def setstate(self, state):
|
| 215 |
+
"""
|
| 216 |
+
Set the current state of the encoder. state must have been
|
| 217 |
+
returned by getstate().
|
| 218 |
+
"""
|
| 219 |
+
|
| 220 |
+
class BufferedIncrementalEncoder(IncrementalEncoder):
|
| 221 |
+
"""
|
| 222 |
+
This subclass of IncrementalEncoder can be used as the baseclass for an
|
| 223 |
+
incremental encoder if the encoder must keep some of the output in a
|
| 224 |
+
buffer between calls to encode().
|
| 225 |
+
"""
|
| 226 |
+
def __init__(self, errors='strict'):
|
| 227 |
+
IncrementalEncoder.__init__(self, errors)
|
| 228 |
+
# unencoded input that is kept between calls to encode()
|
| 229 |
+
self.buffer = ""
|
| 230 |
+
|
| 231 |
+
def _buffer_encode(self, input, errors, final):
|
| 232 |
+
# Overwrite this method in subclasses: It must encode input
|
| 233 |
+
# and return an (output, length consumed) tuple
|
| 234 |
+
raise NotImplementedError
|
| 235 |
+
|
| 236 |
+
def encode(self, input, final=False):
|
| 237 |
+
# encode input (taking the buffer into account)
|
| 238 |
+
data = self.buffer + input
|
| 239 |
+
(result, consumed) = self._buffer_encode(data, self.errors, final)
|
| 240 |
+
# keep unencoded input until the next call
|
| 241 |
+
self.buffer = data[consumed:]
|
| 242 |
+
return result
|
| 243 |
+
|
| 244 |
+
def reset(self):
|
| 245 |
+
IncrementalEncoder.reset(self)
|
| 246 |
+
self.buffer = ""
|
| 247 |
+
|
| 248 |
+
def getstate(self):
|
| 249 |
+
return self.buffer or 0
|
| 250 |
+
|
| 251 |
+
def setstate(self, state):
|
| 252 |
+
self.buffer = state or ""
|
| 253 |
+
|
| 254 |
+
class IncrementalDecoder(object):
|
| 255 |
+
"""
|
| 256 |
+
An IncrementalDecoder decodes an input in multiple steps. The input can
|
| 257 |
+
be passed piece by piece to the decode() method. The IncrementalDecoder
|
| 258 |
+
remembers the state of the decoding process between calls to decode().
|
| 259 |
+
"""
|
| 260 |
+
def __init__(self, errors='strict'):
|
| 261 |
+
"""
|
| 262 |
+
Create an IncrementalDecoder instance.
|
| 263 |
+
|
| 264 |
+
The IncrementalDecoder may use different error handling schemes by
|
| 265 |
+
providing the errors keyword argument. See the module docstring
|
| 266 |
+
for a list of possible values.
|
| 267 |
+
"""
|
| 268 |
+
self.errors = errors
|
| 269 |
+
|
| 270 |
+
def decode(self, input, final=False):
|
| 271 |
+
"""
|
| 272 |
+
Decode input and returns the resulting object.
|
| 273 |
+
"""
|
| 274 |
+
raise NotImplementedError
|
| 275 |
+
|
| 276 |
+
def reset(self):
|
| 277 |
+
"""
|
| 278 |
+
Reset the decoder to the initial state.
|
| 279 |
+
"""
|
| 280 |
+
|
| 281 |
+
def getstate(self):
|
| 282 |
+
"""
|
| 283 |
+
Return the current state of the decoder.
|
| 284 |
+
|
| 285 |
+
This must be a (buffered_input, additional_state_info) tuple.
|
| 286 |
+
buffered_input must be a bytes object containing bytes that
|
| 287 |
+
were passed to decode() that have not yet been converted.
|
| 288 |
+
additional_state_info must be a non-negative integer
|
| 289 |
+
representing the state of the decoder WITHOUT yet having
|
| 290 |
+
processed the contents of buffered_input. In the initial state
|
| 291 |
+
and after reset(), getstate() must return (b"", 0).
|
| 292 |
+
"""
|
| 293 |
+
return (b"", 0)
|
| 294 |
+
|
| 295 |
+
def setstate(self, state):
|
| 296 |
+
"""
|
| 297 |
+
Set the current state of the decoder.
|
| 298 |
+
|
| 299 |
+
state must have been returned by getstate(). The effect of
|
| 300 |
+
setstate((b"", 0)) must be equivalent to reset().
|
| 301 |
+
"""
|
| 302 |
+
|
| 303 |
+
class BufferedIncrementalDecoder(IncrementalDecoder):
|
| 304 |
+
"""
|
| 305 |
+
This subclass of IncrementalDecoder can be used as the baseclass for an
|
| 306 |
+
incremental decoder if the decoder must be able to handle incomplete
|
| 307 |
+
byte sequences.
|
| 308 |
+
"""
|
| 309 |
+
def __init__(self, errors='strict'):
|
| 310 |
+
IncrementalDecoder.__init__(self, errors)
|
| 311 |
+
# undecoded input that is kept between calls to decode()
|
| 312 |
+
self.buffer = b""
|
| 313 |
+
|
| 314 |
+
def _buffer_decode(self, input, errors, final):
|
| 315 |
+
# Overwrite this method in subclasses: It must decode input
|
| 316 |
+
# and return an (output, length consumed) tuple
|
| 317 |
+
raise NotImplementedError
|
| 318 |
+
|
| 319 |
+
def decode(self, input, final=False):
|
| 320 |
+
# decode input (taking the buffer into account)
|
| 321 |
+
data = self.buffer + input
|
| 322 |
+
(result, consumed) = self._buffer_decode(data, self.errors, final)
|
| 323 |
+
# keep undecoded input until the next call
|
| 324 |
+
self.buffer = data[consumed:]
|
| 325 |
+
return result
|
| 326 |
+
|
| 327 |
+
def reset(self):
|
| 328 |
+
IncrementalDecoder.reset(self)
|
| 329 |
+
self.buffer = b""
|
| 330 |
+
|
| 331 |
+
def getstate(self):
|
| 332 |
+
# additional state info is always 0
|
| 333 |
+
return (self.buffer, 0)
|
| 334 |
+
|
| 335 |
+
def setstate(self, state):
|
| 336 |
+
# ignore additional state info
|
| 337 |
+
self.buffer = state[0]
|
| 338 |
+
|
| 339 |
+
#
|
| 340 |
+
# The StreamWriter and StreamReader class provide generic working
|
| 341 |
+
# interfaces which can be used to implement new encoding submodules
|
| 342 |
+
# very easily. See encodings/utf_8.py for an example on how this is
|
| 343 |
+
# done.
|
| 344 |
+
#
|
| 345 |
+
|
| 346 |
+
class StreamWriter(Codec):
|
| 347 |
+
|
| 348 |
+
def __init__(self, stream, errors='strict'):
|
| 349 |
+
|
| 350 |
+
""" Creates a StreamWriter instance.
|
| 351 |
+
|
| 352 |
+
stream must be a file-like object open for writing.
|
| 353 |
+
|
| 354 |
+
The StreamWriter may use different error handling
|
| 355 |
+
schemes by providing the errors keyword argument. These
|
| 356 |
+
parameters are predefined:
|
| 357 |
+
|
| 358 |
+
'strict' - raise a ValueError (or a subclass)
|
| 359 |
+
'ignore' - ignore the character and continue with the next
|
| 360 |
+
'replace'- replace with a suitable replacement character
|
| 361 |
+
'xmlcharrefreplace' - Replace with the appropriate XML
|
| 362 |
+
character reference.
|
| 363 |
+
'backslashreplace' - Replace with backslashed escape
|
| 364 |
+
sequences.
|
| 365 |
+
'namereplace' - Replace with \\N{...} escape sequences.
|
| 366 |
+
|
| 367 |
+
The set of allowed parameter values can be extended via
|
| 368 |
+
register_error.
|
| 369 |
+
"""
|
| 370 |
+
self.stream = stream
|
| 371 |
+
self.errors = errors
|
| 372 |
+
|
| 373 |
+
def write(self, object):
|
| 374 |
+
|
| 375 |
+
""" Writes the object's contents encoded to self.stream.
|
| 376 |
+
"""
|
| 377 |
+
data, consumed = self.encode(object, self.errors)
|
| 378 |
+
self.stream.write(data)
|
| 379 |
+
|
| 380 |
+
def writelines(self, list):
|
| 381 |
+
|
| 382 |
+
""" Writes the concatenated list of strings to the stream
|
| 383 |
+
using .write().
|
| 384 |
+
"""
|
| 385 |
+
self.write(''.join(list))
|
| 386 |
+
|
| 387 |
+
def reset(self):
|
| 388 |
+
|
| 389 |
+
""" Resets the codec buffers used for keeping internal state.
|
| 390 |
+
|
| 391 |
+
Calling this method should ensure that the data on the
|
| 392 |
+
output is put into a clean state, that allows appending
|
| 393 |
+
of new fresh data without having to rescan the whole
|
| 394 |
+
stream to recover state.
|
| 395 |
+
|
| 396 |
+
"""
|
| 397 |
+
pass
|
| 398 |
+
|
| 399 |
+
def seek(self, offset, whence=0):
|
| 400 |
+
self.stream.seek(offset, whence)
|
| 401 |
+
if whence == 0 and offset == 0:
|
| 402 |
+
self.reset()
|
| 403 |
+
|
| 404 |
+
def __getattr__(self, name,
|
| 405 |
+
getattr=getattr):
|
| 406 |
+
|
| 407 |
+
""" Inherit all other methods from the underlying stream.
|
| 408 |
+
"""
|
| 409 |
+
return getattr(self.stream, name)
|
| 410 |
+
|
| 411 |
+
def __enter__(self):
|
| 412 |
+
return self
|
| 413 |
+
|
| 414 |
+
def __exit__(self, type, value, tb):
|
| 415 |
+
self.stream.close()
|
| 416 |
+
|
| 417 |
+
###
|
| 418 |
+
|
| 419 |
+
class StreamReader(Codec):
|
| 420 |
+
|
| 421 |
+
charbuffertype = str
|
| 422 |
+
|
| 423 |
+
def __init__(self, stream, errors='strict'):
|
| 424 |
+
|
| 425 |
+
""" Creates a StreamReader instance.
|
| 426 |
+
|
| 427 |
+
stream must be a file-like object open for reading.
|
| 428 |
+
|
| 429 |
+
The StreamReader may use different error handling
|
| 430 |
+
schemes by providing the errors keyword argument. These
|
| 431 |
+
parameters are predefined:
|
| 432 |
+
|
| 433 |
+
'strict' - raise a ValueError (or a subclass)
|
| 434 |
+
'ignore' - ignore the character and continue with the next
|
| 435 |
+
'replace'- replace with a suitable replacement character
|
| 436 |
+
'backslashreplace' - Replace with backslashed escape sequences;
|
| 437 |
+
|
| 438 |
+
The set of allowed parameter values can be extended via
|
| 439 |
+
register_error.
|
| 440 |
+
"""
|
| 441 |
+
self.stream = stream
|
| 442 |
+
self.errors = errors
|
| 443 |
+
self.bytebuffer = b""
|
| 444 |
+
self._empty_charbuffer = self.charbuffertype()
|
| 445 |
+
self.charbuffer = self._empty_charbuffer
|
| 446 |
+
self.linebuffer = None
|
| 447 |
+
|
| 448 |
+
def decode(self, input, errors='strict'):
|
| 449 |
+
raise NotImplementedError
|
| 450 |
+
|
| 451 |
+
def read(self, size=-1, chars=-1, firstline=False):
|
| 452 |
+
|
| 453 |
+
""" Decodes data from the stream self.stream and returns the
|
| 454 |
+
resulting object.
|
| 455 |
+
|
| 456 |
+
chars indicates the number of decoded code points or bytes to
|
| 457 |
+
return. read() will never return more data than requested,
|
| 458 |
+
but it might return less, if there is not enough available.
|
| 459 |
+
|
| 460 |
+
size indicates the approximate maximum number of decoded
|
| 461 |
+
bytes or code points to read for decoding. The decoder
|
| 462 |
+
can modify this setting as appropriate. The default value
|
| 463 |
+
-1 indicates to read and decode as much as possible. size
|
| 464 |
+
is intended to prevent having to decode huge files in one
|
| 465 |
+
step.
|
| 466 |
+
|
| 467 |
+
If firstline is true, and a UnicodeDecodeError happens
|
| 468 |
+
after the first line terminator in the input only the first line
|
| 469 |
+
will be returned, the rest of the input will be kept until the
|
| 470 |
+
next call to read().
|
| 471 |
+
|
| 472 |
+
The method should use a greedy read strategy, meaning that
|
| 473 |
+
it should read as much data as is allowed within the
|
| 474 |
+
definition of the encoding and the given size, e.g. if
|
| 475 |
+
optional encoding endings or state markers are available
|
| 476 |
+
on the stream, these should be read too.
|
| 477 |
+
"""
|
| 478 |
+
# If we have lines cached, first merge them back into characters
|
| 479 |
+
if self.linebuffer:
|
| 480 |
+
self.charbuffer = self._empty_charbuffer.join(self.linebuffer)
|
| 481 |
+
self.linebuffer = None
|
| 482 |
+
|
| 483 |
+
if chars < 0:
|
| 484 |
+
# For compatibility with other read() methods that take a
|
| 485 |
+
# single argument
|
| 486 |
+
chars = size
|
| 487 |
+
|
| 488 |
+
# read until we get the required number of characters (if available)
|
| 489 |
+
while True:
|
| 490 |
+
# can the request be satisfied from the character buffer?
|
| 491 |
+
if chars >= 0:
|
| 492 |
+
if len(self.charbuffer) >= chars:
|
| 493 |
+
break
|
| 494 |
+
# we need more data
|
| 495 |
+
if size < 0:
|
| 496 |
+
newdata = self.stream.read()
|
| 497 |
+
else:
|
| 498 |
+
newdata = self.stream.read(size)
|
| 499 |
+
# decode bytes (those remaining from the last call included)
|
| 500 |
+
data = self.bytebuffer + newdata
|
| 501 |
+
if not data:
|
| 502 |
+
break
|
| 503 |
+
try:
|
| 504 |
+
newchars, decodedbytes = self.decode(data, self.errors)
|
| 505 |
+
except UnicodeDecodeError as exc:
|
| 506 |
+
if firstline:
|
| 507 |
+
newchars, decodedbytes = \
|
| 508 |
+
self.decode(data[:exc.start], self.errors)
|
| 509 |
+
lines = newchars.splitlines(keepends=True)
|
| 510 |
+
if len(lines)<=1:
|
| 511 |
+
raise
|
| 512 |
+
else:
|
| 513 |
+
raise
|
| 514 |
+
# keep undecoded bytes until the next call
|
| 515 |
+
self.bytebuffer = data[decodedbytes:]
|
| 516 |
+
# put new characters in the character buffer
|
| 517 |
+
self.charbuffer += newchars
|
| 518 |
+
# there was no data available
|
| 519 |
+
if not newdata:
|
| 520 |
+
break
|
| 521 |
+
if chars < 0:
|
| 522 |
+
# Return everything we've got
|
| 523 |
+
result = self.charbuffer
|
| 524 |
+
self.charbuffer = self._empty_charbuffer
|
| 525 |
+
else:
|
| 526 |
+
# Return the first chars characters
|
| 527 |
+
result = self.charbuffer[:chars]
|
| 528 |
+
self.charbuffer = self.charbuffer[chars:]
|
| 529 |
+
return result
|
| 530 |
+
|
| 531 |
+
def readline(self, size=None, keepends=True):
|
| 532 |
+
|
| 533 |
+
""" Read one line from the input stream and return the
|
| 534 |
+
decoded data.
|
| 535 |
+
|
| 536 |
+
size, if given, is passed as size argument to the
|
| 537 |
+
read() method.
|
| 538 |
+
|
| 539 |
+
"""
|
| 540 |
+
# If we have lines cached from an earlier read, return
|
| 541 |
+
# them unconditionally
|
| 542 |
+
if self.linebuffer:
|
| 543 |
+
line = self.linebuffer[0]
|
| 544 |
+
del self.linebuffer[0]
|
| 545 |
+
if len(self.linebuffer) == 1:
|
| 546 |
+
# revert to charbuffer mode; we might need more data
|
| 547 |
+
# next time
|
| 548 |
+
self.charbuffer = self.linebuffer[0]
|
| 549 |
+
self.linebuffer = None
|
| 550 |
+
if not keepends:
|
| 551 |
+
line = line.splitlines(keepends=False)[0]
|
| 552 |
+
return line
|
| 553 |
+
|
| 554 |
+
readsize = size or 72
|
| 555 |
+
line = self._empty_charbuffer
|
| 556 |
+
# If size is given, we call read() only once
|
| 557 |
+
while True:
|
| 558 |
+
data = self.read(readsize, firstline=True)
|
| 559 |
+
if data:
|
| 560 |
+
# If we're at a "\r" read one extra character (which might
|
| 561 |
+
# be a "\n") to get a proper line ending. If the stream is
|
| 562 |
+
# temporarily exhausted we return the wrong line ending.
|
| 563 |
+
if (isinstance(data, str) and data.endswith("\r")) or \
|
| 564 |
+
(isinstance(data, bytes) and data.endswith(b"\r")):
|
| 565 |
+
data += self.read(size=1, chars=1)
|
| 566 |
+
|
| 567 |
+
line += data
|
| 568 |
+
lines = line.splitlines(keepends=True)
|
| 569 |
+
if lines:
|
| 570 |
+
if len(lines) > 1:
|
| 571 |
+
# More than one line result; the first line is a full line
|
| 572 |
+
# to return
|
| 573 |
+
line = lines[0]
|
| 574 |
+
del lines[0]
|
| 575 |
+
if len(lines) > 1:
|
| 576 |
+
# cache the remaining lines
|
| 577 |
+
lines[-1] += self.charbuffer
|
| 578 |
+
self.linebuffer = lines
|
| 579 |
+
self.charbuffer = None
|
| 580 |
+
else:
|
| 581 |
+
# only one remaining line, put it back into charbuffer
|
| 582 |
+
self.charbuffer = lines[0] + self.charbuffer
|
| 583 |
+
if not keepends:
|
| 584 |
+
line = line.splitlines(keepends=False)[0]
|
| 585 |
+
break
|
| 586 |
+
line0withend = lines[0]
|
| 587 |
+
line0withoutend = lines[0].splitlines(keepends=False)[0]
|
| 588 |
+
if line0withend != line0withoutend: # We really have a line end
|
| 589 |
+
# Put the rest back together and keep it until the next call
|
| 590 |
+
self.charbuffer = self._empty_charbuffer.join(lines[1:]) + \
|
| 591 |
+
self.charbuffer
|
| 592 |
+
if keepends:
|
| 593 |
+
line = line0withend
|
| 594 |
+
else:
|
| 595 |
+
line = line0withoutend
|
| 596 |
+
break
|
| 597 |
+
# we didn't get anything or this was our only try
|
| 598 |
+
if not data or size is not None:
|
| 599 |
+
if line and not keepends:
|
| 600 |
+
line = line.splitlines(keepends=False)[0]
|
| 601 |
+
break
|
| 602 |
+
if readsize < 8000:
|
| 603 |
+
readsize *= 2
|
| 604 |
+
return line
|
| 605 |
+
|
| 606 |
+
def readlines(self, sizehint=None, keepends=True):
|
| 607 |
+
|
| 608 |
+
""" Read all lines available on the input stream
|
| 609 |
+
and return them as a list.
|
| 610 |
+
|
| 611 |
+
Line breaks are implemented using the codec's decoder
|
| 612 |
+
method and are included in the list entries.
|
| 613 |
+
|
| 614 |
+
sizehint, if given, is ignored since there is no efficient
|
| 615 |
+
way to finding the true end-of-line.
|
| 616 |
+
|
| 617 |
+
"""
|
| 618 |
+
data = self.read()
|
| 619 |
+
return data.splitlines(keepends)
|
| 620 |
+
|
| 621 |
+
def reset(self):
|
| 622 |
+
|
| 623 |
+
""" Resets the codec buffers used for keeping internal state.
|
| 624 |
+
|
| 625 |
+
Note that no stream repositioning should take place.
|
| 626 |
+
This method is primarily intended to be able to recover
|
| 627 |
+
from decoding errors.
|
| 628 |
+
|
| 629 |
+
"""
|
| 630 |
+
self.bytebuffer = b""
|
| 631 |
+
self.charbuffer = self._empty_charbuffer
|
| 632 |
+
self.linebuffer = None
|
| 633 |
+
|
| 634 |
+
def seek(self, offset, whence=0):
|
| 635 |
+
""" Set the input stream's current position.
|
| 636 |
+
|
| 637 |
+
Resets the codec buffers used for keeping state.
|
| 638 |
+
"""
|
| 639 |
+
self.stream.seek(offset, whence)
|
| 640 |
+
self.reset()
|
| 641 |
+
|
| 642 |
+
def __next__(self):
|
| 643 |
+
|
| 644 |
+
""" Return the next decoded line from the input stream."""
|
| 645 |
+
line = self.readline()
|
| 646 |
+
if line:
|
| 647 |
+
return line
|
| 648 |
+
raise StopIteration
|
| 649 |
+
|
| 650 |
+
def __iter__(self):
|
| 651 |
+
return self
|
| 652 |
+
|
| 653 |
+
def __getattr__(self, name,
|
| 654 |
+
getattr=getattr):
|
| 655 |
+
|
| 656 |
+
""" Inherit all other methods from the underlying stream.
|
| 657 |
+
"""
|
| 658 |
+
return getattr(self.stream, name)
|
| 659 |
+
|
| 660 |
+
def __enter__(self):
|
| 661 |
+
return self
|
| 662 |
+
|
| 663 |
+
def __exit__(self, type, value, tb):
|
| 664 |
+
self.stream.close()
|
| 665 |
+
|
| 666 |
+
###
|
| 667 |
+
|
| 668 |
+
class StreamReaderWriter:
|
| 669 |
+
|
| 670 |
+
""" StreamReaderWriter instances allow wrapping streams which
|
| 671 |
+
work in both read and write modes.
|
| 672 |
+
|
| 673 |
+
The design is such that one can use the factory functions
|
| 674 |
+
returned by the codec.lookup() function to construct the
|
| 675 |
+
instance.
|
| 676 |
+
|
| 677 |
+
"""
|
| 678 |
+
# Optional attributes set by the file wrappers below
|
| 679 |
+
encoding = 'unknown'
|
| 680 |
+
|
| 681 |
+
def __init__(self, stream, Reader, Writer, errors='strict'):
|
| 682 |
+
|
| 683 |
+
""" Creates a StreamReaderWriter instance.
|
| 684 |
+
|
| 685 |
+
stream must be a Stream-like object.
|
| 686 |
+
|
| 687 |
+
Reader, Writer must be factory functions or classes
|
| 688 |
+
providing the StreamReader, StreamWriter interface resp.
|
| 689 |
+
|
| 690 |
+
Error handling is done in the same way as defined for the
|
| 691 |
+
StreamWriter/Readers.
|
| 692 |
+
|
| 693 |
+
"""
|
| 694 |
+
self.stream = stream
|
| 695 |
+
self.reader = Reader(stream, errors)
|
| 696 |
+
self.writer = Writer(stream, errors)
|
| 697 |
+
self.errors = errors
|
| 698 |
+
|
| 699 |
+
def read(self, size=-1):
|
| 700 |
+
|
| 701 |
+
return self.reader.read(size)
|
| 702 |
+
|
| 703 |
+
def readline(self, size=None):
|
| 704 |
+
|
| 705 |
+
return self.reader.readline(size)
|
| 706 |
+
|
| 707 |
+
def readlines(self, sizehint=None):
|
| 708 |
+
|
| 709 |
+
return self.reader.readlines(sizehint)
|
| 710 |
+
|
| 711 |
+
def __next__(self):
|
| 712 |
+
|
| 713 |
+
""" Return the next decoded line from the input stream."""
|
| 714 |
+
return next(self.reader)
|
| 715 |
+
|
| 716 |
+
def __iter__(self):
|
| 717 |
+
return self
|
| 718 |
+
|
| 719 |
+
def write(self, data):
|
| 720 |
+
|
| 721 |
+
return self.writer.write(data)
|
| 722 |
+
|
| 723 |
+
def writelines(self, list):
|
| 724 |
+
|
| 725 |
+
return self.writer.writelines(list)
|
| 726 |
+
|
| 727 |
+
def reset(self):
|
| 728 |
+
|
| 729 |
+
self.reader.reset()
|
| 730 |
+
self.writer.reset()
|
| 731 |
+
|
| 732 |
+
def seek(self, offset, whence=0):
|
| 733 |
+
self.stream.seek(offset, whence)
|
| 734 |
+
self.reader.reset()
|
| 735 |
+
if whence == 0 and offset == 0:
|
| 736 |
+
self.writer.reset()
|
| 737 |
+
|
| 738 |
+
def __getattr__(self, name,
|
| 739 |
+
getattr=getattr):
|
| 740 |
+
|
| 741 |
+
""" Inherit all other methods from the underlying stream.
|
| 742 |
+
"""
|
| 743 |
+
return getattr(self.stream, name)
|
| 744 |
+
|
| 745 |
+
# these are needed to make "with StreamReaderWriter(...)" work properly
|
| 746 |
+
|
| 747 |
+
def __enter__(self):
|
| 748 |
+
return self
|
| 749 |
+
|
| 750 |
+
def __exit__(self, type, value, tb):
|
| 751 |
+
self.stream.close()
|
| 752 |
+
|
| 753 |
+
###
|
| 754 |
+
|
| 755 |
+
class StreamRecoder:
|
| 756 |
+
|
| 757 |
+
""" StreamRecoder instances translate data from one encoding to another.
|
| 758 |
+
|
| 759 |
+
They use the complete set of APIs returned by the
|
| 760 |
+
codecs.lookup() function to implement their task.
|
| 761 |
+
|
| 762 |
+
Data written to the StreamRecoder is first decoded into an
|
| 763 |
+
intermediate format (depending on the "decode" codec) and then
|
| 764 |
+
written to the underlying stream using an instance of the provided
|
| 765 |
+
Writer class.
|
| 766 |
+
|
| 767 |
+
In the other direction, data is read from the underlying stream using
|
| 768 |
+
a Reader instance and then encoded and returned to the caller.
|
| 769 |
+
|
| 770 |
+
"""
|
| 771 |
+
# Optional attributes set by the file wrappers below
|
| 772 |
+
data_encoding = 'unknown'
|
| 773 |
+
file_encoding = 'unknown'
|
| 774 |
+
|
| 775 |
+
def __init__(self, stream, encode, decode, Reader, Writer,
|
| 776 |
+
errors='strict'):
|
| 777 |
+
|
| 778 |
+
""" Creates a StreamRecoder instance which implements a two-way
|
| 779 |
+
conversion: encode and decode work on the frontend (the
|
| 780 |
+
data visible to .read() and .write()) while Reader and Writer
|
| 781 |
+
work on the backend (the data in stream).
|
| 782 |
+
|
| 783 |
+
You can use these objects to do transparent
|
| 784 |
+
transcodings from e.g. latin-1 to utf-8 and back.
|
| 785 |
+
|
| 786 |
+
stream must be a file-like object.
|
| 787 |
+
|
| 788 |
+
encode and decode must adhere to the Codec interface; Reader and
|
| 789 |
+
Writer must be factory functions or classes providing the
|
| 790 |
+
StreamReader and StreamWriter interfaces resp.
|
| 791 |
+
|
| 792 |
+
Error handling is done in the same way as defined for the
|
| 793 |
+
StreamWriter/Readers.
|
| 794 |
+
|
| 795 |
+
"""
|
| 796 |
+
self.stream = stream
|
| 797 |
+
self.encode = encode
|
| 798 |
+
self.decode = decode
|
| 799 |
+
self.reader = Reader(stream, errors)
|
| 800 |
+
self.writer = Writer(stream, errors)
|
| 801 |
+
self.errors = errors
|
| 802 |
+
|
| 803 |
+
def read(self, size=-1):
|
| 804 |
+
|
| 805 |
+
data = self.reader.read(size)
|
| 806 |
+
data, bytesencoded = self.encode(data, self.errors)
|
| 807 |
+
return data
|
| 808 |
+
|
| 809 |
+
def readline(self, size=None):
|
| 810 |
+
|
| 811 |
+
if size is None:
|
| 812 |
+
data = self.reader.readline()
|
| 813 |
+
else:
|
| 814 |
+
data = self.reader.readline(size)
|
| 815 |
+
data, bytesencoded = self.encode(data, self.errors)
|
| 816 |
+
return data
|
| 817 |
+
|
| 818 |
+
def readlines(self, sizehint=None):
|
| 819 |
+
|
| 820 |
+
data = self.reader.read()
|
| 821 |
+
data, bytesencoded = self.encode(data, self.errors)
|
| 822 |
+
return data.splitlines(keepends=True)
|
| 823 |
+
|
| 824 |
+
def __next__(self):
|
| 825 |
+
|
| 826 |
+
""" Return the next decoded line from the input stream."""
|
| 827 |
+
data = next(self.reader)
|
| 828 |
+
data, bytesencoded = self.encode(data, self.errors)
|
| 829 |
+
return data
|
| 830 |
+
|
| 831 |
+
def __iter__(self):
|
| 832 |
+
return self
|
| 833 |
+
|
| 834 |
+
def write(self, data):
|
| 835 |
+
|
| 836 |
+
data, bytesdecoded = self.decode(data, self.errors)
|
| 837 |
+
return self.writer.write(data)
|
| 838 |
+
|
| 839 |
+
def writelines(self, list):
|
| 840 |
+
|
| 841 |
+
data = b''.join(list)
|
| 842 |
+
data, bytesdecoded = self.decode(data, self.errors)
|
| 843 |
+
return self.writer.write(data)
|
| 844 |
+
|
| 845 |
+
def reset(self):
|
| 846 |
+
|
| 847 |
+
self.reader.reset()
|
| 848 |
+
self.writer.reset()
|
| 849 |
+
|
| 850 |
+
def seek(self, offset, whence=0):
|
| 851 |
+
# Seeks must be propagated to both the readers and writers
|
| 852 |
+
# as they might need to reset their internal buffers.
|
| 853 |
+
self.reader.seek(offset, whence)
|
| 854 |
+
self.writer.seek(offset, whence)
|
| 855 |
+
|
| 856 |
+
def __getattr__(self, name,
|
| 857 |
+
getattr=getattr):
|
| 858 |
+
|
| 859 |
+
""" Inherit all other methods from the underlying stream.
|
| 860 |
+
"""
|
| 861 |
+
return getattr(self.stream, name)
|
| 862 |
+
|
| 863 |
+
def __enter__(self):
|
| 864 |
+
return self
|
| 865 |
+
|
| 866 |
+
def __exit__(self, type, value, tb):
|
| 867 |
+
self.stream.close()
|
| 868 |
+
|
| 869 |
+
### Shortcuts
|
| 870 |
+
|
| 871 |
+
def open(filename, mode='r', encoding=None, errors='strict', buffering=-1):
|
| 872 |
+
|
| 873 |
+
""" Open an encoded file using the given mode and return
|
| 874 |
+
a wrapped version providing transparent encoding/decoding.
|
| 875 |
+
|
| 876 |
+
Note: The wrapped version will only accept the object format
|
| 877 |
+
defined by the codecs, i.e. Unicode objects for most builtin
|
| 878 |
+
codecs. Output is also codec dependent and will usually be
|
| 879 |
+
Unicode as well.
|
| 880 |
+
|
| 881 |
+
If encoding is not None, then the
|
| 882 |
+
underlying encoded files are always opened in binary mode.
|
| 883 |
+
The default file mode is 'r', meaning to open the file in read mode.
|
| 884 |
+
|
| 885 |
+
encoding specifies the encoding which is to be used for the
|
| 886 |
+
file.
|
| 887 |
+
|
| 888 |
+
errors may be given to define the error handling. It defaults
|
| 889 |
+
to 'strict' which causes ValueErrors to be raised in case an
|
| 890 |
+
encoding error occurs.
|
| 891 |
+
|
| 892 |
+
buffering has the same meaning as for the builtin open() API.
|
| 893 |
+
It defaults to -1 which means that the default buffer size will
|
| 894 |
+
be used.
|
| 895 |
+
|
| 896 |
+
The returned wrapped file object provides an extra attribute
|
| 897 |
+
.encoding which allows querying the used encoding. This
|
| 898 |
+
attribute is only available if an encoding was specified as
|
| 899 |
+
parameter.
|
| 900 |
+
|
| 901 |
+
"""
|
| 902 |
+
if encoding is not None and \
|
| 903 |
+
'b' not in mode:
|
| 904 |
+
# Force opening of the file in binary mode
|
| 905 |
+
mode = mode + 'b'
|
| 906 |
+
file = builtins.open(filename, mode, buffering)
|
| 907 |
+
if encoding is None:
|
| 908 |
+
return file
|
| 909 |
+
|
| 910 |
+
try:
|
| 911 |
+
info = lookup(encoding)
|
| 912 |
+
srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
|
| 913 |
+
# Add attributes to simplify introspection
|
| 914 |
+
srw.encoding = encoding
|
| 915 |
+
return srw
|
| 916 |
+
except:
|
| 917 |
+
file.close()
|
| 918 |
+
raise
|
| 919 |
+
|
| 920 |
+
def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
|
| 921 |
+
|
| 922 |
+
""" Return a wrapped version of file which provides transparent
|
| 923 |
+
encoding translation.
|
| 924 |
+
|
| 925 |
+
Data written to the wrapped file is decoded according
|
| 926 |
+
to the given data_encoding and then encoded to the underlying
|
| 927 |
+
file using file_encoding. The intermediate data type
|
| 928 |
+
will usually be Unicode but depends on the specified codecs.
|
| 929 |
+
|
| 930 |
+
Bytes read from the file are decoded using file_encoding and then
|
| 931 |
+
passed back to the caller encoded using data_encoding.
|
| 932 |
+
|
| 933 |
+
If file_encoding is not given, it defaults to data_encoding.
|
| 934 |
+
|
| 935 |
+
errors may be given to define the error handling. It defaults
|
| 936 |
+
to 'strict' which causes ValueErrors to be raised in case an
|
| 937 |
+
encoding error occurs.
|
| 938 |
+
|
| 939 |
+
The returned wrapped file object provides two extra attributes
|
| 940 |
+
.data_encoding and .file_encoding which reflect the given
|
| 941 |
+
parameters of the same name. The attributes can be used for
|
| 942 |
+
introspection by Python programs.
|
| 943 |
+
|
| 944 |
+
"""
|
| 945 |
+
if file_encoding is None:
|
| 946 |
+
file_encoding = data_encoding
|
| 947 |
+
data_info = lookup(data_encoding)
|
| 948 |
+
file_info = lookup(file_encoding)
|
| 949 |
+
sr = StreamRecoder(file, data_info.encode, data_info.decode,
|
| 950 |
+
file_info.streamreader, file_info.streamwriter, errors)
|
| 951 |
+
# Add attributes to simplify introspection
|
| 952 |
+
sr.data_encoding = data_encoding
|
| 953 |
+
sr.file_encoding = file_encoding
|
| 954 |
+
return sr
|
| 955 |
+
|
| 956 |
+
### Helpers for codec lookup
|
| 957 |
+
|
| 958 |
+
def getencoder(encoding):
|
| 959 |
+
|
| 960 |
+
""" Lookup up the codec for the given encoding and return
|
| 961 |
+
its encoder function.
|
| 962 |
+
|
| 963 |
+
Raises a LookupError in case the encoding cannot be found.
|
| 964 |
+
|
| 965 |
+
"""
|
| 966 |
+
return lookup(encoding).encode
|
| 967 |
+
|
| 968 |
+
def getdecoder(encoding):
|
| 969 |
+
|
| 970 |
+
""" Lookup up the codec for the given encoding and return
|
| 971 |
+
its decoder function.
|
| 972 |
+
|
| 973 |
+
Raises a LookupError in case the encoding cannot be found.
|
| 974 |
+
|
| 975 |
+
"""
|
| 976 |
+
return lookup(encoding).decode
|
| 977 |
+
|
| 978 |
+
def getincrementalencoder(encoding):
|
| 979 |
+
|
| 980 |
+
""" Lookup up the codec for the given encoding and return
|
| 981 |
+
its IncrementalEncoder class or factory function.
|
| 982 |
+
|
| 983 |
+
Raises a LookupError in case the encoding cannot be found
|
| 984 |
+
or the codecs doesn't provide an incremental encoder.
|
| 985 |
+
|
| 986 |
+
"""
|
| 987 |
+
encoder = lookup(encoding).incrementalencoder
|
| 988 |
+
if encoder is None:
|
| 989 |
+
raise LookupError(encoding)
|
| 990 |
+
return encoder
|
| 991 |
+
|
| 992 |
+
def getincrementaldecoder(encoding):
|
| 993 |
+
|
| 994 |
+
""" Lookup up the codec for the given encoding and return
|
| 995 |
+
its IncrementalDecoder class or factory function.
|
| 996 |
+
|
| 997 |
+
Raises a LookupError in case the encoding cannot be found
|
| 998 |
+
or the codecs doesn't provide an incremental decoder.
|
| 999 |
+
|
| 1000 |
+
"""
|
| 1001 |
+
decoder = lookup(encoding).incrementaldecoder
|
| 1002 |
+
if decoder is None:
|
| 1003 |
+
raise LookupError(encoding)
|
| 1004 |
+
return decoder
|
| 1005 |
+
|
| 1006 |
+
def getreader(encoding):
|
| 1007 |
+
|
| 1008 |
+
""" Lookup up the codec for the given encoding and return
|
| 1009 |
+
its StreamReader class or factory function.
|
| 1010 |
+
|
| 1011 |
+
Raises a LookupError in case the encoding cannot be found.
|
| 1012 |
+
|
| 1013 |
+
"""
|
| 1014 |
+
return lookup(encoding).streamreader
|
| 1015 |
+
|
| 1016 |
+
def getwriter(encoding):
|
| 1017 |
+
|
| 1018 |
+
""" Lookup up the codec for the given encoding and return
|
| 1019 |
+
its StreamWriter class or factory function.
|
| 1020 |
+
|
| 1021 |
+
Raises a LookupError in case the encoding cannot be found.
|
| 1022 |
+
|
| 1023 |
+
"""
|
| 1024 |
+
return lookup(encoding).streamwriter
|
| 1025 |
+
|
| 1026 |
+
def iterencode(iterator, encoding, errors='strict', **kwargs):
|
| 1027 |
+
"""
|
| 1028 |
+
Encoding iterator.
|
| 1029 |
+
|
| 1030 |
+
Encodes the input strings from the iterator using an IncrementalEncoder.
|
| 1031 |
+
|
| 1032 |
+
errors and kwargs are passed through to the IncrementalEncoder
|
| 1033 |
+
constructor.
|
| 1034 |
+
"""
|
| 1035 |
+
encoder = getincrementalencoder(encoding)(errors, **kwargs)
|
| 1036 |
+
for input in iterator:
|
| 1037 |
+
output = encoder.encode(input)
|
| 1038 |
+
if output:
|
| 1039 |
+
yield output
|
| 1040 |
+
output = encoder.encode("", True)
|
| 1041 |
+
if output:
|
| 1042 |
+
yield output
|
| 1043 |
+
|
| 1044 |
+
def iterdecode(iterator, encoding, errors='strict', **kwargs):
|
| 1045 |
+
"""
|
| 1046 |
+
Decoding iterator.
|
| 1047 |
+
|
| 1048 |
+
Decodes the input strings from the iterator using an IncrementalDecoder.
|
| 1049 |
+
|
| 1050 |
+
errors and kwargs are passed through to the IncrementalDecoder
|
| 1051 |
+
constructor.
|
| 1052 |
+
"""
|
| 1053 |
+
decoder = getincrementaldecoder(encoding)(errors, **kwargs)
|
| 1054 |
+
for input in iterator:
|
| 1055 |
+
output = decoder.decode(input)
|
| 1056 |
+
if output:
|
| 1057 |
+
yield output
|
| 1058 |
+
output = decoder.decode(b"", True)
|
| 1059 |
+
if output:
|
| 1060 |
+
yield output
|
| 1061 |
+
|
| 1062 |
+
### Helpers for charmap-based codecs
|
| 1063 |
+
|
| 1064 |
+
def make_identity_dict(rng):
|
| 1065 |
+
|
| 1066 |
+
""" make_identity_dict(rng) -> dict
|
| 1067 |
+
|
| 1068 |
+
Return a dictionary where elements of the rng sequence are
|
| 1069 |
+
mapped to themselves.
|
| 1070 |
+
|
| 1071 |
+
"""
|
| 1072 |
+
return {i:i for i in rng}
|
| 1073 |
+
|
| 1074 |
+
def make_encoding_map(decoding_map):
|
| 1075 |
+
|
| 1076 |
+
""" Creates an encoding map from a decoding map.
|
| 1077 |
+
|
| 1078 |
+
If a target mapping in the decoding map occurs multiple
|
| 1079 |
+
times, then that target is mapped to None (undefined mapping),
|
| 1080 |
+
causing an exception when encountered by the charmap codec
|
| 1081 |
+
during translation.
|
| 1082 |
+
|
| 1083 |
+
One example where this happens is cp875.py which decodes
|
| 1084 |
+
multiple character to \\u001a.
|
| 1085 |
+
|
| 1086 |
+
"""
|
| 1087 |
+
m = {}
|
| 1088 |
+
for k,v in decoding_map.items():
|
| 1089 |
+
if not v in m:
|
| 1090 |
+
m[v] = k
|
| 1091 |
+
else:
|
| 1092 |
+
m[v] = None
|
| 1093 |
+
return m
|
| 1094 |
+
|
| 1095 |
+
### error handlers
|
| 1096 |
+
|
| 1097 |
+
try:
|
| 1098 |
+
strict_errors = lookup_error("strict")
|
| 1099 |
+
ignore_errors = lookup_error("ignore")
|
| 1100 |
+
replace_errors = lookup_error("replace")
|
| 1101 |
+
xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
|
| 1102 |
+
backslashreplace_errors = lookup_error("backslashreplace")
|
| 1103 |
+
namereplace_errors = lookup_error("namereplace")
|
| 1104 |
+
except LookupError:
|
| 1105 |
+
# In --disable-unicode builds, these error handler are missing
|
| 1106 |
+
strict_errors = None
|
| 1107 |
+
ignore_errors = None
|
| 1108 |
+
replace_errors = None
|
| 1109 |
+
xmlcharrefreplace_errors = None
|
| 1110 |
+
backslashreplace_errors = None
|
| 1111 |
+
namereplace_errors = None
|
| 1112 |
+
|
| 1113 |
+
# Tell modulefinder that using codecs probably needs the encodings
|
| 1114 |
+
# package
|
| 1115 |
+
_false = 0
|
| 1116 |
+
if _false:
|
| 1117 |
+
import encodings
|
| 1118 |
+
|
| 1119 |
+
### Tests
|
| 1120 |
+
|
| 1121 |
+
if __name__ == '__main__':
|
| 1122 |
+
|
| 1123 |
+
# Make stdout translate Latin-1 output into UTF-8 output
|
| 1124 |
+
sys.stdout = EncodedFile(sys.stdout, 'latin-1', 'utf-8')
|
| 1125 |
+
|
| 1126 |
+
# Have stdin translate Latin-1 input into UTF-8 input
|
| 1127 |
+
sys.stdin = EncodedFile(sys.stdin, 'utf-8', 'latin-1')
|
evalkit_cambrian/lib/python3.10/codeop.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""Utilities to compile possibly incomplete Python source code.
|
| 2 |
+
|
| 3 |
+
This module provides two interfaces, broadly similar to the builtin
|
| 4 |
+
function compile(), which take program text, a filename and a 'mode'
|
| 5 |
+
and:
|
| 6 |
+
|
| 7 |
+
- Return code object if the command is complete and valid
|
| 8 |
+
- Return None if the command is incomplete
|
| 9 |
+
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
| 10 |
+
syntax error (OverflowError and ValueError can be produced by
|
| 11 |
+
malformed literals).
|
| 12 |
+
|
| 13 |
+
The two interfaces are:
|
| 14 |
+
|
| 15 |
+
compile_command(source, filename, symbol):
|
| 16 |
+
|
| 17 |
+
Compiles a single command in the manner described above.
|
| 18 |
+
|
| 19 |
+
CommandCompiler():
|
| 20 |
+
|
| 21 |
+
Instances of this class have __call__ methods identical in
|
| 22 |
+
signature to compile_command; the difference is that if the
|
| 23 |
+
instance compiles program text containing a __future__ statement,
|
| 24 |
+
the instance 'remembers' and compiles all subsequent program texts
|
| 25 |
+
with the statement in force.
|
| 26 |
+
|
| 27 |
+
The module also provides another class:
|
| 28 |
+
|
| 29 |
+
Compile():
|
| 30 |
+
|
| 31 |
+
Instances of this class act like the built-in function compile,
|
| 32 |
+
but with 'memory' in the sense described above.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
import __future__
|
| 36 |
+
import warnings
|
| 37 |
+
|
| 38 |
+
_features = [getattr(__future__, fname)
|
| 39 |
+
for fname in __future__.all_feature_names]
|
| 40 |
+
|
| 41 |
+
__all__ = ["compile_command", "Compile", "CommandCompiler"]
|
| 42 |
+
|
| 43 |
+
# The following flags match the values from Include/cpython/compile.h
|
| 44 |
+
# Caveat emptor: These flags are undocumented on purpose and depending
|
| 45 |
+
# on their effect outside the standard library is **unsupported**.
|
| 46 |
+
PyCF_DONT_IMPLY_DEDENT = 0x200
|
| 47 |
+
PyCF_ALLOW_INCOMPLETE_INPUT = 0x4000
|
| 48 |
+
|
| 49 |
+
def _maybe_compile(compiler, source, filename, symbol):
|
| 50 |
+
# Check for source consisting of only blank lines and comments.
|
| 51 |
+
for line in source.split("\n"):
|
| 52 |
+
line = line.strip()
|
| 53 |
+
if line and line[0] != '#':
|
| 54 |
+
break # Leave it alone.
|
| 55 |
+
else:
|
| 56 |
+
if symbol != "eval":
|
| 57 |
+
source = "pass" # Replace it with a 'pass' statement
|
| 58 |
+
|
| 59 |
+
# Disable compiler warnings when checking for incomplete input.
|
| 60 |
+
with warnings.catch_warnings():
|
| 61 |
+
warnings.simplefilter("ignore", (SyntaxWarning, DeprecationWarning))
|
| 62 |
+
try:
|
| 63 |
+
compiler(source, filename, symbol)
|
| 64 |
+
except SyntaxError: # Let other compile() errors propagate.
|
| 65 |
+
try:
|
| 66 |
+
compiler(source + "\n", filename, symbol)
|
| 67 |
+
return None
|
| 68 |
+
except SyntaxError as e:
|
| 69 |
+
if "incomplete input" in str(e):
|
| 70 |
+
return None
|
| 71 |
+
# fallthrough
|
| 72 |
+
|
| 73 |
+
return compiler(source, filename, symbol)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def _is_syntax_error(err1, err2):
|
| 77 |
+
rep1 = repr(err1)
|
| 78 |
+
rep2 = repr(err2)
|
| 79 |
+
if "was never closed" in rep1 and "was never closed" in rep2:
|
| 80 |
+
return False
|
| 81 |
+
if rep1 == rep2:
|
| 82 |
+
return True
|
| 83 |
+
return False
|
| 84 |
+
|
| 85 |
+
def _compile(source, filename, symbol):
|
| 86 |
+
return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT | PyCF_ALLOW_INCOMPLETE_INPUT)
|
| 87 |
+
|
| 88 |
+
def compile_command(source, filename="<input>", symbol="single"):
|
| 89 |
+
r"""Compile a command and determine whether it is incomplete.
|
| 90 |
+
|
| 91 |
+
Arguments:
|
| 92 |
+
|
| 93 |
+
source -- the source string; may contain \n characters
|
| 94 |
+
filename -- optional filename from which source was read; default
|
| 95 |
+
"<input>"
|
| 96 |
+
symbol -- optional grammar start symbol; "single" (default), "exec"
|
| 97 |
+
or "eval"
|
| 98 |
+
|
| 99 |
+
Return value / exceptions raised:
|
| 100 |
+
|
| 101 |
+
- Return a code object if the command is complete and valid
|
| 102 |
+
- Return None if the command is incomplete
|
| 103 |
+
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
| 104 |
+
syntax error (OverflowError and ValueError can be produced by
|
| 105 |
+
malformed literals).
|
| 106 |
+
"""
|
| 107 |
+
return _maybe_compile(_compile, source, filename, symbol)
|
| 108 |
+
|
| 109 |
+
class Compile:
|
| 110 |
+
"""Instances of this class behave much like the built-in compile
|
| 111 |
+
function, but if one is used to compile text containing a future
|
| 112 |
+
statement, it "remembers" and compiles all subsequent program texts
|
| 113 |
+
with the statement in force."""
|
| 114 |
+
def __init__(self):
|
| 115 |
+
self.flags = PyCF_DONT_IMPLY_DEDENT | PyCF_ALLOW_INCOMPLETE_INPUT
|
| 116 |
+
|
| 117 |
+
def __call__(self, source, filename, symbol):
|
| 118 |
+
codeob = compile(source, filename, symbol, self.flags, True)
|
| 119 |
+
for feature in _features:
|
| 120 |
+
if codeob.co_flags & feature.compiler_flag:
|
| 121 |
+
self.flags |= feature.compiler_flag
|
| 122 |
+
return codeob
|
| 123 |
+
|
| 124 |
+
class CommandCompiler:
|
| 125 |
+
"""Instances of this class have __call__ methods identical in
|
| 126 |
+
signature to compile_command; the difference is that if the
|
| 127 |
+
instance compiles program text containing a __future__ statement,
|
| 128 |
+
the instance 'remembers' and compiles all subsequent program texts
|
| 129 |
+
with the statement in force."""
|
| 130 |
+
|
| 131 |
+
def __init__(self,):
|
| 132 |
+
self.compiler = Compile()
|
| 133 |
+
|
| 134 |
+
def __call__(self, source, filename="<input>", symbol="single"):
|
| 135 |
+
r"""Compile a command and determine whether it is incomplete.
|
| 136 |
+
|
| 137 |
+
Arguments:
|
| 138 |
+
|
| 139 |
+
source -- the source string; may contain \n characters
|
| 140 |
+
filename -- optional filename from which source was read;
|
| 141 |
+
default "<input>"
|
| 142 |
+
symbol -- optional grammar start symbol; "single" (default) or
|
| 143 |
+
"eval"
|
| 144 |
+
|
| 145 |
+
Return value / exceptions raised:
|
| 146 |
+
|
| 147 |
+
- Return a code object if the command is complete and valid
|
| 148 |
+
- Return None if the command is incomplete
|
| 149 |
+
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
| 150 |
+
syntax error (OverflowError and ValueError can be produced by
|
| 151 |
+
malformed literals).
|
| 152 |
+
"""
|
| 153 |
+
return _maybe_compile(self.compiler, source, filename, symbol)
|
evalkit_cambrian/lib/python3.10/contextvars.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from _contextvars import Context, ContextVar, Token, copy_context
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
__all__ = ('Context', 'ContextVar', 'Token', 'copy_context')
|
evalkit_cambrian/lib/python3.10/copyreg.py
ADDED
|
@@ -0,0 +1,219 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper to provide extensibility for pickle.
|
| 2 |
+
|
| 3 |
+
This is only useful to add pickle support for extension types defined in
|
| 4 |
+
C, not for instances of user-defined classes.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
__all__ = ["pickle", "constructor",
|
| 8 |
+
"add_extension", "remove_extension", "clear_extension_cache"]
|
| 9 |
+
|
| 10 |
+
dispatch_table = {}
|
| 11 |
+
|
| 12 |
+
def pickle(ob_type, pickle_function, constructor_ob=None):
|
| 13 |
+
if not callable(pickle_function):
|
| 14 |
+
raise TypeError("reduction functions must be callable")
|
| 15 |
+
dispatch_table[ob_type] = pickle_function
|
| 16 |
+
|
| 17 |
+
# The constructor_ob function is a vestige of safe for unpickling.
|
| 18 |
+
# There is no reason for the caller to pass it anymore.
|
| 19 |
+
if constructor_ob is not None:
|
| 20 |
+
constructor(constructor_ob)
|
| 21 |
+
|
| 22 |
+
def constructor(object):
|
| 23 |
+
if not callable(object):
|
| 24 |
+
raise TypeError("constructors must be callable")
|
| 25 |
+
|
| 26 |
+
# Example: provide pickling support for complex numbers.
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
complex
|
| 30 |
+
except NameError:
|
| 31 |
+
pass
|
| 32 |
+
else:
|
| 33 |
+
|
| 34 |
+
def pickle_complex(c):
|
| 35 |
+
return complex, (c.real, c.imag)
|
| 36 |
+
|
| 37 |
+
pickle(complex, pickle_complex, complex)
|
| 38 |
+
|
| 39 |
+
def pickle_union(obj):
|
| 40 |
+
import functools, operator
|
| 41 |
+
return functools.reduce, (operator.or_, obj.__args__)
|
| 42 |
+
|
| 43 |
+
pickle(type(int | str), pickle_union)
|
| 44 |
+
|
| 45 |
+
# Support for pickling new-style objects
|
| 46 |
+
|
| 47 |
+
def _reconstructor(cls, base, state):
|
| 48 |
+
if base is object:
|
| 49 |
+
obj = object.__new__(cls)
|
| 50 |
+
else:
|
| 51 |
+
obj = base.__new__(cls, state)
|
| 52 |
+
if base.__init__ != object.__init__:
|
| 53 |
+
base.__init__(obj, state)
|
| 54 |
+
return obj
|
| 55 |
+
|
| 56 |
+
_HEAPTYPE = 1<<9
|
| 57 |
+
_new_type = type(int.__new__)
|
| 58 |
+
|
| 59 |
+
# Python code for object.__reduce_ex__ for protocols 0 and 1
|
| 60 |
+
|
| 61 |
+
def _reduce_ex(self, proto):
|
| 62 |
+
assert proto < 2
|
| 63 |
+
cls = self.__class__
|
| 64 |
+
for base in cls.__mro__:
|
| 65 |
+
if hasattr(base, '__flags__') and not base.__flags__ & _HEAPTYPE:
|
| 66 |
+
break
|
| 67 |
+
new = base.__new__
|
| 68 |
+
if isinstance(new, _new_type) and new.__self__ is base:
|
| 69 |
+
break
|
| 70 |
+
else:
|
| 71 |
+
base = object # not really reachable
|
| 72 |
+
if base is object:
|
| 73 |
+
state = None
|
| 74 |
+
else:
|
| 75 |
+
if base is cls:
|
| 76 |
+
raise TypeError(f"cannot pickle {cls.__name__!r} object")
|
| 77 |
+
state = base(self)
|
| 78 |
+
args = (cls, base, state)
|
| 79 |
+
try:
|
| 80 |
+
getstate = self.__getstate__
|
| 81 |
+
except AttributeError:
|
| 82 |
+
if getattr(self, "__slots__", None):
|
| 83 |
+
raise TypeError(f"cannot pickle {cls.__name__!r} object: "
|
| 84 |
+
f"a class that defines __slots__ without "
|
| 85 |
+
f"defining __getstate__ cannot be pickled "
|
| 86 |
+
f"with protocol {proto}") from None
|
| 87 |
+
try:
|
| 88 |
+
dict = self.__dict__
|
| 89 |
+
except AttributeError:
|
| 90 |
+
dict = None
|
| 91 |
+
else:
|
| 92 |
+
dict = getstate()
|
| 93 |
+
if dict:
|
| 94 |
+
return _reconstructor, args, dict
|
| 95 |
+
else:
|
| 96 |
+
return _reconstructor, args
|
| 97 |
+
|
| 98 |
+
# Helper for __reduce_ex__ protocol 2
|
| 99 |
+
|
| 100 |
+
def __newobj__(cls, *args):
|
| 101 |
+
return cls.__new__(cls, *args)
|
| 102 |
+
|
| 103 |
+
def __newobj_ex__(cls, args, kwargs):
|
| 104 |
+
"""Used by pickle protocol 4, instead of __newobj__ to allow classes with
|
| 105 |
+
keyword-only arguments to be pickled correctly.
|
| 106 |
+
"""
|
| 107 |
+
return cls.__new__(cls, *args, **kwargs)
|
| 108 |
+
|
| 109 |
+
def _slotnames(cls):
|
| 110 |
+
"""Return a list of slot names for a given class.
|
| 111 |
+
|
| 112 |
+
This needs to find slots defined by the class and its bases, so we
|
| 113 |
+
can't simply return the __slots__ attribute. We must walk down
|
| 114 |
+
the Method Resolution Order and concatenate the __slots__ of each
|
| 115 |
+
class found there. (This assumes classes don't modify their
|
| 116 |
+
__slots__ attribute to misrepresent their slots after the class is
|
| 117 |
+
defined.)
|
| 118 |
+
"""
|
| 119 |
+
|
| 120 |
+
# Get the value from a cache in the class if possible
|
| 121 |
+
names = cls.__dict__.get("__slotnames__")
|
| 122 |
+
if names is not None:
|
| 123 |
+
return names
|
| 124 |
+
|
| 125 |
+
# Not cached -- calculate the value
|
| 126 |
+
names = []
|
| 127 |
+
if not hasattr(cls, "__slots__"):
|
| 128 |
+
# This class has no slots
|
| 129 |
+
pass
|
| 130 |
+
else:
|
| 131 |
+
# Slots found -- gather slot names from all base classes
|
| 132 |
+
for c in cls.__mro__:
|
| 133 |
+
if "__slots__" in c.__dict__:
|
| 134 |
+
slots = c.__dict__['__slots__']
|
| 135 |
+
# if class has a single slot, it can be given as a string
|
| 136 |
+
if isinstance(slots, str):
|
| 137 |
+
slots = (slots,)
|
| 138 |
+
for name in slots:
|
| 139 |
+
# special descriptors
|
| 140 |
+
if name in ("__dict__", "__weakref__"):
|
| 141 |
+
continue
|
| 142 |
+
# mangled names
|
| 143 |
+
elif name.startswith('__') and not name.endswith('__'):
|
| 144 |
+
stripped = c.__name__.lstrip('_')
|
| 145 |
+
if stripped:
|
| 146 |
+
names.append('_%s%s' % (stripped, name))
|
| 147 |
+
else:
|
| 148 |
+
names.append(name)
|
| 149 |
+
else:
|
| 150 |
+
names.append(name)
|
| 151 |
+
|
| 152 |
+
# Cache the outcome in the class if at all possible
|
| 153 |
+
try:
|
| 154 |
+
cls.__slotnames__ = names
|
| 155 |
+
except:
|
| 156 |
+
pass # But don't die if we can't
|
| 157 |
+
|
| 158 |
+
return names
|
| 159 |
+
|
| 160 |
+
# A registry of extension codes. This is an ad-hoc compression
|
| 161 |
+
# mechanism. Whenever a global reference to <module>, <name> is about
|
| 162 |
+
# to be pickled, the (<module>, <name>) tuple is looked up here to see
|
| 163 |
+
# if it is a registered extension code for it. Extension codes are
|
| 164 |
+
# universal, so that the meaning of a pickle does not depend on
|
| 165 |
+
# context. (There are also some codes reserved for local use that
|
| 166 |
+
# don't have this restriction.) Codes are positive ints; 0 is
|
| 167 |
+
# reserved.
|
| 168 |
+
|
| 169 |
+
_extension_registry = {} # key -> code
|
| 170 |
+
_inverted_registry = {} # code -> key
|
| 171 |
+
_extension_cache = {} # code -> object
|
| 172 |
+
# Don't ever rebind those names: pickling grabs a reference to them when
|
| 173 |
+
# it's initialized, and won't see a rebinding.
|
| 174 |
+
|
| 175 |
+
def add_extension(module, name, code):
|
| 176 |
+
"""Register an extension code."""
|
| 177 |
+
code = int(code)
|
| 178 |
+
if not 1 <= code <= 0x7fffffff:
|
| 179 |
+
raise ValueError("code out of range")
|
| 180 |
+
key = (module, name)
|
| 181 |
+
if (_extension_registry.get(key) == code and
|
| 182 |
+
_inverted_registry.get(code) == key):
|
| 183 |
+
return # Redundant registrations are benign
|
| 184 |
+
if key in _extension_registry:
|
| 185 |
+
raise ValueError("key %s is already registered with code %s" %
|
| 186 |
+
(key, _extension_registry[key]))
|
| 187 |
+
if code in _inverted_registry:
|
| 188 |
+
raise ValueError("code %s is already in use for key %s" %
|
| 189 |
+
(code, _inverted_registry[code]))
|
| 190 |
+
_extension_registry[key] = code
|
| 191 |
+
_inverted_registry[code] = key
|
| 192 |
+
|
| 193 |
+
def remove_extension(module, name, code):
|
| 194 |
+
"""Unregister an extension code. For testing only."""
|
| 195 |
+
key = (module, name)
|
| 196 |
+
if (_extension_registry.get(key) != code or
|
| 197 |
+
_inverted_registry.get(code) != key):
|
| 198 |
+
raise ValueError("key %s is not registered with code %s" %
|
| 199 |
+
(key, code))
|
| 200 |
+
del _extension_registry[key]
|
| 201 |
+
del _inverted_registry[code]
|
| 202 |
+
if code in _extension_cache:
|
| 203 |
+
del _extension_cache[code]
|
| 204 |
+
|
| 205 |
+
def clear_extension_cache():
|
| 206 |
+
_extension_cache.clear()
|
| 207 |
+
|
| 208 |
+
# Standard extension code assignments
|
| 209 |
+
|
| 210 |
+
# Reserved ranges
|
| 211 |
+
|
| 212 |
+
# First Last Count Purpose
|
| 213 |
+
# 1 127 127 Reserved for Python standard library
|
| 214 |
+
# 128 191 64 Reserved for Zope
|
| 215 |
+
# 192 239 48 Reserved for 3rd parties
|
| 216 |
+
# 240 255 16 Reserved for private use (will never be assigned)
|
| 217 |
+
# 256 Inf Inf Reserved for future assignment
|
| 218 |
+
|
| 219 |
+
# Extension codes are assigned by the Python Software Foundation.
|
evalkit_cambrian/lib/python3.10/decimal.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
try:
|
| 3 |
+
from _decimal import *
|
| 4 |
+
from _decimal import __doc__
|
| 5 |
+
from _decimal import __version__
|
| 6 |
+
from _decimal import __libmpdec_version__
|
| 7 |
+
except ImportError:
|
| 8 |
+
from _pydecimal import *
|
| 9 |
+
from _pydecimal import __doc__
|
| 10 |
+
from _pydecimal import __version__
|
| 11 |
+
from _pydecimal import __libmpdec_version__
|
evalkit_cambrian/lib/python3.10/doctest.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_cambrian/lib/python3.10/enum.py
ADDED
|
@@ -0,0 +1,1053 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from types import MappingProxyType, DynamicClassAttribute
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
__all__ = [
|
| 6 |
+
'EnumMeta',
|
| 7 |
+
'Enum', 'IntEnum', 'Flag', 'IntFlag',
|
| 8 |
+
'auto', 'unique',
|
| 9 |
+
]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def _is_descriptor(obj):
|
| 13 |
+
"""
|
| 14 |
+
Returns True if obj is a descriptor, False otherwise.
|
| 15 |
+
"""
|
| 16 |
+
return (
|
| 17 |
+
hasattr(obj, '__get__') or
|
| 18 |
+
hasattr(obj, '__set__') or
|
| 19 |
+
hasattr(obj, '__delete__')
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
def _is_dunder(name):
|
| 23 |
+
"""
|
| 24 |
+
Returns True if a __dunder__ name, False otherwise.
|
| 25 |
+
"""
|
| 26 |
+
return (
|
| 27 |
+
len(name) > 4 and
|
| 28 |
+
name[:2] == name[-2:] == '__' and
|
| 29 |
+
name[2] != '_' and
|
| 30 |
+
name[-3] != '_'
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
def _is_sunder(name):
|
| 34 |
+
"""
|
| 35 |
+
Returns True if a _sunder_ name, False otherwise.
|
| 36 |
+
"""
|
| 37 |
+
return (
|
| 38 |
+
len(name) > 2 and
|
| 39 |
+
name[0] == name[-1] == '_' and
|
| 40 |
+
name[1:2] != '_' and
|
| 41 |
+
name[-2:-1] != '_'
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
def _is_private(cls_name, name):
|
| 45 |
+
# do not use `re` as `re` imports `enum`
|
| 46 |
+
pattern = '_%s__' % (cls_name, )
|
| 47 |
+
pat_len = len(pattern)
|
| 48 |
+
if (
|
| 49 |
+
len(name) > pat_len
|
| 50 |
+
and name.startswith(pattern)
|
| 51 |
+
and name[pat_len:pat_len+1] != ['_']
|
| 52 |
+
and (name[-1] != '_' or name[-2] != '_')
|
| 53 |
+
):
|
| 54 |
+
return True
|
| 55 |
+
else:
|
| 56 |
+
return False
|
| 57 |
+
|
| 58 |
+
def _make_class_unpicklable(cls):
|
| 59 |
+
"""
|
| 60 |
+
Make the given class un-picklable.
|
| 61 |
+
"""
|
| 62 |
+
def _break_on_call_reduce(self, proto):
|
| 63 |
+
raise TypeError('%r cannot be pickled' % self)
|
| 64 |
+
cls.__reduce_ex__ = _break_on_call_reduce
|
| 65 |
+
cls.__module__ = '<unknown>'
|
| 66 |
+
|
| 67 |
+
_auto_null = object()
|
| 68 |
+
class auto:
|
| 69 |
+
"""
|
| 70 |
+
Instances are replaced with an appropriate value in Enum class suites.
|
| 71 |
+
"""
|
| 72 |
+
value = _auto_null
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class _EnumDict(dict):
|
| 76 |
+
"""
|
| 77 |
+
Track enum member order and ensure member names are not reused.
|
| 78 |
+
|
| 79 |
+
EnumMeta will use the names found in self._member_names as the
|
| 80 |
+
enumeration member names.
|
| 81 |
+
"""
|
| 82 |
+
def __init__(self):
|
| 83 |
+
super().__init__()
|
| 84 |
+
self._member_names = []
|
| 85 |
+
self._last_values = []
|
| 86 |
+
self._ignore = []
|
| 87 |
+
self._auto_called = False
|
| 88 |
+
|
| 89 |
+
def __setitem__(self, key, value):
|
| 90 |
+
"""
|
| 91 |
+
Changes anything not dundered or not a descriptor.
|
| 92 |
+
|
| 93 |
+
If an enum member name is used twice, an error is raised; duplicate
|
| 94 |
+
values are not checked for.
|
| 95 |
+
|
| 96 |
+
Single underscore (sunder) names are reserved.
|
| 97 |
+
"""
|
| 98 |
+
if _is_private(self._cls_name, key):
|
| 99 |
+
import warnings
|
| 100 |
+
warnings.warn(
|
| 101 |
+
"private variables, such as %r, will be normal attributes in 3.11"
|
| 102 |
+
% (key, ),
|
| 103 |
+
DeprecationWarning,
|
| 104 |
+
stacklevel=2,
|
| 105 |
+
)
|
| 106 |
+
if _is_sunder(key):
|
| 107 |
+
if key not in (
|
| 108 |
+
'_order_', '_create_pseudo_member_',
|
| 109 |
+
'_generate_next_value_', '_missing_', '_ignore_',
|
| 110 |
+
):
|
| 111 |
+
raise ValueError('_names_ are reserved for future Enum use')
|
| 112 |
+
if key == '_generate_next_value_':
|
| 113 |
+
# check if members already defined as auto()
|
| 114 |
+
if self._auto_called:
|
| 115 |
+
raise TypeError("_generate_next_value_ must be defined before members")
|
| 116 |
+
setattr(self, '_generate_next_value', value)
|
| 117 |
+
elif key == '_ignore_':
|
| 118 |
+
if isinstance(value, str):
|
| 119 |
+
value = value.replace(',',' ').split()
|
| 120 |
+
else:
|
| 121 |
+
value = list(value)
|
| 122 |
+
self._ignore = value
|
| 123 |
+
already = set(value) & set(self._member_names)
|
| 124 |
+
if already:
|
| 125 |
+
raise ValueError(
|
| 126 |
+
'_ignore_ cannot specify already set names: %r'
|
| 127 |
+
% (already, )
|
| 128 |
+
)
|
| 129 |
+
elif _is_dunder(key):
|
| 130 |
+
if key == '__order__':
|
| 131 |
+
key = '_order_'
|
| 132 |
+
elif key in self._member_names:
|
| 133 |
+
# descriptor overwriting an enum?
|
| 134 |
+
raise TypeError('Attempted to reuse key: %r' % key)
|
| 135 |
+
elif key in self._ignore:
|
| 136 |
+
pass
|
| 137 |
+
elif not _is_descriptor(value):
|
| 138 |
+
if key in self:
|
| 139 |
+
# enum overwriting a descriptor?
|
| 140 |
+
raise TypeError('%r already defined as: %r' % (key, self[key]))
|
| 141 |
+
if isinstance(value, auto):
|
| 142 |
+
if value.value == _auto_null:
|
| 143 |
+
value.value = self._generate_next_value(
|
| 144 |
+
key,
|
| 145 |
+
1,
|
| 146 |
+
len(self._member_names),
|
| 147 |
+
self._last_values[:],
|
| 148 |
+
)
|
| 149 |
+
self._auto_called = True
|
| 150 |
+
value = value.value
|
| 151 |
+
self._member_names.append(key)
|
| 152 |
+
self._last_values.append(value)
|
| 153 |
+
super().__setitem__(key, value)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
# Dummy value for Enum as EnumMeta explicitly checks for it, but of course
|
| 157 |
+
# until EnumMeta finishes running the first time the Enum class doesn't exist.
|
| 158 |
+
# This is also why there are checks in EnumMeta like `if Enum is not None`
|
| 159 |
+
Enum = None
|
| 160 |
+
|
| 161 |
+
class EnumMeta(type):
|
| 162 |
+
"""
|
| 163 |
+
Metaclass for Enum
|
| 164 |
+
"""
|
| 165 |
+
@classmethod
|
| 166 |
+
def __prepare__(metacls, cls, bases, **kwds):
|
| 167 |
+
# check that previous enum members do not exist
|
| 168 |
+
metacls._check_for_existing_members(cls, bases)
|
| 169 |
+
# create the namespace dict
|
| 170 |
+
enum_dict = _EnumDict()
|
| 171 |
+
enum_dict._cls_name = cls
|
| 172 |
+
# inherit previous flags and _generate_next_value_ function
|
| 173 |
+
member_type, first_enum = metacls._get_mixins_(cls, bases)
|
| 174 |
+
if first_enum is not None:
|
| 175 |
+
enum_dict['_generate_next_value_'] = getattr(
|
| 176 |
+
first_enum, '_generate_next_value_', None,
|
| 177 |
+
)
|
| 178 |
+
return enum_dict
|
| 179 |
+
|
| 180 |
+
def __new__(metacls, cls, bases, classdict, **kwds):
|
| 181 |
+
# an Enum class is final once enumeration items have been defined; it
|
| 182 |
+
# cannot be mixed with other types (int, float, etc.) if it has an
|
| 183 |
+
# inherited __new__ unless a new __new__ is defined (or the resulting
|
| 184 |
+
# class will fail).
|
| 185 |
+
#
|
| 186 |
+
# remove any keys listed in _ignore_
|
| 187 |
+
classdict.setdefault('_ignore_', []).append('_ignore_')
|
| 188 |
+
ignore = classdict['_ignore_']
|
| 189 |
+
for key in ignore:
|
| 190 |
+
classdict.pop(key, None)
|
| 191 |
+
member_type, first_enum = metacls._get_mixins_(cls, bases)
|
| 192 |
+
__new__, save_new, use_args = metacls._find_new_(
|
| 193 |
+
classdict, member_type, first_enum,
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# save enum items into separate mapping so they don't get baked into
|
| 197 |
+
# the new class
|
| 198 |
+
enum_members = {k: classdict[k] for k in classdict._member_names}
|
| 199 |
+
for name in classdict._member_names:
|
| 200 |
+
del classdict[name]
|
| 201 |
+
|
| 202 |
+
# adjust the sunders
|
| 203 |
+
_order_ = classdict.pop('_order_', None)
|
| 204 |
+
|
| 205 |
+
# check for illegal enum names (any others?)
|
| 206 |
+
invalid_names = set(enum_members) & {'mro', ''}
|
| 207 |
+
if invalid_names:
|
| 208 |
+
raise ValueError('Invalid enum member name: {0}'.format(
|
| 209 |
+
','.join(invalid_names)))
|
| 210 |
+
|
| 211 |
+
# create a default docstring if one has not been provided
|
| 212 |
+
if '__doc__' not in classdict:
|
| 213 |
+
classdict['__doc__'] = 'An enumeration.'
|
| 214 |
+
|
| 215 |
+
enum_class = super().__new__(metacls, cls, bases, classdict, **kwds)
|
| 216 |
+
enum_class._member_names_ = [] # names in definition order
|
| 217 |
+
enum_class._member_map_ = {} # name->value map
|
| 218 |
+
enum_class._member_type_ = member_type
|
| 219 |
+
|
| 220 |
+
# save DynamicClassAttribute attributes from super classes so we know
|
| 221 |
+
# if we can take the shortcut of storing members in the class dict
|
| 222 |
+
dynamic_attributes = {
|
| 223 |
+
k for c in enum_class.mro()
|
| 224 |
+
for k, v in c.__dict__.items()
|
| 225 |
+
if isinstance(v, DynamicClassAttribute)
|
| 226 |
+
}
|
| 227 |
+
|
| 228 |
+
# Reverse value->name map for hashable values.
|
| 229 |
+
enum_class._value2member_map_ = {}
|
| 230 |
+
|
| 231 |
+
# If a custom type is mixed into the Enum, and it does not know how
|
| 232 |
+
# to pickle itself, pickle.dumps will succeed but pickle.loads will
|
| 233 |
+
# fail. Rather than have the error show up later and possibly far
|
| 234 |
+
# from the source, sabotage the pickle protocol for this class so
|
| 235 |
+
# that pickle.dumps also fails.
|
| 236 |
+
#
|
| 237 |
+
# However, if the new class implements its own __reduce_ex__, do not
|
| 238 |
+
# sabotage -- it's on them to make sure it works correctly. We use
|
| 239 |
+
# __reduce_ex__ instead of any of the others as it is preferred by
|
| 240 |
+
# pickle over __reduce__, and it handles all pickle protocols.
|
| 241 |
+
if '__reduce_ex__' not in classdict:
|
| 242 |
+
if member_type is not object:
|
| 243 |
+
methods = ('__getnewargs_ex__', '__getnewargs__',
|
| 244 |
+
'__reduce_ex__', '__reduce__')
|
| 245 |
+
if not any(m in member_type.__dict__ for m in methods):
|
| 246 |
+
if '__new__' in classdict:
|
| 247 |
+
# too late, sabotage
|
| 248 |
+
_make_class_unpicklable(enum_class)
|
| 249 |
+
else:
|
| 250 |
+
# final attempt to verify that pickling would work:
|
| 251 |
+
# travel mro until __new__ is found, checking for
|
| 252 |
+
# __reduce__ and friends along the way -- if any of them
|
| 253 |
+
# are found before/when __new__ is found, pickling should
|
| 254 |
+
# work
|
| 255 |
+
sabotage = None
|
| 256 |
+
for chain in bases:
|
| 257 |
+
for base in chain.__mro__:
|
| 258 |
+
if base is object:
|
| 259 |
+
continue
|
| 260 |
+
elif any(m in base.__dict__ for m in methods):
|
| 261 |
+
# found one, we're good
|
| 262 |
+
sabotage = False
|
| 263 |
+
break
|
| 264 |
+
elif '__new__' in base.__dict__:
|
| 265 |
+
# not good
|
| 266 |
+
sabotage = True
|
| 267 |
+
break
|
| 268 |
+
if sabotage is not None:
|
| 269 |
+
break
|
| 270 |
+
if sabotage:
|
| 271 |
+
_make_class_unpicklable(enum_class)
|
| 272 |
+
# instantiate them, checking for duplicates as we go
|
| 273 |
+
# we instantiate first instead of checking for duplicates first in case
|
| 274 |
+
# a custom __new__ is doing something funky with the values -- such as
|
| 275 |
+
# auto-numbering ;)
|
| 276 |
+
for member_name in classdict._member_names:
|
| 277 |
+
value = enum_members[member_name]
|
| 278 |
+
if not isinstance(value, tuple):
|
| 279 |
+
args = (value, )
|
| 280 |
+
else:
|
| 281 |
+
args = value
|
| 282 |
+
if member_type is tuple: # special case for tuple enums
|
| 283 |
+
args = (args, ) # wrap it one more time
|
| 284 |
+
if not use_args:
|
| 285 |
+
enum_member = __new__(enum_class)
|
| 286 |
+
if not hasattr(enum_member, '_value_'):
|
| 287 |
+
enum_member._value_ = value
|
| 288 |
+
else:
|
| 289 |
+
enum_member = __new__(enum_class, *args)
|
| 290 |
+
if not hasattr(enum_member, '_value_'):
|
| 291 |
+
if member_type is object:
|
| 292 |
+
enum_member._value_ = value
|
| 293 |
+
else:
|
| 294 |
+
enum_member._value_ = member_type(*args)
|
| 295 |
+
value = enum_member._value_
|
| 296 |
+
enum_member._name_ = member_name
|
| 297 |
+
enum_member.__objclass__ = enum_class
|
| 298 |
+
enum_member.__init__(*args)
|
| 299 |
+
# If another member with the same value was already defined, the
|
| 300 |
+
# new member becomes an alias to the existing one.
|
| 301 |
+
for name, canonical_member in enum_class._member_map_.items():
|
| 302 |
+
if canonical_member._value_ == enum_member._value_:
|
| 303 |
+
enum_member = canonical_member
|
| 304 |
+
break
|
| 305 |
+
else:
|
| 306 |
+
# Aliases don't appear in member names (only in __members__).
|
| 307 |
+
enum_class._member_names_.append(member_name)
|
| 308 |
+
# performance boost for any member that would not shadow
|
| 309 |
+
# a DynamicClassAttribute
|
| 310 |
+
if member_name not in dynamic_attributes:
|
| 311 |
+
setattr(enum_class, member_name, enum_member)
|
| 312 |
+
# now add to _member_map_
|
| 313 |
+
enum_class._member_map_[member_name] = enum_member
|
| 314 |
+
try:
|
| 315 |
+
# This may fail if value is not hashable. We can't add the value
|
| 316 |
+
# to the map, and by-value lookups for this value will be
|
| 317 |
+
# linear.
|
| 318 |
+
enum_class._value2member_map_[value] = enum_member
|
| 319 |
+
except TypeError:
|
| 320 |
+
pass
|
| 321 |
+
|
| 322 |
+
# double check that repr and friends are not the mixin's or various
|
| 323 |
+
# things break (such as pickle)
|
| 324 |
+
# however, if the method is defined in the Enum itself, don't replace
|
| 325 |
+
# it
|
| 326 |
+
for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
|
| 327 |
+
if name in classdict:
|
| 328 |
+
continue
|
| 329 |
+
class_method = getattr(enum_class, name)
|
| 330 |
+
obj_method = getattr(member_type, name, None)
|
| 331 |
+
enum_method = getattr(first_enum, name, None)
|
| 332 |
+
if obj_method is not None and obj_method is class_method:
|
| 333 |
+
setattr(enum_class, name, enum_method)
|
| 334 |
+
|
| 335 |
+
# replace any other __new__ with our own (as long as Enum is not None,
|
| 336 |
+
# anyway) -- again, this is to support pickle
|
| 337 |
+
if Enum is not None:
|
| 338 |
+
# if the user defined their own __new__, save it before it gets
|
| 339 |
+
# clobbered in case they subclass later
|
| 340 |
+
if save_new:
|
| 341 |
+
enum_class.__new_member__ = __new__
|
| 342 |
+
enum_class.__new__ = Enum.__new__
|
| 343 |
+
|
| 344 |
+
# py3 support for definition order (helps keep py2/py3 code in sync)
|
| 345 |
+
if _order_ is not None:
|
| 346 |
+
if isinstance(_order_, str):
|
| 347 |
+
_order_ = _order_.replace(',', ' ').split()
|
| 348 |
+
if _order_ != enum_class._member_names_:
|
| 349 |
+
raise TypeError('member order does not match _order_')
|
| 350 |
+
|
| 351 |
+
return enum_class
|
| 352 |
+
|
| 353 |
+
def __bool__(self):
|
| 354 |
+
"""
|
| 355 |
+
classes/types should always be True.
|
| 356 |
+
"""
|
| 357 |
+
return True
|
| 358 |
+
|
| 359 |
+
def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1):
|
| 360 |
+
"""
|
| 361 |
+
Either returns an existing member, or creates a new enum class.
|
| 362 |
+
|
| 363 |
+
This method is used both when an enum class is given a value to match
|
| 364 |
+
to an enumeration member (i.e. Color(3)) and for the functional API
|
| 365 |
+
(i.e. Color = Enum('Color', names='RED GREEN BLUE')).
|
| 366 |
+
|
| 367 |
+
When used for the functional API:
|
| 368 |
+
|
| 369 |
+
`value` will be the name of the new class.
|
| 370 |
+
|
| 371 |
+
`names` should be either a string of white-space/comma delimited names
|
| 372 |
+
(values will start at `start`), or an iterator/mapping of name, value pairs.
|
| 373 |
+
|
| 374 |
+
`module` should be set to the module this class is being created in;
|
| 375 |
+
if it is not set, an attempt to find that module will be made, but if
|
| 376 |
+
it fails the class will not be picklable.
|
| 377 |
+
|
| 378 |
+
`qualname` should be set to the actual location this class can be found
|
| 379 |
+
at in its module; by default it is set to the global scope. If this is
|
| 380 |
+
not correct, unpickling will fail in some circumstances.
|
| 381 |
+
|
| 382 |
+
`type`, if set, will be mixed in as the first base class.
|
| 383 |
+
"""
|
| 384 |
+
if names is None: # simple value lookup
|
| 385 |
+
return cls.__new__(cls, value)
|
| 386 |
+
# otherwise, functional API: we're creating a new Enum type
|
| 387 |
+
return cls._create_(
|
| 388 |
+
value,
|
| 389 |
+
names,
|
| 390 |
+
module=module,
|
| 391 |
+
qualname=qualname,
|
| 392 |
+
type=type,
|
| 393 |
+
start=start,
|
| 394 |
+
)
|
| 395 |
+
|
| 396 |
+
def __contains__(cls, obj):
|
| 397 |
+
if not isinstance(obj, Enum):
|
| 398 |
+
import warnings
|
| 399 |
+
warnings.warn(
|
| 400 |
+
"in 3.12 __contains__ will no longer raise TypeError, but will return True if\n"
|
| 401 |
+
"obj is a member or a member's value",
|
| 402 |
+
DeprecationWarning,
|
| 403 |
+
stacklevel=2,
|
| 404 |
+
)
|
| 405 |
+
raise TypeError(
|
| 406 |
+
"unsupported operand type(s) for 'in': '%s' and '%s'" % (
|
| 407 |
+
type(obj).__qualname__, cls.__class__.__qualname__))
|
| 408 |
+
return isinstance(obj, cls) and obj._name_ in cls._member_map_
|
| 409 |
+
|
| 410 |
+
def __delattr__(cls, attr):
|
| 411 |
+
# nicer error message when someone tries to delete an attribute
|
| 412 |
+
# (see issue19025).
|
| 413 |
+
if attr in cls._member_map_:
|
| 414 |
+
raise AttributeError("%s: cannot delete Enum member." % cls.__name__)
|
| 415 |
+
super().__delattr__(attr)
|
| 416 |
+
|
| 417 |
+
def __dir__(self):
|
| 418 |
+
return (
|
| 419 |
+
['__class__', '__doc__', '__members__', '__module__']
|
| 420 |
+
+ self._member_names_
|
| 421 |
+
)
|
| 422 |
+
|
| 423 |
+
def __getattr__(cls, name):
|
| 424 |
+
"""
|
| 425 |
+
Return the enum member matching `name`
|
| 426 |
+
|
| 427 |
+
We use __getattr__ instead of descriptors or inserting into the enum
|
| 428 |
+
class' __dict__ in order to support `name` and `value` being both
|
| 429 |
+
properties for enum members (which live in the class' __dict__) and
|
| 430 |
+
enum members themselves.
|
| 431 |
+
"""
|
| 432 |
+
if _is_dunder(name):
|
| 433 |
+
raise AttributeError(name)
|
| 434 |
+
try:
|
| 435 |
+
return cls._member_map_[name]
|
| 436 |
+
except KeyError:
|
| 437 |
+
raise AttributeError(name) from None
|
| 438 |
+
|
| 439 |
+
def __getitem__(cls, name):
|
| 440 |
+
return cls._member_map_[name]
|
| 441 |
+
|
| 442 |
+
def __iter__(cls):
|
| 443 |
+
"""
|
| 444 |
+
Returns members in definition order.
|
| 445 |
+
"""
|
| 446 |
+
return (cls._member_map_[name] for name in cls._member_names_)
|
| 447 |
+
|
| 448 |
+
def __len__(cls):
|
| 449 |
+
return len(cls._member_names_)
|
| 450 |
+
|
| 451 |
+
@property
|
| 452 |
+
def __members__(cls):
|
| 453 |
+
"""
|
| 454 |
+
Returns a mapping of member name->value.
|
| 455 |
+
|
| 456 |
+
This mapping lists all enum members, including aliases. Note that this
|
| 457 |
+
is a read-only view of the internal mapping.
|
| 458 |
+
"""
|
| 459 |
+
return MappingProxyType(cls._member_map_)
|
| 460 |
+
|
| 461 |
+
def __repr__(cls):
|
| 462 |
+
return "<enum %r>" % cls.__name__
|
| 463 |
+
|
| 464 |
+
def __reversed__(cls):
|
| 465 |
+
"""
|
| 466 |
+
Returns members in reverse definition order.
|
| 467 |
+
"""
|
| 468 |
+
return (cls._member_map_[name] for name in reversed(cls._member_names_))
|
| 469 |
+
|
| 470 |
+
def __setattr__(cls, name, value):
|
| 471 |
+
"""
|
| 472 |
+
Block attempts to reassign Enum members.
|
| 473 |
+
|
| 474 |
+
A simple assignment to the class namespace only changes one of the
|
| 475 |
+
several possible ways to get an Enum member from the Enum class,
|
| 476 |
+
resulting in an inconsistent Enumeration.
|
| 477 |
+
"""
|
| 478 |
+
member_map = cls.__dict__.get('_member_map_', {})
|
| 479 |
+
if name in member_map:
|
| 480 |
+
raise AttributeError('Cannot reassign members.')
|
| 481 |
+
super().__setattr__(name, value)
|
| 482 |
+
|
| 483 |
+
def _create_(cls, class_name, names, *, module=None, qualname=None, type=None, start=1):
|
| 484 |
+
"""
|
| 485 |
+
Convenience method to create a new Enum class.
|
| 486 |
+
|
| 487 |
+
`names` can be:
|
| 488 |
+
|
| 489 |
+
* A string containing member names, separated either with spaces or
|
| 490 |
+
commas. Values are incremented by 1 from `start`.
|
| 491 |
+
* An iterable of member names. Values are incremented by 1 from `start`.
|
| 492 |
+
* An iterable of (member name, value) pairs.
|
| 493 |
+
* A mapping of member name -> value pairs.
|
| 494 |
+
"""
|
| 495 |
+
metacls = cls.__class__
|
| 496 |
+
bases = (cls, ) if type is None else (type, cls)
|
| 497 |
+
_, first_enum = cls._get_mixins_(cls, bases)
|
| 498 |
+
classdict = metacls.__prepare__(class_name, bases)
|
| 499 |
+
|
| 500 |
+
# special processing needed for names?
|
| 501 |
+
if isinstance(names, str):
|
| 502 |
+
names = names.replace(',', ' ').split()
|
| 503 |
+
if isinstance(names, (tuple, list)) and names and isinstance(names[0], str):
|
| 504 |
+
original_names, names = names, []
|
| 505 |
+
last_values = []
|
| 506 |
+
for count, name in enumerate(original_names):
|
| 507 |
+
value = first_enum._generate_next_value_(name, start, count, last_values[:])
|
| 508 |
+
last_values.append(value)
|
| 509 |
+
names.append((name, value))
|
| 510 |
+
|
| 511 |
+
# Here, names is either an iterable of (name, value) or a mapping.
|
| 512 |
+
for item in names:
|
| 513 |
+
if isinstance(item, str):
|
| 514 |
+
member_name, member_value = item, names[item]
|
| 515 |
+
else:
|
| 516 |
+
member_name, member_value = item
|
| 517 |
+
classdict[member_name] = member_value
|
| 518 |
+
enum_class = metacls.__new__(metacls, class_name, bases, classdict)
|
| 519 |
+
|
| 520 |
+
# TODO: replace the frame hack if a blessed way to know the calling
|
| 521 |
+
# module is ever developed
|
| 522 |
+
if module is None:
|
| 523 |
+
try:
|
| 524 |
+
module = sys._getframe(2).f_globals['__name__']
|
| 525 |
+
except (AttributeError, ValueError, KeyError):
|
| 526 |
+
pass
|
| 527 |
+
if module is None:
|
| 528 |
+
_make_class_unpicklable(enum_class)
|
| 529 |
+
else:
|
| 530 |
+
enum_class.__module__ = module
|
| 531 |
+
if qualname is not None:
|
| 532 |
+
enum_class.__qualname__ = qualname
|
| 533 |
+
|
| 534 |
+
return enum_class
|
| 535 |
+
|
| 536 |
+
def _convert_(cls, name, module, filter, source=None):
|
| 537 |
+
"""
|
| 538 |
+
Create a new Enum subclass that replaces a collection of global constants
|
| 539 |
+
"""
|
| 540 |
+
# convert all constants from source (or module) that pass filter() to
|
| 541 |
+
# a new Enum called name, and export the enum and its members back to
|
| 542 |
+
# module;
|
| 543 |
+
# also, replace the __reduce_ex__ method so unpickling works in
|
| 544 |
+
# previous Python versions
|
| 545 |
+
module_globals = vars(sys.modules[module])
|
| 546 |
+
if source:
|
| 547 |
+
source = vars(source)
|
| 548 |
+
else:
|
| 549 |
+
source = module_globals
|
| 550 |
+
# _value2member_map_ is populated in the same order every time
|
| 551 |
+
# for a consistent reverse mapping of number to name when there
|
| 552 |
+
# are multiple names for the same number.
|
| 553 |
+
members = [
|
| 554 |
+
(name, value)
|
| 555 |
+
for name, value in source.items()
|
| 556 |
+
if filter(name)]
|
| 557 |
+
try:
|
| 558 |
+
# sort by value
|
| 559 |
+
members.sort(key=lambda t: (t[1], t[0]))
|
| 560 |
+
except TypeError:
|
| 561 |
+
# unless some values aren't comparable, in which case sort by name
|
| 562 |
+
members.sort(key=lambda t: t[0])
|
| 563 |
+
cls = cls(name, members, module=module)
|
| 564 |
+
cls.__reduce_ex__ = _reduce_ex_by_name
|
| 565 |
+
module_globals.update(cls.__members__)
|
| 566 |
+
module_globals[name] = cls
|
| 567 |
+
return cls
|
| 568 |
+
|
| 569 |
+
@staticmethod
|
| 570 |
+
def _check_for_existing_members(class_name, bases):
|
| 571 |
+
for chain in bases:
|
| 572 |
+
for base in chain.__mro__:
|
| 573 |
+
if issubclass(base, Enum) and base._member_names_:
|
| 574 |
+
raise TypeError(
|
| 575 |
+
"%s: cannot extend enumeration %r"
|
| 576 |
+
% (class_name, base.__name__)
|
| 577 |
+
)
|
| 578 |
+
|
| 579 |
+
@staticmethod
|
| 580 |
+
def _get_mixins_(class_name, bases):
|
| 581 |
+
"""
|
| 582 |
+
Returns the type for creating enum members, and the first inherited
|
| 583 |
+
enum class.
|
| 584 |
+
|
| 585 |
+
bases: the tuple of bases that was given to __new__
|
| 586 |
+
"""
|
| 587 |
+
if not bases:
|
| 588 |
+
return object, Enum
|
| 589 |
+
|
| 590 |
+
def _find_data_type(bases):
|
| 591 |
+
data_types = set()
|
| 592 |
+
for chain in bases:
|
| 593 |
+
candidate = None
|
| 594 |
+
for base in chain.__mro__:
|
| 595 |
+
if base is object:
|
| 596 |
+
continue
|
| 597 |
+
elif issubclass(base, Enum):
|
| 598 |
+
if base._member_type_ is not object:
|
| 599 |
+
data_types.add(base._member_type_)
|
| 600 |
+
break
|
| 601 |
+
elif '__new__' in base.__dict__:
|
| 602 |
+
if issubclass(base, Enum):
|
| 603 |
+
continue
|
| 604 |
+
data_types.add(candidate or base)
|
| 605 |
+
break
|
| 606 |
+
else:
|
| 607 |
+
candidate = candidate or base
|
| 608 |
+
if len(data_types) > 1:
|
| 609 |
+
raise TypeError('%r: too many data types: %r' % (class_name, data_types))
|
| 610 |
+
elif data_types:
|
| 611 |
+
return data_types.pop()
|
| 612 |
+
else:
|
| 613 |
+
return None
|
| 614 |
+
|
| 615 |
+
# ensure final parent class is an Enum derivative, find any concrete
|
| 616 |
+
# data type, and check that Enum has no members
|
| 617 |
+
first_enum = bases[-1]
|
| 618 |
+
if not issubclass(first_enum, Enum):
|
| 619 |
+
raise TypeError("new enumerations should be created as "
|
| 620 |
+
"`EnumName([mixin_type, ...] [data_type,] enum_type)`")
|
| 621 |
+
member_type = _find_data_type(bases) or object
|
| 622 |
+
if first_enum._member_names_:
|
| 623 |
+
raise TypeError("Cannot extend enumerations")
|
| 624 |
+
return member_type, first_enum
|
| 625 |
+
|
| 626 |
+
@staticmethod
|
| 627 |
+
def _find_new_(classdict, member_type, first_enum):
|
| 628 |
+
"""
|
| 629 |
+
Returns the __new__ to be used for creating the enum members.
|
| 630 |
+
|
| 631 |
+
classdict: the class dictionary given to __new__
|
| 632 |
+
member_type: the data type whose __new__ will be used by default
|
| 633 |
+
first_enum: enumeration to check for an overriding __new__
|
| 634 |
+
"""
|
| 635 |
+
# now find the correct __new__, checking to see of one was defined
|
| 636 |
+
# by the user; also check earlier enum classes in case a __new__ was
|
| 637 |
+
# saved as __new_member__
|
| 638 |
+
__new__ = classdict.get('__new__', None)
|
| 639 |
+
|
| 640 |
+
# should __new__ be saved as __new_member__ later?
|
| 641 |
+
save_new = __new__ is not None
|
| 642 |
+
|
| 643 |
+
if __new__ is None:
|
| 644 |
+
# check all possibles for __new_member__ before falling back to
|
| 645 |
+
# __new__
|
| 646 |
+
for method in ('__new_member__', '__new__'):
|
| 647 |
+
for possible in (member_type, first_enum):
|
| 648 |
+
target = getattr(possible, method, None)
|
| 649 |
+
if target not in {
|
| 650 |
+
None,
|
| 651 |
+
None.__new__,
|
| 652 |
+
object.__new__,
|
| 653 |
+
Enum.__new__,
|
| 654 |
+
}:
|
| 655 |
+
__new__ = target
|
| 656 |
+
break
|
| 657 |
+
if __new__ is not None:
|
| 658 |
+
break
|
| 659 |
+
else:
|
| 660 |
+
__new__ = object.__new__
|
| 661 |
+
|
| 662 |
+
# if a non-object.__new__ is used then whatever value/tuple was
|
| 663 |
+
# assigned to the enum member name will be passed to __new__ and to the
|
| 664 |
+
# new enum member's __init__
|
| 665 |
+
if __new__ is object.__new__:
|
| 666 |
+
use_args = False
|
| 667 |
+
else:
|
| 668 |
+
use_args = True
|
| 669 |
+
return __new__, save_new, use_args
|
| 670 |
+
|
| 671 |
+
|
| 672 |
+
class Enum(metaclass=EnumMeta):
|
| 673 |
+
"""
|
| 674 |
+
Generic enumeration.
|
| 675 |
+
|
| 676 |
+
Derive from this class to define new enumerations.
|
| 677 |
+
"""
|
| 678 |
+
def __new__(cls, value):
|
| 679 |
+
# all enum instances are actually created during class construction
|
| 680 |
+
# without calling this method; this method is called by the metaclass'
|
| 681 |
+
# __call__ (i.e. Color(3) ), and by pickle
|
| 682 |
+
if type(value) is cls:
|
| 683 |
+
# For lookups like Color(Color.RED)
|
| 684 |
+
return value
|
| 685 |
+
# by-value search for a matching enum member
|
| 686 |
+
# see if it's in the reverse mapping (for hashable values)
|
| 687 |
+
try:
|
| 688 |
+
return cls._value2member_map_[value]
|
| 689 |
+
except KeyError:
|
| 690 |
+
# Not found, no need to do long O(n) search
|
| 691 |
+
pass
|
| 692 |
+
except TypeError:
|
| 693 |
+
# not there, now do long search -- O(n) behavior
|
| 694 |
+
for member in cls._member_map_.values():
|
| 695 |
+
if member._value_ == value:
|
| 696 |
+
return member
|
| 697 |
+
# still not found -- try _missing_ hook
|
| 698 |
+
try:
|
| 699 |
+
exc = None
|
| 700 |
+
result = cls._missing_(value)
|
| 701 |
+
except Exception as e:
|
| 702 |
+
exc = e
|
| 703 |
+
result = None
|
| 704 |
+
try:
|
| 705 |
+
if isinstance(result, cls):
|
| 706 |
+
return result
|
| 707 |
+
else:
|
| 708 |
+
ve_exc = ValueError("%r is not a valid %s" % (value, cls.__qualname__))
|
| 709 |
+
if result is None and exc is None:
|
| 710 |
+
raise ve_exc
|
| 711 |
+
elif exc is None:
|
| 712 |
+
exc = TypeError(
|
| 713 |
+
'error in %s._missing_: returned %r instead of None or a valid member'
|
| 714 |
+
% (cls.__name__, result)
|
| 715 |
+
)
|
| 716 |
+
if not isinstance(exc, ValueError):
|
| 717 |
+
exc.__context__ = ve_exc
|
| 718 |
+
raise exc
|
| 719 |
+
finally:
|
| 720 |
+
# ensure all variables that could hold an exception are destroyed
|
| 721 |
+
exc = None
|
| 722 |
+
ve_exc = None
|
| 723 |
+
|
| 724 |
+
def _generate_next_value_(name, start, count, last_values):
|
| 725 |
+
"""
|
| 726 |
+
Generate the next value when not given.
|
| 727 |
+
|
| 728 |
+
name: the name of the member
|
| 729 |
+
start: the initial start value or None
|
| 730 |
+
count: the number of existing members
|
| 731 |
+
last_value: the last value assigned or None
|
| 732 |
+
"""
|
| 733 |
+
for last_value in reversed(last_values):
|
| 734 |
+
try:
|
| 735 |
+
return last_value + 1
|
| 736 |
+
except TypeError:
|
| 737 |
+
pass
|
| 738 |
+
else:
|
| 739 |
+
return start
|
| 740 |
+
|
| 741 |
+
@classmethod
|
| 742 |
+
def _missing_(cls, value):
|
| 743 |
+
return None
|
| 744 |
+
|
| 745 |
+
def __repr__(self):
|
| 746 |
+
return "<%s.%s: %r>" % (
|
| 747 |
+
self.__class__.__name__, self._name_, self._value_)
|
| 748 |
+
|
| 749 |
+
def __str__(self):
|
| 750 |
+
return "%s.%s" % (self.__class__.__name__, self._name_)
|
| 751 |
+
|
| 752 |
+
def __dir__(self):
|
| 753 |
+
"""
|
| 754 |
+
Returns all members and all public methods
|
| 755 |
+
"""
|
| 756 |
+
added_behavior = [
|
| 757 |
+
m
|
| 758 |
+
for cls in self.__class__.mro()
|
| 759 |
+
for m in cls.__dict__
|
| 760 |
+
if m[0] != '_' and m not in self._member_map_
|
| 761 |
+
] + [m for m in self.__dict__ if m[0] != '_']
|
| 762 |
+
return (['__class__', '__doc__', '__module__'] + added_behavior)
|
| 763 |
+
|
| 764 |
+
def __format__(self, format_spec):
|
| 765 |
+
"""
|
| 766 |
+
Returns format using actual value type unless __str__ has been overridden.
|
| 767 |
+
"""
|
| 768 |
+
# mixed-in Enums should use the mixed-in type's __format__, otherwise
|
| 769 |
+
# we can get strange results with the Enum name showing up instead of
|
| 770 |
+
# the value
|
| 771 |
+
|
| 772 |
+
# pure Enum branch, or branch with __str__ explicitly overridden
|
| 773 |
+
str_overridden = type(self).__str__ not in (Enum.__str__, Flag.__str__)
|
| 774 |
+
if self._member_type_ is object or str_overridden:
|
| 775 |
+
cls = str
|
| 776 |
+
val = str(self)
|
| 777 |
+
# mix-in branch
|
| 778 |
+
else:
|
| 779 |
+
cls = self._member_type_
|
| 780 |
+
val = self._value_
|
| 781 |
+
return cls.__format__(val, format_spec)
|
| 782 |
+
|
| 783 |
+
def __hash__(self):
|
| 784 |
+
return hash(self._name_)
|
| 785 |
+
|
| 786 |
+
def __reduce_ex__(self, proto):
|
| 787 |
+
return self.__class__, (self._value_, )
|
| 788 |
+
|
| 789 |
+
# DynamicClassAttribute is used to provide access to the `name` and
|
| 790 |
+
# `value` properties of enum members while keeping some measure of
|
| 791 |
+
# protection from modification, while still allowing for an enumeration
|
| 792 |
+
# to have members named `name` and `value`. This works because enumeration
|
| 793 |
+
# members are not set directly on the enum class -- __getattr__ is
|
| 794 |
+
# used to look them up.
|
| 795 |
+
|
| 796 |
+
@DynamicClassAttribute
|
| 797 |
+
def name(self):
|
| 798 |
+
"""The name of the Enum member."""
|
| 799 |
+
return self._name_
|
| 800 |
+
|
| 801 |
+
@DynamicClassAttribute
|
| 802 |
+
def value(self):
|
| 803 |
+
"""The value of the Enum member."""
|
| 804 |
+
return self._value_
|
| 805 |
+
|
| 806 |
+
|
| 807 |
+
class IntEnum(int, Enum):
|
| 808 |
+
"""Enum where members are also (and must be) ints"""
|
| 809 |
+
|
| 810 |
+
|
| 811 |
+
def _reduce_ex_by_name(self, proto):
|
| 812 |
+
return self.name
|
| 813 |
+
|
| 814 |
+
class Flag(Enum):
|
| 815 |
+
"""
|
| 816 |
+
Support for flags
|
| 817 |
+
"""
|
| 818 |
+
|
| 819 |
+
def _generate_next_value_(name, start, count, last_values):
|
| 820 |
+
"""
|
| 821 |
+
Generate the next value when not given.
|
| 822 |
+
|
| 823 |
+
name: the name of the member
|
| 824 |
+
start: the initial start value or None
|
| 825 |
+
count: the number of existing members
|
| 826 |
+
last_value: the last value assigned or None
|
| 827 |
+
"""
|
| 828 |
+
if not count:
|
| 829 |
+
return start if start is not None else 1
|
| 830 |
+
for last_value in reversed(last_values):
|
| 831 |
+
try:
|
| 832 |
+
high_bit = _high_bit(last_value)
|
| 833 |
+
break
|
| 834 |
+
except Exception:
|
| 835 |
+
raise TypeError('Invalid Flag value: %r' % last_value) from None
|
| 836 |
+
return 2 ** (high_bit+1)
|
| 837 |
+
|
| 838 |
+
@classmethod
|
| 839 |
+
def _missing_(cls, value):
|
| 840 |
+
"""
|
| 841 |
+
Returns member (possibly creating it) if one can be found for value.
|
| 842 |
+
"""
|
| 843 |
+
original_value = value
|
| 844 |
+
if value < 0:
|
| 845 |
+
value = ~value
|
| 846 |
+
possible_member = cls._create_pseudo_member_(value)
|
| 847 |
+
if original_value < 0:
|
| 848 |
+
possible_member = ~possible_member
|
| 849 |
+
return possible_member
|
| 850 |
+
|
| 851 |
+
@classmethod
|
| 852 |
+
def _create_pseudo_member_(cls, value):
|
| 853 |
+
"""
|
| 854 |
+
Create a composite member iff value contains only members.
|
| 855 |
+
"""
|
| 856 |
+
pseudo_member = cls._value2member_map_.get(value, None)
|
| 857 |
+
if pseudo_member is None:
|
| 858 |
+
# verify all bits are accounted for
|
| 859 |
+
_, extra_flags = _decompose(cls, value)
|
| 860 |
+
if extra_flags:
|
| 861 |
+
raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
|
| 862 |
+
# construct a singleton enum pseudo-member
|
| 863 |
+
pseudo_member = object.__new__(cls)
|
| 864 |
+
pseudo_member._name_ = None
|
| 865 |
+
pseudo_member._value_ = value
|
| 866 |
+
# use setdefault in case another thread already created a composite
|
| 867 |
+
# with this value
|
| 868 |
+
pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
|
| 869 |
+
return pseudo_member
|
| 870 |
+
|
| 871 |
+
def __contains__(self, other):
|
| 872 |
+
"""
|
| 873 |
+
Returns True if self has at least the same flags set as other.
|
| 874 |
+
"""
|
| 875 |
+
if not isinstance(other, self.__class__):
|
| 876 |
+
raise TypeError(
|
| 877 |
+
"unsupported operand type(s) for 'in': '%s' and '%s'" % (
|
| 878 |
+
type(other).__qualname__, self.__class__.__qualname__))
|
| 879 |
+
return other._value_ & self._value_ == other._value_
|
| 880 |
+
|
| 881 |
+
def __repr__(self):
|
| 882 |
+
cls = self.__class__
|
| 883 |
+
if self._name_ is not None:
|
| 884 |
+
return '<%s.%s: %r>' % (cls.__name__, self._name_, self._value_)
|
| 885 |
+
members, uncovered = _decompose(cls, self._value_)
|
| 886 |
+
return '<%s.%s: %r>' % (
|
| 887 |
+
cls.__name__,
|
| 888 |
+
'|'.join([str(m._name_ or m._value_) for m in members]),
|
| 889 |
+
self._value_,
|
| 890 |
+
)
|
| 891 |
+
|
| 892 |
+
def __str__(self):
|
| 893 |
+
cls = self.__class__
|
| 894 |
+
if self._name_ is not None:
|
| 895 |
+
return '%s.%s' % (cls.__name__, self._name_)
|
| 896 |
+
members, uncovered = _decompose(cls, self._value_)
|
| 897 |
+
if len(members) == 1 and members[0]._name_ is None:
|
| 898 |
+
return '%s.%r' % (cls.__name__, members[0]._value_)
|
| 899 |
+
else:
|
| 900 |
+
return '%s.%s' % (
|
| 901 |
+
cls.__name__,
|
| 902 |
+
'|'.join([str(m._name_ or m._value_) for m in members]),
|
| 903 |
+
)
|
| 904 |
+
|
| 905 |
+
def __bool__(self):
|
| 906 |
+
return bool(self._value_)
|
| 907 |
+
|
| 908 |
+
def __or__(self, other):
|
| 909 |
+
if not isinstance(other, self.__class__):
|
| 910 |
+
return NotImplemented
|
| 911 |
+
return self.__class__(self._value_ | other._value_)
|
| 912 |
+
|
| 913 |
+
def __and__(self, other):
|
| 914 |
+
if not isinstance(other, self.__class__):
|
| 915 |
+
return NotImplemented
|
| 916 |
+
return self.__class__(self._value_ & other._value_)
|
| 917 |
+
|
| 918 |
+
def __xor__(self, other):
|
| 919 |
+
if not isinstance(other, self.__class__):
|
| 920 |
+
return NotImplemented
|
| 921 |
+
return self.__class__(self._value_ ^ other._value_)
|
| 922 |
+
|
| 923 |
+
def __invert__(self):
|
| 924 |
+
members, uncovered = _decompose(self.__class__, self._value_)
|
| 925 |
+
inverted = self.__class__(0)
|
| 926 |
+
for m in self.__class__:
|
| 927 |
+
if m not in members and not (m._value_ & self._value_):
|
| 928 |
+
inverted = inverted | m
|
| 929 |
+
return self.__class__(inverted)
|
| 930 |
+
|
| 931 |
+
|
| 932 |
+
class IntFlag(int, Flag):
|
| 933 |
+
"""
|
| 934 |
+
Support for integer-based Flags
|
| 935 |
+
"""
|
| 936 |
+
|
| 937 |
+
@classmethod
|
| 938 |
+
def _missing_(cls, value):
|
| 939 |
+
"""
|
| 940 |
+
Returns member (possibly creating it) if one can be found for value.
|
| 941 |
+
"""
|
| 942 |
+
if not isinstance(value, int):
|
| 943 |
+
raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
|
| 944 |
+
new_member = cls._create_pseudo_member_(value)
|
| 945 |
+
return new_member
|
| 946 |
+
|
| 947 |
+
@classmethod
|
| 948 |
+
def _create_pseudo_member_(cls, value):
|
| 949 |
+
"""
|
| 950 |
+
Create a composite member iff value contains only members.
|
| 951 |
+
"""
|
| 952 |
+
pseudo_member = cls._value2member_map_.get(value, None)
|
| 953 |
+
if pseudo_member is None:
|
| 954 |
+
need_to_create = [value]
|
| 955 |
+
# get unaccounted for bits
|
| 956 |
+
_, extra_flags = _decompose(cls, value)
|
| 957 |
+
# timer = 10
|
| 958 |
+
while extra_flags:
|
| 959 |
+
# timer -= 1
|
| 960 |
+
bit = _high_bit(extra_flags)
|
| 961 |
+
flag_value = 2 ** bit
|
| 962 |
+
if (flag_value not in cls._value2member_map_ and
|
| 963 |
+
flag_value not in need_to_create
|
| 964 |
+
):
|
| 965 |
+
need_to_create.append(flag_value)
|
| 966 |
+
if extra_flags == -flag_value:
|
| 967 |
+
extra_flags = 0
|
| 968 |
+
else:
|
| 969 |
+
extra_flags ^= flag_value
|
| 970 |
+
for value in reversed(need_to_create):
|
| 971 |
+
# construct singleton pseudo-members
|
| 972 |
+
pseudo_member = int.__new__(cls, value)
|
| 973 |
+
pseudo_member._name_ = None
|
| 974 |
+
pseudo_member._value_ = value
|
| 975 |
+
# use setdefault in case another thread already created a composite
|
| 976 |
+
# with this value
|
| 977 |
+
pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
|
| 978 |
+
return pseudo_member
|
| 979 |
+
|
| 980 |
+
def __or__(self, other):
|
| 981 |
+
if not isinstance(other, (self.__class__, int)):
|
| 982 |
+
return NotImplemented
|
| 983 |
+
result = self.__class__(self._value_ | self.__class__(other)._value_)
|
| 984 |
+
return result
|
| 985 |
+
|
| 986 |
+
def __and__(self, other):
|
| 987 |
+
if not isinstance(other, (self.__class__, int)):
|
| 988 |
+
return NotImplemented
|
| 989 |
+
return self.__class__(self._value_ & self.__class__(other)._value_)
|
| 990 |
+
|
| 991 |
+
def __xor__(self, other):
|
| 992 |
+
if not isinstance(other, (self.__class__, int)):
|
| 993 |
+
return NotImplemented
|
| 994 |
+
return self.__class__(self._value_ ^ self.__class__(other)._value_)
|
| 995 |
+
|
| 996 |
+
__ror__ = __or__
|
| 997 |
+
__rand__ = __and__
|
| 998 |
+
__rxor__ = __xor__
|
| 999 |
+
|
| 1000 |
+
def __invert__(self):
|
| 1001 |
+
result = self.__class__(~self._value_)
|
| 1002 |
+
return result
|
| 1003 |
+
|
| 1004 |
+
|
| 1005 |
+
def _high_bit(value):
|
| 1006 |
+
"""
|
| 1007 |
+
returns index of highest bit, or -1 if value is zero or negative
|
| 1008 |
+
"""
|
| 1009 |
+
return value.bit_length() - 1
|
| 1010 |
+
|
| 1011 |
+
def unique(enumeration):
|
| 1012 |
+
"""
|
| 1013 |
+
Class decorator for enumerations ensuring unique member values.
|
| 1014 |
+
"""
|
| 1015 |
+
duplicates = []
|
| 1016 |
+
for name, member in enumeration.__members__.items():
|
| 1017 |
+
if name != member.name:
|
| 1018 |
+
duplicates.append((name, member.name))
|
| 1019 |
+
if duplicates:
|
| 1020 |
+
alias_details = ', '.join(
|
| 1021 |
+
["%s -> %s" % (alias, name) for (alias, name) in duplicates])
|
| 1022 |
+
raise ValueError('duplicate values found in %r: %s' %
|
| 1023 |
+
(enumeration, alias_details))
|
| 1024 |
+
return enumeration
|
| 1025 |
+
|
| 1026 |
+
def _decompose(flag, value):
|
| 1027 |
+
"""
|
| 1028 |
+
Extract all members from the value.
|
| 1029 |
+
"""
|
| 1030 |
+
# _decompose is only called if the value is not named
|
| 1031 |
+
not_covered = value
|
| 1032 |
+
negative = value < 0
|
| 1033 |
+
members = []
|
| 1034 |
+
for member in flag:
|
| 1035 |
+
member_value = member.value
|
| 1036 |
+
if member_value and member_value & value == member_value:
|
| 1037 |
+
members.append(member)
|
| 1038 |
+
not_covered &= ~member_value
|
| 1039 |
+
if not negative:
|
| 1040 |
+
tmp = not_covered
|
| 1041 |
+
while tmp:
|
| 1042 |
+
flag_value = 2 ** _high_bit(tmp)
|
| 1043 |
+
if flag_value in flag._value2member_map_:
|
| 1044 |
+
members.append(flag._value2member_map_[flag_value])
|
| 1045 |
+
not_covered &= ~flag_value
|
| 1046 |
+
tmp &= ~flag_value
|
| 1047 |
+
if not members and value in flag._value2member_map_:
|
| 1048 |
+
members.append(flag._value2member_map_[value])
|
| 1049 |
+
members.sort(key=lambda m: m._value_, reverse=True)
|
| 1050 |
+
if len(members) > 1 and members[0].value == value:
|
| 1051 |
+
# we have the breakdown, don't need the value member itself
|
| 1052 |
+
members.pop(0)
|
| 1053 |
+
return members, not_covered
|
evalkit_cambrian/lib/python3.10/fileinput.py
ADDED
|
@@ -0,0 +1,462 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper class to quickly write a loop over all standard input files.
|
| 2 |
+
|
| 3 |
+
Typical use is:
|
| 4 |
+
|
| 5 |
+
import fileinput
|
| 6 |
+
for line in fileinput.input(encoding="utf-8"):
|
| 7 |
+
process(line)
|
| 8 |
+
|
| 9 |
+
This iterates over the lines of all files listed in sys.argv[1:],
|
| 10 |
+
defaulting to sys.stdin if the list is empty. If a filename is '-' it
|
| 11 |
+
is also replaced by sys.stdin and the optional arguments mode and
|
| 12 |
+
openhook are ignored. To specify an alternative list of filenames,
|
| 13 |
+
pass it as the argument to input(). A single file name is also allowed.
|
| 14 |
+
|
| 15 |
+
Functions filename(), lineno() return the filename and cumulative line
|
| 16 |
+
number of the line that has just been read; filelineno() returns its
|
| 17 |
+
line number in the current file; isfirstline() returns true iff the
|
| 18 |
+
line just read is the first line of its file; isstdin() returns true
|
| 19 |
+
iff the line was read from sys.stdin. Function nextfile() closes the
|
| 20 |
+
current file so that the next iteration will read the first line from
|
| 21 |
+
the next file (if any); lines not read from the file will not count
|
| 22 |
+
towards the cumulative line count; the filename is not changed until
|
| 23 |
+
after the first line of the next file has been read. Function close()
|
| 24 |
+
closes the sequence.
|
| 25 |
+
|
| 26 |
+
Before any lines have been read, filename() returns None and both line
|
| 27 |
+
numbers are zero; nextfile() has no effect. After all lines have been
|
| 28 |
+
read, filename() and the line number functions return the values
|
| 29 |
+
pertaining to the last line read; nextfile() has no effect.
|
| 30 |
+
|
| 31 |
+
All files are opened in text mode by default, you can override this by
|
| 32 |
+
setting the mode parameter to input() or FileInput.__init__().
|
| 33 |
+
If an I/O error occurs during opening or reading a file, the OSError
|
| 34 |
+
exception is raised.
|
| 35 |
+
|
| 36 |
+
If sys.stdin is used more than once, the second and further use will
|
| 37 |
+
return no lines, except perhaps for interactive use, or if it has been
|
| 38 |
+
explicitly reset (e.g. using sys.stdin.seek(0)).
|
| 39 |
+
|
| 40 |
+
Empty files are opened and immediately closed; the only time their
|
| 41 |
+
presence in the list of filenames is noticeable at all is when the
|
| 42 |
+
last file opened is empty.
|
| 43 |
+
|
| 44 |
+
It is possible that the last line of a file doesn't end in a newline
|
| 45 |
+
character; otherwise lines are returned including the trailing
|
| 46 |
+
newline.
|
| 47 |
+
|
| 48 |
+
Class FileInput is the implementation; its methods filename(),
|
| 49 |
+
lineno(), fileline(), isfirstline(), isstdin(), nextfile() and close()
|
| 50 |
+
correspond to the functions in the module. In addition it has a
|
| 51 |
+
readline() method which returns the next input line, and a
|
| 52 |
+
__getitem__() method which implements the sequence behavior. The
|
| 53 |
+
sequence must be accessed in strictly sequential order; sequence
|
| 54 |
+
access and readline() cannot be mixed.
|
| 55 |
+
|
| 56 |
+
Optional in-place filtering: if the keyword argument inplace=1 is
|
| 57 |
+
passed to input() or to the FileInput constructor, the file is moved
|
| 58 |
+
to a backup file and standard output is directed to the input file.
|
| 59 |
+
This makes it possible to write a filter that rewrites its input file
|
| 60 |
+
in place. If the keyword argument backup=".<some extension>" is also
|
| 61 |
+
given, it specifies the extension for the backup file, and the backup
|
| 62 |
+
file remains around; by default, the extension is ".bak" and it is
|
| 63 |
+
deleted when the output file is closed. In-place filtering is
|
| 64 |
+
disabled when standard input is read. XXX The current implementation
|
| 65 |
+
does not work for MS-DOS 8+3 filesystems.
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
import io
|
| 69 |
+
import sys, os
|
| 70 |
+
from types import GenericAlias
|
| 71 |
+
|
| 72 |
+
__all__ = ["input", "close", "nextfile", "filename", "lineno", "filelineno",
|
| 73 |
+
"fileno", "isfirstline", "isstdin", "FileInput", "hook_compressed",
|
| 74 |
+
"hook_encoded"]
|
| 75 |
+
|
| 76 |
+
_state = None
|
| 77 |
+
|
| 78 |
+
def input(files=None, inplace=False, backup="", *, mode="r", openhook=None,
|
| 79 |
+
encoding=None, errors=None):
|
| 80 |
+
"""Return an instance of the FileInput class, which can be iterated.
|
| 81 |
+
|
| 82 |
+
The parameters are passed to the constructor of the FileInput class.
|
| 83 |
+
The returned instance, in addition to being an iterator,
|
| 84 |
+
keeps global state for the functions of this module,.
|
| 85 |
+
"""
|
| 86 |
+
global _state
|
| 87 |
+
if _state and _state._file:
|
| 88 |
+
raise RuntimeError("input() already active")
|
| 89 |
+
_state = FileInput(files, inplace, backup, mode=mode, openhook=openhook,
|
| 90 |
+
encoding=encoding, errors=errors)
|
| 91 |
+
return _state
|
| 92 |
+
|
| 93 |
+
def close():
|
| 94 |
+
"""Close the sequence."""
|
| 95 |
+
global _state
|
| 96 |
+
state = _state
|
| 97 |
+
_state = None
|
| 98 |
+
if state:
|
| 99 |
+
state.close()
|
| 100 |
+
|
| 101 |
+
def nextfile():
|
| 102 |
+
"""
|
| 103 |
+
Close the current file so that the next iteration will read the first
|
| 104 |
+
line from the next file (if any); lines not read from the file will
|
| 105 |
+
not count towards the cumulative line count. The filename is not
|
| 106 |
+
changed until after the first line of the next file has been read.
|
| 107 |
+
Before the first line has been read, this function has no effect;
|
| 108 |
+
it cannot be used to skip the first file. After the last line of the
|
| 109 |
+
last file has been read, this function has no effect.
|
| 110 |
+
"""
|
| 111 |
+
if not _state:
|
| 112 |
+
raise RuntimeError("no active input()")
|
| 113 |
+
return _state.nextfile()
|
| 114 |
+
|
| 115 |
+
def filename():
|
| 116 |
+
"""
|
| 117 |
+
Return the name of the file currently being read.
|
| 118 |
+
Before the first line has been read, returns None.
|
| 119 |
+
"""
|
| 120 |
+
if not _state:
|
| 121 |
+
raise RuntimeError("no active input()")
|
| 122 |
+
return _state.filename()
|
| 123 |
+
|
| 124 |
+
def lineno():
|
| 125 |
+
"""
|
| 126 |
+
Return the cumulative line number of the line that has just been read.
|
| 127 |
+
Before the first line has been read, returns 0. After the last line
|
| 128 |
+
of the last file has been read, returns the line number of that line.
|
| 129 |
+
"""
|
| 130 |
+
if not _state:
|
| 131 |
+
raise RuntimeError("no active input()")
|
| 132 |
+
return _state.lineno()
|
| 133 |
+
|
| 134 |
+
def filelineno():
|
| 135 |
+
"""
|
| 136 |
+
Return the line number in the current file. Before the first line
|
| 137 |
+
has been read, returns 0. After the last line of the last file has
|
| 138 |
+
been read, returns the line number of that line within the file.
|
| 139 |
+
"""
|
| 140 |
+
if not _state:
|
| 141 |
+
raise RuntimeError("no active input()")
|
| 142 |
+
return _state.filelineno()
|
| 143 |
+
|
| 144 |
+
def fileno():
|
| 145 |
+
"""
|
| 146 |
+
Return the file number of the current file. When no file is currently
|
| 147 |
+
opened, returns -1.
|
| 148 |
+
"""
|
| 149 |
+
if not _state:
|
| 150 |
+
raise RuntimeError("no active input()")
|
| 151 |
+
return _state.fileno()
|
| 152 |
+
|
| 153 |
+
def isfirstline():
|
| 154 |
+
"""
|
| 155 |
+
Returns true the line just read is the first line of its file,
|
| 156 |
+
otherwise returns false.
|
| 157 |
+
"""
|
| 158 |
+
if not _state:
|
| 159 |
+
raise RuntimeError("no active input()")
|
| 160 |
+
return _state.isfirstline()
|
| 161 |
+
|
| 162 |
+
def isstdin():
|
| 163 |
+
"""
|
| 164 |
+
Returns true if the last line was read from sys.stdin,
|
| 165 |
+
otherwise returns false.
|
| 166 |
+
"""
|
| 167 |
+
if not _state:
|
| 168 |
+
raise RuntimeError("no active input()")
|
| 169 |
+
return _state.isstdin()
|
| 170 |
+
|
| 171 |
+
class FileInput:
|
| 172 |
+
"""FileInput([files[, inplace[, backup]]], *, mode=None, openhook=None)
|
| 173 |
+
|
| 174 |
+
Class FileInput is the implementation of the module; its methods
|
| 175 |
+
filename(), lineno(), fileline(), isfirstline(), isstdin(), fileno(),
|
| 176 |
+
nextfile() and close() correspond to the functions of the same name
|
| 177 |
+
in the module.
|
| 178 |
+
In addition it has a readline() method which returns the next
|
| 179 |
+
input line, and a __getitem__() method which implements the
|
| 180 |
+
sequence behavior. The sequence must be accessed in strictly
|
| 181 |
+
sequential order; random access and readline() cannot be mixed.
|
| 182 |
+
"""
|
| 183 |
+
|
| 184 |
+
def __init__(self, files=None, inplace=False, backup="", *,
|
| 185 |
+
mode="r", openhook=None, encoding=None, errors=None):
|
| 186 |
+
if isinstance(files, str):
|
| 187 |
+
files = (files,)
|
| 188 |
+
elif isinstance(files, os.PathLike):
|
| 189 |
+
files = (os.fspath(files), )
|
| 190 |
+
else:
|
| 191 |
+
if files is None:
|
| 192 |
+
files = sys.argv[1:]
|
| 193 |
+
if not files:
|
| 194 |
+
files = ('-',)
|
| 195 |
+
else:
|
| 196 |
+
files = tuple(files)
|
| 197 |
+
self._files = files
|
| 198 |
+
self._inplace = inplace
|
| 199 |
+
self._backup = backup
|
| 200 |
+
self._savestdout = None
|
| 201 |
+
self._output = None
|
| 202 |
+
self._filename = None
|
| 203 |
+
self._startlineno = 0
|
| 204 |
+
self._filelineno = 0
|
| 205 |
+
self._file = None
|
| 206 |
+
self._isstdin = False
|
| 207 |
+
self._backupfilename = None
|
| 208 |
+
self._encoding = encoding
|
| 209 |
+
self._errors = errors
|
| 210 |
+
|
| 211 |
+
# We can not use io.text_encoding() here because old openhook doesn't
|
| 212 |
+
# take encoding parameter.
|
| 213 |
+
if (sys.flags.warn_default_encoding and
|
| 214 |
+
"b" not in mode and encoding is None and openhook is None):
|
| 215 |
+
import warnings
|
| 216 |
+
warnings.warn("'encoding' argument not specified.",
|
| 217 |
+
EncodingWarning, 2)
|
| 218 |
+
|
| 219 |
+
# restrict mode argument to reading modes
|
| 220 |
+
if mode not in ('r', 'rU', 'U', 'rb'):
|
| 221 |
+
raise ValueError("FileInput opening mode must be one of "
|
| 222 |
+
"'r', 'rU', 'U' and 'rb'")
|
| 223 |
+
if 'U' in mode:
|
| 224 |
+
import warnings
|
| 225 |
+
warnings.warn("'U' mode is deprecated",
|
| 226 |
+
DeprecationWarning, 2)
|
| 227 |
+
self._mode = mode
|
| 228 |
+
self._write_mode = mode.replace('r', 'w') if 'U' not in mode else 'w'
|
| 229 |
+
if openhook:
|
| 230 |
+
if inplace:
|
| 231 |
+
raise ValueError("FileInput cannot use an opening hook in inplace mode")
|
| 232 |
+
if not callable(openhook):
|
| 233 |
+
raise ValueError("FileInput openhook must be callable")
|
| 234 |
+
self._openhook = openhook
|
| 235 |
+
|
| 236 |
+
def __del__(self):
|
| 237 |
+
self.close()
|
| 238 |
+
|
| 239 |
+
def close(self):
|
| 240 |
+
try:
|
| 241 |
+
self.nextfile()
|
| 242 |
+
finally:
|
| 243 |
+
self._files = ()
|
| 244 |
+
|
| 245 |
+
def __enter__(self):
|
| 246 |
+
return self
|
| 247 |
+
|
| 248 |
+
def __exit__(self, type, value, traceback):
|
| 249 |
+
self.close()
|
| 250 |
+
|
| 251 |
+
def __iter__(self):
|
| 252 |
+
return self
|
| 253 |
+
|
| 254 |
+
def __next__(self):
|
| 255 |
+
while True:
|
| 256 |
+
line = self._readline()
|
| 257 |
+
if line:
|
| 258 |
+
self._filelineno += 1
|
| 259 |
+
return line
|
| 260 |
+
if not self._file:
|
| 261 |
+
raise StopIteration
|
| 262 |
+
self.nextfile()
|
| 263 |
+
# repeat with next file
|
| 264 |
+
|
| 265 |
+
def __getitem__(self, i):
|
| 266 |
+
import warnings
|
| 267 |
+
warnings.warn(
|
| 268 |
+
"Support for indexing FileInput objects is deprecated. "
|
| 269 |
+
"Use iterator protocol instead.",
|
| 270 |
+
DeprecationWarning,
|
| 271 |
+
stacklevel=2
|
| 272 |
+
)
|
| 273 |
+
if i != self.lineno():
|
| 274 |
+
raise RuntimeError("accessing lines out of order")
|
| 275 |
+
try:
|
| 276 |
+
return self.__next__()
|
| 277 |
+
except StopIteration:
|
| 278 |
+
raise IndexError("end of input reached")
|
| 279 |
+
|
| 280 |
+
def nextfile(self):
|
| 281 |
+
savestdout = self._savestdout
|
| 282 |
+
self._savestdout = None
|
| 283 |
+
if savestdout:
|
| 284 |
+
sys.stdout = savestdout
|
| 285 |
+
|
| 286 |
+
output = self._output
|
| 287 |
+
self._output = None
|
| 288 |
+
try:
|
| 289 |
+
if output:
|
| 290 |
+
output.close()
|
| 291 |
+
finally:
|
| 292 |
+
file = self._file
|
| 293 |
+
self._file = None
|
| 294 |
+
try:
|
| 295 |
+
del self._readline # restore FileInput._readline
|
| 296 |
+
except AttributeError:
|
| 297 |
+
pass
|
| 298 |
+
try:
|
| 299 |
+
if file and not self._isstdin:
|
| 300 |
+
file.close()
|
| 301 |
+
finally:
|
| 302 |
+
backupfilename = self._backupfilename
|
| 303 |
+
self._backupfilename = None
|
| 304 |
+
if backupfilename and not self._backup:
|
| 305 |
+
try: os.unlink(backupfilename)
|
| 306 |
+
except OSError: pass
|
| 307 |
+
|
| 308 |
+
self._isstdin = False
|
| 309 |
+
|
| 310 |
+
def readline(self):
|
| 311 |
+
while True:
|
| 312 |
+
line = self._readline()
|
| 313 |
+
if line:
|
| 314 |
+
self._filelineno += 1
|
| 315 |
+
return line
|
| 316 |
+
if not self._file:
|
| 317 |
+
return line
|
| 318 |
+
self.nextfile()
|
| 319 |
+
# repeat with next file
|
| 320 |
+
|
| 321 |
+
def _readline(self):
|
| 322 |
+
if not self._files:
|
| 323 |
+
if 'b' in self._mode:
|
| 324 |
+
return b''
|
| 325 |
+
else:
|
| 326 |
+
return ''
|
| 327 |
+
self._filename = self._files[0]
|
| 328 |
+
self._files = self._files[1:]
|
| 329 |
+
self._startlineno = self.lineno()
|
| 330 |
+
self._filelineno = 0
|
| 331 |
+
self._file = None
|
| 332 |
+
self._isstdin = False
|
| 333 |
+
self._backupfilename = 0
|
| 334 |
+
|
| 335 |
+
# EncodingWarning is emitted in __init__() already
|
| 336 |
+
if "b" not in self._mode:
|
| 337 |
+
encoding = self._encoding or "locale"
|
| 338 |
+
else:
|
| 339 |
+
encoding = None
|
| 340 |
+
|
| 341 |
+
if self._filename == '-':
|
| 342 |
+
self._filename = '<stdin>'
|
| 343 |
+
if 'b' in self._mode:
|
| 344 |
+
self._file = getattr(sys.stdin, 'buffer', sys.stdin)
|
| 345 |
+
else:
|
| 346 |
+
self._file = sys.stdin
|
| 347 |
+
self._isstdin = True
|
| 348 |
+
else:
|
| 349 |
+
if self._inplace:
|
| 350 |
+
self._backupfilename = (
|
| 351 |
+
os.fspath(self._filename) + (self._backup or ".bak"))
|
| 352 |
+
try:
|
| 353 |
+
os.unlink(self._backupfilename)
|
| 354 |
+
except OSError:
|
| 355 |
+
pass
|
| 356 |
+
# The next few lines may raise OSError
|
| 357 |
+
os.rename(self._filename, self._backupfilename)
|
| 358 |
+
self._file = open(self._backupfilename, self._mode,
|
| 359 |
+
encoding=encoding, errors=self._errors)
|
| 360 |
+
try:
|
| 361 |
+
perm = os.fstat(self._file.fileno()).st_mode
|
| 362 |
+
except OSError:
|
| 363 |
+
self._output = open(self._filename, self._write_mode,
|
| 364 |
+
encoding=encoding, errors=self._errors)
|
| 365 |
+
else:
|
| 366 |
+
mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
|
| 367 |
+
if hasattr(os, 'O_BINARY'):
|
| 368 |
+
mode |= os.O_BINARY
|
| 369 |
+
|
| 370 |
+
fd = os.open(self._filename, mode, perm)
|
| 371 |
+
self._output = os.fdopen(fd, self._write_mode,
|
| 372 |
+
encoding=encoding, errors=self._errors)
|
| 373 |
+
try:
|
| 374 |
+
os.chmod(self._filename, perm)
|
| 375 |
+
except OSError:
|
| 376 |
+
pass
|
| 377 |
+
self._savestdout = sys.stdout
|
| 378 |
+
sys.stdout = self._output
|
| 379 |
+
else:
|
| 380 |
+
# This may raise OSError
|
| 381 |
+
if self._openhook:
|
| 382 |
+
# Custom hooks made previous to Python 3.10 didn't have
|
| 383 |
+
# encoding argument
|
| 384 |
+
if self._encoding is None:
|
| 385 |
+
self._file = self._openhook(self._filename, self._mode)
|
| 386 |
+
else:
|
| 387 |
+
self._file = self._openhook(
|
| 388 |
+
self._filename, self._mode, encoding=self._encoding, errors=self._errors)
|
| 389 |
+
else:
|
| 390 |
+
self._file = open(self._filename, self._mode, encoding=encoding, errors=self._errors)
|
| 391 |
+
self._readline = self._file.readline # hide FileInput._readline
|
| 392 |
+
return self._readline()
|
| 393 |
+
|
| 394 |
+
def filename(self):
|
| 395 |
+
return self._filename
|
| 396 |
+
|
| 397 |
+
def lineno(self):
|
| 398 |
+
return self._startlineno + self._filelineno
|
| 399 |
+
|
| 400 |
+
def filelineno(self):
|
| 401 |
+
return self._filelineno
|
| 402 |
+
|
| 403 |
+
def fileno(self):
|
| 404 |
+
if self._file:
|
| 405 |
+
try:
|
| 406 |
+
return self._file.fileno()
|
| 407 |
+
except ValueError:
|
| 408 |
+
return -1
|
| 409 |
+
else:
|
| 410 |
+
return -1
|
| 411 |
+
|
| 412 |
+
def isfirstline(self):
|
| 413 |
+
return self._filelineno == 1
|
| 414 |
+
|
| 415 |
+
def isstdin(self):
|
| 416 |
+
return self._isstdin
|
| 417 |
+
|
| 418 |
+
__class_getitem__ = classmethod(GenericAlias)
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
def hook_compressed(filename, mode, *, encoding=None, errors=None):
|
| 422 |
+
if encoding is None and "b" not in mode: # EncodingWarning is emitted in FileInput() already.
|
| 423 |
+
encoding = "locale"
|
| 424 |
+
ext = os.path.splitext(filename)[1]
|
| 425 |
+
if ext == '.gz':
|
| 426 |
+
import gzip
|
| 427 |
+
stream = gzip.open(filename, mode)
|
| 428 |
+
elif ext == '.bz2':
|
| 429 |
+
import bz2
|
| 430 |
+
stream = bz2.BZ2File(filename, mode)
|
| 431 |
+
else:
|
| 432 |
+
return open(filename, mode, encoding=encoding, errors=errors)
|
| 433 |
+
|
| 434 |
+
# gzip and bz2 are binary mode by default.
|
| 435 |
+
if "b" not in mode:
|
| 436 |
+
stream = io.TextIOWrapper(stream, encoding=encoding, errors=errors)
|
| 437 |
+
return stream
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
def hook_encoded(encoding, errors=None):
|
| 441 |
+
def openhook(filename, mode):
|
| 442 |
+
return open(filename, mode, encoding=encoding, errors=errors)
|
| 443 |
+
return openhook
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def _test():
|
| 447 |
+
import getopt
|
| 448 |
+
inplace = False
|
| 449 |
+
backup = False
|
| 450 |
+
opts, args = getopt.getopt(sys.argv[1:], "ib:")
|
| 451 |
+
for o, a in opts:
|
| 452 |
+
if o == '-i': inplace = True
|
| 453 |
+
if o == '-b': backup = a
|
| 454 |
+
for line in input(args, inplace=inplace, backup=backup):
|
| 455 |
+
if line[-1:] == '\n': line = line[:-1]
|
| 456 |
+
if line[-1:] == '\r': line = line[:-1]
|
| 457 |
+
print("%d: %s[%d]%s %s" % (lineno(), filename(), filelineno(),
|
| 458 |
+
isfirstline() and "*" or "", line))
|
| 459 |
+
print("%d: %s[%d]" % (lineno(), filename(), filelineno()))
|
| 460 |
+
|
| 461 |
+
if __name__ == '__main__':
|
| 462 |
+
_test()
|
evalkit_cambrian/lib/python3.10/fnmatch.py
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Filename matching with shell patterns.
|
| 2 |
+
|
| 3 |
+
fnmatch(FILENAME, PATTERN) matches according to the local convention.
|
| 4 |
+
fnmatchcase(FILENAME, PATTERN) always takes case in account.
|
| 5 |
+
|
| 6 |
+
The functions operate by translating the pattern into a regular
|
| 7 |
+
expression. They cache the compiled regular expressions for speed.
|
| 8 |
+
|
| 9 |
+
The function translate(PATTERN) returns a regular expression
|
| 10 |
+
corresponding to PATTERN. (It does not compile it.)
|
| 11 |
+
"""
|
| 12 |
+
import os
|
| 13 |
+
import posixpath
|
| 14 |
+
import re
|
| 15 |
+
import functools
|
| 16 |
+
|
| 17 |
+
__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"]
|
| 18 |
+
|
| 19 |
+
# Build a thread-safe incrementing counter to help create unique regexp group
|
| 20 |
+
# names across calls.
|
| 21 |
+
from itertools import count
|
| 22 |
+
_nextgroupnum = count().__next__
|
| 23 |
+
del count
|
| 24 |
+
|
| 25 |
+
def fnmatch(name, pat):
|
| 26 |
+
"""Test whether FILENAME matches PATTERN.
|
| 27 |
+
|
| 28 |
+
Patterns are Unix shell style:
|
| 29 |
+
|
| 30 |
+
* matches everything
|
| 31 |
+
? matches any single character
|
| 32 |
+
[seq] matches any character in seq
|
| 33 |
+
[!seq] matches any char not in seq
|
| 34 |
+
|
| 35 |
+
An initial period in FILENAME is not special.
|
| 36 |
+
Both FILENAME and PATTERN are first case-normalized
|
| 37 |
+
if the operating system requires it.
|
| 38 |
+
If you don't want this, use fnmatchcase(FILENAME, PATTERN).
|
| 39 |
+
"""
|
| 40 |
+
name = os.path.normcase(name)
|
| 41 |
+
pat = os.path.normcase(pat)
|
| 42 |
+
return fnmatchcase(name, pat)
|
| 43 |
+
|
| 44 |
+
@functools.lru_cache(maxsize=256, typed=True)
|
| 45 |
+
def _compile_pattern(pat):
|
| 46 |
+
if isinstance(pat, bytes):
|
| 47 |
+
pat_str = str(pat, 'ISO-8859-1')
|
| 48 |
+
res_str = translate(pat_str)
|
| 49 |
+
res = bytes(res_str, 'ISO-8859-1')
|
| 50 |
+
else:
|
| 51 |
+
res = translate(pat)
|
| 52 |
+
return re.compile(res).match
|
| 53 |
+
|
| 54 |
+
def filter(names, pat):
|
| 55 |
+
"""Construct a list from those elements of the iterable NAMES that match PAT."""
|
| 56 |
+
result = []
|
| 57 |
+
pat = os.path.normcase(pat)
|
| 58 |
+
match = _compile_pattern(pat)
|
| 59 |
+
if os.path is posixpath:
|
| 60 |
+
# normcase on posix is NOP. Optimize it away from the loop.
|
| 61 |
+
for name in names:
|
| 62 |
+
if match(name):
|
| 63 |
+
result.append(name)
|
| 64 |
+
else:
|
| 65 |
+
for name in names:
|
| 66 |
+
if match(os.path.normcase(name)):
|
| 67 |
+
result.append(name)
|
| 68 |
+
return result
|
| 69 |
+
|
| 70 |
+
def fnmatchcase(name, pat):
|
| 71 |
+
"""Test whether FILENAME matches PATTERN, including case.
|
| 72 |
+
|
| 73 |
+
This is a version of fnmatch() which doesn't case-normalize
|
| 74 |
+
its arguments.
|
| 75 |
+
"""
|
| 76 |
+
match = _compile_pattern(pat)
|
| 77 |
+
return match(name) is not None
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def translate(pat):
|
| 81 |
+
"""Translate a shell PATTERN to a regular expression.
|
| 82 |
+
|
| 83 |
+
There is no way to quote meta-characters.
|
| 84 |
+
"""
|
| 85 |
+
|
| 86 |
+
STAR = object()
|
| 87 |
+
res = []
|
| 88 |
+
add = res.append
|
| 89 |
+
i, n = 0, len(pat)
|
| 90 |
+
while i < n:
|
| 91 |
+
c = pat[i]
|
| 92 |
+
i = i+1
|
| 93 |
+
if c == '*':
|
| 94 |
+
# compress consecutive `*` into one
|
| 95 |
+
if (not res) or res[-1] is not STAR:
|
| 96 |
+
add(STAR)
|
| 97 |
+
elif c == '?':
|
| 98 |
+
add('.')
|
| 99 |
+
elif c == '[':
|
| 100 |
+
j = i
|
| 101 |
+
if j < n and pat[j] == '!':
|
| 102 |
+
j = j+1
|
| 103 |
+
if j < n and pat[j] == ']':
|
| 104 |
+
j = j+1
|
| 105 |
+
while j < n and pat[j] != ']':
|
| 106 |
+
j = j+1
|
| 107 |
+
if j >= n:
|
| 108 |
+
add('\\[')
|
| 109 |
+
else:
|
| 110 |
+
stuff = pat[i:j]
|
| 111 |
+
if '-' not in stuff:
|
| 112 |
+
stuff = stuff.replace('\\', r'\\')
|
| 113 |
+
else:
|
| 114 |
+
chunks = []
|
| 115 |
+
k = i+2 if pat[i] == '!' else i+1
|
| 116 |
+
while True:
|
| 117 |
+
k = pat.find('-', k, j)
|
| 118 |
+
if k < 0:
|
| 119 |
+
break
|
| 120 |
+
chunks.append(pat[i:k])
|
| 121 |
+
i = k+1
|
| 122 |
+
k = k+3
|
| 123 |
+
chunk = pat[i:j]
|
| 124 |
+
if chunk:
|
| 125 |
+
chunks.append(chunk)
|
| 126 |
+
else:
|
| 127 |
+
chunks[-1] += '-'
|
| 128 |
+
# Remove empty ranges -- invalid in RE.
|
| 129 |
+
for k in range(len(chunks)-1, 0, -1):
|
| 130 |
+
if chunks[k-1][-1] > chunks[k][0]:
|
| 131 |
+
chunks[k-1] = chunks[k-1][:-1] + chunks[k][1:]
|
| 132 |
+
del chunks[k]
|
| 133 |
+
# Escape backslashes and hyphens for set difference (--).
|
| 134 |
+
# Hyphens that create ranges shouldn't be escaped.
|
| 135 |
+
stuff = '-'.join(s.replace('\\', r'\\').replace('-', r'\-')
|
| 136 |
+
for s in chunks)
|
| 137 |
+
# Escape set operations (&&, ~~ and ||).
|
| 138 |
+
stuff = re.sub(r'([&~|])', r'\\\1', stuff)
|
| 139 |
+
i = j+1
|
| 140 |
+
if not stuff:
|
| 141 |
+
# Empty range: never match.
|
| 142 |
+
add('(?!)')
|
| 143 |
+
elif stuff == '!':
|
| 144 |
+
# Negated empty range: match any character.
|
| 145 |
+
add('.')
|
| 146 |
+
else:
|
| 147 |
+
if stuff[0] == '!':
|
| 148 |
+
stuff = '^' + stuff[1:]
|
| 149 |
+
elif stuff[0] in ('^', '['):
|
| 150 |
+
stuff = '\\' + stuff
|
| 151 |
+
add(f'[{stuff}]')
|
| 152 |
+
else:
|
| 153 |
+
add(re.escape(c))
|
| 154 |
+
assert i == n
|
| 155 |
+
|
| 156 |
+
# Deal with STARs.
|
| 157 |
+
inp = res
|
| 158 |
+
res = []
|
| 159 |
+
add = res.append
|
| 160 |
+
i, n = 0, len(inp)
|
| 161 |
+
# Fixed pieces at the start?
|
| 162 |
+
while i < n and inp[i] is not STAR:
|
| 163 |
+
add(inp[i])
|
| 164 |
+
i += 1
|
| 165 |
+
# Now deal with STAR fixed STAR fixed ...
|
| 166 |
+
# For an interior `STAR fixed` pairing, we want to do a minimal
|
| 167 |
+
# .*? match followed by `fixed`, with no possibility of backtracking.
|
| 168 |
+
# We can't spell that directly, but can trick it into working by matching
|
| 169 |
+
# .*?fixed
|
| 170 |
+
# in a lookahead assertion, save the matched part in a group, then
|
| 171 |
+
# consume that group via a backreference. If the overall match fails,
|
| 172 |
+
# the lookahead assertion won't try alternatives. So the translation is:
|
| 173 |
+
# (?=(?P<name>.*?fixed))(?P=name)
|
| 174 |
+
# Group names are created as needed: g0, g1, g2, ...
|
| 175 |
+
# The numbers are obtained from _nextgroupnum() to ensure they're unique
|
| 176 |
+
# across calls and across threads. This is because people rely on the
|
| 177 |
+
# undocumented ability to join multiple translate() results together via
|
| 178 |
+
# "|" to build large regexps matching "one of many" shell patterns.
|
| 179 |
+
while i < n:
|
| 180 |
+
assert inp[i] is STAR
|
| 181 |
+
i += 1
|
| 182 |
+
if i == n:
|
| 183 |
+
add(".*")
|
| 184 |
+
break
|
| 185 |
+
assert inp[i] is not STAR
|
| 186 |
+
fixed = []
|
| 187 |
+
while i < n and inp[i] is not STAR:
|
| 188 |
+
fixed.append(inp[i])
|
| 189 |
+
i += 1
|
| 190 |
+
fixed = "".join(fixed)
|
| 191 |
+
if i == n:
|
| 192 |
+
add(".*")
|
| 193 |
+
add(fixed)
|
| 194 |
+
else:
|
| 195 |
+
groupnum = _nextgroupnum()
|
| 196 |
+
add(f"(?=(?P<g{groupnum}>.*?{fixed}))(?P=g{groupnum})")
|
| 197 |
+
assert i == n
|
| 198 |
+
res = "".join(res)
|
| 199 |
+
return fr'(?s:{res})\Z'
|
evalkit_cambrian/lib/python3.10/functools.py
ADDED
|
@@ -0,0 +1,992 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""functools.py - Tools for working with functions and callable objects
|
| 2 |
+
"""
|
| 3 |
+
# Python module wrapper for _functools C module
|
| 4 |
+
# to allow utilities written in Python to be added
|
| 5 |
+
# to the functools module.
|
| 6 |
+
# Written by Nick Coghlan <ncoghlan at gmail.com>,
|
| 7 |
+
# Raymond Hettinger <python at rcn.com>,
|
| 8 |
+
# and Łukasz Langa <lukasz at langa.pl>.
|
| 9 |
+
# Copyright (C) 2006-2013 Python Software Foundation.
|
| 10 |
+
# See C source code for _functools credits/copyright
|
| 11 |
+
|
| 12 |
+
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
|
| 13 |
+
'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce',
|
| 14 |
+
'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod',
|
| 15 |
+
'cached_property']
|
| 16 |
+
|
| 17 |
+
from abc import get_cache_token
|
| 18 |
+
from collections import namedtuple
|
| 19 |
+
# import types, weakref # Deferred to single_dispatch()
|
| 20 |
+
from reprlib import recursive_repr
|
| 21 |
+
from _thread import RLock
|
| 22 |
+
from types import GenericAlias
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
################################################################################
|
| 26 |
+
### update_wrapper() and wraps() decorator
|
| 27 |
+
################################################################################
|
| 28 |
+
|
| 29 |
+
# update_wrapper() and wraps() are tools to help write
|
| 30 |
+
# wrapper functions that can handle naive introspection
|
| 31 |
+
|
| 32 |
+
WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__qualname__', '__doc__',
|
| 33 |
+
'__annotations__')
|
| 34 |
+
WRAPPER_UPDATES = ('__dict__',)
|
| 35 |
+
def update_wrapper(wrapper,
|
| 36 |
+
wrapped,
|
| 37 |
+
assigned = WRAPPER_ASSIGNMENTS,
|
| 38 |
+
updated = WRAPPER_UPDATES):
|
| 39 |
+
"""Update a wrapper function to look like the wrapped function
|
| 40 |
+
|
| 41 |
+
wrapper is the function to be updated
|
| 42 |
+
wrapped is the original function
|
| 43 |
+
assigned is a tuple naming the attributes assigned directly
|
| 44 |
+
from the wrapped function to the wrapper function (defaults to
|
| 45 |
+
functools.WRAPPER_ASSIGNMENTS)
|
| 46 |
+
updated is a tuple naming the attributes of the wrapper that
|
| 47 |
+
are updated with the corresponding attribute from the wrapped
|
| 48 |
+
function (defaults to functools.WRAPPER_UPDATES)
|
| 49 |
+
"""
|
| 50 |
+
for attr in assigned:
|
| 51 |
+
try:
|
| 52 |
+
value = getattr(wrapped, attr)
|
| 53 |
+
except AttributeError:
|
| 54 |
+
pass
|
| 55 |
+
else:
|
| 56 |
+
setattr(wrapper, attr, value)
|
| 57 |
+
for attr in updated:
|
| 58 |
+
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
|
| 59 |
+
# Issue #17482: set __wrapped__ last so we don't inadvertently copy it
|
| 60 |
+
# from the wrapped function when updating __dict__
|
| 61 |
+
wrapper.__wrapped__ = wrapped
|
| 62 |
+
# Return the wrapper so this can be used as a decorator via partial()
|
| 63 |
+
return wrapper
|
| 64 |
+
|
| 65 |
+
def wraps(wrapped,
|
| 66 |
+
assigned = WRAPPER_ASSIGNMENTS,
|
| 67 |
+
updated = WRAPPER_UPDATES):
|
| 68 |
+
"""Decorator factory to apply update_wrapper() to a wrapper function
|
| 69 |
+
|
| 70 |
+
Returns a decorator that invokes update_wrapper() with the decorated
|
| 71 |
+
function as the wrapper argument and the arguments to wraps() as the
|
| 72 |
+
remaining arguments. Default arguments are as for update_wrapper().
|
| 73 |
+
This is a convenience function to simplify applying partial() to
|
| 74 |
+
update_wrapper().
|
| 75 |
+
"""
|
| 76 |
+
return partial(update_wrapper, wrapped=wrapped,
|
| 77 |
+
assigned=assigned, updated=updated)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
################################################################################
|
| 81 |
+
### total_ordering class decorator
|
| 82 |
+
################################################################################
|
| 83 |
+
|
| 84 |
+
# The total ordering functions all invoke the root magic method directly
|
| 85 |
+
# rather than using the corresponding operator. This avoids possible
|
| 86 |
+
# infinite recursion that could occur when the operator dispatch logic
|
| 87 |
+
# detects a NotImplemented result and then calls a reflected method.
|
| 88 |
+
|
| 89 |
+
def _gt_from_lt(self, other, NotImplemented=NotImplemented):
|
| 90 |
+
'Return a > b. Computed by @total_ordering from (not a < b) and (a != b).'
|
| 91 |
+
op_result = type(self).__lt__(self, other)
|
| 92 |
+
if op_result is NotImplemented:
|
| 93 |
+
return op_result
|
| 94 |
+
return not op_result and self != other
|
| 95 |
+
|
| 96 |
+
def _le_from_lt(self, other, NotImplemented=NotImplemented):
|
| 97 |
+
'Return a <= b. Computed by @total_ordering from (a < b) or (a == b).'
|
| 98 |
+
op_result = type(self).__lt__(self, other)
|
| 99 |
+
if op_result is NotImplemented:
|
| 100 |
+
return op_result
|
| 101 |
+
return op_result or self == other
|
| 102 |
+
|
| 103 |
+
def _ge_from_lt(self, other, NotImplemented=NotImplemented):
|
| 104 |
+
'Return a >= b. Computed by @total_ordering from (not a < b).'
|
| 105 |
+
op_result = type(self).__lt__(self, other)
|
| 106 |
+
if op_result is NotImplemented:
|
| 107 |
+
return op_result
|
| 108 |
+
return not op_result
|
| 109 |
+
|
| 110 |
+
def _ge_from_le(self, other, NotImplemented=NotImplemented):
|
| 111 |
+
'Return a >= b. Computed by @total_ordering from (not a <= b) or (a == b).'
|
| 112 |
+
op_result = type(self).__le__(self, other)
|
| 113 |
+
if op_result is NotImplemented:
|
| 114 |
+
return op_result
|
| 115 |
+
return not op_result or self == other
|
| 116 |
+
|
| 117 |
+
def _lt_from_le(self, other, NotImplemented=NotImplemented):
|
| 118 |
+
'Return a < b. Computed by @total_ordering from (a <= b) and (a != b).'
|
| 119 |
+
op_result = type(self).__le__(self, other)
|
| 120 |
+
if op_result is NotImplemented:
|
| 121 |
+
return op_result
|
| 122 |
+
return op_result and self != other
|
| 123 |
+
|
| 124 |
+
def _gt_from_le(self, other, NotImplemented=NotImplemented):
|
| 125 |
+
'Return a > b. Computed by @total_ordering from (not a <= b).'
|
| 126 |
+
op_result = type(self).__le__(self, other)
|
| 127 |
+
if op_result is NotImplemented:
|
| 128 |
+
return op_result
|
| 129 |
+
return not op_result
|
| 130 |
+
|
| 131 |
+
def _lt_from_gt(self, other, NotImplemented=NotImplemented):
|
| 132 |
+
'Return a < b. Computed by @total_ordering from (not a > b) and (a != b).'
|
| 133 |
+
op_result = type(self).__gt__(self, other)
|
| 134 |
+
if op_result is NotImplemented:
|
| 135 |
+
return op_result
|
| 136 |
+
return not op_result and self != other
|
| 137 |
+
|
| 138 |
+
def _ge_from_gt(self, other, NotImplemented=NotImplemented):
|
| 139 |
+
'Return a >= b. Computed by @total_ordering from (a > b) or (a == b).'
|
| 140 |
+
op_result = type(self).__gt__(self, other)
|
| 141 |
+
if op_result is NotImplemented:
|
| 142 |
+
return op_result
|
| 143 |
+
return op_result or self == other
|
| 144 |
+
|
| 145 |
+
def _le_from_gt(self, other, NotImplemented=NotImplemented):
|
| 146 |
+
'Return a <= b. Computed by @total_ordering from (not a > b).'
|
| 147 |
+
op_result = type(self).__gt__(self, other)
|
| 148 |
+
if op_result is NotImplemented:
|
| 149 |
+
return op_result
|
| 150 |
+
return not op_result
|
| 151 |
+
|
| 152 |
+
def _le_from_ge(self, other, NotImplemented=NotImplemented):
|
| 153 |
+
'Return a <= b. Computed by @total_ordering from (not a >= b) or (a == b).'
|
| 154 |
+
op_result = type(self).__ge__(self, other)
|
| 155 |
+
if op_result is NotImplemented:
|
| 156 |
+
return op_result
|
| 157 |
+
return not op_result or self == other
|
| 158 |
+
|
| 159 |
+
def _gt_from_ge(self, other, NotImplemented=NotImplemented):
|
| 160 |
+
'Return a > b. Computed by @total_ordering from (a >= b) and (a != b).'
|
| 161 |
+
op_result = type(self).__ge__(self, other)
|
| 162 |
+
if op_result is NotImplemented:
|
| 163 |
+
return op_result
|
| 164 |
+
return op_result and self != other
|
| 165 |
+
|
| 166 |
+
def _lt_from_ge(self, other, NotImplemented=NotImplemented):
|
| 167 |
+
'Return a < b. Computed by @total_ordering from (not a >= b).'
|
| 168 |
+
op_result = type(self).__ge__(self, other)
|
| 169 |
+
if op_result is NotImplemented:
|
| 170 |
+
return op_result
|
| 171 |
+
return not op_result
|
| 172 |
+
|
| 173 |
+
_convert = {
|
| 174 |
+
'__lt__': [('__gt__', _gt_from_lt),
|
| 175 |
+
('__le__', _le_from_lt),
|
| 176 |
+
('__ge__', _ge_from_lt)],
|
| 177 |
+
'__le__': [('__ge__', _ge_from_le),
|
| 178 |
+
('__lt__', _lt_from_le),
|
| 179 |
+
('__gt__', _gt_from_le)],
|
| 180 |
+
'__gt__': [('__lt__', _lt_from_gt),
|
| 181 |
+
('__ge__', _ge_from_gt),
|
| 182 |
+
('__le__', _le_from_gt)],
|
| 183 |
+
'__ge__': [('__le__', _le_from_ge),
|
| 184 |
+
('__gt__', _gt_from_ge),
|
| 185 |
+
('__lt__', _lt_from_ge)]
|
| 186 |
+
}
|
| 187 |
+
|
| 188 |
+
def total_ordering(cls):
|
| 189 |
+
"""Class decorator that fills in missing ordering methods"""
|
| 190 |
+
# Find user-defined comparisons (not those inherited from object).
|
| 191 |
+
roots = {op for op in _convert if getattr(cls, op, None) is not getattr(object, op, None)}
|
| 192 |
+
if not roots:
|
| 193 |
+
raise ValueError('must define at least one ordering operation: < > <= >=')
|
| 194 |
+
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
|
| 195 |
+
for opname, opfunc in _convert[root]:
|
| 196 |
+
if opname not in roots:
|
| 197 |
+
opfunc.__name__ = opname
|
| 198 |
+
setattr(cls, opname, opfunc)
|
| 199 |
+
return cls
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
################################################################################
|
| 203 |
+
### cmp_to_key() function converter
|
| 204 |
+
################################################################################
|
| 205 |
+
|
| 206 |
+
def cmp_to_key(mycmp):
|
| 207 |
+
"""Convert a cmp= function into a key= function"""
|
| 208 |
+
class K(object):
|
| 209 |
+
__slots__ = ['obj']
|
| 210 |
+
def __init__(self, obj):
|
| 211 |
+
self.obj = obj
|
| 212 |
+
def __lt__(self, other):
|
| 213 |
+
return mycmp(self.obj, other.obj) < 0
|
| 214 |
+
def __gt__(self, other):
|
| 215 |
+
return mycmp(self.obj, other.obj) > 0
|
| 216 |
+
def __eq__(self, other):
|
| 217 |
+
return mycmp(self.obj, other.obj) == 0
|
| 218 |
+
def __le__(self, other):
|
| 219 |
+
return mycmp(self.obj, other.obj) <= 0
|
| 220 |
+
def __ge__(self, other):
|
| 221 |
+
return mycmp(self.obj, other.obj) >= 0
|
| 222 |
+
__hash__ = None
|
| 223 |
+
return K
|
| 224 |
+
|
| 225 |
+
try:
|
| 226 |
+
from _functools import cmp_to_key
|
| 227 |
+
except ImportError:
|
| 228 |
+
pass
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
################################################################################
|
| 232 |
+
### reduce() sequence to a single item
|
| 233 |
+
################################################################################
|
| 234 |
+
|
| 235 |
+
_initial_missing = object()
|
| 236 |
+
|
| 237 |
+
def reduce(function, sequence, initial=_initial_missing):
|
| 238 |
+
"""
|
| 239 |
+
reduce(function, iterable[, initial]) -> value
|
| 240 |
+
|
| 241 |
+
Apply a function of two arguments cumulatively to the items of a sequence
|
| 242 |
+
or iterable, from left to right, so as to reduce the iterable to a single
|
| 243 |
+
value. For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates
|
| 244 |
+
((((1+2)+3)+4)+5). If initial is present, it is placed before the items
|
| 245 |
+
of the iterable in the calculation, and serves as a default when the
|
| 246 |
+
iterable is empty.
|
| 247 |
+
"""
|
| 248 |
+
|
| 249 |
+
it = iter(sequence)
|
| 250 |
+
|
| 251 |
+
if initial is _initial_missing:
|
| 252 |
+
try:
|
| 253 |
+
value = next(it)
|
| 254 |
+
except StopIteration:
|
| 255 |
+
raise TypeError(
|
| 256 |
+
"reduce() of empty iterable with no initial value") from None
|
| 257 |
+
else:
|
| 258 |
+
value = initial
|
| 259 |
+
|
| 260 |
+
for element in it:
|
| 261 |
+
value = function(value, element)
|
| 262 |
+
|
| 263 |
+
return value
|
| 264 |
+
|
| 265 |
+
try:
|
| 266 |
+
from _functools import reduce
|
| 267 |
+
except ImportError:
|
| 268 |
+
pass
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
################################################################################
|
| 272 |
+
### partial() argument application
|
| 273 |
+
################################################################################
|
| 274 |
+
|
| 275 |
+
# Purely functional, no descriptor behaviour
|
| 276 |
+
class partial:
|
| 277 |
+
"""New function with partial application of the given arguments
|
| 278 |
+
and keywords.
|
| 279 |
+
"""
|
| 280 |
+
|
| 281 |
+
__slots__ = "func", "args", "keywords", "__dict__", "__weakref__"
|
| 282 |
+
|
| 283 |
+
def __new__(cls, func, /, *args, **keywords):
|
| 284 |
+
if not callable(func):
|
| 285 |
+
raise TypeError("the first argument must be callable")
|
| 286 |
+
|
| 287 |
+
if hasattr(func, "func"):
|
| 288 |
+
args = func.args + args
|
| 289 |
+
keywords = {**func.keywords, **keywords}
|
| 290 |
+
func = func.func
|
| 291 |
+
|
| 292 |
+
self = super(partial, cls).__new__(cls)
|
| 293 |
+
|
| 294 |
+
self.func = func
|
| 295 |
+
self.args = args
|
| 296 |
+
self.keywords = keywords
|
| 297 |
+
return self
|
| 298 |
+
|
| 299 |
+
def __call__(self, /, *args, **keywords):
|
| 300 |
+
keywords = {**self.keywords, **keywords}
|
| 301 |
+
return self.func(*self.args, *args, **keywords)
|
| 302 |
+
|
| 303 |
+
@recursive_repr()
|
| 304 |
+
def __repr__(self):
|
| 305 |
+
qualname = type(self).__qualname__
|
| 306 |
+
args = [repr(self.func)]
|
| 307 |
+
args.extend(repr(x) for x in self.args)
|
| 308 |
+
args.extend(f"{k}={v!r}" for (k, v) in self.keywords.items())
|
| 309 |
+
if type(self).__module__ == "functools":
|
| 310 |
+
return f"functools.{qualname}({', '.join(args)})"
|
| 311 |
+
return f"{qualname}({', '.join(args)})"
|
| 312 |
+
|
| 313 |
+
def __reduce__(self):
|
| 314 |
+
return type(self), (self.func,), (self.func, self.args,
|
| 315 |
+
self.keywords or None, self.__dict__ or None)
|
| 316 |
+
|
| 317 |
+
def __setstate__(self, state):
|
| 318 |
+
if not isinstance(state, tuple):
|
| 319 |
+
raise TypeError("argument to __setstate__ must be a tuple")
|
| 320 |
+
if len(state) != 4:
|
| 321 |
+
raise TypeError(f"expected 4 items in state, got {len(state)}")
|
| 322 |
+
func, args, kwds, namespace = state
|
| 323 |
+
if (not callable(func) or not isinstance(args, tuple) or
|
| 324 |
+
(kwds is not None and not isinstance(kwds, dict)) or
|
| 325 |
+
(namespace is not None and not isinstance(namespace, dict))):
|
| 326 |
+
raise TypeError("invalid partial state")
|
| 327 |
+
|
| 328 |
+
args = tuple(args) # just in case it's a subclass
|
| 329 |
+
if kwds is None:
|
| 330 |
+
kwds = {}
|
| 331 |
+
elif type(kwds) is not dict: # XXX does it need to be *exactly* dict?
|
| 332 |
+
kwds = dict(kwds)
|
| 333 |
+
if namespace is None:
|
| 334 |
+
namespace = {}
|
| 335 |
+
|
| 336 |
+
self.__dict__ = namespace
|
| 337 |
+
self.func = func
|
| 338 |
+
self.args = args
|
| 339 |
+
self.keywords = kwds
|
| 340 |
+
|
| 341 |
+
try:
|
| 342 |
+
from _functools import partial
|
| 343 |
+
except ImportError:
|
| 344 |
+
pass
|
| 345 |
+
|
| 346 |
+
# Descriptor version
|
| 347 |
+
class partialmethod(object):
|
| 348 |
+
"""Method descriptor with partial application of the given arguments
|
| 349 |
+
and keywords.
|
| 350 |
+
|
| 351 |
+
Supports wrapping existing descriptors and handles non-descriptor
|
| 352 |
+
callables as instance methods.
|
| 353 |
+
"""
|
| 354 |
+
|
| 355 |
+
def __init__(self, func, /, *args, **keywords):
|
| 356 |
+
if not callable(func) and not hasattr(func, "__get__"):
|
| 357 |
+
raise TypeError("{!r} is not callable or a descriptor"
|
| 358 |
+
.format(func))
|
| 359 |
+
|
| 360 |
+
# func could be a descriptor like classmethod which isn't callable,
|
| 361 |
+
# so we can't inherit from partial (it verifies func is callable)
|
| 362 |
+
if isinstance(func, partialmethod):
|
| 363 |
+
# flattening is mandatory in order to place cls/self before all
|
| 364 |
+
# other arguments
|
| 365 |
+
# it's also more efficient since only one function will be called
|
| 366 |
+
self.func = func.func
|
| 367 |
+
self.args = func.args + args
|
| 368 |
+
self.keywords = {**func.keywords, **keywords}
|
| 369 |
+
else:
|
| 370 |
+
self.func = func
|
| 371 |
+
self.args = args
|
| 372 |
+
self.keywords = keywords
|
| 373 |
+
|
| 374 |
+
def __repr__(self):
|
| 375 |
+
args = ", ".join(map(repr, self.args))
|
| 376 |
+
keywords = ", ".join("{}={!r}".format(k, v)
|
| 377 |
+
for k, v in self.keywords.items())
|
| 378 |
+
format_string = "{module}.{cls}({func}, {args}, {keywords})"
|
| 379 |
+
return format_string.format(module=self.__class__.__module__,
|
| 380 |
+
cls=self.__class__.__qualname__,
|
| 381 |
+
func=self.func,
|
| 382 |
+
args=args,
|
| 383 |
+
keywords=keywords)
|
| 384 |
+
|
| 385 |
+
def _make_unbound_method(self):
|
| 386 |
+
def _method(cls_or_self, /, *args, **keywords):
|
| 387 |
+
keywords = {**self.keywords, **keywords}
|
| 388 |
+
return self.func(cls_or_self, *self.args, *args, **keywords)
|
| 389 |
+
_method.__isabstractmethod__ = self.__isabstractmethod__
|
| 390 |
+
_method._partialmethod = self
|
| 391 |
+
return _method
|
| 392 |
+
|
| 393 |
+
def __get__(self, obj, cls=None):
|
| 394 |
+
get = getattr(self.func, "__get__", None)
|
| 395 |
+
result = None
|
| 396 |
+
if get is not None:
|
| 397 |
+
new_func = get(obj, cls)
|
| 398 |
+
if new_func is not self.func:
|
| 399 |
+
# Assume __get__ returning something new indicates the
|
| 400 |
+
# creation of an appropriate callable
|
| 401 |
+
result = partial(new_func, *self.args, **self.keywords)
|
| 402 |
+
try:
|
| 403 |
+
result.__self__ = new_func.__self__
|
| 404 |
+
except AttributeError:
|
| 405 |
+
pass
|
| 406 |
+
if result is None:
|
| 407 |
+
# If the underlying descriptor didn't do anything, treat this
|
| 408 |
+
# like an instance method
|
| 409 |
+
result = self._make_unbound_method().__get__(obj, cls)
|
| 410 |
+
return result
|
| 411 |
+
|
| 412 |
+
@property
|
| 413 |
+
def __isabstractmethod__(self):
|
| 414 |
+
return getattr(self.func, "__isabstractmethod__", False)
|
| 415 |
+
|
| 416 |
+
__class_getitem__ = classmethod(GenericAlias)
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
# Helper functions
|
| 420 |
+
|
| 421 |
+
def _unwrap_partial(func):
|
| 422 |
+
while isinstance(func, partial):
|
| 423 |
+
func = func.func
|
| 424 |
+
return func
|
| 425 |
+
|
| 426 |
+
################################################################################
|
| 427 |
+
### LRU Cache function decorator
|
| 428 |
+
################################################################################
|
| 429 |
+
|
| 430 |
+
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
| 431 |
+
|
| 432 |
+
class _HashedSeq(list):
|
| 433 |
+
""" This class guarantees that hash() will be called no more than once
|
| 434 |
+
per element. This is important because the lru_cache() will hash
|
| 435 |
+
the key multiple times on a cache miss.
|
| 436 |
+
|
| 437 |
+
"""
|
| 438 |
+
|
| 439 |
+
__slots__ = 'hashvalue'
|
| 440 |
+
|
| 441 |
+
def __init__(self, tup, hash=hash):
|
| 442 |
+
self[:] = tup
|
| 443 |
+
self.hashvalue = hash(tup)
|
| 444 |
+
|
| 445 |
+
def __hash__(self):
|
| 446 |
+
return self.hashvalue
|
| 447 |
+
|
| 448 |
+
def _make_key(args, kwds, typed,
|
| 449 |
+
kwd_mark = (object(),),
|
| 450 |
+
fasttypes = {int, str},
|
| 451 |
+
tuple=tuple, type=type, len=len):
|
| 452 |
+
"""Make a cache key from optionally typed positional and keyword arguments
|
| 453 |
+
|
| 454 |
+
The key is constructed in a way that is flat as possible rather than
|
| 455 |
+
as a nested structure that would take more memory.
|
| 456 |
+
|
| 457 |
+
If there is only a single argument and its data type is known to cache
|
| 458 |
+
its hash value, then that argument is returned without a wrapper. This
|
| 459 |
+
saves space and improves lookup speed.
|
| 460 |
+
|
| 461 |
+
"""
|
| 462 |
+
# All of code below relies on kwds preserving the order input by the user.
|
| 463 |
+
# Formerly, we sorted() the kwds before looping. The new way is *much*
|
| 464 |
+
# faster; however, it means that f(x=1, y=2) will now be treated as a
|
| 465 |
+
# distinct call from f(y=2, x=1) which will be cached separately.
|
| 466 |
+
key = args
|
| 467 |
+
if kwds:
|
| 468 |
+
key += kwd_mark
|
| 469 |
+
for item in kwds.items():
|
| 470 |
+
key += item
|
| 471 |
+
if typed:
|
| 472 |
+
key += tuple(type(v) for v in args)
|
| 473 |
+
if kwds:
|
| 474 |
+
key += tuple(type(v) for v in kwds.values())
|
| 475 |
+
elif len(key) == 1 and type(key[0]) in fasttypes:
|
| 476 |
+
return key[0]
|
| 477 |
+
return _HashedSeq(key)
|
| 478 |
+
|
| 479 |
+
def lru_cache(maxsize=128, typed=False):
|
| 480 |
+
"""Least-recently-used cache decorator.
|
| 481 |
+
|
| 482 |
+
If *maxsize* is set to None, the LRU features are disabled and the cache
|
| 483 |
+
can grow without bound.
|
| 484 |
+
|
| 485 |
+
If *typed* is True, arguments of different types will be cached separately.
|
| 486 |
+
For example, f(3.0) and f(3) will be treated as distinct calls with
|
| 487 |
+
distinct results.
|
| 488 |
+
|
| 489 |
+
Arguments to the cached function must be hashable.
|
| 490 |
+
|
| 491 |
+
View the cache statistics named tuple (hits, misses, maxsize, currsize)
|
| 492 |
+
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
| 493 |
+
Access the underlying function with f.__wrapped__.
|
| 494 |
+
|
| 495 |
+
See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
|
| 496 |
+
|
| 497 |
+
"""
|
| 498 |
+
|
| 499 |
+
# Users should only access the lru_cache through its public API:
|
| 500 |
+
# cache_info, cache_clear, and f.__wrapped__
|
| 501 |
+
# The internals of the lru_cache are encapsulated for thread safety and
|
| 502 |
+
# to allow the implementation to change (including a possible C version).
|
| 503 |
+
|
| 504 |
+
if isinstance(maxsize, int):
|
| 505 |
+
# Negative maxsize is treated as 0
|
| 506 |
+
if maxsize < 0:
|
| 507 |
+
maxsize = 0
|
| 508 |
+
elif callable(maxsize) and isinstance(typed, bool):
|
| 509 |
+
# The user_function was passed in directly via the maxsize argument
|
| 510 |
+
user_function, maxsize = maxsize, 128
|
| 511 |
+
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
| 512 |
+
wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
|
| 513 |
+
return update_wrapper(wrapper, user_function)
|
| 514 |
+
elif maxsize is not None:
|
| 515 |
+
raise TypeError(
|
| 516 |
+
'Expected first argument to be an integer, a callable, or None')
|
| 517 |
+
|
| 518 |
+
def decorating_function(user_function):
|
| 519 |
+
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
| 520 |
+
wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
|
| 521 |
+
return update_wrapper(wrapper, user_function)
|
| 522 |
+
|
| 523 |
+
return decorating_function
|
| 524 |
+
|
| 525 |
+
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
|
| 526 |
+
# Constants shared by all lru cache instances:
|
| 527 |
+
sentinel = object() # unique object used to signal cache misses
|
| 528 |
+
make_key = _make_key # build a key from the function arguments
|
| 529 |
+
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
| 530 |
+
|
| 531 |
+
cache = {}
|
| 532 |
+
hits = misses = 0
|
| 533 |
+
full = False
|
| 534 |
+
cache_get = cache.get # bound method to lookup a key or return None
|
| 535 |
+
cache_len = cache.__len__ # get cache size without calling len()
|
| 536 |
+
lock = RLock() # because linkedlist updates aren't threadsafe
|
| 537 |
+
root = [] # root of the circular doubly linked list
|
| 538 |
+
root[:] = [root, root, None, None] # initialize by pointing to self
|
| 539 |
+
|
| 540 |
+
if maxsize == 0:
|
| 541 |
+
|
| 542 |
+
def wrapper(*args, **kwds):
|
| 543 |
+
# No caching -- just a statistics update
|
| 544 |
+
nonlocal misses
|
| 545 |
+
misses += 1
|
| 546 |
+
result = user_function(*args, **kwds)
|
| 547 |
+
return result
|
| 548 |
+
|
| 549 |
+
elif maxsize is None:
|
| 550 |
+
|
| 551 |
+
def wrapper(*args, **kwds):
|
| 552 |
+
# Simple caching without ordering or size limit
|
| 553 |
+
nonlocal hits, misses
|
| 554 |
+
key = make_key(args, kwds, typed)
|
| 555 |
+
result = cache_get(key, sentinel)
|
| 556 |
+
if result is not sentinel:
|
| 557 |
+
hits += 1
|
| 558 |
+
return result
|
| 559 |
+
misses += 1
|
| 560 |
+
result = user_function(*args, **kwds)
|
| 561 |
+
cache[key] = result
|
| 562 |
+
return result
|
| 563 |
+
|
| 564 |
+
else:
|
| 565 |
+
|
| 566 |
+
def wrapper(*args, **kwds):
|
| 567 |
+
# Size limited caching that tracks accesses by recency
|
| 568 |
+
nonlocal root, hits, misses, full
|
| 569 |
+
key = make_key(args, kwds, typed)
|
| 570 |
+
with lock:
|
| 571 |
+
link = cache_get(key)
|
| 572 |
+
if link is not None:
|
| 573 |
+
# Move the link to the front of the circular queue
|
| 574 |
+
link_prev, link_next, _key, result = link
|
| 575 |
+
link_prev[NEXT] = link_next
|
| 576 |
+
link_next[PREV] = link_prev
|
| 577 |
+
last = root[PREV]
|
| 578 |
+
last[NEXT] = root[PREV] = link
|
| 579 |
+
link[PREV] = last
|
| 580 |
+
link[NEXT] = root
|
| 581 |
+
hits += 1
|
| 582 |
+
return result
|
| 583 |
+
misses += 1
|
| 584 |
+
result = user_function(*args, **kwds)
|
| 585 |
+
with lock:
|
| 586 |
+
if key in cache:
|
| 587 |
+
# Getting here means that this same key was added to the
|
| 588 |
+
# cache while the lock was released. Since the link
|
| 589 |
+
# update is already done, we need only return the
|
| 590 |
+
# computed result and update the count of misses.
|
| 591 |
+
pass
|
| 592 |
+
elif full:
|
| 593 |
+
# Use the old root to store the new key and result.
|
| 594 |
+
oldroot = root
|
| 595 |
+
oldroot[KEY] = key
|
| 596 |
+
oldroot[RESULT] = result
|
| 597 |
+
# Empty the oldest link and make it the new root.
|
| 598 |
+
# Keep a reference to the old key and old result to
|
| 599 |
+
# prevent their ref counts from going to zero during the
|
| 600 |
+
# update. That will prevent potentially arbitrary object
|
| 601 |
+
# clean-up code (i.e. __del__) from running while we're
|
| 602 |
+
# still adjusting the links.
|
| 603 |
+
root = oldroot[NEXT]
|
| 604 |
+
oldkey = root[KEY]
|
| 605 |
+
oldresult = root[RESULT]
|
| 606 |
+
root[KEY] = root[RESULT] = None
|
| 607 |
+
# Now update the cache dictionary.
|
| 608 |
+
del cache[oldkey]
|
| 609 |
+
# Save the potentially reentrant cache[key] assignment
|
| 610 |
+
# for last, after the root and links have been put in
|
| 611 |
+
# a consistent state.
|
| 612 |
+
cache[key] = oldroot
|
| 613 |
+
else:
|
| 614 |
+
# Put result in a new link at the front of the queue.
|
| 615 |
+
last = root[PREV]
|
| 616 |
+
link = [last, root, key, result]
|
| 617 |
+
last[NEXT] = root[PREV] = cache[key] = link
|
| 618 |
+
# Use the cache_len bound method instead of the len() function
|
| 619 |
+
# which could potentially be wrapped in an lru_cache itself.
|
| 620 |
+
full = (cache_len() >= maxsize)
|
| 621 |
+
return result
|
| 622 |
+
|
| 623 |
+
def cache_info():
|
| 624 |
+
"""Report cache statistics"""
|
| 625 |
+
with lock:
|
| 626 |
+
return _CacheInfo(hits, misses, maxsize, cache_len())
|
| 627 |
+
|
| 628 |
+
def cache_clear():
|
| 629 |
+
"""Clear the cache and cache statistics"""
|
| 630 |
+
nonlocal hits, misses, full
|
| 631 |
+
with lock:
|
| 632 |
+
cache.clear()
|
| 633 |
+
root[:] = [root, root, None, None]
|
| 634 |
+
hits = misses = 0
|
| 635 |
+
full = False
|
| 636 |
+
|
| 637 |
+
wrapper.cache_info = cache_info
|
| 638 |
+
wrapper.cache_clear = cache_clear
|
| 639 |
+
return wrapper
|
| 640 |
+
|
| 641 |
+
try:
|
| 642 |
+
from _functools import _lru_cache_wrapper
|
| 643 |
+
except ImportError:
|
| 644 |
+
pass
|
| 645 |
+
|
| 646 |
+
|
| 647 |
+
################################################################################
|
| 648 |
+
### cache -- simplified access to the infinity cache
|
| 649 |
+
################################################################################
|
| 650 |
+
|
| 651 |
+
def cache(user_function, /):
|
| 652 |
+
'Simple lightweight unbounded cache. Sometimes called "memoize".'
|
| 653 |
+
return lru_cache(maxsize=None)(user_function)
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
################################################################################
|
| 657 |
+
### singledispatch() - single-dispatch generic function decorator
|
| 658 |
+
################################################################################
|
| 659 |
+
|
| 660 |
+
def _c3_merge(sequences):
|
| 661 |
+
"""Merges MROs in *sequences* to a single MRO using the C3 algorithm.
|
| 662 |
+
|
| 663 |
+
Adapted from https://www.python.org/download/releases/2.3/mro/.
|
| 664 |
+
|
| 665 |
+
"""
|
| 666 |
+
result = []
|
| 667 |
+
while True:
|
| 668 |
+
sequences = [s for s in sequences if s] # purge empty sequences
|
| 669 |
+
if not sequences:
|
| 670 |
+
return result
|
| 671 |
+
for s1 in sequences: # find merge candidates among seq heads
|
| 672 |
+
candidate = s1[0]
|
| 673 |
+
for s2 in sequences:
|
| 674 |
+
if candidate in s2[1:]:
|
| 675 |
+
candidate = None
|
| 676 |
+
break # reject the current head, it appears later
|
| 677 |
+
else:
|
| 678 |
+
break
|
| 679 |
+
if candidate is None:
|
| 680 |
+
raise RuntimeError("Inconsistent hierarchy")
|
| 681 |
+
result.append(candidate)
|
| 682 |
+
# remove the chosen candidate
|
| 683 |
+
for seq in sequences:
|
| 684 |
+
if seq[0] == candidate:
|
| 685 |
+
del seq[0]
|
| 686 |
+
|
| 687 |
+
def _c3_mro(cls, abcs=None):
|
| 688 |
+
"""Computes the method resolution order using extended C3 linearization.
|
| 689 |
+
|
| 690 |
+
If no *abcs* are given, the algorithm works exactly like the built-in C3
|
| 691 |
+
linearization used for method resolution.
|
| 692 |
+
|
| 693 |
+
If given, *abcs* is a list of abstract base classes that should be inserted
|
| 694 |
+
into the resulting MRO. Unrelated ABCs are ignored and don't end up in the
|
| 695 |
+
result. The algorithm inserts ABCs where their functionality is introduced,
|
| 696 |
+
i.e. issubclass(cls, abc) returns True for the class itself but returns
|
| 697 |
+
False for all its direct base classes. Implicit ABCs for a given class
|
| 698 |
+
(either registered or inferred from the presence of a special method like
|
| 699 |
+
__len__) are inserted directly after the last ABC explicitly listed in the
|
| 700 |
+
MRO of said class. If two implicit ABCs end up next to each other in the
|
| 701 |
+
resulting MRO, their ordering depends on the order of types in *abcs*.
|
| 702 |
+
|
| 703 |
+
"""
|
| 704 |
+
for i, base in enumerate(reversed(cls.__bases__)):
|
| 705 |
+
if hasattr(base, '__abstractmethods__'):
|
| 706 |
+
boundary = len(cls.__bases__) - i
|
| 707 |
+
break # Bases up to the last explicit ABC are considered first.
|
| 708 |
+
else:
|
| 709 |
+
boundary = 0
|
| 710 |
+
abcs = list(abcs) if abcs else []
|
| 711 |
+
explicit_bases = list(cls.__bases__[:boundary])
|
| 712 |
+
abstract_bases = []
|
| 713 |
+
other_bases = list(cls.__bases__[boundary:])
|
| 714 |
+
for base in abcs:
|
| 715 |
+
if issubclass(cls, base) and not any(
|
| 716 |
+
issubclass(b, base) for b in cls.__bases__
|
| 717 |
+
):
|
| 718 |
+
# If *cls* is the class that introduces behaviour described by
|
| 719 |
+
# an ABC *base*, insert said ABC to its MRO.
|
| 720 |
+
abstract_bases.append(base)
|
| 721 |
+
for base in abstract_bases:
|
| 722 |
+
abcs.remove(base)
|
| 723 |
+
explicit_c3_mros = [_c3_mro(base, abcs=abcs) for base in explicit_bases]
|
| 724 |
+
abstract_c3_mros = [_c3_mro(base, abcs=abcs) for base in abstract_bases]
|
| 725 |
+
other_c3_mros = [_c3_mro(base, abcs=abcs) for base in other_bases]
|
| 726 |
+
return _c3_merge(
|
| 727 |
+
[[cls]] +
|
| 728 |
+
explicit_c3_mros + abstract_c3_mros + other_c3_mros +
|
| 729 |
+
[explicit_bases] + [abstract_bases] + [other_bases]
|
| 730 |
+
)
|
| 731 |
+
|
| 732 |
+
def _compose_mro(cls, types):
|
| 733 |
+
"""Calculates the method resolution order for a given class *cls*.
|
| 734 |
+
|
| 735 |
+
Includes relevant abstract base classes (with their respective bases) from
|
| 736 |
+
the *types* iterable. Uses a modified C3 linearization algorithm.
|
| 737 |
+
|
| 738 |
+
"""
|
| 739 |
+
bases = set(cls.__mro__)
|
| 740 |
+
# Remove entries which are already present in the __mro__ or unrelated.
|
| 741 |
+
def is_related(typ):
|
| 742 |
+
return (typ not in bases and hasattr(typ, '__mro__')
|
| 743 |
+
and not isinstance(typ, GenericAlias)
|
| 744 |
+
and issubclass(cls, typ))
|
| 745 |
+
types = [n for n in types if is_related(n)]
|
| 746 |
+
# Remove entries which are strict bases of other entries (they will end up
|
| 747 |
+
# in the MRO anyway.
|
| 748 |
+
def is_strict_base(typ):
|
| 749 |
+
for other in types:
|
| 750 |
+
if typ != other and typ in other.__mro__:
|
| 751 |
+
return True
|
| 752 |
+
return False
|
| 753 |
+
types = [n for n in types if not is_strict_base(n)]
|
| 754 |
+
# Subclasses of the ABCs in *types* which are also implemented by
|
| 755 |
+
# *cls* can be used to stabilize ABC ordering.
|
| 756 |
+
type_set = set(types)
|
| 757 |
+
mro = []
|
| 758 |
+
for typ in types:
|
| 759 |
+
found = []
|
| 760 |
+
for sub in typ.__subclasses__():
|
| 761 |
+
if sub not in bases and issubclass(cls, sub):
|
| 762 |
+
found.append([s for s in sub.__mro__ if s in type_set])
|
| 763 |
+
if not found:
|
| 764 |
+
mro.append(typ)
|
| 765 |
+
continue
|
| 766 |
+
# Favor subclasses with the biggest number of useful bases
|
| 767 |
+
found.sort(key=len, reverse=True)
|
| 768 |
+
for sub in found:
|
| 769 |
+
for subcls in sub:
|
| 770 |
+
if subcls not in mro:
|
| 771 |
+
mro.append(subcls)
|
| 772 |
+
return _c3_mro(cls, abcs=mro)
|
| 773 |
+
|
| 774 |
+
def _find_impl(cls, registry):
|
| 775 |
+
"""Returns the best matching implementation from *registry* for type *cls*.
|
| 776 |
+
|
| 777 |
+
Where there is no registered implementation for a specific type, its method
|
| 778 |
+
resolution order is used to find a more generic implementation.
|
| 779 |
+
|
| 780 |
+
Note: if *registry* does not contain an implementation for the base
|
| 781 |
+
*object* type, this function may return None.
|
| 782 |
+
|
| 783 |
+
"""
|
| 784 |
+
mro = _compose_mro(cls, registry.keys())
|
| 785 |
+
match = None
|
| 786 |
+
for t in mro:
|
| 787 |
+
if match is not None:
|
| 788 |
+
# If *match* is an implicit ABC but there is another unrelated,
|
| 789 |
+
# equally matching implicit ABC, refuse the temptation to guess.
|
| 790 |
+
if (t in registry and t not in cls.__mro__
|
| 791 |
+
and match not in cls.__mro__
|
| 792 |
+
and not issubclass(match, t)):
|
| 793 |
+
raise RuntimeError("Ambiguous dispatch: {} or {}".format(
|
| 794 |
+
match, t))
|
| 795 |
+
break
|
| 796 |
+
if t in registry:
|
| 797 |
+
match = t
|
| 798 |
+
return registry.get(match)
|
| 799 |
+
|
| 800 |
+
def singledispatch(func):
|
| 801 |
+
"""Single-dispatch generic function decorator.
|
| 802 |
+
|
| 803 |
+
Transforms a function into a generic function, which can have different
|
| 804 |
+
behaviours depending upon the type of its first argument. The decorated
|
| 805 |
+
function acts as the default implementation, and additional
|
| 806 |
+
implementations can be registered using the register() attribute of the
|
| 807 |
+
generic function.
|
| 808 |
+
"""
|
| 809 |
+
# There are many programs that use functools without singledispatch, so we
|
| 810 |
+
# trade-off making singledispatch marginally slower for the benefit of
|
| 811 |
+
# making start-up of such applications slightly faster.
|
| 812 |
+
import types, weakref
|
| 813 |
+
|
| 814 |
+
registry = {}
|
| 815 |
+
dispatch_cache = weakref.WeakKeyDictionary()
|
| 816 |
+
cache_token = None
|
| 817 |
+
|
| 818 |
+
def dispatch(cls):
|
| 819 |
+
"""generic_func.dispatch(cls) -> <function implementation>
|
| 820 |
+
|
| 821 |
+
Runs the dispatch algorithm to return the best available implementation
|
| 822 |
+
for the given *cls* registered on *generic_func*.
|
| 823 |
+
|
| 824 |
+
"""
|
| 825 |
+
nonlocal cache_token
|
| 826 |
+
if cache_token is not None:
|
| 827 |
+
current_token = get_cache_token()
|
| 828 |
+
if cache_token != current_token:
|
| 829 |
+
dispatch_cache.clear()
|
| 830 |
+
cache_token = current_token
|
| 831 |
+
try:
|
| 832 |
+
impl = dispatch_cache[cls]
|
| 833 |
+
except KeyError:
|
| 834 |
+
try:
|
| 835 |
+
impl = registry[cls]
|
| 836 |
+
except KeyError:
|
| 837 |
+
impl = _find_impl(cls, registry)
|
| 838 |
+
dispatch_cache[cls] = impl
|
| 839 |
+
return impl
|
| 840 |
+
|
| 841 |
+
def _is_valid_dispatch_type(cls):
|
| 842 |
+
return isinstance(cls, type) and not isinstance(cls, GenericAlias)
|
| 843 |
+
|
| 844 |
+
def register(cls, func=None):
|
| 845 |
+
"""generic_func.register(cls, func) -> func
|
| 846 |
+
|
| 847 |
+
Registers a new implementation for the given *cls* on a *generic_func*.
|
| 848 |
+
|
| 849 |
+
"""
|
| 850 |
+
nonlocal cache_token
|
| 851 |
+
if _is_valid_dispatch_type(cls):
|
| 852 |
+
if func is None:
|
| 853 |
+
return lambda f: register(cls, f)
|
| 854 |
+
else:
|
| 855 |
+
if func is not None:
|
| 856 |
+
raise TypeError(
|
| 857 |
+
f"Invalid first argument to `register()`. "
|
| 858 |
+
f"{cls!r} is not a class."
|
| 859 |
+
)
|
| 860 |
+
ann = getattr(cls, '__annotations__', {})
|
| 861 |
+
if not ann:
|
| 862 |
+
raise TypeError(
|
| 863 |
+
f"Invalid first argument to `register()`: {cls!r}. "
|
| 864 |
+
f"Use either `@register(some_class)` or plain `@register` "
|
| 865 |
+
f"on an annotated function."
|
| 866 |
+
)
|
| 867 |
+
func = cls
|
| 868 |
+
|
| 869 |
+
# only import typing if annotation parsing is necessary
|
| 870 |
+
from typing import get_type_hints
|
| 871 |
+
argname, cls = next(iter(get_type_hints(func).items()))
|
| 872 |
+
if not _is_valid_dispatch_type(cls):
|
| 873 |
+
raise TypeError(
|
| 874 |
+
f"Invalid annotation for {argname!r}. "
|
| 875 |
+
f"{cls!r} is not a class."
|
| 876 |
+
)
|
| 877 |
+
|
| 878 |
+
registry[cls] = func
|
| 879 |
+
if cache_token is None and hasattr(cls, '__abstractmethods__'):
|
| 880 |
+
cache_token = get_cache_token()
|
| 881 |
+
dispatch_cache.clear()
|
| 882 |
+
return func
|
| 883 |
+
|
| 884 |
+
def wrapper(*args, **kw):
|
| 885 |
+
if not args:
|
| 886 |
+
raise TypeError(f'{funcname} requires at least '
|
| 887 |
+
'1 positional argument')
|
| 888 |
+
|
| 889 |
+
return dispatch(args[0].__class__)(*args, **kw)
|
| 890 |
+
|
| 891 |
+
funcname = getattr(func, '__name__', 'singledispatch function')
|
| 892 |
+
registry[object] = func
|
| 893 |
+
wrapper.register = register
|
| 894 |
+
wrapper.dispatch = dispatch
|
| 895 |
+
wrapper.registry = types.MappingProxyType(registry)
|
| 896 |
+
wrapper._clear_cache = dispatch_cache.clear
|
| 897 |
+
update_wrapper(wrapper, func)
|
| 898 |
+
return wrapper
|
| 899 |
+
|
| 900 |
+
|
| 901 |
+
# Descriptor version
|
| 902 |
+
class singledispatchmethod:
|
| 903 |
+
"""Single-dispatch generic method descriptor.
|
| 904 |
+
|
| 905 |
+
Supports wrapping existing descriptors and handles non-descriptor
|
| 906 |
+
callables as instance methods.
|
| 907 |
+
"""
|
| 908 |
+
|
| 909 |
+
def __init__(self, func):
|
| 910 |
+
if not callable(func) and not hasattr(func, "__get__"):
|
| 911 |
+
raise TypeError(f"{func!r} is not callable or a descriptor")
|
| 912 |
+
|
| 913 |
+
self.dispatcher = singledispatch(func)
|
| 914 |
+
self.func = func
|
| 915 |
+
|
| 916 |
+
def register(self, cls, method=None):
|
| 917 |
+
"""generic_method.register(cls, func) -> func
|
| 918 |
+
|
| 919 |
+
Registers a new implementation for the given *cls* on a *generic_method*.
|
| 920 |
+
"""
|
| 921 |
+
return self.dispatcher.register(cls, func=method)
|
| 922 |
+
|
| 923 |
+
def __get__(self, obj, cls=None):
|
| 924 |
+
def _method(*args, **kwargs):
|
| 925 |
+
method = self.dispatcher.dispatch(args[0].__class__)
|
| 926 |
+
return method.__get__(obj, cls)(*args, **kwargs)
|
| 927 |
+
|
| 928 |
+
_method.__isabstractmethod__ = self.__isabstractmethod__
|
| 929 |
+
_method.register = self.register
|
| 930 |
+
update_wrapper(_method, self.func)
|
| 931 |
+
return _method
|
| 932 |
+
|
| 933 |
+
@property
|
| 934 |
+
def __isabstractmethod__(self):
|
| 935 |
+
return getattr(self.func, '__isabstractmethod__', False)
|
| 936 |
+
|
| 937 |
+
|
| 938 |
+
################################################################################
|
| 939 |
+
### cached_property() - computed once per instance, cached as attribute
|
| 940 |
+
################################################################################
|
| 941 |
+
|
| 942 |
+
_NOT_FOUND = object()
|
| 943 |
+
|
| 944 |
+
|
| 945 |
+
class cached_property:
|
| 946 |
+
def __init__(self, func):
|
| 947 |
+
self.func = func
|
| 948 |
+
self.attrname = None
|
| 949 |
+
self.__doc__ = func.__doc__
|
| 950 |
+
self.lock = RLock()
|
| 951 |
+
|
| 952 |
+
def __set_name__(self, owner, name):
|
| 953 |
+
if self.attrname is None:
|
| 954 |
+
self.attrname = name
|
| 955 |
+
elif name != self.attrname:
|
| 956 |
+
raise TypeError(
|
| 957 |
+
"Cannot assign the same cached_property to two different names "
|
| 958 |
+
f"({self.attrname!r} and {name!r})."
|
| 959 |
+
)
|
| 960 |
+
|
| 961 |
+
def __get__(self, instance, owner=None):
|
| 962 |
+
if instance is None:
|
| 963 |
+
return self
|
| 964 |
+
if self.attrname is None:
|
| 965 |
+
raise TypeError(
|
| 966 |
+
"Cannot use cached_property instance without calling __set_name__ on it.")
|
| 967 |
+
try:
|
| 968 |
+
cache = instance.__dict__
|
| 969 |
+
except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
|
| 970 |
+
msg = (
|
| 971 |
+
f"No '__dict__' attribute on {type(instance).__name__!r} "
|
| 972 |
+
f"instance to cache {self.attrname!r} property."
|
| 973 |
+
)
|
| 974 |
+
raise TypeError(msg) from None
|
| 975 |
+
val = cache.get(self.attrname, _NOT_FOUND)
|
| 976 |
+
if val is _NOT_FOUND:
|
| 977 |
+
with self.lock:
|
| 978 |
+
# check if another thread filled cache while we awaited lock
|
| 979 |
+
val = cache.get(self.attrname, _NOT_FOUND)
|
| 980 |
+
if val is _NOT_FOUND:
|
| 981 |
+
val = self.func(instance)
|
| 982 |
+
try:
|
| 983 |
+
cache[self.attrname] = val
|
| 984 |
+
except TypeError:
|
| 985 |
+
msg = (
|
| 986 |
+
f"The '__dict__' attribute on {type(instance).__name__!r} instance "
|
| 987 |
+
f"does not support item assignment for caching {self.attrname!r} property."
|
| 988 |
+
)
|
| 989 |
+
raise TypeError(msg) from None
|
| 990 |
+
return val
|
| 991 |
+
|
| 992 |
+
__class_getitem__ = classmethod(GenericAlias)
|
evalkit_cambrian/lib/python3.10/genericpath.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Path operations common to more than one OS
|
| 3 |
+
Do not use directly. The OS specific modules import the appropriate
|
| 4 |
+
functions from this module themselves.
|
| 5 |
+
"""
|
| 6 |
+
import os
|
| 7 |
+
import stat
|
| 8 |
+
|
| 9 |
+
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
|
| 10 |
+
'getsize', 'isdir', 'isfile', 'samefile', 'sameopenfile',
|
| 11 |
+
'samestat']
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# Does a path exist?
|
| 15 |
+
# This is false for dangling symbolic links on systems that support them.
|
| 16 |
+
def exists(path):
|
| 17 |
+
"""Test whether a path exists. Returns False for broken symbolic links"""
|
| 18 |
+
try:
|
| 19 |
+
os.stat(path)
|
| 20 |
+
except (OSError, ValueError):
|
| 21 |
+
return False
|
| 22 |
+
return True
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# This follows symbolic links, so both islink() and isdir() can be true
|
| 26 |
+
# for the same path on systems that support symlinks
|
| 27 |
+
def isfile(path):
|
| 28 |
+
"""Test whether a path is a regular file"""
|
| 29 |
+
try:
|
| 30 |
+
st = os.stat(path)
|
| 31 |
+
except (OSError, ValueError):
|
| 32 |
+
return False
|
| 33 |
+
return stat.S_ISREG(st.st_mode)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
# Is a path a directory?
|
| 37 |
+
# This follows symbolic links, so both islink() and isdir()
|
| 38 |
+
# can be true for the same path on systems that support symlinks
|
| 39 |
+
def isdir(s):
|
| 40 |
+
"""Return true if the pathname refers to an existing directory."""
|
| 41 |
+
try:
|
| 42 |
+
st = os.stat(s)
|
| 43 |
+
except (OSError, ValueError):
|
| 44 |
+
return False
|
| 45 |
+
return stat.S_ISDIR(st.st_mode)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def getsize(filename):
|
| 49 |
+
"""Return the size of a file, reported by os.stat()."""
|
| 50 |
+
return os.stat(filename).st_size
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def getmtime(filename):
|
| 54 |
+
"""Return the last modification time of a file, reported by os.stat()."""
|
| 55 |
+
return os.stat(filename).st_mtime
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def getatime(filename):
|
| 59 |
+
"""Return the last access time of a file, reported by os.stat()."""
|
| 60 |
+
return os.stat(filename).st_atime
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def getctime(filename):
|
| 64 |
+
"""Return the metadata change time of a file, reported by os.stat()."""
|
| 65 |
+
return os.stat(filename).st_ctime
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
# Return the longest prefix of all list elements.
|
| 69 |
+
def commonprefix(m):
|
| 70 |
+
"Given a list of pathnames, returns the longest common leading component"
|
| 71 |
+
if not m: return ''
|
| 72 |
+
# Some people pass in a list of pathname parts to operate in an OS-agnostic
|
| 73 |
+
# fashion; don't try to translate in that case as that's an abuse of the
|
| 74 |
+
# API and they are already doing what they need to be OS-agnostic and so
|
| 75 |
+
# they most likely won't be using an os.PathLike object in the sublists.
|
| 76 |
+
if not isinstance(m[0], (list, tuple)):
|
| 77 |
+
m = tuple(map(os.fspath, m))
|
| 78 |
+
s1 = min(m)
|
| 79 |
+
s2 = max(m)
|
| 80 |
+
for i, c in enumerate(s1):
|
| 81 |
+
if c != s2[i]:
|
| 82 |
+
return s1[:i]
|
| 83 |
+
return s1
|
| 84 |
+
|
| 85 |
+
# Are two stat buffers (obtained from stat, fstat or lstat)
|
| 86 |
+
# describing the same file?
|
| 87 |
+
def samestat(s1, s2):
|
| 88 |
+
"""Test whether two stat buffers reference the same file"""
|
| 89 |
+
return (s1.st_ino == s2.st_ino and
|
| 90 |
+
s1.st_dev == s2.st_dev)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
# Are two filenames really pointing to the same file?
|
| 94 |
+
def samefile(f1, f2):
|
| 95 |
+
"""Test whether two pathnames reference the same actual file or directory
|
| 96 |
+
|
| 97 |
+
This is determined by the device number and i-node number and
|
| 98 |
+
raises an exception if an os.stat() call on either pathname fails.
|
| 99 |
+
"""
|
| 100 |
+
s1 = os.stat(f1)
|
| 101 |
+
s2 = os.stat(f2)
|
| 102 |
+
return samestat(s1, s2)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
# Are two open files really referencing the same file?
|
| 106 |
+
# (Not necessarily the same file descriptor!)
|
| 107 |
+
def sameopenfile(fp1, fp2):
|
| 108 |
+
"""Test whether two open file objects reference the same file"""
|
| 109 |
+
s1 = os.fstat(fp1)
|
| 110 |
+
s2 = os.fstat(fp2)
|
| 111 |
+
return samestat(s1, s2)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
# Split a path in root and extension.
|
| 115 |
+
# The extension is everything starting at the last dot in the last
|
| 116 |
+
# pathname component; the root is everything before that.
|
| 117 |
+
# It is always true that root + ext == p.
|
| 118 |
+
|
| 119 |
+
# Generic implementation of splitext, to be parametrized with
|
| 120 |
+
# the separators
|
| 121 |
+
def _splitext(p, sep, altsep, extsep):
|
| 122 |
+
"""Split the extension from a pathname.
|
| 123 |
+
|
| 124 |
+
Extension is everything from the last dot to the end, ignoring
|
| 125 |
+
leading dots. Returns "(root, ext)"; ext may be empty."""
|
| 126 |
+
# NOTE: This code must work for text and bytes strings.
|
| 127 |
+
|
| 128 |
+
sepIndex = p.rfind(sep)
|
| 129 |
+
if altsep:
|
| 130 |
+
altsepIndex = p.rfind(altsep)
|
| 131 |
+
sepIndex = max(sepIndex, altsepIndex)
|
| 132 |
+
|
| 133 |
+
dotIndex = p.rfind(extsep)
|
| 134 |
+
if dotIndex > sepIndex:
|
| 135 |
+
# skip all leading dots
|
| 136 |
+
filenameIndex = sepIndex + 1
|
| 137 |
+
while filenameIndex < dotIndex:
|
| 138 |
+
if p[filenameIndex:filenameIndex+1] != extsep:
|
| 139 |
+
return p[:dotIndex], p[dotIndex:]
|
| 140 |
+
filenameIndex += 1
|
| 141 |
+
|
| 142 |
+
return p, p[:0]
|
| 143 |
+
|
| 144 |
+
def _check_arg_types(funcname, *args):
|
| 145 |
+
hasstr = hasbytes = False
|
| 146 |
+
for s in args:
|
| 147 |
+
if isinstance(s, str):
|
| 148 |
+
hasstr = True
|
| 149 |
+
elif isinstance(s, bytes):
|
| 150 |
+
hasbytes = True
|
| 151 |
+
else:
|
| 152 |
+
raise TypeError(f'{funcname}() argument must be str, bytes, or '
|
| 153 |
+
f'os.PathLike object, not {s.__class__.__name__!r}') from None
|
| 154 |
+
if hasstr and hasbytes:
|
| 155 |
+
raise TypeError("Can't mix strings and bytes in path components") from None
|
evalkit_cambrian/lib/python3.10/hashlib.py
ADDED
|
@@ -0,0 +1,269 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#. Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org)
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
__doc__ = """hashlib module - A common interface to many hash functions.
|
| 6 |
+
|
| 7 |
+
new(name, data=b'', **kwargs) - returns a new hash object implementing the
|
| 8 |
+
given hash function; initializing the hash
|
| 9 |
+
using the given binary data.
|
| 10 |
+
|
| 11 |
+
Named constructor functions are also available, these are faster
|
| 12 |
+
than using new(name):
|
| 13 |
+
|
| 14 |
+
md5(), sha1(), sha224(), sha256(), sha384(), sha512(), blake2b(), blake2s(),
|
| 15 |
+
sha3_224, sha3_256, sha3_384, sha3_512, shake_128, and shake_256.
|
| 16 |
+
|
| 17 |
+
More algorithms may be available on your platform but the above are guaranteed
|
| 18 |
+
to exist. See the algorithms_guaranteed and algorithms_available attributes
|
| 19 |
+
to find out what algorithm names can be passed to new().
|
| 20 |
+
|
| 21 |
+
NOTE: If you want the adler32 or crc32 hash functions they are available in
|
| 22 |
+
the zlib module.
|
| 23 |
+
|
| 24 |
+
Choose your hash function wisely. Some have known collision weaknesses.
|
| 25 |
+
sha384 and sha512 will be slow on 32 bit platforms.
|
| 26 |
+
|
| 27 |
+
Hash objects have these methods:
|
| 28 |
+
- update(data): Update the hash object with the bytes in data. Repeated calls
|
| 29 |
+
are equivalent to a single call with the concatenation of all
|
| 30 |
+
the arguments.
|
| 31 |
+
- digest(): Return the digest of the bytes passed to the update() method
|
| 32 |
+
so far as a bytes object.
|
| 33 |
+
- hexdigest(): Like digest() except the digest is returned as a string
|
| 34 |
+
of double length, containing only hexadecimal digits.
|
| 35 |
+
- copy(): Return a copy (clone) of the hash object. This can be used to
|
| 36 |
+
efficiently compute the digests of datas that share a common
|
| 37 |
+
initial substring.
|
| 38 |
+
|
| 39 |
+
For example, to obtain the digest of the byte string 'Nobody inspects the
|
| 40 |
+
spammish repetition':
|
| 41 |
+
|
| 42 |
+
>>> import hashlib
|
| 43 |
+
>>> m = hashlib.md5()
|
| 44 |
+
>>> m.update(b"Nobody inspects")
|
| 45 |
+
>>> m.update(b" the spammish repetition")
|
| 46 |
+
>>> m.digest()
|
| 47 |
+
b'\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9'
|
| 48 |
+
|
| 49 |
+
More condensed:
|
| 50 |
+
|
| 51 |
+
>>> hashlib.sha224(b"Nobody inspects the spammish repetition").hexdigest()
|
| 52 |
+
'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2'
|
| 53 |
+
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
# This tuple and __get_builtin_constructor() must be modified if a new
|
| 57 |
+
# always available algorithm is added.
|
| 58 |
+
__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
|
| 59 |
+
'blake2b', 'blake2s',
|
| 60 |
+
'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
|
| 61 |
+
'shake_128', 'shake_256')
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
algorithms_guaranteed = set(__always_supported)
|
| 65 |
+
algorithms_available = set(__always_supported)
|
| 66 |
+
|
| 67 |
+
__all__ = __always_supported + ('new', 'algorithms_guaranteed',
|
| 68 |
+
'algorithms_available', 'pbkdf2_hmac')
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
__builtin_constructor_cache = {}
|
| 72 |
+
|
| 73 |
+
# Prefer our blake2 implementation
|
| 74 |
+
# OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. The OpenSSL
|
| 75 |
+
# implementations neither support keyed blake2 (blake2 MAC) nor advanced
|
| 76 |
+
# features like salt, personalization, or tree hashing. OpenSSL hash-only
|
| 77 |
+
# variants are available as 'blake2b512' and 'blake2s256', though.
|
| 78 |
+
__block_openssl_constructor = {
|
| 79 |
+
'blake2b', 'blake2s',
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
def __get_builtin_constructor(name):
|
| 83 |
+
cache = __builtin_constructor_cache
|
| 84 |
+
constructor = cache.get(name)
|
| 85 |
+
if constructor is not None:
|
| 86 |
+
return constructor
|
| 87 |
+
try:
|
| 88 |
+
if name in {'SHA1', 'sha1'}:
|
| 89 |
+
import _sha1
|
| 90 |
+
cache['SHA1'] = cache['sha1'] = _sha1.sha1
|
| 91 |
+
elif name in {'MD5', 'md5'}:
|
| 92 |
+
import _md5
|
| 93 |
+
cache['MD5'] = cache['md5'] = _md5.md5
|
| 94 |
+
elif name in {'SHA256', 'sha256', 'SHA224', 'sha224'}:
|
| 95 |
+
import _sha256
|
| 96 |
+
cache['SHA224'] = cache['sha224'] = _sha256.sha224
|
| 97 |
+
cache['SHA256'] = cache['sha256'] = _sha256.sha256
|
| 98 |
+
elif name in {'SHA512', 'sha512', 'SHA384', 'sha384'}:
|
| 99 |
+
import _sha512
|
| 100 |
+
cache['SHA384'] = cache['sha384'] = _sha512.sha384
|
| 101 |
+
cache['SHA512'] = cache['sha512'] = _sha512.sha512
|
| 102 |
+
elif name in {'blake2b', 'blake2s'}:
|
| 103 |
+
import _blake2
|
| 104 |
+
cache['blake2b'] = _blake2.blake2b
|
| 105 |
+
cache['blake2s'] = _blake2.blake2s
|
| 106 |
+
elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512'}:
|
| 107 |
+
import _sha3
|
| 108 |
+
cache['sha3_224'] = _sha3.sha3_224
|
| 109 |
+
cache['sha3_256'] = _sha3.sha3_256
|
| 110 |
+
cache['sha3_384'] = _sha3.sha3_384
|
| 111 |
+
cache['sha3_512'] = _sha3.sha3_512
|
| 112 |
+
elif name in {'shake_128', 'shake_256'}:
|
| 113 |
+
import _sha3
|
| 114 |
+
cache['shake_128'] = _sha3.shake_128
|
| 115 |
+
cache['shake_256'] = _sha3.shake_256
|
| 116 |
+
except ImportError:
|
| 117 |
+
pass # no extension module, this hash is unsupported.
|
| 118 |
+
|
| 119 |
+
constructor = cache.get(name)
|
| 120 |
+
if constructor is not None:
|
| 121 |
+
return constructor
|
| 122 |
+
|
| 123 |
+
raise ValueError('unsupported hash type ' + name)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def __get_openssl_constructor(name):
|
| 127 |
+
if name in __block_openssl_constructor:
|
| 128 |
+
# Prefer our builtin blake2 implementation.
|
| 129 |
+
return __get_builtin_constructor(name)
|
| 130 |
+
try:
|
| 131 |
+
# MD5, SHA1, and SHA2 are in all supported OpenSSL versions
|
| 132 |
+
# SHA3/shake are available in OpenSSL 1.1.1+
|
| 133 |
+
f = getattr(_hashlib, 'openssl_' + name)
|
| 134 |
+
# Allow the C module to raise ValueError. The function will be
|
| 135 |
+
# defined but the hash not actually available. Don't fall back to
|
| 136 |
+
# builtin if the current security policy blocks a digest, bpo#40695.
|
| 137 |
+
f(usedforsecurity=False)
|
| 138 |
+
# Use the C function directly (very fast)
|
| 139 |
+
return f
|
| 140 |
+
except (AttributeError, ValueError):
|
| 141 |
+
return __get_builtin_constructor(name)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def __py_new(name, data=b'', **kwargs):
|
| 145 |
+
"""new(name, data=b'', **kwargs) - Return a new hashing object using the
|
| 146 |
+
named algorithm; optionally initialized with data (which must be
|
| 147 |
+
a bytes-like object).
|
| 148 |
+
"""
|
| 149 |
+
return __get_builtin_constructor(name)(data, **kwargs)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def __hash_new(name, data=b'', **kwargs):
|
| 153 |
+
"""new(name, data=b'') - Return a new hashing object using the named algorithm;
|
| 154 |
+
optionally initialized with data (which must be a bytes-like object).
|
| 155 |
+
"""
|
| 156 |
+
if name in __block_openssl_constructor:
|
| 157 |
+
# Prefer our builtin blake2 implementation.
|
| 158 |
+
return __get_builtin_constructor(name)(data, **kwargs)
|
| 159 |
+
try:
|
| 160 |
+
return _hashlib.new(name, data, **kwargs)
|
| 161 |
+
except ValueError:
|
| 162 |
+
# If the _hashlib module (OpenSSL) doesn't support the named
|
| 163 |
+
# hash, try using our builtin implementations.
|
| 164 |
+
# This allows for SHA224/256 and SHA384/512 support even though
|
| 165 |
+
# the OpenSSL library prior to 0.9.8 doesn't provide them.
|
| 166 |
+
return __get_builtin_constructor(name)(data)
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
try:
|
| 170 |
+
import _hashlib
|
| 171 |
+
new = __hash_new
|
| 172 |
+
__get_hash = __get_openssl_constructor
|
| 173 |
+
algorithms_available = algorithms_available.union(
|
| 174 |
+
_hashlib.openssl_md_meth_names)
|
| 175 |
+
except ImportError:
|
| 176 |
+
_hashlib = None
|
| 177 |
+
new = __py_new
|
| 178 |
+
__get_hash = __get_builtin_constructor
|
| 179 |
+
|
| 180 |
+
try:
|
| 181 |
+
# OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA
|
| 182 |
+
from _hashlib import pbkdf2_hmac
|
| 183 |
+
except ImportError:
|
| 184 |
+
from warnings import warn as _warn
|
| 185 |
+
_trans_5C = bytes((x ^ 0x5C) for x in range(256))
|
| 186 |
+
_trans_36 = bytes((x ^ 0x36) for x in range(256))
|
| 187 |
+
|
| 188 |
+
def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None):
|
| 189 |
+
"""Password based key derivation function 2 (PKCS #5 v2.0)
|
| 190 |
+
|
| 191 |
+
This Python implementations based on the hmac module about as fast
|
| 192 |
+
as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster
|
| 193 |
+
for long passwords.
|
| 194 |
+
"""
|
| 195 |
+
_warn(
|
| 196 |
+
"Python implementation of pbkdf2_hmac() is deprecated.",
|
| 197 |
+
category=DeprecationWarning,
|
| 198 |
+
stacklevel=2
|
| 199 |
+
)
|
| 200 |
+
if not isinstance(hash_name, str):
|
| 201 |
+
raise TypeError(hash_name)
|
| 202 |
+
|
| 203 |
+
if not isinstance(password, (bytes, bytearray)):
|
| 204 |
+
password = bytes(memoryview(password))
|
| 205 |
+
if not isinstance(salt, (bytes, bytearray)):
|
| 206 |
+
salt = bytes(memoryview(salt))
|
| 207 |
+
|
| 208 |
+
# Fast inline HMAC implementation
|
| 209 |
+
inner = new(hash_name)
|
| 210 |
+
outer = new(hash_name)
|
| 211 |
+
blocksize = getattr(inner, 'block_size', 64)
|
| 212 |
+
if len(password) > blocksize:
|
| 213 |
+
password = new(hash_name, password).digest()
|
| 214 |
+
password = password + b'\x00' * (blocksize - len(password))
|
| 215 |
+
inner.update(password.translate(_trans_36))
|
| 216 |
+
outer.update(password.translate(_trans_5C))
|
| 217 |
+
|
| 218 |
+
def prf(msg, inner=inner, outer=outer):
|
| 219 |
+
# PBKDF2_HMAC uses the password as key. We can re-use the same
|
| 220 |
+
# digest objects and just update copies to skip initialization.
|
| 221 |
+
icpy = inner.copy()
|
| 222 |
+
ocpy = outer.copy()
|
| 223 |
+
icpy.update(msg)
|
| 224 |
+
ocpy.update(icpy.digest())
|
| 225 |
+
return ocpy.digest()
|
| 226 |
+
|
| 227 |
+
if iterations < 1:
|
| 228 |
+
raise ValueError(iterations)
|
| 229 |
+
if dklen is None:
|
| 230 |
+
dklen = outer.digest_size
|
| 231 |
+
if dklen < 1:
|
| 232 |
+
raise ValueError(dklen)
|
| 233 |
+
|
| 234 |
+
dkey = b''
|
| 235 |
+
loop = 1
|
| 236 |
+
from_bytes = int.from_bytes
|
| 237 |
+
while len(dkey) < dklen:
|
| 238 |
+
prev = prf(salt + loop.to_bytes(4, 'big'))
|
| 239 |
+
# endianness doesn't matter here as long to / from use the same
|
| 240 |
+
rkey = int.from_bytes(prev, 'big')
|
| 241 |
+
for i in range(iterations - 1):
|
| 242 |
+
prev = prf(prev)
|
| 243 |
+
# rkey = rkey ^ prev
|
| 244 |
+
rkey ^= from_bytes(prev, 'big')
|
| 245 |
+
loop += 1
|
| 246 |
+
dkey += rkey.to_bytes(inner.digest_size, 'big')
|
| 247 |
+
|
| 248 |
+
return dkey[:dklen]
|
| 249 |
+
|
| 250 |
+
try:
|
| 251 |
+
# OpenSSL's scrypt requires OpenSSL 1.1+
|
| 252 |
+
from _hashlib import scrypt
|
| 253 |
+
except ImportError:
|
| 254 |
+
pass
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
for __func_name in __always_supported:
|
| 258 |
+
# try them all, some may not work due to the OpenSSL
|
| 259 |
+
# version not supporting that algorithm.
|
| 260 |
+
try:
|
| 261 |
+
globals()[__func_name] = __get_hash(__func_name)
|
| 262 |
+
except ValueError:
|
| 263 |
+
import logging
|
| 264 |
+
logging.exception('code for hash %s was not found.', __func_name)
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
# Cleanup locals()
|
| 268 |
+
del __always_supported, __func_name, __get_hash
|
| 269 |
+
del __py_new, __hash_new, __get_openssl_constructor
|
evalkit_cambrian/lib/python3.10/heapq.py
ADDED
|
@@ -0,0 +1,601 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Heap queue algorithm (a.k.a. priority queue).
|
| 2 |
+
|
| 3 |
+
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
|
| 4 |
+
all k, counting elements from 0. For the sake of comparison,
|
| 5 |
+
non-existing elements are considered to be infinite. The interesting
|
| 6 |
+
property of a heap is that a[0] is always its smallest element.
|
| 7 |
+
|
| 8 |
+
Usage:
|
| 9 |
+
|
| 10 |
+
heap = [] # creates an empty heap
|
| 11 |
+
heappush(heap, item) # pushes a new item on the heap
|
| 12 |
+
item = heappop(heap) # pops the smallest item from the heap
|
| 13 |
+
item = heap[0] # smallest item on the heap without popping it
|
| 14 |
+
heapify(x) # transforms list into a heap, in-place, in linear time
|
| 15 |
+
item = heapreplace(heap, item) # pops and returns smallest item, and adds
|
| 16 |
+
# new item; the heap size is unchanged
|
| 17 |
+
|
| 18 |
+
Our API differs from textbook heap algorithms as follows:
|
| 19 |
+
|
| 20 |
+
- We use 0-based indexing. This makes the relationship between the
|
| 21 |
+
index for a node and the indexes for its children slightly less
|
| 22 |
+
obvious, but is more suitable since Python uses 0-based indexing.
|
| 23 |
+
|
| 24 |
+
- Our heappop() method returns the smallest item, not the largest.
|
| 25 |
+
|
| 26 |
+
These two make it possible to view the heap as a regular Python list
|
| 27 |
+
without surprises: heap[0] is the smallest item, and heap.sort()
|
| 28 |
+
maintains the heap invariant!
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
# Original code by Kevin O'Connor, augmented by Tim Peters and Raymond Hettinger
|
| 32 |
+
|
| 33 |
+
__about__ = """Heap queues
|
| 34 |
+
|
| 35 |
+
[explanation by François Pinard]
|
| 36 |
+
|
| 37 |
+
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
|
| 38 |
+
all k, counting elements from 0. For the sake of comparison,
|
| 39 |
+
non-existing elements are considered to be infinite. The interesting
|
| 40 |
+
property of a heap is that a[0] is always its smallest element.
|
| 41 |
+
|
| 42 |
+
The strange invariant above is meant to be an efficient memory
|
| 43 |
+
representation for a tournament. The numbers below are `k', not a[k]:
|
| 44 |
+
|
| 45 |
+
0
|
| 46 |
+
|
| 47 |
+
1 2
|
| 48 |
+
|
| 49 |
+
3 4 5 6
|
| 50 |
+
|
| 51 |
+
7 8 9 10 11 12 13 14
|
| 52 |
+
|
| 53 |
+
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
In the tree above, each cell `k' is topping `2*k+1' and `2*k+2'. In
|
| 57 |
+
a usual binary tournament we see in sports, each cell is the winner
|
| 58 |
+
over the two cells it tops, and we can trace the winner down the tree
|
| 59 |
+
to see all opponents s/he had. However, in many computer applications
|
| 60 |
+
of such tournaments, we do not need to trace the history of a winner.
|
| 61 |
+
To be more memory efficient, when a winner is promoted, we try to
|
| 62 |
+
replace it by something else at a lower level, and the rule becomes
|
| 63 |
+
that a cell and the two cells it tops contain three different items,
|
| 64 |
+
but the top cell "wins" over the two topped cells.
|
| 65 |
+
|
| 66 |
+
If this heap invariant is protected at all time, index 0 is clearly
|
| 67 |
+
the overall winner. The simplest algorithmic way to remove it and
|
| 68 |
+
find the "next" winner is to move some loser (let's say cell 30 in the
|
| 69 |
+
diagram above) into the 0 position, and then percolate this new 0 down
|
| 70 |
+
the tree, exchanging values, until the invariant is re-established.
|
| 71 |
+
This is clearly logarithmic on the total number of items in the tree.
|
| 72 |
+
By iterating over all items, you get an O(n ln n) sort.
|
| 73 |
+
|
| 74 |
+
A nice feature of this sort is that you can efficiently insert new
|
| 75 |
+
items while the sort is going on, provided that the inserted items are
|
| 76 |
+
not "better" than the last 0'th element you extracted. This is
|
| 77 |
+
especially useful in simulation contexts, where the tree holds all
|
| 78 |
+
incoming events, and the "win" condition means the smallest scheduled
|
| 79 |
+
time. When an event schedule other events for execution, they are
|
| 80 |
+
scheduled into the future, so they can easily go into the heap. So, a
|
| 81 |
+
heap is a good structure for implementing schedulers (this is what I
|
| 82 |
+
used for my MIDI sequencer :-).
|
| 83 |
+
|
| 84 |
+
Various structures for implementing schedulers have been extensively
|
| 85 |
+
studied, and heaps are good for this, as they are reasonably speedy,
|
| 86 |
+
the speed is almost constant, and the worst case is not much different
|
| 87 |
+
than the average case. However, there are other representations which
|
| 88 |
+
are more efficient overall, yet the worst cases might be terrible.
|
| 89 |
+
|
| 90 |
+
Heaps are also very useful in big disk sorts. You most probably all
|
| 91 |
+
know that a big sort implies producing "runs" (which are pre-sorted
|
| 92 |
+
sequences, which size is usually related to the amount of CPU memory),
|
| 93 |
+
followed by a merging passes for these runs, which merging is often
|
| 94 |
+
very cleverly organised[1]. It is very important that the initial
|
| 95 |
+
sort produces the longest runs possible. Tournaments are a good way
|
| 96 |
+
to that. If, using all the memory available to hold a tournament, you
|
| 97 |
+
replace and percolate items that happen to fit the current run, you'll
|
| 98 |
+
produce runs which are twice the size of the memory for random input,
|
| 99 |
+
and much better for input fuzzily ordered.
|
| 100 |
+
|
| 101 |
+
Moreover, if you output the 0'th item on disk and get an input which
|
| 102 |
+
may not fit in the current tournament (because the value "wins" over
|
| 103 |
+
the last output value), it cannot fit in the heap, so the size of the
|
| 104 |
+
heap decreases. The freed memory could be cleverly reused immediately
|
| 105 |
+
for progressively building a second heap, which grows at exactly the
|
| 106 |
+
same rate the first heap is melting. When the first heap completely
|
| 107 |
+
vanishes, you switch heaps and start a new run. Clever and quite
|
| 108 |
+
effective!
|
| 109 |
+
|
| 110 |
+
In a word, heaps are useful memory structures to know. I use them in
|
| 111 |
+
a few applications, and I think it is good to keep a `heap' module
|
| 112 |
+
around. :-)
|
| 113 |
+
|
| 114 |
+
--------------------
|
| 115 |
+
[1] The disk balancing algorithms which are current, nowadays, are
|
| 116 |
+
more annoying than clever, and this is a consequence of the seeking
|
| 117 |
+
capabilities of the disks. On devices which cannot seek, like big
|
| 118 |
+
tape drives, the story was quite different, and one had to be very
|
| 119 |
+
clever to ensure (far in advance) that each tape movement will be the
|
| 120 |
+
most effective possible (that is, will best participate at
|
| 121 |
+
"progressing" the merge). Some tapes were even able to read
|
| 122 |
+
backwards, and this was also used to avoid the rewinding time.
|
| 123 |
+
Believe me, real good tape sorts were quite spectacular to watch!
|
| 124 |
+
From all times, sorting has always been a Great Art! :-)
|
| 125 |
+
"""
|
| 126 |
+
|
| 127 |
+
__all__ = ['heappush', 'heappop', 'heapify', 'heapreplace', 'merge',
|
| 128 |
+
'nlargest', 'nsmallest', 'heappushpop']
|
| 129 |
+
|
| 130 |
+
def heappush(heap, item):
|
| 131 |
+
"""Push item onto heap, maintaining the heap invariant."""
|
| 132 |
+
heap.append(item)
|
| 133 |
+
_siftdown(heap, 0, len(heap)-1)
|
| 134 |
+
|
| 135 |
+
def heappop(heap):
|
| 136 |
+
"""Pop the smallest item off the heap, maintaining the heap invariant."""
|
| 137 |
+
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
|
| 138 |
+
if heap:
|
| 139 |
+
returnitem = heap[0]
|
| 140 |
+
heap[0] = lastelt
|
| 141 |
+
_siftup(heap, 0)
|
| 142 |
+
return returnitem
|
| 143 |
+
return lastelt
|
| 144 |
+
|
| 145 |
+
def heapreplace(heap, item):
|
| 146 |
+
"""Pop and return the current smallest value, and add the new item.
|
| 147 |
+
|
| 148 |
+
This is more efficient than heappop() followed by heappush(), and can be
|
| 149 |
+
more appropriate when using a fixed-size heap. Note that the value
|
| 150 |
+
returned may be larger than item! That constrains reasonable uses of
|
| 151 |
+
this routine unless written as part of a conditional replacement:
|
| 152 |
+
|
| 153 |
+
if item > heap[0]:
|
| 154 |
+
item = heapreplace(heap, item)
|
| 155 |
+
"""
|
| 156 |
+
returnitem = heap[0] # raises appropriate IndexError if heap is empty
|
| 157 |
+
heap[0] = item
|
| 158 |
+
_siftup(heap, 0)
|
| 159 |
+
return returnitem
|
| 160 |
+
|
| 161 |
+
def heappushpop(heap, item):
|
| 162 |
+
"""Fast version of a heappush followed by a heappop."""
|
| 163 |
+
if heap and heap[0] < item:
|
| 164 |
+
item, heap[0] = heap[0], item
|
| 165 |
+
_siftup(heap, 0)
|
| 166 |
+
return item
|
| 167 |
+
|
| 168 |
+
def heapify(x):
|
| 169 |
+
"""Transform list into a heap, in-place, in O(len(x)) time."""
|
| 170 |
+
n = len(x)
|
| 171 |
+
# Transform bottom-up. The largest index there's any point to looking at
|
| 172 |
+
# is the largest with a child index in-range, so must have 2*i + 1 < n,
|
| 173 |
+
# or i < (n-1)/2. If n is even = 2*j, this is (2*j-1)/2 = j-1/2 so
|
| 174 |
+
# j-1 is the largest, which is n//2 - 1. If n is odd = 2*j+1, this is
|
| 175 |
+
# (2*j+1-1)/2 = j so j-1 is the largest, and that's again n//2-1.
|
| 176 |
+
for i in reversed(range(n//2)):
|
| 177 |
+
_siftup(x, i)
|
| 178 |
+
|
| 179 |
+
def _heappop_max(heap):
|
| 180 |
+
"""Maxheap version of a heappop."""
|
| 181 |
+
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
|
| 182 |
+
if heap:
|
| 183 |
+
returnitem = heap[0]
|
| 184 |
+
heap[0] = lastelt
|
| 185 |
+
_siftup_max(heap, 0)
|
| 186 |
+
return returnitem
|
| 187 |
+
return lastelt
|
| 188 |
+
|
| 189 |
+
def _heapreplace_max(heap, item):
|
| 190 |
+
"""Maxheap version of a heappop followed by a heappush."""
|
| 191 |
+
returnitem = heap[0] # raises appropriate IndexError if heap is empty
|
| 192 |
+
heap[0] = item
|
| 193 |
+
_siftup_max(heap, 0)
|
| 194 |
+
return returnitem
|
| 195 |
+
|
| 196 |
+
def _heapify_max(x):
|
| 197 |
+
"""Transform list into a maxheap, in-place, in O(len(x)) time."""
|
| 198 |
+
n = len(x)
|
| 199 |
+
for i in reversed(range(n//2)):
|
| 200 |
+
_siftup_max(x, i)
|
| 201 |
+
|
| 202 |
+
# 'heap' is a heap at all indices >= startpos, except possibly for pos. pos
|
| 203 |
+
# is the index of a leaf with a possibly out-of-order value. Restore the
|
| 204 |
+
# heap invariant.
|
| 205 |
+
def _siftdown(heap, startpos, pos):
|
| 206 |
+
newitem = heap[pos]
|
| 207 |
+
# Follow the path to the root, moving parents down until finding a place
|
| 208 |
+
# newitem fits.
|
| 209 |
+
while pos > startpos:
|
| 210 |
+
parentpos = (pos - 1) >> 1
|
| 211 |
+
parent = heap[parentpos]
|
| 212 |
+
if newitem < parent:
|
| 213 |
+
heap[pos] = parent
|
| 214 |
+
pos = parentpos
|
| 215 |
+
continue
|
| 216 |
+
break
|
| 217 |
+
heap[pos] = newitem
|
| 218 |
+
|
| 219 |
+
# The child indices of heap index pos are already heaps, and we want to make
|
| 220 |
+
# a heap at index pos too. We do this by bubbling the smaller child of
|
| 221 |
+
# pos up (and so on with that child's children, etc) until hitting a leaf,
|
| 222 |
+
# then using _siftdown to move the oddball originally at index pos into place.
|
| 223 |
+
#
|
| 224 |
+
# We *could* break out of the loop as soon as we find a pos where newitem <=
|
| 225 |
+
# both its children, but turns out that's not a good idea, and despite that
|
| 226 |
+
# many books write the algorithm that way. During a heap pop, the last array
|
| 227 |
+
# element is sifted in, and that tends to be large, so that comparing it
|
| 228 |
+
# against values starting from the root usually doesn't pay (= usually doesn't
|
| 229 |
+
# get us out of the loop early). See Knuth, Volume 3, where this is
|
| 230 |
+
# explained and quantified in an exercise.
|
| 231 |
+
#
|
| 232 |
+
# Cutting the # of comparisons is important, since these routines have no
|
| 233 |
+
# way to extract "the priority" from an array element, so that intelligence
|
| 234 |
+
# is likely to be hiding in custom comparison methods, or in array elements
|
| 235 |
+
# storing (priority, record) tuples. Comparisons are thus potentially
|
| 236 |
+
# expensive.
|
| 237 |
+
#
|
| 238 |
+
# On random arrays of length 1000, making this change cut the number of
|
| 239 |
+
# comparisons made by heapify() a little, and those made by exhaustive
|
| 240 |
+
# heappop() a lot, in accord with theory. Here are typical results from 3
|
| 241 |
+
# runs (3 just to demonstrate how small the variance is):
|
| 242 |
+
#
|
| 243 |
+
# Compares needed by heapify Compares needed by 1000 heappops
|
| 244 |
+
# -------------------------- --------------------------------
|
| 245 |
+
# 1837 cut to 1663 14996 cut to 8680
|
| 246 |
+
# 1855 cut to 1659 14966 cut to 8678
|
| 247 |
+
# 1847 cut to 1660 15024 cut to 8703
|
| 248 |
+
#
|
| 249 |
+
# Building the heap by using heappush() 1000 times instead required
|
| 250 |
+
# 2198, 2148, and 2219 compares: heapify() is more efficient, when
|
| 251 |
+
# you can use it.
|
| 252 |
+
#
|
| 253 |
+
# The total compares needed by list.sort() on the same lists were 8627,
|
| 254 |
+
# 8627, and 8632 (this should be compared to the sum of heapify() and
|
| 255 |
+
# heappop() compares): list.sort() is (unsurprisingly!) more efficient
|
| 256 |
+
# for sorting.
|
| 257 |
+
|
| 258 |
+
def _siftup(heap, pos):
|
| 259 |
+
endpos = len(heap)
|
| 260 |
+
startpos = pos
|
| 261 |
+
newitem = heap[pos]
|
| 262 |
+
# Bubble up the smaller child until hitting a leaf.
|
| 263 |
+
childpos = 2*pos + 1 # leftmost child position
|
| 264 |
+
while childpos < endpos:
|
| 265 |
+
# Set childpos to index of smaller child.
|
| 266 |
+
rightpos = childpos + 1
|
| 267 |
+
if rightpos < endpos and not heap[childpos] < heap[rightpos]:
|
| 268 |
+
childpos = rightpos
|
| 269 |
+
# Move the smaller child up.
|
| 270 |
+
heap[pos] = heap[childpos]
|
| 271 |
+
pos = childpos
|
| 272 |
+
childpos = 2*pos + 1
|
| 273 |
+
# The leaf at pos is empty now. Put newitem there, and bubble it up
|
| 274 |
+
# to its final resting place (by sifting its parents down).
|
| 275 |
+
heap[pos] = newitem
|
| 276 |
+
_siftdown(heap, startpos, pos)
|
| 277 |
+
|
| 278 |
+
def _siftdown_max(heap, startpos, pos):
|
| 279 |
+
'Maxheap variant of _siftdown'
|
| 280 |
+
newitem = heap[pos]
|
| 281 |
+
# Follow the path to the root, moving parents down until finding a place
|
| 282 |
+
# newitem fits.
|
| 283 |
+
while pos > startpos:
|
| 284 |
+
parentpos = (pos - 1) >> 1
|
| 285 |
+
parent = heap[parentpos]
|
| 286 |
+
if parent < newitem:
|
| 287 |
+
heap[pos] = parent
|
| 288 |
+
pos = parentpos
|
| 289 |
+
continue
|
| 290 |
+
break
|
| 291 |
+
heap[pos] = newitem
|
| 292 |
+
|
| 293 |
+
def _siftup_max(heap, pos):
|
| 294 |
+
'Maxheap variant of _siftup'
|
| 295 |
+
endpos = len(heap)
|
| 296 |
+
startpos = pos
|
| 297 |
+
newitem = heap[pos]
|
| 298 |
+
# Bubble up the larger child until hitting a leaf.
|
| 299 |
+
childpos = 2*pos + 1 # leftmost child position
|
| 300 |
+
while childpos < endpos:
|
| 301 |
+
# Set childpos to index of larger child.
|
| 302 |
+
rightpos = childpos + 1
|
| 303 |
+
if rightpos < endpos and not heap[rightpos] < heap[childpos]:
|
| 304 |
+
childpos = rightpos
|
| 305 |
+
# Move the larger child up.
|
| 306 |
+
heap[pos] = heap[childpos]
|
| 307 |
+
pos = childpos
|
| 308 |
+
childpos = 2*pos + 1
|
| 309 |
+
# The leaf at pos is empty now. Put newitem there, and bubble it up
|
| 310 |
+
# to its final resting place (by sifting its parents down).
|
| 311 |
+
heap[pos] = newitem
|
| 312 |
+
_siftdown_max(heap, startpos, pos)
|
| 313 |
+
|
| 314 |
+
def merge(*iterables, key=None, reverse=False):
|
| 315 |
+
'''Merge multiple sorted inputs into a single sorted output.
|
| 316 |
+
|
| 317 |
+
Similar to sorted(itertools.chain(*iterables)) but returns a generator,
|
| 318 |
+
does not pull the data into memory all at once, and assumes that each of
|
| 319 |
+
the input streams is already sorted (smallest to largest).
|
| 320 |
+
|
| 321 |
+
>>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
|
| 322 |
+
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
|
| 323 |
+
|
| 324 |
+
If *key* is not None, applies a key function to each element to determine
|
| 325 |
+
its sort order.
|
| 326 |
+
|
| 327 |
+
>>> list(merge(['dog', 'horse'], ['cat', 'fish', 'kangaroo'], key=len))
|
| 328 |
+
['dog', 'cat', 'fish', 'horse', 'kangaroo']
|
| 329 |
+
|
| 330 |
+
'''
|
| 331 |
+
|
| 332 |
+
h = []
|
| 333 |
+
h_append = h.append
|
| 334 |
+
|
| 335 |
+
if reverse:
|
| 336 |
+
_heapify = _heapify_max
|
| 337 |
+
_heappop = _heappop_max
|
| 338 |
+
_heapreplace = _heapreplace_max
|
| 339 |
+
direction = -1
|
| 340 |
+
else:
|
| 341 |
+
_heapify = heapify
|
| 342 |
+
_heappop = heappop
|
| 343 |
+
_heapreplace = heapreplace
|
| 344 |
+
direction = 1
|
| 345 |
+
|
| 346 |
+
if key is None:
|
| 347 |
+
for order, it in enumerate(map(iter, iterables)):
|
| 348 |
+
try:
|
| 349 |
+
next = it.__next__
|
| 350 |
+
h_append([next(), order * direction, next])
|
| 351 |
+
except StopIteration:
|
| 352 |
+
pass
|
| 353 |
+
_heapify(h)
|
| 354 |
+
while len(h) > 1:
|
| 355 |
+
try:
|
| 356 |
+
while True:
|
| 357 |
+
value, order, next = s = h[0]
|
| 358 |
+
yield value
|
| 359 |
+
s[0] = next() # raises StopIteration when exhausted
|
| 360 |
+
_heapreplace(h, s) # restore heap condition
|
| 361 |
+
except StopIteration:
|
| 362 |
+
_heappop(h) # remove empty iterator
|
| 363 |
+
if h:
|
| 364 |
+
# fast case when only a single iterator remains
|
| 365 |
+
value, order, next = h[0]
|
| 366 |
+
yield value
|
| 367 |
+
yield from next.__self__
|
| 368 |
+
return
|
| 369 |
+
|
| 370 |
+
for order, it in enumerate(map(iter, iterables)):
|
| 371 |
+
try:
|
| 372 |
+
next = it.__next__
|
| 373 |
+
value = next()
|
| 374 |
+
h_append([key(value), order * direction, value, next])
|
| 375 |
+
except StopIteration:
|
| 376 |
+
pass
|
| 377 |
+
_heapify(h)
|
| 378 |
+
while len(h) > 1:
|
| 379 |
+
try:
|
| 380 |
+
while True:
|
| 381 |
+
key_value, order, value, next = s = h[0]
|
| 382 |
+
yield value
|
| 383 |
+
value = next()
|
| 384 |
+
s[0] = key(value)
|
| 385 |
+
s[2] = value
|
| 386 |
+
_heapreplace(h, s)
|
| 387 |
+
except StopIteration:
|
| 388 |
+
_heappop(h)
|
| 389 |
+
if h:
|
| 390 |
+
key_value, order, value, next = h[0]
|
| 391 |
+
yield value
|
| 392 |
+
yield from next.__self__
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
# Algorithm notes for nlargest() and nsmallest()
|
| 396 |
+
# ==============================================
|
| 397 |
+
#
|
| 398 |
+
# Make a single pass over the data while keeping the k most extreme values
|
| 399 |
+
# in a heap. Memory consumption is limited to keeping k values in a list.
|
| 400 |
+
#
|
| 401 |
+
# Measured performance for random inputs:
|
| 402 |
+
#
|
| 403 |
+
# number of comparisons
|
| 404 |
+
# n inputs k-extreme values (average of 5 trials) % more than min()
|
| 405 |
+
# ------------- ---------------- --------------------- -----------------
|
| 406 |
+
# 1,000 100 3,317 231.7%
|
| 407 |
+
# 10,000 100 14,046 40.5%
|
| 408 |
+
# 100,000 100 105,749 5.7%
|
| 409 |
+
# 1,000,000 100 1,007,751 0.8%
|
| 410 |
+
# 10,000,000 100 10,009,401 0.1%
|
| 411 |
+
#
|
| 412 |
+
# Theoretical number of comparisons for k smallest of n random inputs:
|
| 413 |
+
#
|
| 414 |
+
# Step Comparisons Action
|
| 415 |
+
# ---- -------------------------- ---------------------------
|
| 416 |
+
# 1 1.66 * k heapify the first k-inputs
|
| 417 |
+
# 2 n - k compare remaining elements to top of heap
|
| 418 |
+
# 3 k * (1 + lg2(k)) * ln(n/k) replace the topmost value on the heap
|
| 419 |
+
# 4 k * lg2(k) - (k/2) final sort of the k most extreme values
|
| 420 |
+
#
|
| 421 |
+
# Combining and simplifying for a rough estimate gives:
|
| 422 |
+
#
|
| 423 |
+
# comparisons = n + k * (log(k, 2) * log(n/k) + log(k, 2) + log(n/k))
|
| 424 |
+
#
|
| 425 |
+
# Computing the number of comparisons for step 3:
|
| 426 |
+
# -----------------------------------------------
|
| 427 |
+
# * For the i-th new value from the iterable, the probability of being in the
|
| 428 |
+
# k most extreme values is k/i. For example, the probability of the 101st
|
| 429 |
+
# value seen being in the 100 most extreme values is 100/101.
|
| 430 |
+
# * If the value is a new extreme value, the cost of inserting it into the
|
| 431 |
+
# heap is 1 + log(k, 2).
|
| 432 |
+
# * The probability times the cost gives:
|
| 433 |
+
# (k/i) * (1 + log(k, 2))
|
| 434 |
+
# * Summing across the remaining n-k elements gives:
|
| 435 |
+
# sum((k/i) * (1 + log(k, 2)) for i in range(k+1, n+1))
|
| 436 |
+
# * This reduces to:
|
| 437 |
+
# (H(n) - H(k)) * k * (1 + log(k, 2))
|
| 438 |
+
# * Where H(n) is the n-th harmonic number estimated by:
|
| 439 |
+
# gamma = 0.5772156649
|
| 440 |
+
# H(n) = log(n, e) + gamma + 1 / (2 * n)
|
| 441 |
+
# http://en.wikipedia.org/wiki/Harmonic_series_(mathematics)#Rate_of_divergence
|
| 442 |
+
# * Substituting the H(n) formula:
|
| 443 |
+
# comparisons = k * (1 + log(k, 2)) * (log(n/k, e) + (1/n - 1/k) / 2)
|
| 444 |
+
#
|
| 445 |
+
# Worst-case for step 3:
|
| 446 |
+
# ----------------------
|
| 447 |
+
# In the worst case, the input data is reversed sorted so that every new element
|
| 448 |
+
# must be inserted in the heap:
|
| 449 |
+
#
|
| 450 |
+
# comparisons = 1.66 * k + log(k, 2) * (n - k)
|
| 451 |
+
#
|
| 452 |
+
# Alternative Algorithms
|
| 453 |
+
# ----------------------
|
| 454 |
+
# Other algorithms were not used because they:
|
| 455 |
+
# 1) Took much more auxiliary memory,
|
| 456 |
+
# 2) Made multiple passes over the data.
|
| 457 |
+
# 3) Made more comparisons in common cases (small k, large n, semi-random input).
|
| 458 |
+
# See the more detailed comparison of approach at:
|
| 459 |
+
# http://code.activestate.com/recipes/577573-compare-algorithms-for-heapqsmallest
|
| 460 |
+
|
| 461 |
+
def nsmallest(n, iterable, key=None):
|
| 462 |
+
"""Find the n smallest elements in a dataset.
|
| 463 |
+
|
| 464 |
+
Equivalent to: sorted(iterable, key=key)[:n]
|
| 465 |
+
"""
|
| 466 |
+
|
| 467 |
+
# Short-cut for n==1 is to use min()
|
| 468 |
+
if n == 1:
|
| 469 |
+
it = iter(iterable)
|
| 470 |
+
sentinel = object()
|
| 471 |
+
result = min(it, default=sentinel, key=key)
|
| 472 |
+
return [] if result is sentinel else [result]
|
| 473 |
+
|
| 474 |
+
# When n>=size, it's faster to use sorted()
|
| 475 |
+
try:
|
| 476 |
+
size = len(iterable)
|
| 477 |
+
except (TypeError, AttributeError):
|
| 478 |
+
pass
|
| 479 |
+
else:
|
| 480 |
+
if n >= size:
|
| 481 |
+
return sorted(iterable, key=key)[:n]
|
| 482 |
+
|
| 483 |
+
# When key is none, use simpler decoration
|
| 484 |
+
if key is None:
|
| 485 |
+
it = iter(iterable)
|
| 486 |
+
# put the range(n) first so that zip() doesn't
|
| 487 |
+
# consume one too many elements from the iterator
|
| 488 |
+
result = [(elem, i) for i, elem in zip(range(n), it)]
|
| 489 |
+
if not result:
|
| 490 |
+
return result
|
| 491 |
+
_heapify_max(result)
|
| 492 |
+
top = result[0][0]
|
| 493 |
+
order = n
|
| 494 |
+
_heapreplace = _heapreplace_max
|
| 495 |
+
for elem in it:
|
| 496 |
+
if elem < top:
|
| 497 |
+
_heapreplace(result, (elem, order))
|
| 498 |
+
top, _order = result[0]
|
| 499 |
+
order += 1
|
| 500 |
+
result.sort()
|
| 501 |
+
return [elem for (elem, order) in result]
|
| 502 |
+
|
| 503 |
+
# General case, slowest method
|
| 504 |
+
it = iter(iterable)
|
| 505 |
+
result = [(key(elem), i, elem) for i, elem in zip(range(n), it)]
|
| 506 |
+
if not result:
|
| 507 |
+
return result
|
| 508 |
+
_heapify_max(result)
|
| 509 |
+
top = result[0][0]
|
| 510 |
+
order = n
|
| 511 |
+
_heapreplace = _heapreplace_max
|
| 512 |
+
for elem in it:
|
| 513 |
+
k = key(elem)
|
| 514 |
+
if k < top:
|
| 515 |
+
_heapreplace(result, (k, order, elem))
|
| 516 |
+
top, _order, _elem = result[0]
|
| 517 |
+
order += 1
|
| 518 |
+
result.sort()
|
| 519 |
+
return [elem for (k, order, elem) in result]
|
| 520 |
+
|
| 521 |
+
def nlargest(n, iterable, key=None):
|
| 522 |
+
"""Find the n largest elements in a dataset.
|
| 523 |
+
|
| 524 |
+
Equivalent to: sorted(iterable, key=key, reverse=True)[:n]
|
| 525 |
+
"""
|
| 526 |
+
|
| 527 |
+
# Short-cut for n==1 is to use max()
|
| 528 |
+
if n == 1:
|
| 529 |
+
it = iter(iterable)
|
| 530 |
+
sentinel = object()
|
| 531 |
+
result = max(it, default=sentinel, key=key)
|
| 532 |
+
return [] if result is sentinel else [result]
|
| 533 |
+
|
| 534 |
+
# When n>=size, it's faster to use sorted()
|
| 535 |
+
try:
|
| 536 |
+
size = len(iterable)
|
| 537 |
+
except (TypeError, AttributeError):
|
| 538 |
+
pass
|
| 539 |
+
else:
|
| 540 |
+
if n >= size:
|
| 541 |
+
return sorted(iterable, key=key, reverse=True)[:n]
|
| 542 |
+
|
| 543 |
+
# When key is none, use simpler decoration
|
| 544 |
+
if key is None:
|
| 545 |
+
it = iter(iterable)
|
| 546 |
+
result = [(elem, i) for i, elem in zip(range(0, -n, -1), it)]
|
| 547 |
+
if not result:
|
| 548 |
+
return result
|
| 549 |
+
heapify(result)
|
| 550 |
+
top = result[0][0]
|
| 551 |
+
order = -n
|
| 552 |
+
_heapreplace = heapreplace
|
| 553 |
+
for elem in it:
|
| 554 |
+
if top < elem:
|
| 555 |
+
_heapreplace(result, (elem, order))
|
| 556 |
+
top, _order = result[0]
|
| 557 |
+
order -= 1
|
| 558 |
+
result.sort(reverse=True)
|
| 559 |
+
return [elem for (elem, order) in result]
|
| 560 |
+
|
| 561 |
+
# General case, slowest method
|
| 562 |
+
it = iter(iterable)
|
| 563 |
+
result = [(key(elem), i, elem) for i, elem in zip(range(0, -n, -1), it)]
|
| 564 |
+
if not result:
|
| 565 |
+
return result
|
| 566 |
+
heapify(result)
|
| 567 |
+
top = result[0][0]
|
| 568 |
+
order = -n
|
| 569 |
+
_heapreplace = heapreplace
|
| 570 |
+
for elem in it:
|
| 571 |
+
k = key(elem)
|
| 572 |
+
if top < k:
|
| 573 |
+
_heapreplace(result, (k, order, elem))
|
| 574 |
+
top, _order, _elem = result[0]
|
| 575 |
+
order -= 1
|
| 576 |
+
result.sort(reverse=True)
|
| 577 |
+
return [elem for (k, order, elem) in result]
|
| 578 |
+
|
| 579 |
+
# If available, use C implementation
|
| 580 |
+
try:
|
| 581 |
+
from _heapq import *
|
| 582 |
+
except ImportError:
|
| 583 |
+
pass
|
| 584 |
+
try:
|
| 585 |
+
from _heapq import _heapreplace_max
|
| 586 |
+
except ImportError:
|
| 587 |
+
pass
|
| 588 |
+
try:
|
| 589 |
+
from _heapq import _heapify_max
|
| 590 |
+
except ImportError:
|
| 591 |
+
pass
|
| 592 |
+
try:
|
| 593 |
+
from _heapq import _heappop_max
|
| 594 |
+
except ImportError:
|
| 595 |
+
pass
|
| 596 |
+
|
| 597 |
+
|
| 598 |
+
if __name__ == "__main__":
|
| 599 |
+
|
| 600 |
+
import doctest # pragma: no cover
|
| 601 |
+
print(doctest.testmod()) # pragma: no cover
|
evalkit_cambrian/lib/python3.10/imghdr.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Recognize image file formats based on their first few bytes."""
|
| 2 |
+
|
| 3 |
+
from os import PathLike
|
| 4 |
+
|
| 5 |
+
__all__ = ["what"]
|
| 6 |
+
|
| 7 |
+
#-------------------------#
|
| 8 |
+
# Recognize image headers #
|
| 9 |
+
#-------------------------#
|
| 10 |
+
|
| 11 |
+
def what(file, h=None):
|
| 12 |
+
f = None
|
| 13 |
+
try:
|
| 14 |
+
if h is None:
|
| 15 |
+
if isinstance(file, (str, PathLike)):
|
| 16 |
+
f = open(file, 'rb')
|
| 17 |
+
h = f.read(32)
|
| 18 |
+
else:
|
| 19 |
+
location = file.tell()
|
| 20 |
+
h = file.read(32)
|
| 21 |
+
file.seek(location)
|
| 22 |
+
for tf in tests:
|
| 23 |
+
res = tf(h, f)
|
| 24 |
+
if res:
|
| 25 |
+
return res
|
| 26 |
+
finally:
|
| 27 |
+
if f: f.close()
|
| 28 |
+
return None
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
#---------------------------------#
|
| 32 |
+
# Subroutines per image file type #
|
| 33 |
+
#---------------------------------#
|
| 34 |
+
|
| 35 |
+
tests = []
|
| 36 |
+
|
| 37 |
+
def test_jpeg(h, f):
|
| 38 |
+
"""JPEG data in JFIF or Exif format"""
|
| 39 |
+
if h[6:10] in (b'JFIF', b'Exif'):
|
| 40 |
+
return 'jpeg'
|
| 41 |
+
|
| 42 |
+
tests.append(test_jpeg)
|
| 43 |
+
|
| 44 |
+
def test_png(h, f):
|
| 45 |
+
if h.startswith(b'\211PNG\r\n\032\n'):
|
| 46 |
+
return 'png'
|
| 47 |
+
|
| 48 |
+
tests.append(test_png)
|
| 49 |
+
|
| 50 |
+
def test_gif(h, f):
|
| 51 |
+
"""GIF ('87 and '89 variants)"""
|
| 52 |
+
if h[:6] in (b'GIF87a', b'GIF89a'):
|
| 53 |
+
return 'gif'
|
| 54 |
+
|
| 55 |
+
tests.append(test_gif)
|
| 56 |
+
|
| 57 |
+
def test_tiff(h, f):
|
| 58 |
+
"""TIFF (can be in Motorola or Intel byte order)"""
|
| 59 |
+
if h[:2] in (b'MM', b'II'):
|
| 60 |
+
return 'tiff'
|
| 61 |
+
|
| 62 |
+
tests.append(test_tiff)
|
| 63 |
+
|
| 64 |
+
def test_rgb(h, f):
|
| 65 |
+
"""SGI image library"""
|
| 66 |
+
if h.startswith(b'\001\332'):
|
| 67 |
+
return 'rgb'
|
| 68 |
+
|
| 69 |
+
tests.append(test_rgb)
|
| 70 |
+
|
| 71 |
+
def test_pbm(h, f):
|
| 72 |
+
"""PBM (portable bitmap)"""
|
| 73 |
+
if len(h) >= 3 and \
|
| 74 |
+
h[0] == ord(b'P') and h[1] in b'14' and h[2] in b' \t\n\r':
|
| 75 |
+
return 'pbm'
|
| 76 |
+
|
| 77 |
+
tests.append(test_pbm)
|
| 78 |
+
|
| 79 |
+
def test_pgm(h, f):
|
| 80 |
+
"""PGM (portable graymap)"""
|
| 81 |
+
if len(h) >= 3 and \
|
| 82 |
+
h[0] == ord(b'P') and h[1] in b'25' and h[2] in b' \t\n\r':
|
| 83 |
+
return 'pgm'
|
| 84 |
+
|
| 85 |
+
tests.append(test_pgm)
|
| 86 |
+
|
| 87 |
+
def test_ppm(h, f):
|
| 88 |
+
"""PPM (portable pixmap)"""
|
| 89 |
+
if len(h) >= 3 and \
|
| 90 |
+
h[0] == ord(b'P') and h[1] in b'36' and h[2] in b' \t\n\r':
|
| 91 |
+
return 'ppm'
|
| 92 |
+
|
| 93 |
+
tests.append(test_ppm)
|
| 94 |
+
|
| 95 |
+
def test_rast(h, f):
|
| 96 |
+
"""Sun raster file"""
|
| 97 |
+
if h.startswith(b'\x59\xA6\x6A\x95'):
|
| 98 |
+
return 'rast'
|
| 99 |
+
|
| 100 |
+
tests.append(test_rast)
|
| 101 |
+
|
| 102 |
+
def test_xbm(h, f):
|
| 103 |
+
"""X bitmap (X10 or X11)"""
|
| 104 |
+
if h.startswith(b'#define '):
|
| 105 |
+
return 'xbm'
|
| 106 |
+
|
| 107 |
+
tests.append(test_xbm)
|
| 108 |
+
|
| 109 |
+
def test_bmp(h, f):
|
| 110 |
+
if h.startswith(b'BM'):
|
| 111 |
+
return 'bmp'
|
| 112 |
+
|
| 113 |
+
tests.append(test_bmp)
|
| 114 |
+
|
| 115 |
+
def test_webp(h, f):
|
| 116 |
+
if h.startswith(b'RIFF') and h[8:12] == b'WEBP':
|
| 117 |
+
return 'webp'
|
| 118 |
+
|
| 119 |
+
tests.append(test_webp)
|
| 120 |
+
|
| 121 |
+
def test_exr(h, f):
|
| 122 |
+
if h.startswith(b'\x76\x2f\x31\x01'):
|
| 123 |
+
return 'exr'
|
| 124 |
+
|
| 125 |
+
tests.append(test_exr)
|
| 126 |
+
|
| 127 |
+
#--------------------#
|
| 128 |
+
# Small test program #
|
| 129 |
+
#--------------------#
|
| 130 |
+
|
| 131 |
+
def test():
|
| 132 |
+
import sys
|
| 133 |
+
recursive = 0
|
| 134 |
+
if sys.argv[1:] and sys.argv[1] == '-r':
|
| 135 |
+
del sys.argv[1:2]
|
| 136 |
+
recursive = 1
|
| 137 |
+
try:
|
| 138 |
+
if sys.argv[1:]:
|
| 139 |
+
testall(sys.argv[1:], recursive, 1)
|
| 140 |
+
else:
|
| 141 |
+
testall(['.'], recursive, 1)
|
| 142 |
+
except KeyboardInterrupt:
|
| 143 |
+
sys.stderr.write('\n[Interrupted]\n')
|
| 144 |
+
sys.exit(1)
|
| 145 |
+
|
| 146 |
+
def testall(list, recursive, toplevel):
|
| 147 |
+
import sys
|
| 148 |
+
import os
|
| 149 |
+
for filename in list:
|
| 150 |
+
if os.path.isdir(filename):
|
| 151 |
+
print(filename + '/:', end=' ')
|
| 152 |
+
if recursive or toplevel:
|
| 153 |
+
print('recursing down:')
|
| 154 |
+
import glob
|
| 155 |
+
names = glob.glob(os.path.join(glob.escape(filename), '*'))
|
| 156 |
+
testall(names, recursive, 0)
|
| 157 |
+
else:
|
| 158 |
+
print('*** directory (use -r) ***')
|
| 159 |
+
else:
|
| 160 |
+
print(filename + ':', end=' ')
|
| 161 |
+
sys.stdout.flush()
|
| 162 |
+
try:
|
| 163 |
+
print(what(filename))
|
| 164 |
+
except OSError:
|
| 165 |
+
print('*** not found ***')
|
| 166 |
+
|
| 167 |
+
if __name__ == '__main__':
|
| 168 |
+
test()
|
evalkit_cambrian/lib/python3.10/lzma.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Interface to the liblzma compression library.
|
| 2 |
+
|
| 3 |
+
This module provides a class for reading and writing compressed files,
|
| 4 |
+
classes for incremental (de)compression, and convenience functions for
|
| 5 |
+
one-shot (de)compression.
|
| 6 |
+
|
| 7 |
+
These classes and functions support both the XZ and legacy LZMA
|
| 8 |
+
container formats, as well as raw compressed data streams.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
__all__ = [
|
| 12 |
+
"CHECK_NONE", "CHECK_CRC32", "CHECK_CRC64", "CHECK_SHA256",
|
| 13 |
+
"CHECK_ID_MAX", "CHECK_UNKNOWN",
|
| 14 |
+
"FILTER_LZMA1", "FILTER_LZMA2", "FILTER_DELTA", "FILTER_X86", "FILTER_IA64",
|
| 15 |
+
"FILTER_ARM", "FILTER_ARMTHUMB", "FILTER_POWERPC", "FILTER_SPARC",
|
| 16 |
+
"FORMAT_AUTO", "FORMAT_XZ", "FORMAT_ALONE", "FORMAT_RAW",
|
| 17 |
+
"MF_HC3", "MF_HC4", "MF_BT2", "MF_BT3", "MF_BT4",
|
| 18 |
+
"MODE_FAST", "MODE_NORMAL", "PRESET_DEFAULT", "PRESET_EXTREME",
|
| 19 |
+
|
| 20 |
+
"LZMACompressor", "LZMADecompressor", "LZMAFile", "LZMAError",
|
| 21 |
+
"open", "compress", "decompress", "is_check_supported",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
import builtins
|
| 25 |
+
import io
|
| 26 |
+
import os
|
| 27 |
+
from _lzma import *
|
| 28 |
+
from _lzma import _encode_filter_properties, _decode_filter_properties
|
| 29 |
+
import _compression
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
_MODE_CLOSED = 0
|
| 33 |
+
_MODE_READ = 1
|
| 34 |
+
# Value 2 no longer used
|
| 35 |
+
_MODE_WRITE = 3
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class LZMAFile(_compression.BaseStream):
|
| 39 |
+
|
| 40 |
+
"""A file object providing transparent LZMA (de)compression.
|
| 41 |
+
|
| 42 |
+
An LZMAFile can act as a wrapper for an existing file object, or
|
| 43 |
+
refer directly to a named file on disk.
|
| 44 |
+
|
| 45 |
+
Note that LZMAFile provides a *binary* file interface - data read
|
| 46 |
+
is returned as bytes, and data to be written must be given as bytes.
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
def __init__(self, filename=None, mode="r", *,
|
| 50 |
+
format=None, check=-1, preset=None, filters=None):
|
| 51 |
+
"""Open an LZMA-compressed file in binary mode.
|
| 52 |
+
|
| 53 |
+
filename can be either an actual file name (given as a str,
|
| 54 |
+
bytes, or PathLike object), in which case the named file is
|
| 55 |
+
opened, or it can be an existing file object to read from or
|
| 56 |
+
write to.
|
| 57 |
+
|
| 58 |
+
mode can be "r" for reading (default), "w" for (over)writing,
|
| 59 |
+
"x" for creating exclusively, or "a" for appending. These can
|
| 60 |
+
equivalently be given as "rb", "wb", "xb" and "ab" respectively.
|
| 61 |
+
|
| 62 |
+
format specifies the container format to use for the file.
|
| 63 |
+
If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the
|
| 64 |
+
default is FORMAT_XZ.
|
| 65 |
+
|
| 66 |
+
check specifies the integrity check to use. This argument can
|
| 67 |
+
only be used when opening a file for writing. For FORMAT_XZ,
|
| 68 |
+
the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not
|
| 69 |
+
support integrity checks - for these formats, check must be
|
| 70 |
+
omitted, or be CHECK_NONE.
|
| 71 |
+
|
| 72 |
+
When opening a file for reading, the *preset* argument is not
|
| 73 |
+
meaningful, and should be omitted. The *filters* argument should
|
| 74 |
+
also be omitted, except when format is FORMAT_RAW (in which case
|
| 75 |
+
it is required).
|
| 76 |
+
|
| 77 |
+
When opening a file for writing, the settings used by the
|
| 78 |
+
compressor can be specified either as a preset compression
|
| 79 |
+
level (with the *preset* argument), or in detail as a custom
|
| 80 |
+
filter chain (with the *filters* argument). For FORMAT_XZ and
|
| 81 |
+
FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset
|
| 82 |
+
level. For FORMAT_RAW, the caller must always specify a filter
|
| 83 |
+
chain; the raw compressor does not support preset compression
|
| 84 |
+
levels.
|
| 85 |
+
|
| 86 |
+
preset (if provided) should be an integer in the range 0-9,
|
| 87 |
+
optionally OR-ed with the constant PRESET_EXTREME.
|
| 88 |
+
|
| 89 |
+
filters (if provided) should be a sequence of dicts. Each dict
|
| 90 |
+
should have an entry for "id" indicating ID of the filter, plus
|
| 91 |
+
additional entries for options to the filter.
|
| 92 |
+
"""
|
| 93 |
+
self._fp = None
|
| 94 |
+
self._closefp = False
|
| 95 |
+
self._mode = _MODE_CLOSED
|
| 96 |
+
|
| 97 |
+
if mode in ("r", "rb"):
|
| 98 |
+
if check != -1:
|
| 99 |
+
raise ValueError("Cannot specify an integrity check "
|
| 100 |
+
"when opening a file for reading")
|
| 101 |
+
if preset is not None:
|
| 102 |
+
raise ValueError("Cannot specify a preset compression "
|
| 103 |
+
"level when opening a file for reading")
|
| 104 |
+
if format is None:
|
| 105 |
+
format = FORMAT_AUTO
|
| 106 |
+
mode_code = _MODE_READ
|
| 107 |
+
elif mode in ("w", "wb", "a", "ab", "x", "xb"):
|
| 108 |
+
if format is None:
|
| 109 |
+
format = FORMAT_XZ
|
| 110 |
+
mode_code = _MODE_WRITE
|
| 111 |
+
self._compressor = LZMACompressor(format=format, check=check,
|
| 112 |
+
preset=preset, filters=filters)
|
| 113 |
+
self._pos = 0
|
| 114 |
+
else:
|
| 115 |
+
raise ValueError("Invalid mode: {!r}".format(mode))
|
| 116 |
+
|
| 117 |
+
if isinstance(filename, (str, bytes, os.PathLike)):
|
| 118 |
+
if "b" not in mode:
|
| 119 |
+
mode += "b"
|
| 120 |
+
self._fp = builtins.open(filename, mode)
|
| 121 |
+
self._closefp = True
|
| 122 |
+
self._mode = mode_code
|
| 123 |
+
elif hasattr(filename, "read") or hasattr(filename, "write"):
|
| 124 |
+
self._fp = filename
|
| 125 |
+
self._mode = mode_code
|
| 126 |
+
else:
|
| 127 |
+
raise TypeError("filename must be a str, bytes, file or PathLike object")
|
| 128 |
+
|
| 129 |
+
if self._mode == _MODE_READ:
|
| 130 |
+
raw = _compression.DecompressReader(self._fp, LZMADecompressor,
|
| 131 |
+
trailing_error=LZMAError, format=format, filters=filters)
|
| 132 |
+
self._buffer = io.BufferedReader(raw)
|
| 133 |
+
|
| 134 |
+
def close(self):
|
| 135 |
+
"""Flush and close the file.
|
| 136 |
+
|
| 137 |
+
May be called more than once without error. Once the file is
|
| 138 |
+
closed, any other operation on it will raise a ValueError.
|
| 139 |
+
"""
|
| 140 |
+
if self._mode == _MODE_CLOSED:
|
| 141 |
+
return
|
| 142 |
+
try:
|
| 143 |
+
if self._mode == _MODE_READ:
|
| 144 |
+
self._buffer.close()
|
| 145 |
+
self._buffer = None
|
| 146 |
+
elif self._mode == _MODE_WRITE:
|
| 147 |
+
self._fp.write(self._compressor.flush())
|
| 148 |
+
self._compressor = None
|
| 149 |
+
finally:
|
| 150 |
+
try:
|
| 151 |
+
if self._closefp:
|
| 152 |
+
self._fp.close()
|
| 153 |
+
finally:
|
| 154 |
+
self._fp = None
|
| 155 |
+
self._closefp = False
|
| 156 |
+
self._mode = _MODE_CLOSED
|
| 157 |
+
|
| 158 |
+
@property
|
| 159 |
+
def closed(self):
|
| 160 |
+
"""True if this file is closed."""
|
| 161 |
+
return self._mode == _MODE_CLOSED
|
| 162 |
+
|
| 163 |
+
def fileno(self):
|
| 164 |
+
"""Return the file descriptor for the underlying file."""
|
| 165 |
+
self._check_not_closed()
|
| 166 |
+
return self._fp.fileno()
|
| 167 |
+
|
| 168 |
+
def seekable(self):
|
| 169 |
+
"""Return whether the file supports seeking."""
|
| 170 |
+
return self.readable() and self._buffer.seekable()
|
| 171 |
+
|
| 172 |
+
def readable(self):
|
| 173 |
+
"""Return whether the file was opened for reading."""
|
| 174 |
+
self._check_not_closed()
|
| 175 |
+
return self._mode == _MODE_READ
|
| 176 |
+
|
| 177 |
+
def writable(self):
|
| 178 |
+
"""Return whether the file was opened for writing."""
|
| 179 |
+
self._check_not_closed()
|
| 180 |
+
return self._mode == _MODE_WRITE
|
| 181 |
+
|
| 182 |
+
def peek(self, size=-1):
|
| 183 |
+
"""Return buffered data without advancing the file position.
|
| 184 |
+
|
| 185 |
+
Always returns at least one byte of data, unless at EOF.
|
| 186 |
+
The exact number of bytes returned is unspecified.
|
| 187 |
+
"""
|
| 188 |
+
self._check_can_read()
|
| 189 |
+
# Relies on the undocumented fact that BufferedReader.peek() always
|
| 190 |
+
# returns at least one byte (except at EOF)
|
| 191 |
+
return self._buffer.peek(size)
|
| 192 |
+
|
| 193 |
+
def read(self, size=-1):
|
| 194 |
+
"""Read up to size uncompressed bytes from the file.
|
| 195 |
+
|
| 196 |
+
If size is negative or omitted, read until EOF is reached.
|
| 197 |
+
Returns b"" if the file is already at EOF.
|
| 198 |
+
"""
|
| 199 |
+
self._check_can_read()
|
| 200 |
+
return self._buffer.read(size)
|
| 201 |
+
|
| 202 |
+
def read1(self, size=-1):
|
| 203 |
+
"""Read up to size uncompressed bytes, while trying to avoid
|
| 204 |
+
making multiple reads from the underlying stream. Reads up to a
|
| 205 |
+
buffer's worth of data if size is negative.
|
| 206 |
+
|
| 207 |
+
Returns b"" if the file is at EOF.
|
| 208 |
+
"""
|
| 209 |
+
self._check_can_read()
|
| 210 |
+
if size < 0:
|
| 211 |
+
size = io.DEFAULT_BUFFER_SIZE
|
| 212 |
+
return self._buffer.read1(size)
|
| 213 |
+
|
| 214 |
+
def readline(self, size=-1):
|
| 215 |
+
"""Read a line of uncompressed bytes from the file.
|
| 216 |
+
|
| 217 |
+
The terminating newline (if present) is retained. If size is
|
| 218 |
+
non-negative, no more than size bytes will be read (in which
|
| 219 |
+
case the line may be incomplete). Returns b'' if already at EOF.
|
| 220 |
+
"""
|
| 221 |
+
self._check_can_read()
|
| 222 |
+
return self._buffer.readline(size)
|
| 223 |
+
|
| 224 |
+
def write(self, data):
|
| 225 |
+
"""Write a bytes object to the file.
|
| 226 |
+
|
| 227 |
+
Returns the number of uncompressed bytes written, which is
|
| 228 |
+
always the length of data in bytes. Note that due to buffering,
|
| 229 |
+
the file on disk may not reflect the data written until close()
|
| 230 |
+
is called.
|
| 231 |
+
"""
|
| 232 |
+
self._check_can_write()
|
| 233 |
+
if isinstance(data, (bytes, bytearray)):
|
| 234 |
+
length = len(data)
|
| 235 |
+
else:
|
| 236 |
+
# accept any data that supports the buffer protocol
|
| 237 |
+
data = memoryview(data)
|
| 238 |
+
length = data.nbytes
|
| 239 |
+
|
| 240 |
+
compressed = self._compressor.compress(data)
|
| 241 |
+
self._fp.write(compressed)
|
| 242 |
+
self._pos += length
|
| 243 |
+
return length
|
| 244 |
+
|
| 245 |
+
def seek(self, offset, whence=io.SEEK_SET):
|
| 246 |
+
"""Change the file position.
|
| 247 |
+
|
| 248 |
+
The new position is specified by offset, relative to the
|
| 249 |
+
position indicated by whence. Possible values for whence are:
|
| 250 |
+
|
| 251 |
+
0: start of stream (default): offset must not be negative
|
| 252 |
+
1: current stream position
|
| 253 |
+
2: end of stream; offset must not be positive
|
| 254 |
+
|
| 255 |
+
Returns the new file position.
|
| 256 |
+
|
| 257 |
+
Note that seeking is emulated, so depending on the parameters,
|
| 258 |
+
this operation may be extremely slow.
|
| 259 |
+
"""
|
| 260 |
+
self._check_can_seek()
|
| 261 |
+
return self._buffer.seek(offset, whence)
|
| 262 |
+
|
| 263 |
+
def tell(self):
|
| 264 |
+
"""Return the current file position."""
|
| 265 |
+
self._check_not_closed()
|
| 266 |
+
if self._mode == _MODE_READ:
|
| 267 |
+
return self._buffer.tell()
|
| 268 |
+
return self._pos
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def open(filename, mode="rb", *,
|
| 272 |
+
format=None, check=-1, preset=None, filters=None,
|
| 273 |
+
encoding=None, errors=None, newline=None):
|
| 274 |
+
"""Open an LZMA-compressed file in binary or text mode.
|
| 275 |
+
|
| 276 |
+
filename can be either an actual file name (given as a str, bytes,
|
| 277 |
+
or PathLike object), in which case the named file is opened, or it
|
| 278 |
+
can be an existing file object to read from or write to.
|
| 279 |
+
|
| 280 |
+
The mode argument can be "r", "rb" (default), "w", "wb", "x", "xb",
|
| 281 |
+
"a", or "ab" for binary mode, or "rt", "wt", "xt", or "at" for text
|
| 282 |
+
mode.
|
| 283 |
+
|
| 284 |
+
The format, check, preset and filters arguments specify the
|
| 285 |
+
compression settings, as for LZMACompressor, LZMADecompressor and
|
| 286 |
+
LZMAFile.
|
| 287 |
+
|
| 288 |
+
For binary mode, this function is equivalent to the LZMAFile
|
| 289 |
+
constructor: LZMAFile(filename, mode, ...). In this case, the
|
| 290 |
+
encoding, errors and newline arguments must not be provided.
|
| 291 |
+
|
| 292 |
+
For text mode, an LZMAFile object is created, and wrapped in an
|
| 293 |
+
io.TextIOWrapper instance with the specified encoding, error
|
| 294 |
+
handling behavior, and line ending(s).
|
| 295 |
+
|
| 296 |
+
"""
|
| 297 |
+
if "t" in mode:
|
| 298 |
+
if "b" in mode:
|
| 299 |
+
raise ValueError("Invalid mode: %r" % (mode,))
|
| 300 |
+
else:
|
| 301 |
+
if encoding is not None:
|
| 302 |
+
raise ValueError("Argument 'encoding' not supported in binary mode")
|
| 303 |
+
if errors is not None:
|
| 304 |
+
raise ValueError("Argument 'errors' not supported in binary mode")
|
| 305 |
+
if newline is not None:
|
| 306 |
+
raise ValueError("Argument 'newline' not supported in binary mode")
|
| 307 |
+
|
| 308 |
+
lz_mode = mode.replace("t", "")
|
| 309 |
+
binary_file = LZMAFile(filename, lz_mode, format=format, check=check,
|
| 310 |
+
preset=preset, filters=filters)
|
| 311 |
+
|
| 312 |
+
if "t" in mode:
|
| 313 |
+
encoding = io.text_encoding(encoding)
|
| 314 |
+
return io.TextIOWrapper(binary_file, encoding, errors, newline)
|
| 315 |
+
else:
|
| 316 |
+
return binary_file
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
|
| 320 |
+
"""Compress a block of data.
|
| 321 |
+
|
| 322 |
+
Refer to LZMACompressor's docstring for a description of the
|
| 323 |
+
optional arguments *format*, *check*, *preset* and *filters*.
|
| 324 |
+
|
| 325 |
+
For incremental compression, use an LZMACompressor instead.
|
| 326 |
+
"""
|
| 327 |
+
comp = LZMACompressor(format, check, preset, filters)
|
| 328 |
+
return comp.compress(data) + comp.flush()
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
|
| 332 |
+
"""Decompress a block of data.
|
| 333 |
+
|
| 334 |
+
Refer to LZMADecompressor's docstring for a description of the
|
| 335 |
+
optional arguments *format*, *check* and *filters*.
|
| 336 |
+
|
| 337 |
+
For incremental decompression, use an LZMADecompressor instead.
|
| 338 |
+
"""
|
| 339 |
+
results = []
|
| 340 |
+
while True:
|
| 341 |
+
decomp = LZMADecompressor(format, memlimit, filters)
|
| 342 |
+
try:
|
| 343 |
+
res = decomp.decompress(data)
|
| 344 |
+
except LZMAError:
|
| 345 |
+
if results:
|
| 346 |
+
break # Leftover data is not a valid LZMA/XZ stream; ignore it.
|
| 347 |
+
else:
|
| 348 |
+
raise # Error on the first iteration; bail out.
|
| 349 |
+
results.append(res)
|
| 350 |
+
if not decomp.eof:
|
| 351 |
+
raise LZMAError("Compressed data ended before the "
|
| 352 |
+
"end-of-stream marker was reached")
|
| 353 |
+
data = decomp.unused_data
|
| 354 |
+
if not data:
|
| 355 |
+
break
|
| 356 |
+
return b"".join(results)
|
evalkit_cambrian/lib/python3.10/mailcap.py
ADDED
|
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Mailcap file handling. See RFC 1524."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import warnings
|
| 5 |
+
import re
|
| 6 |
+
|
| 7 |
+
__all__ = ["getcaps","findmatch"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def lineno_sort_key(entry):
|
| 11 |
+
# Sort in ascending order, with unspecified entries at the end
|
| 12 |
+
if 'lineno' in entry:
|
| 13 |
+
return 0, entry['lineno']
|
| 14 |
+
else:
|
| 15 |
+
return 1, 0
|
| 16 |
+
|
| 17 |
+
_find_unsafe = re.compile(r'[^\xa1-\U0010FFFF\w@+=:,./-]').search
|
| 18 |
+
|
| 19 |
+
class UnsafeMailcapInput(Warning):
|
| 20 |
+
"""Warning raised when refusing unsafe input"""
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
# Part 1: top-level interface.
|
| 24 |
+
|
| 25 |
+
def getcaps():
|
| 26 |
+
"""Return a dictionary containing the mailcap database.
|
| 27 |
+
|
| 28 |
+
The dictionary maps a MIME type (in all lowercase, e.g. 'text/plain')
|
| 29 |
+
to a list of dictionaries corresponding to mailcap entries. The list
|
| 30 |
+
collects all the entries for that MIME type from all available mailcap
|
| 31 |
+
files. Each dictionary contains key-value pairs for that MIME type,
|
| 32 |
+
where the viewing command is stored with the key "view".
|
| 33 |
+
|
| 34 |
+
"""
|
| 35 |
+
caps = {}
|
| 36 |
+
lineno = 0
|
| 37 |
+
for mailcap in listmailcapfiles():
|
| 38 |
+
try:
|
| 39 |
+
fp = open(mailcap, 'r')
|
| 40 |
+
except OSError:
|
| 41 |
+
continue
|
| 42 |
+
with fp:
|
| 43 |
+
morecaps, lineno = _readmailcapfile(fp, lineno)
|
| 44 |
+
for key, value in morecaps.items():
|
| 45 |
+
if not key in caps:
|
| 46 |
+
caps[key] = value
|
| 47 |
+
else:
|
| 48 |
+
caps[key] = caps[key] + value
|
| 49 |
+
return caps
|
| 50 |
+
|
| 51 |
+
def listmailcapfiles():
|
| 52 |
+
"""Return a list of all mailcap files found on the system."""
|
| 53 |
+
# This is mostly a Unix thing, but we use the OS path separator anyway
|
| 54 |
+
if 'MAILCAPS' in os.environ:
|
| 55 |
+
pathstr = os.environ['MAILCAPS']
|
| 56 |
+
mailcaps = pathstr.split(os.pathsep)
|
| 57 |
+
else:
|
| 58 |
+
if 'HOME' in os.environ:
|
| 59 |
+
home = os.environ['HOME']
|
| 60 |
+
else:
|
| 61 |
+
# Don't bother with getpwuid()
|
| 62 |
+
home = '.' # Last resort
|
| 63 |
+
mailcaps = [home + '/.mailcap', '/etc/mailcap',
|
| 64 |
+
'/usr/etc/mailcap', '/usr/local/etc/mailcap']
|
| 65 |
+
return mailcaps
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
# Part 2: the parser.
|
| 69 |
+
def readmailcapfile(fp):
|
| 70 |
+
"""Read a mailcap file and return a dictionary keyed by MIME type."""
|
| 71 |
+
warnings.warn('readmailcapfile is deprecated, use getcaps instead',
|
| 72 |
+
DeprecationWarning, 2)
|
| 73 |
+
caps, _ = _readmailcapfile(fp, None)
|
| 74 |
+
return caps
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _readmailcapfile(fp, lineno):
|
| 78 |
+
"""Read a mailcap file and return a dictionary keyed by MIME type.
|
| 79 |
+
|
| 80 |
+
Each MIME type is mapped to an entry consisting of a list of
|
| 81 |
+
dictionaries; the list will contain more than one such dictionary
|
| 82 |
+
if a given MIME type appears more than once in the mailcap file.
|
| 83 |
+
Each dictionary contains key-value pairs for that MIME type, where
|
| 84 |
+
the viewing command is stored with the key "view".
|
| 85 |
+
"""
|
| 86 |
+
caps = {}
|
| 87 |
+
while 1:
|
| 88 |
+
line = fp.readline()
|
| 89 |
+
if not line: break
|
| 90 |
+
# Ignore comments and blank lines
|
| 91 |
+
if line[0] == '#' or line.strip() == '':
|
| 92 |
+
continue
|
| 93 |
+
nextline = line
|
| 94 |
+
# Join continuation lines
|
| 95 |
+
while nextline[-2:] == '\\\n':
|
| 96 |
+
nextline = fp.readline()
|
| 97 |
+
if not nextline: nextline = '\n'
|
| 98 |
+
line = line[:-2] + nextline
|
| 99 |
+
# Parse the line
|
| 100 |
+
key, fields = parseline(line)
|
| 101 |
+
if not (key and fields):
|
| 102 |
+
continue
|
| 103 |
+
if lineno is not None:
|
| 104 |
+
fields['lineno'] = lineno
|
| 105 |
+
lineno += 1
|
| 106 |
+
# Normalize the key
|
| 107 |
+
types = key.split('/')
|
| 108 |
+
for j in range(len(types)):
|
| 109 |
+
types[j] = types[j].strip()
|
| 110 |
+
key = '/'.join(types).lower()
|
| 111 |
+
# Update the database
|
| 112 |
+
if key in caps:
|
| 113 |
+
caps[key].append(fields)
|
| 114 |
+
else:
|
| 115 |
+
caps[key] = [fields]
|
| 116 |
+
return caps, lineno
|
| 117 |
+
|
| 118 |
+
def parseline(line):
|
| 119 |
+
"""Parse one entry in a mailcap file and return a dictionary.
|
| 120 |
+
|
| 121 |
+
The viewing command is stored as the value with the key "view",
|
| 122 |
+
and the rest of the fields produce key-value pairs in the dict.
|
| 123 |
+
"""
|
| 124 |
+
fields = []
|
| 125 |
+
i, n = 0, len(line)
|
| 126 |
+
while i < n:
|
| 127 |
+
field, i = parsefield(line, i, n)
|
| 128 |
+
fields.append(field)
|
| 129 |
+
i = i+1 # Skip semicolon
|
| 130 |
+
if len(fields) < 2:
|
| 131 |
+
return None, None
|
| 132 |
+
key, view, rest = fields[0], fields[1], fields[2:]
|
| 133 |
+
fields = {'view': view}
|
| 134 |
+
for field in rest:
|
| 135 |
+
i = field.find('=')
|
| 136 |
+
if i < 0:
|
| 137 |
+
fkey = field
|
| 138 |
+
fvalue = ""
|
| 139 |
+
else:
|
| 140 |
+
fkey = field[:i].strip()
|
| 141 |
+
fvalue = field[i+1:].strip()
|
| 142 |
+
if fkey in fields:
|
| 143 |
+
# Ignore it
|
| 144 |
+
pass
|
| 145 |
+
else:
|
| 146 |
+
fields[fkey] = fvalue
|
| 147 |
+
return key, fields
|
| 148 |
+
|
| 149 |
+
def parsefield(line, i, n):
|
| 150 |
+
"""Separate one key-value pair in a mailcap entry."""
|
| 151 |
+
start = i
|
| 152 |
+
while i < n:
|
| 153 |
+
c = line[i]
|
| 154 |
+
if c == ';':
|
| 155 |
+
break
|
| 156 |
+
elif c == '\\':
|
| 157 |
+
i = i+2
|
| 158 |
+
else:
|
| 159 |
+
i = i+1
|
| 160 |
+
return line[start:i].strip(), i
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
# Part 3: using the database.
|
| 164 |
+
|
| 165 |
+
def findmatch(caps, MIMEtype, key='view', filename="/dev/null", plist=[]):
|
| 166 |
+
"""Find a match for a mailcap entry.
|
| 167 |
+
|
| 168 |
+
Return a tuple containing the command line, and the mailcap entry
|
| 169 |
+
used; (None, None) if no match is found. This may invoke the
|
| 170 |
+
'test' command of several matching entries before deciding which
|
| 171 |
+
entry to use.
|
| 172 |
+
|
| 173 |
+
"""
|
| 174 |
+
if _find_unsafe(filename):
|
| 175 |
+
msg = "Refusing to use mailcap with filename %r. Use a safe temporary filename." % (filename,)
|
| 176 |
+
warnings.warn(msg, UnsafeMailcapInput)
|
| 177 |
+
return None, None
|
| 178 |
+
entries = lookup(caps, MIMEtype, key)
|
| 179 |
+
# XXX This code should somehow check for the needsterminal flag.
|
| 180 |
+
for e in entries:
|
| 181 |
+
if 'test' in e:
|
| 182 |
+
test = subst(e['test'], filename, plist)
|
| 183 |
+
if test is None:
|
| 184 |
+
continue
|
| 185 |
+
if test and os.system(test) != 0:
|
| 186 |
+
continue
|
| 187 |
+
command = subst(e[key], MIMEtype, filename, plist)
|
| 188 |
+
if command is not None:
|
| 189 |
+
return command, e
|
| 190 |
+
return None, None
|
| 191 |
+
|
| 192 |
+
def lookup(caps, MIMEtype, key=None):
|
| 193 |
+
entries = []
|
| 194 |
+
if MIMEtype in caps:
|
| 195 |
+
entries = entries + caps[MIMEtype]
|
| 196 |
+
MIMEtypes = MIMEtype.split('/')
|
| 197 |
+
MIMEtype = MIMEtypes[0] + '/*'
|
| 198 |
+
if MIMEtype in caps:
|
| 199 |
+
entries = entries + caps[MIMEtype]
|
| 200 |
+
if key is not None:
|
| 201 |
+
entries = [e for e in entries if key in e]
|
| 202 |
+
entries = sorted(entries, key=lineno_sort_key)
|
| 203 |
+
return entries
|
| 204 |
+
|
| 205 |
+
def subst(field, MIMEtype, filename, plist=[]):
|
| 206 |
+
# XXX Actually, this is Unix-specific
|
| 207 |
+
res = ''
|
| 208 |
+
i, n = 0, len(field)
|
| 209 |
+
while i < n:
|
| 210 |
+
c = field[i]; i = i+1
|
| 211 |
+
if c != '%':
|
| 212 |
+
if c == '\\':
|
| 213 |
+
c = field[i:i+1]; i = i+1
|
| 214 |
+
res = res + c
|
| 215 |
+
else:
|
| 216 |
+
c = field[i]; i = i+1
|
| 217 |
+
if c == '%':
|
| 218 |
+
res = res + c
|
| 219 |
+
elif c == 's':
|
| 220 |
+
res = res + filename
|
| 221 |
+
elif c == 't':
|
| 222 |
+
if _find_unsafe(MIMEtype):
|
| 223 |
+
msg = "Refusing to substitute MIME type %r into a shell command." % (MIMEtype,)
|
| 224 |
+
warnings.warn(msg, UnsafeMailcapInput)
|
| 225 |
+
return None
|
| 226 |
+
res = res + MIMEtype
|
| 227 |
+
elif c == '{':
|
| 228 |
+
start = i
|
| 229 |
+
while i < n and field[i] != '}':
|
| 230 |
+
i = i+1
|
| 231 |
+
name = field[start:i]
|
| 232 |
+
i = i+1
|
| 233 |
+
param = findparam(name, plist)
|
| 234 |
+
if _find_unsafe(param):
|
| 235 |
+
msg = "Refusing to substitute parameter %r (%s) into a shell command" % (param, name)
|
| 236 |
+
warnings.warn(msg, UnsafeMailcapInput)
|
| 237 |
+
return None
|
| 238 |
+
res = res + param
|
| 239 |
+
# XXX To do:
|
| 240 |
+
# %n == number of parts if type is multipart/*
|
| 241 |
+
# %F == list of alternating type and filename for parts
|
| 242 |
+
else:
|
| 243 |
+
res = res + '%' + c
|
| 244 |
+
return res
|
| 245 |
+
|
| 246 |
+
def findparam(name, plist):
|
| 247 |
+
name = name.lower() + '='
|
| 248 |
+
n = len(name)
|
| 249 |
+
for p in plist:
|
| 250 |
+
if p[:n].lower() == name:
|
| 251 |
+
return p[n:]
|
| 252 |
+
return ''
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
# Part 4: test program.
|
| 256 |
+
|
| 257 |
+
def test():
|
| 258 |
+
import sys
|
| 259 |
+
caps = getcaps()
|
| 260 |
+
if not sys.argv[1:]:
|
| 261 |
+
show(caps)
|
| 262 |
+
return
|
| 263 |
+
for i in range(1, len(sys.argv), 2):
|
| 264 |
+
args = sys.argv[i:i+2]
|
| 265 |
+
if len(args) < 2:
|
| 266 |
+
print("usage: mailcap [MIMEtype file] ...")
|
| 267 |
+
return
|
| 268 |
+
MIMEtype = args[0]
|
| 269 |
+
file = args[1]
|
| 270 |
+
command, e = findmatch(caps, MIMEtype, 'view', file)
|
| 271 |
+
if not command:
|
| 272 |
+
print("No viewer found for", type)
|
| 273 |
+
else:
|
| 274 |
+
print("Executing:", command)
|
| 275 |
+
sts = os.system(command)
|
| 276 |
+
sts = os.waitstatus_to_exitcode(sts)
|
| 277 |
+
if sts:
|
| 278 |
+
print("Exit status:", sts)
|
| 279 |
+
|
| 280 |
+
def show(caps):
|
| 281 |
+
print("Mailcap files:")
|
| 282 |
+
for fn in listmailcapfiles(): print("\t" + fn)
|
| 283 |
+
print()
|
| 284 |
+
if not caps: caps = getcaps()
|
| 285 |
+
print("Mailcap entries:")
|
| 286 |
+
print()
|
| 287 |
+
ckeys = sorted(caps)
|
| 288 |
+
for type in ckeys:
|
| 289 |
+
print(type)
|
| 290 |
+
entries = caps[type]
|
| 291 |
+
for e in entries:
|
| 292 |
+
keys = sorted(e)
|
| 293 |
+
for k in keys:
|
| 294 |
+
print(" %-15s" % k, e[k])
|
| 295 |
+
print()
|
| 296 |
+
|
| 297 |
+
if __name__ == '__main__':
|
| 298 |
+
test()
|
evalkit_cambrian/lib/python3.10/opcode.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
"""
|
| 3 |
+
opcode module - potentially shared between dis and other modules which
|
| 4 |
+
operate on bytecodes (e.g. peephole optimizers).
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs",
|
| 8 |
+
"haslocal", "hascompare", "hasfree", "opname", "opmap",
|
| 9 |
+
"HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs"]
|
| 10 |
+
|
| 11 |
+
# It's a chicken-and-egg I'm afraid:
|
| 12 |
+
# We're imported before _opcode's made.
|
| 13 |
+
# With exception unheeded
|
| 14 |
+
# (stack_effect is not needed)
|
| 15 |
+
# Both our chickens and eggs are allayed.
|
| 16 |
+
# --Larry Hastings, 2013/11/23
|
| 17 |
+
|
| 18 |
+
try:
|
| 19 |
+
from _opcode import stack_effect
|
| 20 |
+
__all__.append('stack_effect')
|
| 21 |
+
except ImportError:
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
cmp_op = ('<', '<=', '==', '!=', '>', '>=')
|
| 25 |
+
|
| 26 |
+
hasconst = []
|
| 27 |
+
hasname = []
|
| 28 |
+
hasjrel = []
|
| 29 |
+
hasjabs = []
|
| 30 |
+
haslocal = []
|
| 31 |
+
hascompare = []
|
| 32 |
+
hasfree = []
|
| 33 |
+
hasnargs = [] # unused
|
| 34 |
+
|
| 35 |
+
opmap = {}
|
| 36 |
+
opname = ['<%r>' % (op,) for op in range(256)]
|
| 37 |
+
|
| 38 |
+
def def_op(name, op):
|
| 39 |
+
opname[op] = name
|
| 40 |
+
opmap[name] = op
|
| 41 |
+
|
| 42 |
+
def name_op(name, op):
|
| 43 |
+
def_op(name, op)
|
| 44 |
+
hasname.append(op)
|
| 45 |
+
|
| 46 |
+
def jrel_op(name, op):
|
| 47 |
+
def_op(name, op)
|
| 48 |
+
hasjrel.append(op)
|
| 49 |
+
|
| 50 |
+
def jabs_op(name, op):
|
| 51 |
+
def_op(name, op)
|
| 52 |
+
hasjabs.append(op)
|
| 53 |
+
|
| 54 |
+
# Instruction opcodes for compiled code
|
| 55 |
+
# Blank lines correspond to available opcodes
|
| 56 |
+
|
| 57 |
+
def_op('POP_TOP', 1)
|
| 58 |
+
def_op('ROT_TWO', 2)
|
| 59 |
+
def_op('ROT_THREE', 3)
|
| 60 |
+
def_op('DUP_TOP', 4)
|
| 61 |
+
def_op('DUP_TOP_TWO', 5)
|
| 62 |
+
def_op('ROT_FOUR', 6)
|
| 63 |
+
|
| 64 |
+
def_op('NOP', 9)
|
| 65 |
+
def_op('UNARY_POSITIVE', 10)
|
| 66 |
+
def_op('UNARY_NEGATIVE', 11)
|
| 67 |
+
def_op('UNARY_NOT', 12)
|
| 68 |
+
|
| 69 |
+
def_op('UNARY_INVERT', 15)
|
| 70 |
+
def_op('BINARY_MATRIX_MULTIPLY', 16)
|
| 71 |
+
def_op('INPLACE_MATRIX_MULTIPLY', 17)
|
| 72 |
+
|
| 73 |
+
def_op('BINARY_POWER', 19)
|
| 74 |
+
def_op('BINARY_MULTIPLY', 20)
|
| 75 |
+
|
| 76 |
+
def_op('BINARY_MODULO', 22)
|
| 77 |
+
def_op('BINARY_ADD', 23)
|
| 78 |
+
def_op('BINARY_SUBTRACT', 24)
|
| 79 |
+
def_op('BINARY_SUBSCR', 25)
|
| 80 |
+
def_op('BINARY_FLOOR_DIVIDE', 26)
|
| 81 |
+
def_op('BINARY_TRUE_DIVIDE', 27)
|
| 82 |
+
def_op('INPLACE_FLOOR_DIVIDE', 28)
|
| 83 |
+
def_op('INPLACE_TRUE_DIVIDE', 29)
|
| 84 |
+
def_op('GET_LEN', 30)
|
| 85 |
+
def_op('MATCH_MAPPING', 31)
|
| 86 |
+
def_op('MATCH_SEQUENCE', 32)
|
| 87 |
+
def_op('MATCH_KEYS', 33)
|
| 88 |
+
def_op('COPY_DICT_WITHOUT_KEYS', 34)
|
| 89 |
+
|
| 90 |
+
def_op('WITH_EXCEPT_START', 49)
|
| 91 |
+
def_op('GET_AITER', 50)
|
| 92 |
+
def_op('GET_ANEXT', 51)
|
| 93 |
+
def_op('BEFORE_ASYNC_WITH', 52)
|
| 94 |
+
|
| 95 |
+
def_op('END_ASYNC_FOR', 54)
|
| 96 |
+
def_op('INPLACE_ADD', 55)
|
| 97 |
+
def_op('INPLACE_SUBTRACT', 56)
|
| 98 |
+
def_op('INPLACE_MULTIPLY', 57)
|
| 99 |
+
|
| 100 |
+
def_op('INPLACE_MODULO', 59)
|
| 101 |
+
def_op('STORE_SUBSCR', 60)
|
| 102 |
+
def_op('DELETE_SUBSCR', 61)
|
| 103 |
+
def_op('BINARY_LSHIFT', 62)
|
| 104 |
+
def_op('BINARY_RSHIFT', 63)
|
| 105 |
+
def_op('BINARY_AND', 64)
|
| 106 |
+
def_op('BINARY_XOR', 65)
|
| 107 |
+
def_op('BINARY_OR', 66)
|
| 108 |
+
def_op('INPLACE_POWER', 67)
|
| 109 |
+
def_op('GET_ITER', 68)
|
| 110 |
+
def_op('GET_YIELD_FROM_ITER', 69)
|
| 111 |
+
def_op('PRINT_EXPR', 70)
|
| 112 |
+
def_op('LOAD_BUILD_CLASS', 71)
|
| 113 |
+
def_op('YIELD_FROM', 72)
|
| 114 |
+
def_op('GET_AWAITABLE', 73)
|
| 115 |
+
def_op('LOAD_ASSERTION_ERROR', 74)
|
| 116 |
+
def_op('INPLACE_LSHIFT', 75)
|
| 117 |
+
def_op('INPLACE_RSHIFT', 76)
|
| 118 |
+
def_op('INPLACE_AND', 77)
|
| 119 |
+
def_op('INPLACE_XOR', 78)
|
| 120 |
+
def_op('INPLACE_OR', 79)
|
| 121 |
+
|
| 122 |
+
def_op('LIST_TO_TUPLE', 82)
|
| 123 |
+
def_op('RETURN_VALUE', 83)
|
| 124 |
+
def_op('IMPORT_STAR', 84)
|
| 125 |
+
def_op('SETUP_ANNOTATIONS', 85)
|
| 126 |
+
def_op('YIELD_VALUE', 86)
|
| 127 |
+
def_op('POP_BLOCK', 87)
|
| 128 |
+
|
| 129 |
+
def_op('POP_EXCEPT', 89)
|
| 130 |
+
|
| 131 |
+
HAVE_ARGUMENT = 90 # Opcodes from here have an argument:
|
| 132 |
+
|
| 133 |
+
name_op('STORE_NAME', 90) # Index in name list
|
| 134 |
+
name_op('DELETE_NAME', 91) # ""
|
| 135 |
+
def_op('UNPACK_SEQUENCE', 92) # Number of tuple items
|
| 136 |
+
jrel_op('FOR_ITER', 93)
|
| 137 |
+
def_op('UNPACK_EX', 94)
|
| 138 |
+
name_op('STORE_ATTR', 95) # Index in name list
|
| 139 |
+
name_op('DELETE_ATTR', 96) # ""
|
| 140 |
+
name_op('STORE_GLOBAL', 97) # ""
|
| 141 |
+
name_op('DELETE_GLOBAL', 98) # ""
|
| 142 |
+
def_op('ROT_N', 99)
|
| 143 |
+
def_op('LOAD_CONST', 100) # Index in const list
|
| 144 |
+
hasconst.append(100)
|
| 145 |
+
name_op('LOAD_NAME', 101) # Index in name list
|
| 146 |
+
def_op('BUILD_TUPLE', 102) # Number of tuple items
|
| 147 |
+
def_op('BUILD_LIST', 103) # Number of list items
|
| 148 |
+
def_op('BUILD_SET', 104) # Number of set items
|
| 149 |
+
def_op('BUILD_MAP', 105) # Number of dict entries
|
| 150 |
+
name_op('LOAD_ATTR', 106) # Index in name list
|
| 151 |
+
def_op('COMPARE_OP', 107) # Comparison operator
|
| 152 |
+
hascompare.append(107)
|
| 153 |
+
name_op('IMPORT_NAME', 108) # Index in name list
|
| 154 |
+
name_op('IMPORT_FROM', 109) # Index in name list
|
| 155 |
+
jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip
|
| 156 |
+
jabs_op('JUMP_IF_FALSE_OR_POP', 111) # Target byte offset from beginning of code
|
| 157 |
+
jabs_op('JUMP_IF_TRUE_OR_POP', 112) # ""
|
| 158 |
+
jabs_op('JUMP_ABSOLUTE', 113) # ""
|
| 159 |
+
jabs_op('POP_JUMP_IF_FALSE', 114) # ""
|
| 160 |
+
jabs_op('POP_JUMP_IF_TRUE', 115) # ""
|
| 161 |
+
name_op('LOAD_GLOBAL', 116) # Index in name list
|
| 162 |
+
def_op('IS_OP', 117)
|
| 163 |
+
def_op('CONTAINS_OP', 118)
|
| 164 |
+
def_op('RERAISE', 119)
|
| 165 |
+
|
| 166 |
+
jabs_op('JUMP_IF_NOT_EXC_MATCH', 121)
|
| 167 |
+
jrel_op('SETUP_FINALLY', 122) # Distance to target address
|
| 168 |
+
|
| 169 |
+
def_op('LOAD_FAST', 124) # Local variable number
|
| 170 |
+
haslocal.append(124)
|
| 171 |
+
def_op('STORE_FAST', 125) # Local variable number
|
| 172 |
+
haslocal.append(125)
|
| 173 |
+
def_op('DELETE_FAST', 126) # Local variable number
|
| 174 |
+
haslocal.append(126)
|
| 175 |
+
|
| 176 |
+
def_op('GEN_START', 129) # Kind of generator/coroutine
|
| 177 |
+
def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3)
|
| 178 |
+
def_op('CALL_FUNCTION', 131) # #args
|
| 179 |
+
def_op('MAKE_FUNCTION', 132) # Flags
|
| 180 |
+
def_op('BUILD_SLICE', 133) # Number of items
|
| 181 |
+
|
| 182 |
+
def_op('LOAD_CLOSURE', 135)
|
| 183 |
+
hasfree.append(135)
|
| 184 |
+
def_op('LOAD_DEREF', 136)
|
| 185 |
+
hasfree.append(136)
|
| 186 |
+
def_op('STORE_DEREF', 137)
|
| 187 |
+
hasfree.append(137)
|
| 188 |
+
def_op('DELETE_DEREF', 138)
|
| 189 |
+
hasfree.append(138)
|
| 190 |
+
|
| 191 |
+
def_op('CALL_FUNCTION_KW', 141) # #args + #kwargs
|
| 192 |
+
def_op('CALL_FUNCTION_EX', 142) # Flags
|
| 193 |
+
jrel_op('SETUP_WITH', 143)
|
| 194 |
+
def_op('EXTENDED_ARG', 144)
|
| 195 |
+
EXTENDED_ARG = 144
|
| 196 |
+
def_op('LIST_APPEND', 145)
|
| 197 |
+
def_op('SET_ADD', 146)
|
| 198 |
+
def_op('MAP_ADD', 147)
|
| 199 |
+
def_op('LOAD_CLASSDEREF', 148)
|
| 200 |
+
hasfree.append(148)
|
| 201 |
+
|
| 202 |
+
def_op('MATCH_CLASS', 152)
|
| 203 |
+
|
| 204 |
+
jrel_op('SETUP_ASYNC_WITH', 154)
|
| 205 |
+
def_op('FORMAT_VALUE', 155)
|
| 206 |
+
def_op('BUILD_CONST_KEY_MAP', 156)
|
| 207 |
+
def_op('BUILD_STRING', 157)
|
| 208 |
+
|
| 209 |
+
name_op('LOAD_METHOD', 160)
|
| 210 |
+
def_op('CALL_METHOD', 161)
|
| 211 |
+
def_op('LIST_EXTEND', 162)
|
| 212 |
+
def_op('SET_UPDATE', 163)
|
| 213 |
+
def_op('DICT_MERGE', 164)
|
| 214 |
+
def_op('DICT_UPDATE', 165)
|
| 215 |
+
|
| 216 |
+
del def_op, name_op, jrel_op, jabs_op
|
evalkit_cambrian/lib/python3.10/pdb.py
ADDED
|
@@ -0,0 +1,1750 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/bin/env python3
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
The Python Debugger Pdb
|
| 5 |
+
=======================
|
| 6 |
+
|
| 7 |
+
To use the debugger in its simplest form:
|
| 8 |
+
|
| 9 |
+
>>> import pdb
|
| 10 |
+
>>> pdb.run('<a statement>')
|
| 11 |
+
|
| 12 |
+
The debugger's prompt is '(Pdb) '. This will stop in the first
|
| 13 |
+
function call in <a statement>.
|
| 14 |
+
|
| 15 |
+
Alternatively, if a statement terminated with an unhandled exception,
|
| 16 |
+
you can use pdb's post-mortem facility to inspect the contents of the
|
| 17 |
+
traceback:
|
| 18 |
+
|
| 19 |
+
>>> <a statement>
|
| 20 |
+
<exception traceback>
|
| 21 |
+
>>> import pdb
|
| 22 |
+
>>> pdb.pm()
|
| 23 |
+
|
| 24 |
+
The commands recognized by the debugger are listed in the next
|
| 25 |
+
section. Most can be abbreviated as indicated; e.g., h(elp) means
|
| 26 |
+
that 'help' can be typed as 'h' or 'help' (but not as 'he' or 'hel',
|
| 27 |
+
nor as 'H' or 'Help' or 'HELP'). Optional arguments are enclosed in
|
| 28 |
+
square brackets. Alternatives in the command syntax are separated
|
| 29 |
+
by a vertical bar (|).
|
| 30 |
+
|
| 31 |
+
A blank line repeats the previous command literally, except for
|
| 32 |
+
'list', where it lists the next 11 lines.
|
| 33 |
+
|
| 34 |
+
Commands that the debugger doesn't recognize are assumed to be Python
|
| 35 |
+
statements and are executed in the context of the program being
|
| 36 |
+
debugged. Python statements can also be prefixed with an exclamation
|
| 37 |
+
point ('!'). This is a powerful way to inspect the program being
|
| 38 |
+
debugged; it is even possible to change variables or call functions.
|
| 39 |
+
When an exception occurs in such a statement, the exception name is
|
| 40 |
+
printed but the debugger's state is not changed.
|
| 41 |
+
|
| 42 |
+
The debugger supports aliases, which can save typing. And aliases can
|
| 43 |
+
have parameters (see the alias help entry) which allows one a certain
|
| 44 |
+
level of adaptability to the context under examination.
|
| 45 |
+
|
| 46 |
+
Multiple commands may be entered on a single line, separated by the
|
| 47 |
+
pair ';;'. No intelligence is applied to separating the commands; the
|
| 48 |
+
input is split at the first ';;', even if it is in the middle of a
|
| 49 |
+
quoted string.
|
| 50 |
+
|
| 51 |
+
If a file ".pdbrc" exists in your home directory or in the current
|
| 52 |
+
directory, it is read in and executed as if it had been typed at the
|
| 53 |
+
debugger prompt. This is particularly useful for aliases. If both
|
| 54 |
+
files exist, the one in the home directory is read first and aliases
|
| 55 |
+
defined there can be overridden by the local file. This behavior can be
|
| 56 |
+
disabled by passing the "readrc=False" argument to the Pdb constructor.
|
| 57 |
+
|
| 58 |
+
Aside from aliases, the debugger is not directly programmable; but it
|
| 59 |
+
is implemented as a class from which you can derive your own debugger
|
| 60 |
+
class, which you can make as fancy as you like.
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
Debugger commands
|
| 64 |
+
=================
|
| 65 |
+
|
| 66 |
+
"""
|
| 67 |
+
# NOTE: the actual command documentation is collected from docstrings of the
|
| 68 |
+
# commands and is appended to __doc__ after the class has been defined.
|
| 69 |
+
|
| 70 |
+
import os
|
| 71 |
+
import io
|
| 72 |
+
import re
|
| 73 |
+
import sys
|
| 74 |
+
import cmd
|
| 75 |
+
import bdb
|
| 76 |
+
import dis
|
| 77 |
+
import code
|
| 78 |
+
import glob
|
| 79 |
+
import pprint
|
| 80 |
+
import signal
|
| 81 |
+
import inspect
|
| 82 |
+
import tokenize
|
| 83 |
+
import traceback
|
| 84 |
+
import linecache
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class Restart(Exception):
|
| 88 |
+
"""Causes a debugger to be restarted for the debugged python program."""
|
| 89 |
+
pass
|
| 90 |
+
|
| 91 |
+
__all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace",
|
| 92 |
+
"post_mortem", "help"]
|
| 93 |
+
|
| 94 |
+
def find_function(funcname, filename):
|
| 95 |
+
cre = re.compile(r'def\s+%s\s*[(]' % re.escape(funcname))
|
| 96 |
+
try:
|
| 97 |
+
fp = tokenize.open(filename)
|
| 98 |
+
except OSError:
|
| 99 |
+
return None
|
| 100 |
+
# consumer of this info expects the first line to be 1
|
| 101 |
+
with fp:
|
| 102 |
+
for lineno, line in enumerate(fp, start=1):
|
| 103 |
+
if cre.match(line):
|
| 104 |
+
return funcname, filename, lineno
|
| 105 |
+
return None
|
| 106 |
+
|
| 107 |
+
def lasti2lineno(code, lasti):
|
| 108 |
+
linestarts = list(dis.findlinestarts(code))
|
| 109 |
+
linestarts.reverse()
|
| 110 |
+
for i, lineno in linestarts:
|
| 111 |
+
if lasti >= i:
|
| 112 |
+
return lineno
|
| 113 |
+
return 0
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
class _rstr(str):
|
| 117 |
+
"""String that doesn't quote its repr."""
|
| 118 |
+
def __repr__(self):
|
| 119 |
+
return self
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
# Interaction prompt line will separate file and call info from code
|
| 123 |
+
# text using value of line_prefix string. A newline and arrow may
|
| 124 |
+
# be to your liking. You can set it once pdb is imported using the
|
| 125 |
+
# command "pdb.line_prefix = '\n% '".
|
| 126 |
+
# line_prefix = ': ' # Use this to get the old situation back
|
| 127 |
+
line_prefix = '\n-> ' # Probably a better default
|
| 128 |
+
|
| 129 |
+
class Pdb(bdb.Bdb, cmd.Cmd):
|
| 130 |
+
|
| 131 |
+
_previous_sigint_handler = None
|
| 132 |
+
|
| 133 |
+
def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None,
|
| 134 |
+
nosigint=False, readrc=True):
|
| 135 |
+
bdb.Bdb.__init__(self, skip=skip)
|
| 136 |
+
cmd.Cmd.__init__(self, completekey, stdin, stdout)
|
| 137 |
+
sys.audit("pdb.Pdb")
|
| 138 |
+
if stdout:
|
| 139 |
+
self.use_rawinput = 0
|
| 140 |
+
self.prompt = '(Pdb) '
|
| 141 |
+
self.aliases = {}
|
| 142 |
+
self.displaying = {}
|
| 143 |
+
self.mainpyfile = ''
|
| 144 |
+
self._wait_for_mainpyfile = False
|
| 145 |
+
self.tb_lineno = {}
|
| 146 |
+
# Try to load readline if it exists
|
| 147 |
+
try:
|
| 148 |
+
import readline
|
| 149 |
+
# remove some common file name delimiters
|
| 150 |
+
readline.set_completer_delims(' \t\n`@#$%^&*()=+[{]}\\|;:\'",<>?')
|
| 151 |
+
except ImportError:
|
| 152 |
+
pass
|
| 153 |
+
self.allow_kbdint = False
|
| 154 |
+
self.nosigint = nosigint
|
| 155 |
+
|
| 156 |
+
# Read ~/.pdbrc and ./.pdbrc
|
| 157 |
+
self.rcLines = []
|
| 158 |
+
if readrc:
|
| 159 |
+
try:
|
| 160 |
+
with open(os.path.expanduser('~/.pdbrc')) as rcFile:
|
| 161 |
+
self.rcLines.extend(rcFile)
|
| 162 |
+
except OSError:
|
| 163 |
+
pass
|
| 164 |
+
try:
|
| 165 |
+
with open(".pdbrc") as rcFile:
|
| 166 |
+
self.rcLines.extend(rcFile)
|
| 167 |
+
except OSError:
|
| 168 |
+
pass
|
| 169 |
+
|
| 170 |
+
self.commands = {} # associates a command list to breakpoint numbers
|
| 171 |
+
self.commands_doprompt = {} # for each bp num, tells if the prompt
|
| 172 |
+
# must be disp. after execing the cmd list
|
| 173 |
+
self.commands_silent = {} # for each bp num, tells if the stack trace
|
| 174 |
+
# must be disp. after execing the cmd list
|
| 175 |
+
self.commands_defining = False # True while in the process of defining
|
| 176 |
+
# a command list
|
| 177 |
+
self.commands_bnum = None # The breakpoint number for which we are
|
| 178 |
+
# defining a list
|
| 179 |
+
|
| 180 |
+
def sigint_handler(self, signum, frame):
|
| 181 |
+
if self.allow_kbdint:
|
| 182 |
+
raise KeyboardInterrupt
|
| 183 |
+
self.message("\nProgram interrupted. (Use 'cont' to resume).")
|
| 184 |
+
self.set_step()
|
| 185 |
+
self.set_trace(frame)
|
| 186 |
+
|
| 187 |
+
def reset(self):
|
| 188 |
+
bdb.Bdb.reset(self)
|
| 189 |
+
self.forget()
|
| 190 |
+
|
| 191 |
+
def forget(self):
|
| 192 |
+
self.lineno = None
|
| 193 |
+
self.stack = []
|
| 194 |
+
self.curindex = 0
|
| 195 |
+
self.curframe = None
|
| 196 |
+
self.tb_lineno.clear()
|
| 197 |
+
|
| 198 |
+
def setup(self, f, tb):
|
| 199 |
+
self.forget()
|
| 200 |
+
self.stack, self.curindex = self.get_stack(f, tb)
|
| 201 |
+
while tb:
|
| 202 |
+
# when setting up post-mortem debugging with a traceback, save all
|
| 203 |
+
# the original line numbers to be displayed along the current line
|
| 204 |
+
# numbers (which can be different, e.g. due to finally clauses)
|
| 205 |
+
lineno = lasti2lineno(tb.tb_frame.f_code, tb.tb_lasti)
|
| 206 |
+
self.tb_lineno[tb.tb_frame] = lineno
|
| 207 |
+
tb = tb.tb_next
|
| 208 |
+
self.curframe = self.stack[self.curindex][0]
|
| 209 |
+
# The f_locals dictionary is updated from the actual frame
|
| 210 |
+
# locals whenever the .f_locals accessor is called, so we
|
| 211 |
+
# cache it here to ensure that modifications are not overwritten.
|
| 212 |
+
self.curframe_locals = self.curframe.f_locals
|
| 213 |
+
return self.execRcLines()
|
| 214 |
+
|
| 215 |
+
# Can be executed earlier than 'setup' if desired
|
| 216 |
+
def execRcLines(self):
|
| 217 |
+
if not self.rcLines:
|
| 218 |
+
return
|
| 219 |
+
# local copy because of recursion
|
| 220 |
+
rcLines = self.rcLines
|
| 221 |
+
rcLines.reverse()
|
| 222 |
+
# execute every line only once
|
| 223 |
+
self.rcLines = []
|
| 224 |
+
while rcLines:
|
| 225 |
+
line = rcLines.pop().strip()
|
| 226 |
+
if line and line[0] != '#':
|
| 227 |
+
if self.onecmd(line):
|
| 228 |
+
# if onecmd returns True, the command wants to exit
|
| 229 |
+
# from the interaction, save leftover rc lines
|
| 230 |
+
# to execute before next interaction
|
| 231 |
+
self.rcLines += reversed(rcLines)
|
| 232 |
+
return True
|
| 233 |
+
|
| 234 |
+
# Override Bdb methods
|
| 235 |
+
|
| 236 |
+
def user_call(self, frame, argument_list):
|
| 237 |
+
"""This method is called when there is the remote possibility
|
| 238 |
+
that we ever need to stop in this function."""
|
| 239 |
+
if self._wait_for_mainpyfile:
|
| 240 |
+
return
|
| 241 |
+
if self.stop_here(frame):
|
| 242 |
+
self.message('--Call--')
|
| 243 |
+
self.interaction(frame, None)
|
| 244 |
+
|
| 245 |
+
def user_line(self, frame):
|
| 246 |
+
"""This function is called when we stop or break at this line."""
|
| 247 |
+
if self._wait_for_mainpyfile:
|
| 248 |
+
if (self.mainpyfile != self.canonic(frame.f_code.co_filename)
|
| 249 |
+
or frame.f_lineno <= 0):
|
| 250 |
+
return
|
| 251 |
+
self._wait_for_mainpyfile = False
|
| 252 |
+
if self.bp_commands(frame):
|
| 253 |
+
self.interaction(frame, None)
|
| 254 |
+
|
| 255 |
+
def bp_commands(self, frame):
|
| 256 |
+
"""Call every command that was set for the current active breakpoint
|
| 257 |
+
(if there is one).
|
| 258 |
+
|
| 259 |
+
Returns True if the normal interaction function must be called,
|
| 260 |
+
False otherwise."""
|
| 261 |
+
# self.currentbp is set in bdb in Bdb.break_here if a breakpoint was hit
|
| 262 |
+
if getattr(self, "currentbp", False) and \
|
| 263 |
+
self.currentbp in self.commands:
|
| 264 |
+
currentbp = self.currentbp
|
| 265 |
+
self.currentbp = 0
|
| 266 |
+
lastcmd_back = self.lastcmd
|
| 267 |
+
self.setup(frame, None)
|
| 268 |
+
for line in self.commands[currentbp]:
|
| 269 |
+
self.onecmd(line)
|
| 270 |
+
self.lastcmd = lastcmd_back
|
| 271 |
+
if not self.commands_silent[currentbp]:
|
| 272 |
+
self.print_stack_entry(self.stack[self.curindex])
|
| 273 |
+
if self.commands_doprompt[currentbp]:
|
| 274 |
+
self._cmdloop()
|
| 275 |
+
self.forget()
|
| 276 |
+
return
|
| 277 |
+
return 1
|
| 278 |
+
|
| 279 |
+
def user_return(self, frame, return_value):
|
| 280 |
+
"""This function is called when a return trap is set here."""
|
| 281 |
+
if self._wait_for_mainpyfile:
|
| 282 |
+
return
|
| 283 |
+
frame.f_locals['__return__'] = return_value
|
| 284 |
+
self.message('--Return--')
|
| 285 |
+
self.interaction(frame, None)
|
| 286 |
+
|
| 287 |
+
def user_exception(self, frame, exc_info):
|
| 288 |
+
"""This function is called if an exception occurs,
|
| 289 |
+
but only if we are to stop at or just below this level."""
|
| 290 |
+
if self._wait_for_mainpyfile:
|
| 291 |
+
return
|
| 292 |
+
exc_type, exc_value, exc_traceback = exc_info
|
| 293 |
+
frame.f_locals['__exception__'] = exc_type, exc_value
|
| 294 |
+
|
| 295 |
+
# An 'Internal StopIteration' exception is an exception debug event
|
| 296 |
+
# issued by the interpreter when handling a subgenerator run with
|
| 297 |
+
# 'yield from' or a generator controlled by a for loop. No exception has
|
| 298 |
+
# actually occurred in this case. The debugger uses this debug event to
|
| 299 |
+
# stop when the debuggee is returning from such generators.
|
| 300 |
+
prefix = 'Internal ' if (not exc_traceback
|
| 301 |
+
and exc_type is StopIteration) else ''
|
| 302 |
+
self.message('%s%s' % (prefix,
|
| 303 |
+
traceback.format_exception_only(exc_type, exc_value)[-1].strip()))
|
| 304 |
+
self.interaction(frame, exc_traceback)
|
| 305 |
+
|
| 306 |
+
# General interaction function
|
| 307 |
+
def _cmdloop(self):
|
| 308 |
+
while True:
|
| 309 |
+
try:
|
| 310 |
+
# keyboard interrupts allow for an easy way to cancel
|
| 311 |
+
# the current command, so allow them during interactive input
|
| 312 |
+
self.allow_kbdint = True
|
| 313 |
+
self.cmdloop()
|
| 314 |
+
self.allow_kbdint = False
|
| 315 |
+
break
|
| 316 |
+
except KeyboardInterrupt:
|
| 317 |
+
self.message('--KeyboardInterrupt--')
|
| 318 |
+
|
| 319 |
+
# Called before loop, handles display expressions
|
| 320 |
+
def preloop(self):
|
| 321 |
+
displaying = self.displaying.get(self.curframe)
|
| 322 |
+
if displaying:
|
| 323 |
+
for expr, oldvalue in displaying.items():
|
| 324 |
+
newvalue = self._getval_except(expr)
|
| 325 |
+
# check for identity first; this prevents custom __eq__ to
|
| 326 |
+
# be called at every loop, and also prevents instances whose
|
| 327 |
+
# fields are changed to be displayed
|
| 328 |
+
if newvalue is not oldvalue and newvalue != oldvalue:
|
| 329 |
+
displaying[expr] = newvalue
|
| 330 |
+
self.message('display %s: %r [old: %r]' %
|
| 331 |
+
(expr, newvalue, oldvalue))
|
| 332 |
+
|
| 333 |
+
def interaction(self, frame, traceback):
|
| 334 |
+
# Restore the previous signal handler at the Pdb prompt.
|
| 335 |
+
if Pdb._previous_sigint_handler:
|
| 336 |
+
try:
|
| 337 |
+
signal.signal(signal.SIGINT, Pdb._previous_sigint_handler)
|
| 338 |
+
except ValueError: # ValueError: signal only works in main thread
|
| 339 |
+
pass
|
| 340 |
+
else:
|
| 341 |
+
Pdb._previous_sigint_handler = None
|
| 342 |
+
if self.setup(frame, traceback):
|
| 343 |
+
# no interaction desired at this time (happens if .pdbrc contains
|
| 344 |
+
# a command like "continue")
|
| 345 |
+
self.forget()
|
| 346 |
+
return
|
| 347 |
+
self.print_stack_entry(self.stack[self.curindex])
|
| 348 |
+
self._cmdloop()
|
| 349 |
+
self.forget()
|
| 350 |
+
|
| 351 |
+
def displayhook(self, obj):
|
| 352 |
+
"""Custom displayhook for the exec in default(), which prevents
|
| 353 |
+
assignment of the _ variable in the builtins.
|
| 354 |
+
"""
|
| 355 |
+
# reproduce the behavior of the standard displayhook, not printing None
|
| 356 |
+
if obj is not None:
|
| 357 |
+
self.message(repr(obj))
|
| 358 |
+
|
| 359 |
+
def default(self, line):
|
| 360 |
+
if line[:1] == '!': line = line[1:]
|
| 361 |
+
locals = self.curframe_locals
|
| 362 |
+
globals = self.curframe.f_globals
|
| 363 |
+
try:
|
| 364 |
+
code = compile(line + '\n', '<stdin>', 'single')
|
| 365 |
+
save_stdout = sys.stdout
|
| 366 |
+
save_stdin = sys.stdin
|
| 367 |
+
save_displayhook = sys.displayhook
|
| 368 |
+
try:
|
| 369 |
+
sys.stdin = self.stdin
|
| 370 |
+
sys.stdout = self.stdout
|
| 371 |
+
sys.displayhook = self.displayhook
|
| 372 |
+
exec(code, globals, locals)
|
| 373 |
+
finally:
|
| 374 |
+
sys.stdout = save_stdout
|
| 375 |
+
sys.stdin = save_stdin
|
| 376 |
+
sys.displayhook = save_displayhook
|
| 377 |
+
except:
|
| 378 |
+
self._error_exc()
|
| 379 |
+
|
| 380 |
+
def precmd(self, line):
|
| 381 |
+
"""Handle alias expansion and ';;' separator."""
|
| 382 |
+
if not line.strip():
|
| 383 |
+
return line
|
| 384 |
+
args = line.split()
|
| 385 |
+
while args[0] in self.aliases:
|
| 386 |
+
line = self.aliases[args[0]]
|
| 387 |
+
ii = 1
|
| 388 |
+
for tmpArg in args[1:]:
|
| 389 |
+
line = line.replace("%" + str(ii),
|
| 390 |
+
tmpArg)
|
| 391 |
+
ii += 1
|
| 392 |
+
line = line.replace("%*", ' '.join(args[1:]))
|
| 393 |
+
args = line.split()
|
| 394 |
+
# split into ';;' separated commands
|
| 395 |
+
# unless it's an alias command
|
| 396 |
+
if args[0] != 'alias':
|
| 397 |
+
marker = line.find(';;')
|
| 398 |
+
if marker >= 0:
|
| 399 |
+
# queue up everything after marker
|
| 400 |
+
next = line[marker+2:].lstrip()
|
| 401 |
+
self.cmdqueue.append(next)
|
| 402 |
+
line = line[:marker].rstrip()
|
| 403 |
+
return line
|
| 404 |
+
|
| 405 |
+
def onecmd(self, line):
|
| 406 |
+
"""Interpret the argument as though it had been typed in response
|
| 407 |
+
to the prompt.
|
| 408 |
+
|
| 409 |
+
Checks whether this line is typed at the normal prompt or in
|
| 410 |
+
a breakpoint command list definition.
|
| 411 |
+
"""
|
| 412 |
+
if not self.commands_defining:
|
| 413 |
+
return cmd.Cmd.onecmd(self, line)
|
| 414 |
+
else:
|
| 415 |
+
return self.handle_command_def(line)
|
| 416 |
+
|
| 417 |
+
def handle_command_def(self, line):
|
| 418 |
+
"""Handles one command line during command list definition."""
|
| 419 |
+
cmd, arg, line = self.parseline(line)
|
| 420 |
+
if not cmd:
|
| 421 |
+
return
|
| 422 |
+
if cmd == 'silent':
|
| 423 |
+
self.commands_silent[self.commands_bnum] = True
|
| 424 |
+
return # continue to handle other cmd def in the cmd list
|
| 425 |
+
elif cmd == 'end':
|
| 426 |
+
self.cmdqueue = []
|
| 427 |
+
return 1 # end of cmd list
|
| 428 |
+
cmdlist = self.commands[self.commands_bnum]
|
| 429 |
+
if arg:
|
| 430 |
+
cmdlist.append(cmd+' '+arg)
|
| 431 |
+
else:
|
| 432 |
+
cmdlist.append(cmd)
|
| 433 |
+
# Determine if we must stop
|
| 434 |
+
try:
|
| 435 |
+
func = getattr(self, 'do_' + cmd)
|
| 436 |
+
except AttributeError:
|
| 437 |
+
func = self.default
|
| 438 |
+
# one of the resuming commands
|
| 439 |
+
if func.__name__ in self.commands_resuming:
|
| 440 |
+
self.commands_doprompt[self.commands_bnum] = False
|
| 441 |
+
self.cmdqueue = []
|
| 442 |
+
return 1
|
| 443 |
+
return
|
| 444 |
+
|
| 445 |
+
# interface abstraction functions
|
| 446 |
+
|
| 447 |
+
def message(self, msg):
|
| 448 |
+
print(msg, file=self.stdout)
|
| 449 |
+
|
| 450 |
+
def error(self, msg):
|
| 451 |
+
print('***', msg, file=self.stdout)
|
| 452 |
+
|
| 453 |
+
# Generic completion functions. Individual complete_foo methods can be
|
| 454 |
+
# assigned below to one of these functions.
|
| 455 |
+
|
| 456 |
+
def _complete_location(self, text, line, begidx, endidx):
|
| 457 |
+
# Complete a file/module/function location for break/tbreak/clear.
|
| 458 |
+
if line.strip().endswith((':', ',')):
|
| 459 |
+
# Here comes a line number or a condition which we can't complete.
|
| 460 |
+
return []
|
| 461 |
+
# First, try to find matching functions (i.e. expressions).
|
| 462 |
+
try:
|
| 463 |
+
ret = self._complete_expression(text, line, begidx, endidx)
|
| 464 |
+
except Exception:
|
| 465 |
+
ret = []
|
| 466 |
+
# Then, try to complete file names as well.
|
| 467 |
+
globs = glob.glob(glob.escape(text) + '*')
|
| 468 |
+
for fn in globs:
|
| 469 |
+
if os.path.isdir(fn):
|
| 470 |
+
ret.append(fn + '/')
|
| 471 |
+
elif os.path.isfile(fn) and fn.lower().endswith(('.py', '.pyw')):
|
| 472 |
+
ret.append(fn + ':')
|
| 473 |
+
return ret
|
| 474 |
+
|
| 475 |
+
def _complete_bpnumber(self, text, line, begidx, endidx):
|
| 476 |
+
# Complete a breakpoint number. (This would be more helpful if we could
|
| 477 |
+
# display additional info along with the completions, such as file/line
|
| 478 |
+
# of the breakpoint.)
|
| 479 |
+
return [str(i) for i, bp in enumerate(bdb.Breakpoint.bpbynumber)
|
| 480 |
+
if bp is not None and str(i).startswith(text)]
|
| 481 |
+
|
| 482 |
+
def _complete_expression(self, text, line, begidx, endidx):
|
| 483 |
+
# Complete an arbitrary expression.
|
| 484 |
+
if not self.curframe:
|
| 485 |
+
return []
|
| 486 |
+
# Collect globals and locals. It is usually not really sensible to also
|
| 487 |
+
# complete builtins, and they clutter the namespace quite heavily, so we
|
| 488 |
+
# leave them out.
|
| 489 |
+
ns = {**self.curframe.f_globals, **self.curframe_locals}
|
| 490 |
+
if '.' in text:
|
| 491 |
+
# Walk an attribute chain up to the last part, similar to what
|
| 492 |
+
# rlcompleter does. This will bail if any of the parts are not
|
| 493 |
+
# simple attribute access, which is what we want.
|
| 494 |
+
dotted = text.split('.')
|
| 495 |
+
try:
|
| 496 |
+
obj = ns[dotted[0]]
|
| 497 |
+
for part in dotted[1:-1]:
|
| 498 |
+
obj = getattr(obj, part)
|
| 499 |
+
except (KeyError, AttributeError):
|
| 500 |
+
return []
|
| 501 |
+
prefix = '.'.join(dotted[:-1]) + '.'
|
| 502 |
+
return [prefix + n for n in dir(obj) if n.startswith(dotted[-1])]
|
| 503 |
+
else:
|
| 504 |
+
# Complete a simple name.
|
| 505 |
+
return [n for n in ns.keys() if n.startswith(text)]
|
| 506 |
+
|
| 507 |
+
# Command definitions, called by cmdloop()
|
| 508 |
+
# The argument is the remaining string on the command line
|
| 509 |
+
# Return true to exit from the command loop
|
| 510 |
+
|
| 511 |
+
def do_commands(self, arg):
|
| 512 |
+
"""commands [bpnumber]
|
| 513 |
+
(com) ...
|
| 514 |
+
(com) end
|
| 515 |
+
(Pdb)
|
| 516 |
+
|
| 517 |
+
Specify a list of commands for breakpoint number bpnumber.
|
| 518 |
+
The commands themselves are entered on the following lines.
|
| 519 |
+
Type a line containing just 'end' to terminate the commands.
|
| 520 |
+
The commands are executed when the breakpoint is hit.
|
| 521 |
+
|
| 522 |
+
To remove all commands from a breakpoint, type commands and
|
| 523 |
+
follow it immediately with end; that is, give no commands.
|
| 524 |
+
|
| 525 |
+
With no bpnumber argument, commands refers to the last
|
| 526 |
+
breakpoint set.
|
| 527 |
+
|
| 528 |
+
You can use breakpoint commands to start your program up
|
| 529 |
+
again. Simply use the continue command, or step, or any other
|
| 530 |
+
command that resumes execution.
|
| 531 |
+
|
| 532 |
+
Specifying any command resuming execution (currently continue,
|
| 533 |
+
step, next, return, jump, quit and their abbreviations)
|
| 534 |
+
terminates the command list (as if that command was
|
| 535 |
+
immediately followed by end). This is because any time you
|
| 536 |
+
resume execution (even with a simple next or step), you may
|
| 537 |
+
encounter another breakpoint -- which could have its own
|
| 538 |
+
command list, leading to ambiguities about which list to
|
| 539 |
+
execute.
|
| 540 |
+
|
| 541 |
+
If you use the 'silent' command in the command list, the usual
|
| 542 |
+
message about stopping at a breakpoint is not printed. This
|
| 543 |
+
may be desirable for breakpoints that are to print a specific
|
| 544 |
+
message and then continue. If none of the other commands
|
| 545 |
+
print anything, you will see no sign that the breakpoint was
|
| 546 |
+
reached.
|
| 547 |
+
"""
|
| 548 |
+
if not arg:
|
| 549 |
+
bnum = len(bdb.Breakpoint.bpbynumber) - 1
|
| 550 |
+
else:
|
| 551 |
+
try:
|
| 552 |
+
bnum = int(arg)
|
| 553 |
+
except:
|
| 554 |
+
self.error("Usage: commands [bnum]\n ...\n end")
|
| 555 |
+
return
|
| 556 |
+
self.commands_bnum = bnum
|
| 557 |
+
# Save old definitions for the case of a keyboard interrupt.
|
| 558 |
+
if bnum in self.commands:
|
| 559 |
+
old_command_defs = (self.commands[bnum],
|
| 560 |
+
self.commands_doprompt[bnum],
|
| 561 |
+
self.commands_silent[bnum])
|
| 562 |
+
else:
|
| 563 |
+
old_command_defs = None
|
| 564 |
+
self.commands[bnum] = []
|
| 565 |
+
self.commands_doprompt[bnum] = True
|
| 566 |
+
self.commands_silent[bnum] = False
|
| 567 |
+
|
| 568 |
+
prompt_back = self.prompt
|
| 569 |
+
self.prompt = '(com) '
|
| 570 |
+
self.commands_defining = True
|
| 571 |
+
try:
|
| 572 |
+
self.cmdloop()
|
| 573 |
+
except KeyboardInterrupt:
|
| 574 |
+
# Restore old definitions.
|
| 575 |
+
if old_command_defs:
|
| 576 |
+
self.commands[bnum] = old_command_defs[0]
|
| 577 |
+
self.commands_doprompt[bnum] = old_command_defs[1]
|
| 578 |
+
self.commands_silent[bnum] = old_command_defs[2]
|
| 579 |
+
else:
|
| 580 |
+
del self.commands[bnum]
|
| 581 |
+
del self.commands_doprompt[bnum]
|
| 582 |
+
del self.commands_silent[bnum]
|
| 583 |
+
self.error('command definition aborted, old commands restored')
|
| 584 |
+
finally:
|
| 585 |
+
self.commands_defining = False
|
| 586 |
+
self.prompt = prompt_back
|
| 587 |
+
|
| 588 |
+
complete_commands = _complete_bpnumber
|
| 589 |
+
|
| 590 |
+
def do_break(self, arg, temporary = 0):
|
| 591 |
+
"""b(reak) [ ([filename:]lineno | function) [, condition] ]
|
| 592 |
+
Without argument, list all breaks.
|
| 593 |
+
|
| 594 |
+
With a line number argument, set a break at this line in the
|
| 595 |
+
current file. With a function name, set a break at the first
|
| 596 |
+
executable line of that function. If a second argument is
|
| 597 |
+
present, it is a string specifying an expression which must
|
| 598 |
+
evaluate to true before the breakpoint is honored.
|
| 599 |
+
|
| 600 |
+
The line number may be prefixed with a filename and a colon,
|
| 601 |
+
to specify a breakpoint in another file (probably one that
|
| 602 |
+
hasn't been loaded yet). The file is searched for on
|
| 603 |
+
sys.path; the .py suffix may be omitted.
|
| 604 |
+
"""
|
| 605 |
+
if not arg:
|
| 606 |
+
if self.breaks: # There's at least one
|
| 607 |
+
self.message("Num Type Disp Enb Where")
|
| 608 |
+
for bp in bdb.Breakpoint.bpbynumber:
|
| 609 |
+
if bp:
|
| 610 |
+
self.message(bp.bpformat())
|
| 611 |
+
return
|
| 612 |
+
# parse arguments; comma has lowest precedence
|
| 613 |
+
# and cannot occur in filename
|
| 614 |
+
filename = None
|
| 615 |
+
lineno = None
|
| 616 |
+
cond = None
|
| 617 |
+
comma = arg.find(',')
|
| 618 |
+
if comma > 0:
|
| 619 |
+
# parse stuff after comma: "condition"
|
| 620 |
+
cond = arg[comma+1:].lstrip()
|
| 621 |
+
arg = arg[:comma].rstrip()
|
| 622 |
+
# parse stuff before comma: [filename:]lineno | function
|
| 623 |
+
colon = arg.rfind(':')
|
| 624 |
+
funcname = None
|
| 625 |
+
if colon >= 0:
|
| 626 |
+
filename = arg[:colon].rstrip()
|
| 627 |
+
f = self.lookupmodule(filename)
|
| 628 |
+
if not f:
|
| 629 |
+
self.error('%r not found from sys.path' % filename)
|
| 630 |
+
return
|
| 631 |
+
else:
|
| 632 |
+
filename = f
|
| 633 |
+
arg = arg[colon+1:].lstrip()
|
| 634 |
+
try:
|
| 635 |
+
lineno = int(arg)
|
| 636 |
+
except ValueError:
|
| 637 |
+
self.error('Bad lineno: %s' % arg)
|
| 638 |
+
return
|
| 639 |
+
else:
|
| 640 |
+
# no colon; can be lineno or function
|
| 641 |
+
try:
|
| 642 |
+
lineno = int(arg)
|
| 643 |
+
except ValueError:
|
| 644 |
+
try:
|
| 645 |
+
func = eval(arg,
|
| 646 |
+
self.curframe.f_globals,
|
| 647 |
+
self.curframe_locals)
|
| 648 |
+
except:
|
| 649 |
+
func = arg
|
| 650 |
+
try:
|
| 651 |
+
if hasattr(func, '__func__'):
|
| 652 |
+
func = func.__func__
|
| 653 |
+
code = func.__code__
|
| 654 |
+
#use co_name to identify the bkpt (function names
|
| 655 |
+
#could be aliased, but co_name is invariant)
|
| 656 |
+
funcname = code.co_name
|
| 657 |
+
lineno = code.co_firstlineno
|
| 658 |
+
filename = code.co_filename
|
| 659 |
+
except:
|
| 660 |
+
# last thing to try
|
| 661 |
+
(ok, filename, ln) = self.lineinfo(arg)
|
| 662 |
+
if not ok:
|
| 663 |
+
self.error('The specified object %r is not a function '
|
| 664 |
+
'or was not found along sys.path.' % arg)
|
| 665 |
+
return
|
| 666 |
+
funcname = ok # ok contains a function name
|
| 667 |
+
lineno = int(ln)
|
| 668 |
+
if not filename:
|
| 669 |
+
filename = self.defaultFile()
|
| 670 |
+
# Check for reasonable breakpoint
|
| 671 |
+
line = self.checkline(filename, lineno)
|
| 672 |
+
if line:
|
| 673 |
+
# now set the break point
|
| 674 |
+
err = self.set_break(filename, line, temporary, cond, funcname)
|
| 675 |
+
if err:
|
| 676 |
+
self.error(err)
|
| 677 |
+
else:
|
| 678 |
+
bp = self.get_breaks(filename, line)[-1]
|
| 679 |
+
self.message("Breakpoint %d at %s:%d" %
|
| 680 |
+
(bp.number, bp.file, bp.line))
|
| 681 |
+
|
| 682 |
+
# To be overridden in derived debuggers
|
| 683 |
+
def defaultFile(self):
|
| 684 |
+
"""Produce a reasonable default."""
|
| 685 |
+
filename = self.curframe.f_code.co_filename
|
| 686 |
+
if filename == '<string>' and self.mainpyfile:
|
| 687 |
+
filename = self.mainpyfile
|
| 688 |
+
return filename
|
| 689 |
+
|
| 690 |
+
do_b = do_break
|
| 691 |
+
|
| 692 |
+
complete_break = _complete_location
|
| 693 |
+
complete_b = _complete_location
|
| 694 |
+
|
| 695 |
+
def do_tbreak(self, arg):
|
| 696 |
+
"""tbreak [ ([filename:]lineno | function) [, condition] ]
|
| 697 |
+
Same arguments as break, but sets a temporary breakpoint: it
|
| 698 |
+
is automatically deleted when first hit.
|
| 699 |
+
"""
|
| 700 |
+
self.do_break(arg, 1)
|
| 701 |
+
|
| 702 |
+
complete_tbreak = _complete_location
|
| 703 |
+
|
| 704 |
+
def lineinfo(self, identifier):
|
| 705 |
+
failed = (None, None, None)
|
| 706 |
+
# Input is identifier, may be in single quotes
|
| 707 |
+
idstring = identifier.split("'")
|
| 708 |
+
if len(idstring) == 1:
|
| 709 |
+
# not in single quotes
|
| 710 |
+
id = idstring[0].strip()
|
| 711 |
+
elif len(idstring) == 3:
|
| 712 |
+
# quoted
|
| 713 |
+
id = idstring[1].strip()
|
| 714 |
+
else:
|
| 715 |
+
return failed
|
| 716 |
+
if id == '': return failed
|
| 717 |
+
parts = id.split('.')
|
| 718 |
+
# Protection for derived debuggers
|
| 719 |
+
if parts[0] == 'self':
|
| 720 |
+
del parts[0]
|
| 721 |
+
if len(parts) == 0:
|
| 722 |
+
return failed
|
| 723 |
+
# Best first guess at file to look at
|
| 724 |
+
fname = self.defaultFile()
|
| 725 |
+
if len(parts) == 1:
|
| 726 |
+
item = parts[0]
|
| 727 |
+
else:
|
| 728 |
+
# More than one part.
|
| 729 |
+
# First is module, second is method/class
|
| 730 |
+
f = self.lookupmodule(parts[0])
|
| 731 |
+
if f:
|
| 732 |
+
fname = f
|
| 733 |
+
item = parts[1]
|
| 734 |
+
answer = find_function(item, fname)
|
| 735 |
+
return answer or failed
|
| 736 |
+
|
| 737 |
+
def checkline(self, filename, lineno):
|
| 738 |
+
"""Check whether specified line seems to be executable.
|
| 739 |
+
|
| 740 |
+
Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank
|
| 741 |
+
line or EOF). Warning: testing is not comprehensive.
|
| 742 |
+
"""
|
| 743 |
+
# this method should be callable before starting debugging, so default
|
| 744 |
+
# to "no globals" if there is no current frame
|
| 745 |
+
frame = getattr(self, 'curframe', None)
|
| 746 |
+
globs = frame.f_globals if frame else None
|
| 747 |
+
line = linecache.getline(filename, lineno, globs)
|
| 748 |
+
if not line:
|
| 749 |
+
self.message('End of file')
|
| 750 |
+
return 0
|
| 751 |
+
line = line.strip()
|
| 752 |
+
# Don't allow setting breakpoint at a blank line
|
| 753 |
+
if (not line or (line[0] == '#') or
|
| 754 |
+
(line[:3] == '"""') or line[:3] == "'''"):
|
| 755 |
+
self.error('Blank or comment')
|
| 756 |
+
return 0
|
| 757 |
+
return lineno
|
| 758 |
+
|
| 759 |
+
def do_enable(self, arg):
|
| 760 |
+
"""enable bpnumber [bpnumber ...]
|
| 761 |
+
Enables the breakpoints given as a space separated list of
|
| 762 |
+
breakpoint numbers.
|
| 763 |
+
"""
|
| 764 |
+
args = arg.split()
|
| 765 |
+
for i in args:
|
| 766 |
+
try:
|
| 767 |
+
bp = self.get_bpbynumber(i)
|
| 768 |
+
except ValueError as err:
|
| 769 |
+
self.error(err)
|
| 770 |
+
else:
|
| 771 |
+
bp.enable()
|
| 772 |
+
self.message('Enabled %s' % bp)
|
| 773 |
+
|
| 774 |
+
complete_enable = _complete_bpnumber
|
| 775 |
+
|
| 776 |
+
def do_disable(self, arg):
|
| 777 |
+
"""disable bpnumber [bpnumber ...]
|
| 778 |
+
Disables the breakpoints given as a space separated list of
|
| 779 |
+
breakpoint numbers. Disabling a breakpoint means it cannot
|
| 780 |
+
cause the program to stop execution, but unlike clearing a
|
| 781 |
+
breakpoint, it remains in the list of breakpoints and can be
|
| 782 |
+
(re-)enabled.
|
| 783 |
+
"""
|
| 784 |
+
args = arg.split()
|
| 785 |
+
for i in args:
|
| 786 |
+
try:
|
| 787 |
+
bp = self.get_bpbynumber(i)
|
| 788 |
+
except ValueError as err:
|
| 789 |
+
self.error(err)
|
| 790 |
+
else:
|
| 791 |
+
bp.disable()
|
| 792 |
+
self.message('Disabled %s' % bp)
|
| 793 |
+
|
| 794 |
+
complete_disable = _complete_bpnumber
|
| 795 |
+
|
| 796 |
+
def do_condition(self, arg):
|
| 797 |
+
"""condition bpnumber [condition]
|
| 798 |
+
Set a new condition for the breakpoint, an expression which
|
| 799 |
+
must evaluate to true before the breakpoint is honored. If
|
| 800 |
+
condition is absent, any existing condition is removed; i.e.,
|
| 801 |
+
the breakpoint is made unconditional.
|
| 802 |
+
"""
|
| 803 |
+
args = arg.split(' ', 1)
|
| 804 |
+
try:
|
| 805 |
+
cond = args[1]
|
| 806 |
+
except IndexError:
|
| 807 |
+
cond = None
|
| 808 |
+
try:
|
| 809 |
+
bp = self.get_bpbynumber(args[0].strip())
|
| 810 |
+
except IndexError:
|
| 811 |
+
self.error('Breakpoint number expected')
|
| 812 |
+
except ValueError as err:
|
| 813 |
+
self.error(err)
|
| 814 |
+
else:
|
| 815 |
+
bp.cond = cond
|
| 816 |
+
if not cond:
|
| 817 |
+
self.message('Breakpoint %d is now unconditional.' % bp.number)
|
| 818 |
+
else:
|
| 819 |
+
self.message('New condition set for breakpoint %d.' % bp.number)
|
| 820 |
+
|
| 821 |
+
complete_condition = _complete_bpnumber
|
| 822 |
+
|
| 823 |
+
def do_ignore(self, arg):
|
| 824 |
+
"""ignore bpnumber [count]
|
| 825 |
+
Set the ignore count for the given breakpoint number. If
|
| 826 |
+
count is omitted, the ignore count is set to 0. A breakpoint
|
| 827 |
+
becomes active when the ignore count is zero. When non-zero,
|
| 828 |
+
the count is decremented each time the breakpoint is reached
|
| 829 |
+
and the breakpoint is not disabled and any associated
|
| 830 |
+
condition evaluates to true.
|
| 831 |
+
"""
|
| 832 |
+
args = arg.split()
|
| 833 |
+
try:
|
| 834 |
+
count = int(args[1].strip())
|
| 835 |
+
except:
|
| 836 |
+
count = 0
|
| 837 |
+
try:
|
| 838 |
+
bp = self.get_bpbynumber(args[0].strip())
|
| 839 |
+
except IndexError:
|
| 840 |
+
self.error('Breakpoint number expected')
|
| 841 |
+
except ValueError as err:
|
| 842 |
+
self.error(err)
|
| 843 |
+
else:
|
| 844 |
+
bp.ignore = count
|
| 845 |
+
if count > 0:
|
| 846 |
+
if count > 1:
|
| 847 |
+
countstr = '%d crossings' % count
|
| 848 |
+
else:
|
| 849 |
+
countstr = '1 crossing'
|
| 850 |
+
self.message('Will ignore next %s of breakpoint %d.' %
|
| 851 |
+
(countstr, bp.number))
|
| 852 |
+
else:
|
| 853 |
+
self.message('Will stop next time breakpoint %d is reached.'
|
| 854 |
+
% bp.number)
|
| 855 |
+
|
| 856 |
+
complete_ignore = _complete_bpnumber
|
| 857 |
+
|
| 858 |
+
def do_clear(self, arg):
|
| 859 |
+
"""cl(ear) filename:lineno\ncl(ear) [bpnumber [bpnumber...]]
|
| 860 |
+
With a space separated list of breakpoint numbers, clear
|
| 861 |
+
those breakpoints. Without argument, clear all breaks (but
|
| 862 |
+
first ask confirmation). With a filename:lineno argument,
|
| 863 |
+
clear all breaks at that line in that file.
|
| 864 |
+
"""
|
| 865 |
+
if not arg:
|
| 866 |
+
try:
|
| 867 |
+
reply = input('Clear all breaks? ')
|
| 868 |
+
except EOFError:
|
| 869 |
+
reply = 'no'
|
| 870 |
+
reply = reply.strip().lower()
|
| 871 |
+
if reply in ('y', 'yes'):
|
| 872 |
+
bplist = [bp for bp in bdb.Breakpoint.bpbynumber if bp]
|
| 873 |
+
self.clear_all_breaks()
|
| 874 |
+
for bp in bplist:
|
| 875 |
+
self.message('Deleted %s' % bp)
|
| 876 |
+
return
|
| 877 |
+
if ':' in arg:
|
| 878 |
+
# Make sure it works for "clear C:\foo\bar.py:12"
|
| 879 |
+
i = arg.rfind(':')
|
| 880 |
+
filename = arg[:i]
|
| 881 |
+
arg = arg[i+1:]
|
| 882 |
+
try:
|
| 883 |
+
lineno = int(arg)
|
| 884 |
+
except ValueError:
|
| 885 |
+
err = "Invalid line number (%s)" % arg
|
| 886 |
+
else:
|
| 887 |
+
bplist = self.get_breaks(filename, lineno)[:]
|
| 888 |
+
err = self.clear_break(filename, lineno)
|
| 889 |
+
if err:
|
| 890 |
+
self.error(err)
|
| 891 |
+
else:
|
| 892 |
+
for bp in bplist:
|
| 893 |
+
self.message('Deleted %s' % bp)
|
| 894 |
+
return
|
| 895 |
+
numberlist = arg.split()
|
| 896 |
+
for i in numberlist:
|
| 897 |
+
try:
|
| 898 |
+
bp = self.get_bpbynumber(i)
|
| 899 |
+
except ValueError as err:
|
| 900 |
+
self.error(err)
|
| 901 |
+
else:
|
| 902 |
+
self.clear_bpbynumber(i)
|
| 903 |
+
self.message('Deleted %s' % bp)
|
| 904 |
+
do_cl = do_clear # 'c' is already an abbreviation for 'continue'
|
| 905 |
+
|
| 906 |
+
complete_clear = _complete_location
|
| 907 |
+
complete_cl = _complete_location
|
| 908 |
+
|
| 909 |
+
def do_where(self, arg):
|
| 910 |
+
"""w(here)
|
| 911 |
+
Print a stack trace, with the most recent frame at the bottom.
|
| 912 |
+
An arrow indicates the "current frame", which determines the
|
| 913 |
+
context of most commands. 'bt' is an alias for this command.
|
| 914 |
+
"""
|
| 915 |
+
self.print_stack_trace()
|
| 916 |
+
do_w = do_where
|
| 917 |
+
do_bt = do_where
|
| 918 |
+
|
| 919 |
+
def _select_frame(self, number):
|
| 920 |
+
assert 0 <= number < len(self.stack)
|
| 921 |
+
self.curindex = number
|
| 922 |
+
self.curframe = self.stack[self.curindex][0]
|
| 923 |
+
self.curframe_locals = self.curframe.f_locals
|
| 924 |
+
self.print_stack_entry(self.stack[self.curindex])
|
| 925 |
+
self.lineno = None
|
| 926 |
+
|
| 927 |
+
def do_up(self, arg):
|
| 928 |
+
"""u(p) [count]
|
| 929 |
+
Move the current frame count (default one) levels up in the
|
| 930 |
+
stack trace (to an older frame).
|
| 931 |
+
"""
|
| 932 |
+
if self.curindex == 0:
|
| 933 |
+
self.error('Oldest frame')
|
| 934 |
+
return
|
| 935 |
+
try:
|
| 936 |
+
count = int(arg or 1)
|
| 937 |
+
except ValueError:
|
| 938 |
+
self.error('Invalid frame count (%s)' % arg)
|
| 939 |
+
return
|
| 940 |
+
if count < 0:
|
| 941 |
+
newframe = 0
|
| 942 |
+
else:
|
| 943 |
+
newframe = max(0, self.curindex - count)
|
| 944 |
+
self._select_frame(newframe)
|
| 945 |
+
do_u = do_up
|
| 946 |
+
|
| 947 |
+
def do_down(self, arg):
|
| 948 |
+
"""d(own) [count]
|
| 949 |
+
Move the current frame count (default one) levels down in the
|
| 950 |
+
stack trace (to a newer frame).
|
| 951 |
+
"""
|
| 952 |
+
if self.curindex + 1 == len(self.stack):
|
| 953 |
+
self.error('Newest frame')
|
| 954 |
+
return
|
| 955 |
+
try:
|
| 956 |
+
count = int(arg or 1)
|
| 957 |
+
except ValueError:
|
| 958 |
+
self.error('Invalid frame count (%s)' % arg)
|
| 959 |
+
return
|
| 960 |
+
if count < 0:
|
| 961 |
+
newframe = len(self.stack) - 1
|
| 962 |
+
else:
|
| 963 |
+
newframe = min(len(self.stack) - 1, self.curindex + count)
|
| 964 |
+
self._select_frame(newframe)
|
| 965 |
+
do_d = do_down
|
| 966 |
+
|
| 967 |
+
def do_until(self, arg):
|
| 968 |
+
"""unt(il) [lineno]
|
| 969 |
+
Without argument, continue execution until the line with a
|
| 970 |
+
number greater than the current one is reached. With a line
|
| 971 |
+
number, continue execution until a line with a number greater
|
| 972 |
+
or equal to that is reached. In both cases, also stop when
|
| 973 |
+
the current frame returns.
|
| 974 |
+
"""
|
| 975 |
+
if arg:
|
| 976 |
+
try:
|
| 977 |
+
lineno = int(arg)
|
| 978 |
+
except ValueError:
|
| 979 |
+
self.error('Error in argument: %r' % arg)
|
| 980 |
+
return
|
| 981 |
+
if lineno <= self.curframe.f_lineno:
|
| 982 |
+
self.error('"until" line number is smaller than current '
|
| 983 |
+
'line number')
|
| 984 |
+
return
|
| 985 |
+
else:
|
| 986 |
+
lineno = None
|
| 987 |
+
self.set_until(self.curframe, lineno)
|
| 988 |
+
return 1
|
| 989 |
+
do_unt = do_until
|
| 990 |
+
|
| 991 |
+
def do_step(self, arg):
|
| 992 |
+
"""s(tep)
|
| 993 |
+
Execute the current line, stop at the first possible occasion
|
| 994 |
+
(either in a function that is called or in the current
|
| 995 |
+
function).
|
| 996 |
+
"""
|
| 997 |
+
self.set_step()
|
| 998 |
+
return 1
|
| 999 |
+
do_s = do_step
|
| 1000 |
+
|
| 1001 |
+
def do_next(self, arg):
|
| 1002 |
+
"""n(ext)
|
| 1003 |
+
Continue execution until the next line in the current function
|
| 1004 |
+
is reached or it returns.
|
| 1005 |
+
"""
|
| 1006 |
+
self.set_next(self.curframe)
|
| 1007 |
+
return 1
|
| 1008 |
+
do_n = do_next
|
| 1009 |
+
|
| 1010 |
+
def do_run(self, arg):
|
| 1011 |
+
"""run [args...]
|
| 1012 |
+
Restart the debugged python program. If a string is supplied
|
| 1013 |
+
it is split with "shlex", and the result is used as the new
|
| 1014 |
+
sys.argv. History, breakpoints, actions and debugger options
|
| 1015 |
+
are preserved. "restart" is an alias for "run".
|
| 1016 |
+
"""
|
| 1017 |
+
if arg:
|
| 1018 |
+
import shlex
|
| 1019 |
+
argv0 = sys.argv[0:1]
|
| 1020 |
+
try:
|
| 1021 |
+
sys.argv = shlex.split(arg)
|
| 1022 |
+
except ValueError as e:
|
| 1023 |
+
self.error('Cannot run %s: %s' % (arg, e))
|
| 1024 |
+
return
|
| 1025 |
+
sys.argv[:0] = argv0
|
| 1026 |
+
# this is caught in the main debugger loop
|
| 1027 |
+
raise Restart
|
| 1028 |
+
|
| 1029 |
+
do_restart = do_run
|
| 1030 |
+
|
| 1031 |
+
def do_return(self, arg):
|
| 1032 |
+
"""r(eturn)
|
| 1033 |
+
Continue execution until the current function returns.
|
| 1034 |
+
"""
|
| 1035 |
+
self.set_return(self.curframe)
|
| 1036 |
+
return 1
|
| 1037 |
+
do_r = do_return
|
| 1038 |
+
|
| 1039 |
+
def do_continue(self, arg):
|
| 1040 |
+
"""c(ont(inue))
|
| 1041 |
+
Continue execution, only stop when a breakpoint is encountered.
|
| 1042 |
+
"""
|
| 1043 |
+
if not self.nosigint:
|
| 1044 |
+
try:
|
| 1045 |
+
Pdb._previous_sigint_handler = \
|
| 1046 |
+
signal.signal(signal.SIGINT, self.sigint_handler)
|
| 1047 |
+
except ValueError:
|
| 1048 |
+
# ValueError happens when do_continue() is invoked from
|
| 1049 |
+
# a non-main thread in which case we just continue without
|
| 1050 |
+
# SIGINT set. Would printing a message here (once) make
|
| 1051 |
+
# sense?
|
| 1052 |
+
pass
|
| 1053 |
+
self.set_continue()
|
| 1054 |
+
return 1
|
| 1055 |
+
do_c = do_cont = do_continue
|
| 1056 |
+
|
| 1057 |
+
def do_jump(self, arg):
|
| 1058 |
+
"""j(ump) lineno
|
| 1059 |
+
Set the next line that will be executed. Only available in
|
| 1060 |
+
the bottom-most frame. This lets you jump back and execute
|
| 1061 |
+
code again, or jump forward to skip code that you don't want
|
| 1062 |
+
to run.
|
| 1063 |
+
|
| 1064 |
+
It should be noted that not all jumps are allowed -- for
|
| 1065 |
+
instance it is not possible to jump into the middle of a
|
| 1066 |
+
for loop or out of a finally clause.
|
| 1067 |
+
"""
|
| 1068 |
+
if self.curindex + 1 != len(self.stack):
|
| 1069 |
+
self.error('You can only jump within the bottom frame')
|
| 1070 |
+
return
|
| 1071 |
+
try:
|
| 1072 |
+
arg = int(arg)
|
| 1073 |
+
except ValueError:
|
| 1074 |
+
self.error("The 'jump' command requires a line number")
|
| 1075 |
+
else:
|
| 1076 |
+
try:
|
| 1077 |
+
# Do the jump, fix up our copy of the stack, and display the
|
| 1078 |
+
# new position
|
| 1079 |
+
self.curframe.f_lineno = arg
|
| 1080 |
+
self.stack[self.curindex] = self.stack[self.curindex][0], arg
|
| 1081 |
+
self.print_stack_entry(self.stack[self.curindex])
|
| 1082 |
+
except ValueError as e:
|
| 1083 |
+
self.error('Jump failed: %s' % e)
|
| 1084 |
+
do_j = do_jump
|
| 1085 |
+
|
| 1086 |
+
def do_debug(self, arg):
|
| 1087 |
+
"""debug code
|
| 1088 |
+
Enter a recursive debugger that steps through the code
|
| 1089 |
+
argument (which is an arbitrary expression or statement to be
|
| 1090 |
+
executed in the current environment).
|
| 1091 |
+
"""
|
| 1092 |
+
sys.settrace(None)
|
| 1093 |
+
globals = self.curframe.f_globals
|
| 1094 |
+
locals = self.curframe_locals
|
| 1095 |
+
p = Pdb(self.completekey, self.stdin, self.stdout)
|
| 1096 |
+
p.prompt = "(%s) " % self.prompt.strip()
|
| 1097 |
+
self.message("ENTERING RECURSIVE DEBUGGER")
|
| 1098 |
+
try:
|
| 1099 |
+
sys.call_tracing(p.run, (arg, globals, locals))
|
| 1100 |
+
except Exception:
|
| 1101 |
+
self._error_exc()
|
| 1102 |
+
self.message("LEAVING RECURSIVE DEBUGGER")
|
| 1103 |
+
sys.settrace(self.trace_dispatch)
|
| 1104 |
+
self.lastcmd = p.lastcmd
|
| 1105 |
+
|
| 1106 |
+
complete_debug = _complete_expression
|
| 1107 |
+
|
| 1108 |
+
def do_quit(self, arg):
|
| 1109 |
+
"""q(uit)\nexit
|
| 1110 |
+
Quit from the debugger. The program being executed is aborted.
|
| 1111 |
+
"""
|
| 1112 |
+
self._user_requested_quit = True
|
| 1113 |
+
self.set_quit()
|
| 1114 |
+
return 1
|
| 1115 |
+
|
| 1116 |
+
do_q = do_quit
|
| 1117 |
+
do_exit = do_quit
|
| 1118 |
+
|
| 1119 |
+
def do_EOF(self, arg):
|
| 1120 |
+
"""EOF
|
| 1121 |
+
Handles the receipt of EOF as a command.
|
| 1122 |
+
"""
|
| 1123 |
+
self.message('')
|
| 1124 |
+
self._user_requested_quit = True
|
| 1125 |
+
self.set_quit()
|
| 1126 |
+
return 1
|
| 1127 |
+
|
| 1128 |
+
def do_args(self, arg):
|
| 1129 |
+
"""a(rgs)
|
| 1130 |
+
Print the argument list of the current function.
|
| 1131 |
+
"""
|
| 1132 |
+
co = self.curframe.f_code
|
| 1133 |
+
dict = self.curframe_locals
|
| 1134 |
+
n = co.co_argcount + co.co_kwonlyargcount
|
| 1135 |
+
if co.co_flags & inspect.CO_VARARGS: n = n+1
|
| 1136 |
+
if co.co_flags & inspect.CO_VARKEYWORDS: n = n+1
|
| 1137 |
+
for i in range(n):
|
| 1138 |
+
name = co.co_varnames[i]
|
| 1139 |
+
if name in dict:
|
| 1140 |
+
self.message('%s = %r' % (name, dict[name]))
|
| 1141 |
+
else:
|
| 1142 |
+
self.message('%s = *** undefined ***' % (name,))
|
| 1143 |
+
do_a = do_args
|
| 1144 |
+
|
| 1145 |
+
def do_retval(self, arg):
|
| 1146 |
+
"""retval
|
| 1147 |
+
Print the return value for the last return of a function.
|
| 1148 |
+
"""
|
| 1149 |
+
if '__return__' in self.curframe_locals:
|
| 1150 |
+
self.message(repr(self.curframe_locals['__return__']))
|
| 1151 |
+
else:
|
| 1152 |
+
self.error('Not yet returned!')
|
| 1153 |
+
do_rv = do_retval
|
| 1154 |
+
|
| 1155 |
+
def _getval(self, arg):
|
| 1156 |
+
try:
|
| 1157 |
+
return eval(arg, self.curframe.f_globals, self.curframe_locals)
|
| 1158 |
+
except:
|
| 1159 |
+
self._error_exc()
|
| 1160 |
+
raise
|
| 1161 |
+
|
| 1162 |
+
def _getval_except(self, arg, frame=None):
|
| 1163 |
+
try:
|
| 1164 |
+
if frame is None:
|
| 1165 |
+
return eval(arg, self.curframe.f_globals, self.curframe_locals)
|
| 1166 |
+
else:
|
| 1167 |
+
return eval(arg, frame.f_globals, frame.f_locals)
|
| 1168 |
+
except:
|
| 1169 |
+
exc_info = sys.exc_info()[:2]
|
| 1170 |
+
err = traceback.format_exception_only(*exc_info)[-1].strip()
|
| 1171 |
+
return _rstr('** raised %s **' % err)
|
| 1172 |
+
|
| 1173 |
+
def _error_exc(self):
|
| 1174 |
+
exc_info = sys.exc_info()[:2]
|
| 1175 |
+
self.error(traceback.format_exception_only(*exc_info)[-1].strip())
|
| 1176 |
+
|
| 1177 |
+
def _msg_val_func(self, arg, func):
|
| 1178 |
+
try:
|
| 1179 |
+
val = self._getval(arg)
|
| 1180 |
+
except:
|
| 1181 |
+
return # _getval() has displayed the error
|
| 1182 |
+
try:
|
| 1183 |
+
self.message(func(val))
|
| 1184 |
+
except:
|
| 1185 |
+
self._error_exc()
|
| 1186 |
+
|
| 1187 |
+
def do_p(self, arg):
|
| 1188 |
+
"""p expression
|
| 1189 |
+
Print the value of the expression.
|
| 1190 |
+
"""
|
| 1191 |
+
self._msg_val_func(arg, repr)
|
| 1192 |
+
|
| 1193 |
+
def do_pp(self, arg):
|
| 1194 |
+
"""pp expression
|
| 1195 |
+
Pretty-print the value of the expression.
|
| 1196 |
+
"""
|
| 1197 |
+
self._msg_val_func(arg, pprint.pformat)
|
| 1198 |
+
|
| 1199 |
+
complete_print = _complete_expression
|
| 1200 |
+
complete_p = _complete_expression
|
| 1201 |
+
complete_pp = _complete_expression
|
| 1202 |
+
|
| 1203 |
+
def do_list(self, arg):
|
| 1204 |
+
"""l(ist) [first [,last] | .]
|
| 1205 |
+
|
| 1206 |
+
List source code for the current file. Without arguments,
|
| 1207 |
+
list 11 lines around the current line or continue the previous
|
| 1208 |
+
listing. With . as argument, list 11 lines around the current
|
| 1209 |
+
line. With one argument, list 11 lines starting at that line.
|
| 1210 |
+
With two arguments, list the given range; if the second
|
| 1211 |
+
argument is less than the first, it is a count.
|
| 1212 |
+
|
| 1213 |
+
The current line in the current frame is indicated by "->".
|
| 1214 |
+
If an exception is being debugged, the line where the
|
| 1215 |
+
exception was originally raised or propagated is indicated by
|
| 1216 |
+
">>", if it differs from the current line.
|
| 1217 |
+
"""
|
| 1218 |
+
self.lastcmd = 'list'
|
| 1219 |
+
last = None
|
| 1220 |
+
if arg and arg != '.':
|
| 1221 |
+
try:
|
| 1222 |
+
if ',' in arg:
|
| 1223 |
+
first, last = arg.split(',')
|
| 1224 |
+
first = int(first.strip())
|
| 1225 |
+
last = int(last.strip())
|
| 1226 |
+
if last < first:
|
| 1227 |
+
# assume it's a count
|
| 1228 |
+
last = first + last
|
| 1229 |
+
else:
|
| 1230 |
+
first = int(arg.strip())
|
| 1231 |
+
first = max(1, first - 5)
|
| 1232 |
+
except ValueError:
|
| 1233 |
+
self.error('Error in argument: %r' % arg)
|
| 1234 |
+
return
|
| 1235 |
+
elif self.lineno is None or arg == '.':
|
| 1236 |
+
first = max(1, self.curframe.f_lineno - 5)
|
| 1237 |
+
else:
|
| 1238 |
+
first = self.lineno + 1
|
| 1239 |
+
if last is None:
|
| 1240 |
+
last = first + 10
|
| 1241 |
+
filename = self.curframe.f_code.co_filename
|
| 1242 |
+
# gh-93696: stdlib frozen modules provide a useful __file__
|
| 1243 |
+
# this workaround can be removed with the closure of gh-89815
|
| 1244 |
+
if filename.startswith("<frozen"):
|
| 1245 |
+
tmp = self.curframe.f_globals.get("__file__")
|
| 1246 |
+
if isinstance(tmp, str):
|
| 1247 |
+
filename = tmp
|
| 1248 |
+
breaklist = self.get_file_breaks(filename)
|
| 1249 |
+
try:
|
| 1250 |
+
lines = linecache.getlines(filename, self.curframe.f_globals)
|
| 1251 |
+
self._print_lines(lines[first-1:last], first, breaklist,
|
| 1252 |
+
self.curframe)
|
| 1253 |
+
self.lineno = min(last, len(lines))
|
| 1254 |
+
if len(lines) < last:
|
| 1255 |
+
self.message('[EOF]')
|
| 1256 |
+
except KeyboardInterrupt:
|
| 1257 |
+
pass
|
| 1258 |
+
do_l = do_list
|
| 1259 |
+
|
| 1260 |
+
def do_longlist(self, arg):
|
| 1261 |
+
"""longlist | ll
|
| 1262 |
+
List the whole source code for the current function or frame.
|
| 1263 |
+
"""
|
| 1264 |
+
filename = self.curframe.f_code.co_filename
|
| 1265 |
+
breaklist = self.get_file_breaks(filename)
|
| 1266 |
+
try:
|
| 1267 |
+
lines, lineno = inspect.getsourcelines(self.curframe)
|
| 1268 |
+
except OSError as err:
|
| 1269 |
+
self.error(err)
|
| 1270 |
+
return
|
| 1271 |
+
self._print_lines(lines, lineno, breaklist, self.curframe)
|
| 1272 |
+
do_ll = do_longlist
|
| 1273 |
+
|
| 1274 |
+
def do_source(self, arg):
|
| 1275 |
+
"""source expression
|
| 1276 |
+
Try to get source code for the given object and display it.
|
| 1277 |
+
"""
|
| 1278 |
+
try:
|
| 1279 |
+
obj = self._getval(arg)
|
| 1280 |
+
except:
|
| 1281 |
+
return
|
| 1282 |
+
try:
|
| 1283 |
+
lines, lineno = inspect.getsourcelines(obj)
|
| 1284 |
+
except (OSError, TypeError) as err:
|
| 1285 |
+
self.error(err)
|
| 1286 |
+
return
|
| 1287 |
+
self._print_lines(lines, lineno)
|
| 1288 |
+
|
| 1289 |
+
complete_source = _complete_expression
|
| 1290 |
+
|
| 1291 |
+
def _print_lines(self, lines, start, breaks=(), frame=None):
|
| 1292 |
+
"""Print a range of lines."""
|
| 1293 |
+
if frame:
|
| 1294 |
+
current_lineno = frame.f_lineno
|
| 1295 |
+
exc_lineno = self.tb_lineno.get(frame, -1)
|
| 1296 |
+
else:
|
| 1297 |
+
current_lineno = exc_lineno = -1
|
| 1298 |
+
for lineno, line in enumerate(lines, start):
|
| 1299 |
+
s = str(lineno).rjust(3)
|
| 1300 |
+
if len(s) < 4:
|
| 1301 |
+
s += ' '
|
| 1302 |
+
if lineno in breaks:
|
| 1303 |
+
s += 'B'
|
| 1304 |
+
else:
|
| 1305 |
+
s += ' '
|
| 1306 |
+
if lineno == current_lineno:
|
| 1307 |
+
s += '->'
|
| 1308 |
+
elif lineno == exc_lineno:
|
| 1309 |
+
s += '>>'
|
| 1310 |
+
self.message(s + '\t' + line.rstrip())
|
| 1311 |
+
|
| 1312 |
+
def do_whatis(self, arg):
|
| 1313 |
+
"""whatis arg
|
| 1314 |
+
Print the type of the argument.
|
| 1315 |
+
"""
|
| 1316 |
+
try:
|
| 1317 |
+
value = self._getval(arg)
|
| 1318 |
+
except:
|
| 1319 |
+
# _getval() already printed the error
|
| 1320 |
+
return
|
| 1321 |
+
code = None
|
| 1322 |
+
# Is it an instance method?
|
| 1323 |
+
try:
|
| 1324 |
+
code = value.__func__.__code__
|
| 1325 |
+
except Exception:
|
| 1326 |
+
pass
|
| 1327 |
+
if code:
|
| 1328 |
+
self.message('Method %s' % code.co_name)
|
| 1329 |
+
return
|
| 1330 |
+
# Is it a function?
|
| 1331 |
+
try:
|
| 1332 |
+
code = value.__code__
|
| 1333 |
+
except Exception:
|
| 1334 |
+
pass
|
| 1335 |
+
if code:
|
| 1336 |
+
self.message('Function %s' % code.co_name)
|
| 1337 |
+
return
|
| 1338 |
+
# Is it a class?
|
| 1339 |
+
if value.__class__ is type:
|
| 1340 |
+
self.message('Class %s.%s' % (value.__module__, value.__qualname__))
|
| 1341 |
+
return
|
| 1342 |
+
# None of the above...
|
| 1343 |
+
self.message(type(value))
|
| 1344 |
+
|
| 1345 |
+
complete_whatis = _complete_expression
|
| 1346 |
+
|
| 1347 |
+
def do_display(self, arg):
|
| 1348 |
+
"""display [expression]
|
| 1349 |
+
|
| 1350 |
+
Display the value of the expression if it changed, each time execution
|
| 1351 |
+
stops in the current frame.
|
| 1352 |
+
|
| 1353 |
+
Without expression, list all display expressions for the current frame.
|
| 1354 |
+
"""
|
| 1355 |
+
if not arg:
|
| 1356 |
+
self.message('Currently displaying:')
|
| 1357 |
+
for item in self.displaying.get(self.curframe, {}).items():
|
| 1358 |
+
self.message('%s: %r' % item)
|
| 1359 |
+
else:
|
| 1360 |
+
val = self._getval_except(arg)
|
| 1361 |
+
self.displaying.setdefault(self.curframe, {})[arg] = val
|
| 1362 |
+
self.message('display %s: %r' % (arg, val))
|
| 1363 |
+
|
| 1364 |
+
complete_display = _complete_expression
|
| 1365 |
+
|
| 1366 |
+
def do_undisplay(self, arg):
|
| 1367 |
+
"""undisplay [expression]
|
| 1368 |
+
|
| 1369 |
+
Do not display the expression any more in the current frame.
|
| 1370 |
+
|
| 1371 |
+
Without expression, clear all display expressions for the current frame.
|
| 1372 |
+
"""
|
| 1373 |
+
if arg:
|
| 1374 |
+
try:
|
| 1375 |
+
del self.displaying.get(self.curframe, {})[arg]
|
| 1376 |
+
except KeyError:
|
| 1377 |
+
self.error('not displaying %s' % arg)
|
| 1378 |
+
else:
|
| 1379 |
+
self.displaying.pop(self.curframe, None)
|
| 1380 |
+
|
| 1381 |
+
def complete_undisplay(self, text, line, begidx, endidx):
|
| 1382 |
+
return [e for e in self.displaying.get(self.curframe, {})
|
| 1383 |
+
if e.startswith(text)]
|
| 1384 |
+
|
| 1385 |
+
def do_interact(self, arg):
|
| 1386 |
+
"""interact
|
| 1387 |
+
|
| 1388 |
+
Start an interactive interpreter whose global namespace
|
| 1389 |
+
contains all the (global and local) names found in the current scope.
|
| 1390 |
+
"""
|
| 1391 |
+
ns = {**self.curframe.f_globals, **self.curframe_locals}
|
| 1392 |
+
code.interact("*interactive*", local=ns)
|
| 1393 |
+
|
| 1394 |
+
def do_alias(self, arg):
|
| 1395 |
+
"""alias [name [command [parameter parameter ...] ]]
|
| 1396 |
+
Create an alias called 'name' that executes 'command'. The
|
| 1397 |
+
command must *not* be enclosed in quotes. Replaceable
|
| 1398 |
+
parameters can be indicated by %1, %2, and so on, while %* is
|
| 1399 |
+
replaced by all the parameters. If no command is given, the
|
| 1400 |
+
current alias for name is shown. If no name is given, all
|
| 1401 |
+
aliases are listed.
|
| 1402 |
+
|
| 1403 |
+
Aliases may be nested and can contain anything that can be
|
| 1404 |
+
legally typed at the pdb prompt. Note! You *can* override
|
| 1405 |
+
internal pdb commands with aliases! Those internal commands
|
| 1406 |
+
are then hidden until the alias is removed. Aliasing is
|
| 1407 |
+
recursively applied to the first word of the command line; all
|
| 1408 |
+
other words in the line are left alone.
|
| 1409 |
+
|
| 1410 |
+
As an example, here are two useful aliases (especially when
|
| 1411 |
+
placed in the .pdbrc file):
|
| 1412 |
+
|
| 1413 |
+
# Print instance variables (usage "pi classInst")
|
| 1414 |
+
alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k])
|
| 1415 |
+
# Print instance variables in self
|
| 1416 |
+
alias ps pi self
|
| 1417 |
+
"""
|
| 1418 |
+
args = arg.split()
|
| 1419 |
+
if len(args) == 0:
|
| 1420 |
+
keys = sorted(self.aliases.keys())
|
| 1421 |
+
for alias in keys:
|
| 1422 |
+
self.message("%s = %s" % (alias, self.aliases[alias]))
|
| 1423 |
+
return
|
| 1424 |
+
if args[0] in self.aliases and len(args) == 1:
|
| 1425 |
+
self.message("%s = %s" % (args[0], self.aliases[args[0]]))
|
| 1426 |
+
else:
|
| 1427 |
+
self.aliases[args[0]] = ' '.join(args[1:])
|
| 1428 |
+
|
| 1429 |
+
def do_unalias(self, arg):
|
| 1430 |
+
"""unalias name
|
| 1431 |
+
Delete the specified alias.
|
| 1432 |
+
"""
|
| 1433 |
+
args = arg.split()
|
| 1434 |
+
if len(args) == 0: return
|
| 1435 |
+
if args[0] in self.aliases:
|
| 1436 |
+
del self.aliases[args[0]]
|
| 1437 |
+
|
| 1438 |
+
def complete_unalias(self, text, line, begidx, endidx):
|
| 1439 |
+
return [a for a in self.aliases if a.startswith(text)]
|
| 1440 |
+
|
| 1441 |
+
# List of all the commands making the program resume execution.
|
| 1442 |
+
commands_resuming = ['do_continue', 'do_step', 'do_next', 'do_return',
|
| 1443 |
+
'do_quit', 'do_jump']
|
| 1444 |
+
|
| 1445 |
+
# Print a traceback starting at the top stack frame.
|
| 1446 |
+
# The most recently entered frame is printed last;
|
| 1447 |
+
# this is different from dbx and gdb, but consistent with
|
| 1448 |
+
# the Python interpreter's stack trace.
|
| 1449 |
+
# It is also consistent with the up/down commands (which are
|
| 1450 |
+
# compatible with dbx and gdb: up moves towards 'main()'
|
| 1451 |
+
# and down moves towards the most recent stack frame).
|
| 1452 |
+
|
| 1453 |
+
def print_stack_trace(self):
|
| 1454 |
+
try:
|
| 1455 |
+
for frame_lineno in self.stack:
|
| 1456 |
+
self.print_stack_entry(frame_lineno)
|
| 1457 |
+
except KeyboardInterrupt:
|
| 1458 |
+
pass
|
| 1459 |
+
|
| 1460 |
+
def print_stack_entry(self, frame_lineno, prompt_prefix=line_prefix):
|
| 1461 |
+
frame, lineno = frame_lineno
|
| 1462 |
+
if frame is self.curframe:
|
| 1463 |
+
prefix = '> '
|
| 1464 |
+
else:
|
| 1465 |
+
prefix = ' '
|
| 1466 |
+
self.message(prefix +
|
| 1467 |
+
self.format_stack_entry(frame_lineno, prompt_prefix))
|
| 1468 |
+
|
| 1469 |
+
# Provide help
|
| 1470 |
+
|
| 1471 |
+
def do_help(self, arg):
|
| 1472 |
+
"""h(elp)
|
| 1473 |
+
Without argument, print the list of available commands.
|
| 1474 |
+
With a command name as argument, print help about that command.
|
| 1475 |
+
"help pdb" shows the full pdb documentation.
|
| 1476 |
+
"help exec" gives help on the ! command.
|
| 1477 |
+
"""
|
| 1478 |
+
if not arg:
|
| 1479 |
+
return cmd.Cmd.do_help(self, arg)
|
| 1480 |
+
try:
|
| 1481 |
+
try:
|
| 1482 |
+
topic = getattr(self, 'help_' + arg)
|
| 1483 |
+
return topic()
|
| 1484 |
+
except AttributeError:
|
| 1485 |
+
command = getattr(self, 'do_' + arg)
|
| 1486 |
+
except AttributeError:
|
| 1487 |
+
self.error('No help for %r' % arg)
|
| 1488 |
+
else:
|
| 1489 |
+
if sys.flags.optimize >= 2:
|
| 1490 |
+
self.error('No help for %r; please do not run Python with -OO '
|
| 1491 |
+
'if you need command help' % arg)
|
| 1492 |
+
return
|
| 1493 |
+
if command.__doc__ is None:
|
| 1494 |
+
self.error('No help for %r; __doc__ string missing' % arg)
|
| 1495 |
+
return
|
| 1496 |
+
self.message(command.__doc__.rstrip())
|
| 1497 |
+
|
| 1498 |
+
do_h = do_help
|
| 1499 |
+
|
| 1500 |
+
def help_exec(self):
|
| 1501 |
+
"""(!) statement
|
| 1502 |
+
Execute the (one-line) statement in the context of the current
|
| 1503 |
+
stack frame. The exclamation point can be omitted unless the
|
| 1504 |
+
first word of the statement resembles a debugger command. To
|
| 1505 |
+
assign to a global variable you must always prefix the command
|
| 1506 |
+
with a 'global' command, e.g.:
|
| 1507 |
+
(Pdb) global list_options; list_options = ['-l']
|
| 1508 |
+
(Pdb)
|
| 1509 |
+
"""
|
| 1510 |
+
self.message((self.help_exec.__doc__ or '').strip())
|
| 1511 |
+
|
| 1512 |
+
def help_pdb(self):
|
| 1513 |
+
help()
|
| 1514 |
+
|
| 1515 |
+
# other helper functions
|
| 1516 |
+
|
| 1517 |
+
def lookupmodule(self, filename):
|
| 1518 |
+
"""Helper function for break/clear parsing -- may be overridden.
|
| 1519 |
+
|
| 1520 |
+
lookupmodule() translates (possibly incomplete) file or module name
|
| 1521 |
+
into an absolute file name.
|
| 1522 |
+
"""
|
| 1523 |
+
if os.path.isabs(filename) and os.path.exists(filename):
|
| 1524 |
+
return filename
|
| 1525 |
+
f = os.path.join(sys.path[0], filename)
|
| 1526 |
+
if os.path.exists(f) and self.canonic(f) == self.mainpyfile:
|
| 1527 |
+
return f
|
| 1528 |
+
root, ext = os.path.splitext(filename)
|
| 1529 |
+
if ext == '':
|
| 1530 |
+
filename = filename + '.py'
|
| 1531 |
+
if os.path.isabs(filename):
|
| 1532 |
+
return filename
|
| 1533 |
+
for dirname in sys.path:
|
| 1534 |
+
while os.path.islink(dirname):
|
| 1535 |
+
dirname = os.readlink(dirname)
|
| 1536 |
+
fullname = os.path.join(dirname, filename)
|
| 1537 |
+
if os.path.exists(fullname):
|
| 1538 |
+
return fullname
|
| 1539 |
+
return None
|
| 1540 |
+
|
| 1541 |
+
def _runmodule(self, module_name):
|
| 1542 |
+
self._wait_for_mainpyfile = True
|
| 1543 |
+
self._user_requested_quit = False
|
| 1544 |
+
import runpy
|
| 1545 |
+
mod_name, mod_spec, code = runpy._get_module_details(module_name)
|
| 1546 |
+
self.mainpyfile = self.canonic(code.co_filename)
|
| 1547 |
+
import __main__
|
| 1548 |
+
__main__.__dict__.clear()
|
| 1549 |
+
__main__.__dict__.update({
|
| 1550 |
+
"__name__": "__main__",
|
| 1551 |
+
"__file__": self.mainpyfile,
|
| 1552 |
+
"__package__": mod_spec.parent,
|
| 1553 |
+
"__loader__": mod_spec.loader,
|
| 1554 |
+
"__spec__": mod_spec,
|
| 1555 |
+
"__builtins__": __builtins__,
|
| 1556 |
+
})
|
| 1557 |
+
self.run(code)
|
| 1558 |
+
|
| 1559 |
+
def _runscript(self, filename):
|
| 1560 |
+
# The script has to run in __main__ namespace (or imports from
|
| 1561 |
+
# __main__ will break).
|
| 1562 |
+
#
|
| 1563 |
+
# So we clear up the __main__ and set several special variables
|
| 1564 |
+
# (this gets rid of pdb's globals and cleans old variables on restarts).
|
| 1565 |
+
import __main__
|
| 1566 |
+
__main__.__dict__.clear()
|
| 1567 |
+
__main__.__dict__.update({"__name__" : "__main__",
|
| 1568 |
+
"__file__" : filename,
|
| 1569 |
+
"__builtins__": __builtins__,
|
| 1570 |
+
})
|
| 1571 |
+
|
| 1572 |
+
# When bdb sets tracing, a number of call and line events happens
|
| 1573 |
+
# BEFORE debugger even reaches user's code (and the exact sequence of
|
| 1574 |
+
# events depends on python version). So we take special measures to
|
| 1575 |
+
# avoid stopping before we reach the main script (see user_line and
|
| 1576 |
+
# user_call for details).
|
| 1577 |
+
self._wait_for_mainpyfile = True
|
| 1578 |
+
self.mainpyfile = self.canonic(filename)
|
| 1579 |
+
self._user_requested_quit = False
|
| 1580 |
+
with io.open_code(filename) as fp:
|
| 1581 |
+
statement = "exec(compile(%r, %r, 'exec'))" % \
|
| 1582 |
+
(fp.read(), self.mainpyfile)
|
| 1583 |
+
self.run(statement)
|
| 1584 |
+
|
| 1585 |
+
# Collect all command help into docstring, if not run with -OO
|
| 1586 |
+
|
| 1587 |
+
if __doc__ is not None:
|
| 1588 |
+
# unfortunately we can't guess this order from the class definition
|
| 1589 |
+
_help_order = [
|
| 1590 |
+
'help', 'where', 'down', 'up', 'break', 'tbreak', 'clear', 'disable',
|
| 1591 |
+
'enable', 'ignore', 'condition', 'commands', 'step', 'next', 'until',
|
| 1592 |
+
'jump', 'return', 'retval', 'run', 'continue', 'list', 'longlist',
|
| 1593 |
+
'args', 'p', 'pp', 'whatis', 'source', 'display', 'undisplay',
|
| 1594 |
+
'interact', 'alias', 'unalias', 'debug', 'quit',
|
| 1595 |
+
]
|
| 1596 |
+
|
| 1597 |
+
for _command in _help_order:
|
| 1598 |
+
__doc__ += getattr(Pdb, 'do_' + _command).__doc__.strip() + '\n\n'
|
| 1599 |
+
__doc__ += Pdb.help_exec.__doc__
|
| 1600 |
+
|
| 1601 |
+
del _help_order, _command
|
| 1602 |
+
|
| 1603 |
+
|
| 1604 |
+
# Simplified interface
|
| 1605 |
+
|
| 1606 |
+
def run(statement, globals=None, locals=None):
|
| 1607 |
+
Pdb().run(statement, globals, locals)
|
| 1608 |
+
|
| 1609 |
+
def runeval(expression, globals=None, locals=None):
|
| 1610 |
+
return Pdb().runeval(expression, globals, locals)
|
| 1611 |
+
|
| 1612 |
+
def runctx(statement, globals, locals):
|
| 1613 |
+
# B/W compatibility
|
| 1614 |
+
run(statement, globals, locals)
|
| 1615 |
+
|
| 1616 |
+
def runcall(*args, **kwds):
|
| 1617 |
+
return Pdb().runcall(*args, **kwds)
|
| 1618 |
+
|
| 1619 |
+
def set_trace(*, header=None):
|
| 1620 |
+
pdb = Pdb()
|
| 1621 |
+
if header is not None:
|
| 1622 |
+
pdb.message(header)
|
| 1623 |
+
pdb.set_trace(sys._getframe().f_back)
|
| 1624 |
+
|
| 1625 |
+
# Post-Mortem interface
|
| 1626 |
+
|
| 1627 |
+
def post_mortem(t=None):
|
| 1628 |
+
# handling the default
|
| 1629 |
+
if t is None:
|
| 1630 |
+
# sys.exc_info() returns (type, value, traceback) if an exception is
|
| 1631 |
+
# being handled, otherwise it returns None
|
| 1632 |
+
t = sys.exc_info()[2]
|
| 1633 |
+
if t is None:
|
| 1634 |
+
raise ValueError("A valid traceback must be passed if no "
|
| 1635 |
+
"exception is being handled")
|
| 1636 |
+
|
| 1637 |
+
p = Pdb()
|
| 1638 |
+
p.reset()
|
| 1639 |
+
p.interaction(None, t)
|
| 1640 |
+
|
| 1641 |
+
def pm():
|
| 1642 |
+
post_mortem(sys.last_traceback)
|
| 1643 |
+
|
| 1644 |
+
|
| 1645 |
+
# Main program for testing
|
| 1646 |
+
|
| 1647 |
+
TESTCMD = 'import x; x.main()'
|
| 1648 |
+
|
| 1649 |
+
def test():
|
| 1650 |
+
run(TESTCMD)
|
| 1651 |
+
|
| 1652 |
+
# print help
|
| 1653 |
+
def help():
|
| 1654 |
+
import pydoc
|
| 1655 |
+
pydoc.pager(__doc__)
|
| 1656 |
+
|
| 1657 |
+
_usage = """\
|
| 1658 |
+
usage: pdb.py [-c command] ... [-m module | pyfile] [arg] ...
|
| 1659 |
+
|
| 1660 |
+
Debug the Python program given by pyfile. Alternatively,
|
| 1661 |
+
an executable module or package to debug can be specified using
|
| 1662 |
+
the -m switch.
|
| 1663 |
+
|
| 1664 |
+
Initial commands are read from .pdbrc files in your home directory
|
| 1665 |
+
and in the current directory, if they exist. Commands supplied with
|
| 1666 |
+
-c are executed after commands from .pdbrc files.
|
| 1667 |
+
|
| 1668 |
+
To let the script run until an exception occurs, use "-c continue".
|
| 1669 |
+
To let the script run up to a given line X in the debugged file, use
|
| 1670 |
+
"-c 'until X'"."""
|
| 1671 |
+
|
| 1672 |
+
def main():
|
| 1673 |
+
import getopt
|
| 1674 |
+
|
| 1675 |
+
opts, args = getopt.getopt(sys.argv[1:], 'mhc:', ['help', 'command='])
|
| 1676 |
+
|
| 1677 |
+
if not args:
|
| 1678 |
+
print(_usage)
|
| 1679 |
+
sys.exit(2)
|
| 1680 |
+
|
| 1681 |
+
commands = []
|
| 1682 |
+
run_as_module = False
|
| 1683 |
+
for opt, optarg in opts:
|
| 1684 |
+
if opt in ['-h', '--help']:
|
| 1685 |
+
print(_usage)
|
| 1686 |
+
sys.exit()
|
| 1687 |
+
elif opt in ['-c', '--command']:
|
| 1688 |
+
commands.append(optarg)
|
| 1689 |
+
elif opt in ['-m']:
|
| 1690 |
+
run_as_module = True
|
| 1691 |
+
|
| 1692 |
+
mainpyfile = args[0] # Get script filename
|
| 1693 |
+
if not run_as_module and not os.path.exists(mainpyfile):
|
| 1694 |
+
print('Error:', mainpyfile, 'does not exist')
|
| 1695 |
+
sys.exit(1)
|
| 1696 |
+
|
| 1697 |
+
if run_as_module:
|
| 1698 |
+
import runpy
|
| 1699 |
+
try:
|
| 1700 |
+
runpy._get_module_details(mainpyfile)
|
| 1701 |
+
except Exception:
|
| 1702 |
+
traceback.print_exc()
|
| 1703 |
+
sys.exit(1)
|
| 1704 |
+
|
| 1705 |
+
sys.argv[:] = args # Hide "pdb.py" and pdb options from argument list
|
| 1706 |
+
|
| 1707 |
+
if not run_as_module:
|
| 1708 |
+
mainpyfile = os.path.realpath(mainpyfile)
|
| 1709 |
+
# Replace pdb's dir with script's dir in front of module search path.
|
| 1710 |
+
sys.path[0] = os.path.dirname(mainpyfile)
|
| 1711 |
+
|
| 1712 |
+
# Note on saving/restoring sys.argv: it's a good idea when sys.argv was
|
| 1713 |
+
# modified by the script being debugged. It's a bad idea when it was
|
| 1714 |
+
# changed by the user from the command line. There is a "restart" command
|
| 1715 |
+
# which allows explicit specification of command line arguments.
|
| 1716 |
+
pdb = Pdb()
|
| 1717 |
+
pdb.rcLines.extend(commands)
|
| 1718 |
+
while True:
|
| 1719 |
+
try:
|
| 1720 |
+
if run_as_module:
|
| 1721 |
+
pdb._runmodule(mainpyfile)
|
| 1722 |
+
else:
|
| 1723 |
+
pdb._runscript(mainpyfile)
|
| 1724 |
+
if pdb._user_requested_quit:
|
| 1725 |
+
break
|
| 1726 |
+
print("The program finished and will be restarted")
|
| 1727 |
+
except Restart:
|
| 1728 |
+
print("Restarting", mainpyfile, "with arguments:")
|
| 1729 |
+
print("\t" + " ".join(sys.argv[1:]))
|
| 1730 |
+
except SystemExit:
|
| 1731 |
+
# In most cases SystemExit does not warrant a post-mortem session.
|
| 1732 |
+
print("The program exited via sys.exit(). Exit status:", end=' ')
|
| 1733 |
+
print(sys.exc_info()[1])
|
| 1734 |
+
except SyntaxError:
|
| 1735 |
+
traceback.print_exc()
|
| 1736 |
+
sys.exit(1)
|
| 1737 |
+
except:
|
| 1738 |
+
traceback.print_exc()
|
| 1739 |
+
print("Uncaught exception. Entering post mortem debugging")
|
| 1740 |
+
print("Running 'cont' or 'step' will restart the program")
|
| 1741 |
+
t = sys.exc_info()[2]
|
| 1742 |
+
pdb.interaction(None, t)
|
| 1743 |
+
print("Post mortem debugger finished. The " + mainpyfile +
|
| 1744 |
+
" will be restarted")
|
| 1745 |
+
|
| 1746 |
+
|
| 1747 |
+
# When invoked as main program, invoke the debugger on a script
|
| 1748 |
+
if __name__ == '__main__':
|
| 1749 |
+
import pdb
|
| 1750 |
+
pdb.main()
|
evalkit_cambrian/lib/python3.10/poplib.py
ADDED
|
@@ -0,0 +1,483 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A POP3 client class.
|
| 2 |
+
|
| 3 |
+
Based on the J. Myers POP3 draft, Jan. 96
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# Author: David Ascher <david_ascher@brown.edu>
|
| 7 |
+
# [heavily stealing from nntplib.py]
|
| 8 |
+
# Updated: Piers Lauder <piers@cs.su.oz.au> [Jul '97]
|
| 9 |
+
# String method conversion and test jig improvements by ESR, February 2001.
|
| 10 |
+
# Added the POP3_SSL class. Methods loosely based on IMAP_SSL. Hector Urtubia <urtubia@mrbook.org> Aug 2003
|
| 11 |
+
|
| 12 |
+
# Example (see the test function at the end of this file)
|
| 13 |
+
|
| 14 |
+
# Imports
|
| 15 |
+
|
| 16 |
+
import errno
|
| 17 |
+
import re
|
| 18 |
+
import socket
|
| 19 |
+
import sys
|
| 20 |
+
|
| 21 |
+
try:
|
| 22 |
+
import ssl
|
| 23 |
+
HAVE_SSL = True
|
| 24 |
+
except ImportError:
|
| 25 |
+
HAVE_SSL = False
|
| 26 |
+
|
| 27 |
+
__all__ = ["POP3","error_proto"]
|
| 28 |
+
|
| 29 |
+
# Exception raised when an error or invalid response is received:
|
| 30 |
+
|
| 31 |
+
class error_proto(Exception): pass
|
| 32 |
+
|
| 33 |
+
# Standard Port
|
| 34 |
+
POP3_PORT = 110
|
| 35 |
+
|
| 36 |
+
# POP SSL PORT
|
| 37 |
+
POP3_SSL_PORT = 995
|
| 38 |
+
|
| 39 |
+
# Line terminators (we always output CRLF, but accept any of CRLF, LFCR, LF)
|
| 40 |
+
CR = b'\r'
|
| 41 |
+
LF = b'\n'
|
| 42 |
+
CRLF = CR+LF
|
| 43 |
+
|
| 44 |
+
# maximal line length when calling readline(). This is to prevent
|
| 45 |
+
# reading arbitrary length lines. RFC 1939 limits POP3 line length to
|
| 46 |
+
# 512 characters, including CRLF. We have selected 2048 just to be on
|
| 47 |
+
# the safe side.
|
| 48 |
+
_MAXLINE = 2048
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class POP3:
|
| 52 |
+
|
| 53 |
+
"""This class supports both the minimal and optional command sets.
|
| 54 |
+
Arguments can be strings or integers (where appropriate)
|
| 55 |
+
(e.g.: retr(1) and retr('1') both work equally well.
|
| 56 |
+
|
| 57 |
+
Minimal Command Set:
|
| 58 |
+
USER name user(name)
|
| 59 |
+
PASS string pass_(string)
|
| 60 |
+
STAT stat()
|
| 61 |
+
LIST [msg] list(msg = None)
|
| 62 |
+
RETR msg retr(msg)
|
| 63 |
+
DELE msg dele(msg)
|
| 64 |
+
NOOP noop()
|
| 65 |
+
RSET rset()
|
| 66 |
+
QUIT quit()
|
| 67 |
+
|
| 68 |
+
Optional Commands (some servers support these):
|
| 69 |
+
RPOP name rpop(name)
|
| 70 |
+
APOP name digest apop(name, digest)
|
| 71 |
+
TOP msg n top(msg, n)
|
| 72 |
+
UIDL [msg] uidl(msg = None)
|
| 73 |
+
CAPA capa()
|
| 74 |
+
STLS stls()
|
| 75 |
+
UTF8 utf8()
|
| 76 |
+
|
| 77 |
+
Raises one exception: 'error_proto'.
|
| 78 |
+
|
| 79 |
+
Instantiate with:
|
| 80 |
+
POP3(hostname, port=110)
|
| 81 |
+
|
| 82 |
+
NB: the POP protocol locks the mailbox from user
|
| 83 |
+
authorization until QUIT, so be sure to get in, suck
|
| 84 |
+
the messages, and quit, each time you access the
|
| 85 |
+
mailbox.
|
| 86 |
+
|
| 87 |
+
POP is a line-based protocol, which means large mail
|
| 88 |
+
messages consume lots of python cycles reading them
|
| 89 |
+
line-by-line.
|
| 90 |
+
|
| 91 |
+
If it's available on your mail server, use IMAP4
|
| 92 |
+
instead, it doesn't suffer from the two problems
|
| 93 |
+
above.
|
| 94 |
+
"""
|
| 95 |
+
|
| 96 |
+
encoding = 'UTF-8'
|
| 97 |
+
|
| 98 |
+
def __init__(self, host, port=POP3_PORT,
|
| 99 |
+
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
|
| 100 |
+
self.host = host
|
| 101 |
+
self.port = port
|
| 102 |
+
self._tls_established = False
|
| 103 |
+
sys.audit("poplib.connect", self, host, port)
|
| 104 |
+
self.sock = self._create_socket(timeout)
|
| 105 |
+
self.file = self.sock.makefile('rb')
|
| 106 |
+
self._debugging = 0
|
| 107 |
+
self.welcome = self._getresp()
|
| 108 |
+
|
| 109 |
+
def _create_socket(self, timeout):
|
| 110 |
+
if timeout is not None and not timeout:
|
| 111 |
+
raise ValueError('Non-blocking socket (timeout=0) is not supported')
|
| 112 |
+
return socket.create_connection((self.host, self.port), timeout)
|
| 113 |
+
|
| 114 |
+
def _putline(self, line):
|
| 115 |
+
if self._debugging > 1: print('*put*', repr(line))
|
| 116 |
+
sys.audit("poplib.putline", self, line)
|
| 117 |
+
self.sock.sendall(line + CRLF)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
# Internal: send one command to the server (through _putline())
|
| 121 |
+
|
| 122 |
+
def _putcmd(self, line):
|
| 123 |
+
if self._debugging: print('*cmd*', repr(line))
|
| 124 |
+
line = bytes(line, self.encoding)
|
| 125 |
+
self._putline(line)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
# Internal: return one line from the server, stripping CRLF.
|
| 129 |
+
# This is where all the CPU time of this module is consumed.
|
| 130 |
+
# Raise error_proto('-ERR EOF') if the connection is closed.
|
| 131 |
+
|
| 132 |
+
def _getline(self):
|
| 133 |
+
line = self.file.readline(_MAXLINE + 1)
|
| 134 |
+
if len(line) > _MAXLINE:
|
| 135 |
+
raise error_proto('line too long')
|
| 136 |
+
|
| 137 |
+
if self._debugging > 1: print('*get*', repr(line))
|
| 138 |
+
if not line: raise error_proto('-ERR EOF')
|
| 139 |
+
octets = len(line)
|
| 140 |
+
# server can send any combination of CR & LF
|
| 141 |
+
# however, 'readline()' returns lines ending in LF
|
| 142 |
+
# so only possibilities are ...LF, ...CRLF, CR...LF
|
| 143 |
+
if line[-2:] == CRLF:
|
| 144 |
+
return line[:-2], octets
|
| 145 |
+
if line[:1] == CR:
|
| 146 |
+
return line[1:-1], octets
|
| 147 |
+
return line[:-1], octets
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
# Internal: get a response from the server.
|
| 151 |
+
# Raise 'error_proto' if the response doesn't start with '+'.
|
| 152 |
+
|
| 153 |
+
def _getresp(self):
|
| 154 |
+
resp, o = self._getline()
|
| 155 |
+
if self._debugging > 1: print('*resp*', repr(resp))
|
| 156 |
+
if not resp.startswith(b'+'):
|
| 157 |
+
raise error_proto(resp)
|
| 158 |
+
return resp
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
# Internal: get a response plus following text from the server.
|
| 162 |
+
|
| 163 |
+
def _getlongresp(self):
|
| 164 |
+
resp = self._getresp()
|
| 165 |
+
list = []; octets = 0
|
| 166 |
+
line, o = self._getline()
|
| 167 |
+
while line != b'.':
|
| 168 |
+
if line.startswith(b'..'):
|
| 169 |
+
o = o-1
|
| 170 |
+
line = line[1:]
|
| 171 |
+
octets = octets + o
|
| 172 |
+
list.append(line)
|
| 173 |
+
line, o = self._getline()
|
| 174 |
+
return resp, list, octets
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
# Internal: send a command and get the response
|
| 178 |
+
|
| 179 |
+
def _shortcmd(self, line):
|
| 180 |
+
self._putcmd(line)
|
| 181 |
+
return self._getresp()
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
# Internal: send a command and get the response plus following text
|
| 185 |
+
|
| 186 |
+
def _longcmd(self, line):
|
| 187 |
+
self._putcmd(line)
|
| 188 |
+
return self._getlongresp()
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
# These can be useful:
|
| 192 |
+
|
| 193 |
+
def getwelcome(self):
|
| 194 |
+
return self.welcome
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def set_debuglevel(self, level):
|
| 198 |
+
self._debugging = level
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
# Here are all the POP commands:
|
| 202 |
+
|
| 203 |
+
def user(self, user):
|
| 204 |
+
"""Send user name, return response
|
| 205 |
+
|
| 206 |
+
(should indicate password required).
|
| 207 |
+
"""
|
| 208 |
+
return self._shortcmd('USER %s' % user)
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def pass_(self, pswd):
|
| 212 |
+
"""Send password, return response
|
| 213 |
+
|
| 214 |
+
(response includes message count, mailbox size).
|
| 215 |
+
|
| 216 |
+
NB: mailbox is locked by server from here to 'quit()'
|
| 217 |
+
"""
|
| 218 |
+
return self._shortcmd('PASS %s' % pswd)
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def stat(self):
|
| 222 |
+
"""Get mailbox status.
|
| 223 |
+
|
| 224 |
+
Result is tuple of 2 ints (message count, mailbox size)
|
| 225 |
+
"""
|
| 226 |
+
retval = self._shortcmd('STAT')
|
| 227 |
+
rets = retval.split()
|
| 228 |
+
if self._debugging: print('*stat*', repr(rets))
|
| 229 |
+
numMessages = int(rets[1])
|
| 230 |
+
sizeMessages = int(rets[2])
|
| 231 |
+
return (numMessages, sizeMessages)
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
def list(self, which=None):
|
| 235 |
+
"""Request listing, return result.
|
| 236 |
+
|
| 237 |
+
Result without a message number argument is in form
|
| 238 |
+
['response', ['mesg_num octets', ...], octets].
|
| 239 |
+
|
| 240 |
+
Result when a message number argument is given is a
|
| 241 |
+
single response: the "scan listing" for that message.
|
| 242 |
+
"""
|
| 243 |
+
if which is not None:
|
| 244 |
+
return self._shortcmd('LIST %s' % which)
|
| 245 |
+
return self._longcmd('LIST')
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
def retr(self, which):
|
| 249 |
+
"""Retrieve whole message number 'which'.
|
| 250 |
+
|
| 251 |
+
Result is in form ['response', ['line', ...], octets].
|
| 252 |
+
"""
|
| 253 |
+
return self._longcmd('RETR %s' % which)
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
def dele(self, which):
|
| 257 |
+
"""Delete message number 'which'.
|
| 258 |
+
|
| 259 |
+
Result is 'response'.
|
| 260 |
+
"""
|
| 261 |
+
return self._shortcmd('DELE %s' % which)
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def noop(self):
|
| 265 |
+
"""Does nothing.
|
| 266 |
+
|
| 267 |
+
One supposes the response indicates the server is alive.
|
| 268 |
+
"""
|
| 269 |
+
return self._shortcmd('NOOP')
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def rset(self):
|
| 273 |
+
"""Unmark all messages marked for deletion."""
|
| 274 |
+
return self._shortcmd('RSET')
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
def quit(self):
|
| 278 |
+
"""Signoff: commit changes on server, unlock mailbox, close connection."""
|
| 279 |
+
resp = self._shortcmd('QUIT')
|
| 280 |
+
self.close()
|
| 281 |
+
return resp
|
| 282 |
+
|
| 283 |
+
def close(self):
|
| 284 |
+
"""Close the connection without assuming anything about it."""
|
| 285 |
+
try:
|
| 286 |
+
file = self.file
|
| 287 |
+
self.file = None
|
| 288 |
+
if file is not None:
|
| 289 |
+
file.close()
|
| 290 |
+
finally:
|
| 291 |
+
sock = self.sock
|
| 292 |
+
self.sock = None
|
| 293 |
+
if sock is not None:
|
| 294 |
+
try:
|
| 295 |
+
sock.shutdown(socket.SHUT_RDWR)
|
| 296 |
+
except OSError as exc:
|
| 297 |
+
# The server might already have closed the connection.
|
| 298 |
+
# On Windows, this may result in WSAEINVAL (error 10022):
|
| 299 |
+
# An invalid operation was attempted.
|
| 300 |
+
if (exc.errno != errno.ENOTCONN
|
| 301 |
+
and getattr(exc, 'winerror', 0) != 10022):
|
| 302 |
+
raise
|
| 303 |
+
finally:
|
| 304 |
+
sock.close()
|
| 305 |
+
|
| 306 |
+
#__del__ = quit
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
# optional commands:
|
| 310 |
+
|
| 311 |
+
def rpop(self, user):
|
| 312 |
+
"""Not sure what this does."""
|
| 313 |
+
return self._shortcmd('RPOP %s' % user)
|
| 314 |
+
|
| 315 |
+
|
| 316 |
+
timestamp = re.compile(br'\+OK.[^<]*(<.*>)')
|
| 317 |
+
|
| 318 |
+
def apop(self, user, password):
|
| 319 |
+
"""Authorisation
|
| 320 |
+
|
| 321 |
+
- only possible if server has supplied a timestamp in initial greeting.
|
| 322 |
+
|
| 323 |
+
Args:
|
| 324 |
+
user - mailbox user;
|
| 325 |
+
password - mailbox password.
|
| 326 |
+
|
| 327 |
+
NB: mailbox is locked by server from here to 'quit()'
|
| 328 |
+
"""
|
| 329 |
+
secret = bytes(password, self.encoding)
|
| 330 |
+
m = self.timestamp.match(self.welcome)
|
| 331 |
+
if not m:
|
| 332 |
+
raise error_proto('-ERR APOP not supported by server')
|
| 333 |
+
import hashlib
|
| 334 |
+
digest = m.group(1)+secret
|
| 335 |
+
digest = hashlib.md5(digest).hexdigest()
|
| 336 |
+
return self._shortcmd('APOP %s %s' % (user, digest))
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
def top(self, which, howmuch):
|
| 340 |
+
"""Retrieve message header of message number 'which'
|
| 341 |
+
and first 'howmuch' lines of message body.
|
| 342 |
+
|
| 343 |
+
Result is in form ['response', ['line', ...], octets].
|
| 344 |
+
"""
|
| 345 |
+
return self._longcmd('TOP %s %s' % (which, howmuch))
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
def uidl(self, which=None):
|
| 349 |
+
"""Return message digest (unique id) list.
|
| 350 |
+
|
| 351 |
+
If 'which', result contains unique id for that message
|
| 352 |
+
in the form 'response mesgnum uid', otherwise result is
|
| 353 |
+
the list ['response', ['mesgnum uid', ...], octets]
|
| 354 |
+
"""
|
| 355 |
+
if which is not None:
|
| 356 |
+
return self._shortcmd('UIDL %s' % which)
|
| 357 |
+
return self._longcmd('UIDL')
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
def utf8(self):
|
| 361 |
+
"""Try to enter UTF-8 mode (see RFC 6856). Returns server response.
|
| 362 |
+
"""
|
| 363 |
+
return self._shortcmd('UTF8')
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
def capa(self):
|
| 367 |
+
"""Return server capabilities (RFC 2449) as a dictionary
|
| 368 |
+
>>> c=poplib.POP3('localhost')
|
| 369 |
+
>>> c.capa()
|
| 370 |
+
{'IMPLEMENTATION': ['Cyrus', 'POP3', 'server', 'v2.2.12'],
|
| 371 |
+
'TOP': [], 'LOGIN-DELAY': ['0'], 'AUTH-RESP-CODE': [],
|
| 372 |
+
'EXPIRE': ['NEVER'], 'USER': [], 'STLS': [], 'PIPELINING': [],
|
| 373 |
+
'UIDL': [], 'RESP-CODES': []}
|
| 374 |
+
>>>
|
| 375 |
+
|
| 376 |
+
Really, according to RFC 2449, the cyrus folks should avoid
|
| 377 |
+
having the implementation split into multiple arguments...
|
| 378 |
+
"""
|
| 379 |
+
def _parsecap(line):
|
| 380 |
+
lst = line.decode('ascii').split()
|
| 381 |
+
return lst[0], lst[1:]
|
| 382 |
+
|
| 383 |
+
caps = {}
|
| 384 |
+
try:
|
| 385 |
+
resp = self._longcmd('CAPA')
|
| 386 |
+
rawcaps = resp[1]
|
| 387 |
+
for capline in rawcaps:
|
| 388 |
+
capnm, capargs = _parsecap(capline)
|
| 389 |
+
caps[capnm] = capargs
|
| 390 |
+
except error_proto:
|
| 391 |
+
raise error_proto('-ERR CAPA not supported by server')
|
| 392 |
+
return caps
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def stls(self, context=None):
|
| 396 |
+
"""Start a TLS session on the active connection as specified in RFC 2595.
|
| 397 |
+
|
| 398 |
+
context - a ssl.SSLContext
|
| 399 |
+
"""
|
| 400 |
+
if not HAVE_SSL:
|
| 401 |
+
raise error_proto('-ERR TLS support missing')
|
| 402 |
+
if self._tls_established:
|
| 403 |
+
raise error_proto('-ERR TLS session already established')
|
| 404 |
+
caps = self.capa()
|
| 405 |
+
if not 'STLS' in caps:
|
| 406 |
+
raise error_proto('-ERR STLS not supported by server')
|
| 407 |
+
if context is None:
|
| 408 |
+
context = ssl._create_stdlib_context()
|
| 409 |
+
resp = self._shortcmd('STLS')
|
| 410 |
+
self.sock = context.wrap_socket(self.sock,
|
| 411 |
+
server_hostname=self.host)
|
| 412 |
+
self.file = self.sock.makefile('rb')
|
| 413 |
+
self._tls_established = True
|
| 414 |
+
return resp
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
if HAVE_SSL:
|
| 418 |
+
|
| 419 |
+
class POP3_SSL(POP3):
|
| 420 |
+
"""POP3 client class over SSL connection
|
| 421 |
+
|
| 422 |
+
Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None,
|
| 423 |
+
context=None)
|
| 424 |
+
|
| 425 |
+
hostname - the hostname of the pop3 over ssl server
|
| 426 |
+
port - port number
|
| 427 |
+
keyfile - PEM formatted file that contains your private key
|
| 428 |
+
certfile - PEM formatted certificate chain file
|
| 429 |
+
context - a ssl.SSLContext
|
| 430 |
+
|
| 431 |
+
See the methods of the parent class POP3 for more documentation.
|
| 432 |
+
"""
|
| 433 |
+
|
| 434 |
+
def __init__(self, host, port=POP3_SSL_PORT, keyfile=None, certfile=None,
|
| 435 |
+
timeout=socket._GLOBAL_DEFAULT_TIMEOUT, context=None):
|
| 436 |
+
if context is not None and keyfile is not None:
|
| 437 |
+
raise ValueError("context and keyfile arguments are mutually "
|
| 438 |
+
"exclusive")
|
| 439 |
+
if context is not None and certfile is not None:
|
| 440 |
+
raise ValueError("context and certfile arguments are mutually "
|
| 441 |
+
"exclusive")
|
| 442 |
+
if keyfile is not None or certfile is not None:
|
| 443 |
+
import warnings
|
| 444 |
+
warnings.warn("keyfile and certfile are deprecated, use a "
|
| 445 |
+
"custom context instead", DeprecationWarning, 2)
|
| 446 |
+
self.keyfile = keyfile
|
| 447 |
+
self.certfile = certfile
|
| 448 |
+
if context is None:
|
| 449 |
+
context = ssl._create_stdlib_context(certfile=certfile,
|
| 450 |
+
keyfile=keyfile)
|
| 451 |
+
self.context = context
|
| 452 |
+
POP3.__init__(self, host, port, timeout)
|
| 453 |
+
|
| 454 |
+
def _create_socket(self, timeout):
|
| 455 |
+
sock = POP3._create_socket(self, timeout)
|
| 456 |
+
sock = self.context.wrap_socket(sock,
|
| 457 |
+
server_hostname=self.host)
|
| 458 |
+
return sock
|
| 459 |
+
|
| 460 |
+
def stls(self, keyfile=None, certfile=None, context=None):
|
| 461 |
+
"""The method unconditionally raises an exception since the
|
| 462 |
+
STLS command doesn't make any sense on an already established
|
| 463 |
+
SSL/TLS session.
|
| 464 |
+
"""
|
| 465 |
+
raise error_proto('-ERR TLS session already established')
|
| 466 |
+
|
| 467 |
+
__all__.append("POP3_SSL")
|
| 468 |
+
|
| 469 |
+
if __name__ == "__main__":
|
| 470 |
+
import sys
|
| 471 |
+
a = POP3(sys.argv[1])
|
| 472 |
+
print(a.getwelcome())
|
| 473 |
+
a.user(sys.argv[2])
|
| 474 |
+
a.pass_(sys.argv[3])
|
| 475 |
+
a.list()
|
| 476 |
+
(numMsgs, totalSize) = a.stat()
|
| 477 |
+
for i in range(1, numMsgs + 1):
|
| 478 |
+
(header, msg, octets) = a.retr(i)
|
| 479 |
+
print("Message %d:" % i)
|
| 480 |
+
for line in msg:
|
| 481 |
+
print(' ' + line)
|
| 482 |
+
print('-----------------------')
|
| 483 |
+
a.quit()
|
evalkit_cambrian/lib/python3.10/pprint.py
ADDED
|
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Author: Fred L. Drake, Jr.
|
| 2 |
+
# fdrake@acm.org
|
| 3 |
+
#
|
| 4 |
+
# This is a simple little module I wrote to make life easier. I didn't
|
| 5 |
+
# see anything quite like it in the library, though I may have overlooked
|
| 6 |
+
# something. I wrote this when I was trying to read some heavily nested
|
| 7 |
+
# tuples with fairly non-descriptive content. This is modeled very much
|
| 8 |
+
# after Lisp/Scheme - style pretty-printing of lists. If you find it
|
| 9 |
+
# useful, thank small children who sleep at night.
|
| 10 |
+
|
| 11 |
+
"""Support to pretty-print lists, tuples, & dictionaries recursively.
|
| 12 |
+
|
| 13 |
+
Very simple, but useful, especially in debugging data structures.
|
| 14 |
+
|
| 15 |
+
Classes
|
| 16 |
+
-------
|
| 17 |
+
|
| 18 |
+
PrettyPrinter()
|
| 19 |
+
Handle pretty-printing operations onto a stream using a configured
|
| 20 |
+
set of formatting parameters.
|
| 21 |
+
|
| 22 |
+
Functions
|
| 23 |
+
---------
|
| 24 |
+
|
| 25 |
+
pformat()
|
| 26 |
+
Format a Python object into a pretty-printed representation.
|
| 27 |
+
|
| 28 |
+
pprint()
|
| 29 |
+
Pretty-print a Python object to a stream [default is sys.stdout].
|
| 30 |
+
|
| 31 |
+
saferepr()
|
| 32 |
+
Generate a 'standard' repr()-like value, but protect against recursive
|
| 33 |
+
data structures.
|
| 34 |
+
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
import collections as _collections
|
| 38 |
+
import dataclasses as _dataclasses
|
| 39 |
+
import re
|
| 40 |
+
import sys as _sys
|
| 41 |
+
import types as _types
|
| 42 |
+
from io import StringIO as _StringIO
|
| 43 |
+
|
| 44 |
+
__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr",
|
| 45 |
+
"PrettyPrinter", "pp"]
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def pprint(object, stream=None, indent=1, width=80, depth=None, *,
|
| 49 |
+
compact=False, sort_dicts=True, underscore_numbers=False):
|
| 50 |
+
"""Pretty-print a Python object to a stream [default is sys.stdout]."""
|
| 51 |
+
printer = PrettyPrinter(
|
| 52 |
+
stream=stream, indent=indent, width=width, depth=depth,
|
| 53 |
+
compact=compact, sort_dicts=sort_dicts,
|
| 54 |
+
underscore_numbers=underscore_numbers)
|
| 55 |
+
printer.pprint(object)
|
| 56 |
+
|
| 57 |
+
def pformat(object, indent=1, width=80, depth=None, *,
|
| 58 |
+
compact=False, sort_dicts=True, underscore_numbers=False):
|
| 59 |
+
"""Format a Python object into a pretty-printed representation."""
|
| 60 |
+
return PrettyPrinter(indent=indent, width=width, depth=depth,
|
| 61 |
+
compact=compact, sort_dicts=sort_dicts,
|
| 62 |
+
underscore_numbers=underscore_numbers).pformat(object)
|
| 63 |
+
|
| 64 |
+
def pp(object, *args, sort_dicts=False, **kwargs):
|
| 65 |
+
"""Pretty-print a Python object"""
|
| 66 |
+
pprint(object, *args, sort_dicts=sort_dicts, **kwargs)
|
| 67 |
+
|
| 68 |
+
def saferepr(object):
|
| 69 |
+
"""Version of repr() which can handle recursive data structures."""
|
| 70 |
+
return PrettyPrinter()._safe_repr(object, {}, None, 0)[0]
|
| 71 |
+
|
| 72 |
+
def isreadable(object):
|
| 73 |
+
"""Determine if saferepr(object) is readable by eval()."""
|
| 74 |
+
return PrettyPrinter()._safe_repr(object, {}, None, 0)[1]
|
| 75 |
+
|
| 76 |
+
def isrecursive(object):
|
| 77 |
+
"""Determine if object requires a recursive representation."""
|
| 78 |
+
return PrettyPrinter()._safe_repr(object, {}, None, 0)[2]
|
| 79 |
+
|
| 80 |
+
class _safe_key:
|
| 81 |
+
"""Helper function for key functions when sorting unorderable objects.
|
| 82 |
+
|
| 83 |
+
The wrapped-object will fallback to a Py2.x style comparison for
|
| 84 |
+
unorderable types (sorting first comparing the type name and then by
|
| 85 |
+
the obj ids). Does not work recursively, so dict.items() must have
|
| 86 |
+
_safe_key applied to both the key and the value.
|
| 87 |
+
|
| 88 |
+
"""
|
| 89 |
+
|
| 90 |
+
__slots__ = ['obj']
|
| 91 |
+
|
| 92 |
+
def __init__(self, obj):
|
| 93 |
+
self.obj = obj
|
| 94 |
+
|
| 95 |
+
def __lt__(self, other):
|
| 96 |
+
try:
|
| 97 |
+
return self.obj < other.obj
|
| 98 |
+
except TypeError:
|
| 99 |
+
return ((str(type(self.obj)), id(self.obj)) < \
|
| 100 |
+
(str(type(other.obj)), id(other.obj)))
|
| 101 |
+
|
| 102 |
+
def _safe_tuple(t):
|
| 103 |
+
"Helper function for comparing 2-tuples"
|
| 104 |
+
return _safe_key(t[0]), _safe_key(t[1])
|
| 105 |
+
|
| 106 |
+
class PrettyPrinter:
|
| 107 |
+
def __init__(self, indent=1, width=80, depth=None, stream=None, *,
|
| 108 |
+
compact=False, sort_dicts=True, underscore_numbers=False):
|
| 109 |
+
"""Handle pretty printing operations onto a stream using a set of
|
| 110 |
+
configured parameters.
|
| 111 |
+
|
| 112 |
+
indent
|
| 113 |
+
Number of spaces to indent for each level of nesting.
|
| 114 |
+
|
| 115 |
+
width
|
| 116 |
+
Attempted maximum number of columns in the output.
|
| 117 |
+
|
| 118 |
+
depth
|
| 119 |
+
The maximum depth to print out nested structures.
|
| 120 |
+
|
| 121 |
+
stream
|
| 122 |
+
The desired output stream. If omitted (or false), the standard
|
| 123 |
+
output stream available at construction will be used.
|
| 124 |
+
|
| 125 |
+
compact
|
| 126 |
+
If true, several items will be combined in one line.
|
| 127 |
+
|
| 128 |
+
sort_dicts
|
| 129 |
+
If true, dict keys are sorted.
|
| 130 |
+
|
| 131 |
+
"""
|
| 132 |
+
indent = int(indent)
|
| 133 |
+
width = int(width)
|
| 134 |
+
if indent < 0:
|
| 135 |
+
raise ValueError('indent must be >= 0')
|
| 136 |
+
if depth is not None and depth <= 0:
|
| 137 |
+
raise ValueError('depth must be > 0')
|
| 138 |
+
if not width:
|
| 139 |
+
raise ValueError('width must be != 0')
|
| 140 |
+
self._depth = depth
|
| 141 |
+
self._indent_per_level = indent
|
| 142 |
+
self._width = width
|
| 143 |
+
if stream is not None:
|
| 144 |
+
self._stream = stream
|
| 145 |
+
else:
|
| 146 |
+
self._stream = _sys.stdout
|
| 147 |
+
self._compact = bool(compact)
|
| 148 |
+
self._sort_dicts = sort_dicts
|
| 149 |
+
self._underscore_numbers = underscore_numbers
|
| 150 |
+
|
| 151 |
+
def pprint(self, object):
|
| 152 |
+
self._format(object, self._stream, 0, 0, {}, 0)
|
| 153 |
+
self._stream.write("\n")
|
| 154 |
+
|
| 155 |
+
def pformat(self, object):
|
| 156 |
+
sio = _StringIO()
|
| 157 |
+
self._format(object, sio, 0, 0, {}, 0)
|
| 158 |
+
return sio.getvalue()
|
| 159 |
+
|
| 160 |
+
def isrecursive(self, object):
|
| 161 |
+
return self.format(object, {}, 0, 0)[2]
|
| 162 |
+
|
| 163 |
+
def isreadable(self, object):
|
| 164 |
+
s, readable, recursive = self.format(object, {}, 0, 0)
|
| 165 |
+
return readable and not recursive
|
| 166 |
+
|
| 167 |
+
def _format(self, object, stream, indent, allowance, context, level):
|
| 168 |
+
objid = id(object)
|
| 169 |
+
if objid in context:
|
| 170 |
+
stream.write(_recursion(object))
|
| 171 |
+
self._recursive = True
|
| 172 |
+
self._readable = False
|
| 173 |
+
return
|
| 174 |
+
rep = self._repr(object, context, level)
|
| 175 |
+
max_width = self._width - indent - allowance
|
| 176 |
+
if len(rep) > max_width:
|
| 177 |
+
p = self._dispatch.get(type(object).__repr__, None)
|
| 178 |
+
if p is not None:
|
| 179 |
+
context[objid] = 1
|
| 180 |
+
p(self, object, stream, indent, allowance, context, level + 1)
|
| 181 |
+
del context[objid]
|
| 182 |
+
return
|
| 183 |
+
elif (_dataclasses.is_dataclass(object) and
|
| 184 |
+
not isinstance(object, type) and
|
| 185 |
+
object.__dataclass_params__.repr and
|
| 186 |
+
# Check dataclass has generated repr method.
|
| 187 |
+
hasattr(object.__repr__, "__wrapped__") and
|
| 188 |
+
"__create_fn__" in object.__repr__.__wrapped__.__qualname__):
|
| 189 |
+
context[objid] = 1
|
| 190 |
+
self._pprint_dataclass(object, stream, indent, allowance, context, level + 1)
|
| 191 |
+
del context[objid]
|
| 192 |
+
return
|
| 193 |
+
stream.write(rep)
|
| 194 |
+
|
| 195 |
+
def _pprint_dataclass(self, object, stream, indent, allowance, context, level):
|
| 196 |
+
cls_name = object.__class__.__name__
|
| 197 |
+
indent += len(cls_name) + 1
|
| 198 |
+
items = [(f.name, getattr(object, f.name)) for f in _dataclasses.fields(object) if f.repr]
|
| 199 |
+
stream.write(cls_name + '(')
|
| 200 |
+
self._format_namespace_items(items, stream, indent, allowance, context, level)
|
| 201 |
+
stream.write(')')
|
| 202 |
+
|
| 203 |
+
_dispatch = {}
|
| 204 |
+
|
| 205 |
+
def _pprint_dict(self, object, stream, indent, allowance, context, level):
|
| 206 |
+
write = stream.write
|
| 207 |
+
write('{')
|
| 208 |
+
if self._indent_per_level > 1:
|
| 209 |
+
write((self._indent_per_level - 1) * ' ')
|
| 210 |
+
length = len(object)
|
| 211 |
+
if length:
|
| 212 |
+
if self._sort_dicts:
|
| 213 |
+
items = sorted(object.items(), key=_safe_tuple)
|
| 214 |
+
else:
|
| 215 |
+
items = object.items()
|
| 216 |
+
self._format_dict_items(items, stream, indent, allowance + 1,
|
| 217 |
+
context, level)
|
| 218 |
+
write('}')
|
| 219 |
+
|
| 220 |
+
_dispatch[dict.__repr__] = _pprint_dict
|
| 221 |
+
|
| 222 |
+
def _pprint_ordered_dict(self, object, stream, indent, allowance, context, level):
|
| 223 |
+
if not len(object):
|
| 224 |
+
stream.write(repr(object))
|
| 225 |
+
return
|
| 226 |
+
cls = object.__class__
|
| 227 |
+
stream.write(cls.__name__ + '(')
|
| 228 |
+
self._format(list(object.items()), stream,
|
| 229 |
+
indent + len(cls.__name__) + 1, allowance + 1,
|
| 230 |
+
context, level)
|
| 231 |
+
stream.write(')')
|
| 232 |
+
|
| 233 |
+
_dispatch[_collections.OrderedDict.__repr__] = _pprint_ordered_dict
|
| 234 |
+
|
| 235 |
+
def _pprint_list(self, object, stream, indent, allowance, context, level):
|
| 236 |
+
stream.write('[')
|
| 237 |
+
self._format_items(object, stream, indent, allowance + 1,
|
| 238 |
+
context, level)
|
| 239 |
+
stream.write(']')
|
| 240 |
+
|
| 241 |
+
_dispatch[list.__repr__] = _pprint_list
|
| 242 |
+
|
| 243 |
+
def _pprint_tuple(self, object, stream, indent, allowance, context, level):
|
| 244 |
+
stream.write('(')
|
| 245 |
+
endchar = ',)' if len(object) == 1 else ')'
|
| 246 |
+
self._format_items(object, stream, indent, allowance + len(endchar),
|
| 247 |
+
context, level)
|
| 248 |
+
stream.write(endchar)
|
| 249 |
+
|
| 250 |
+
_dispatch[tuple.__repr__] = _pprint_tuple
|
| 251 |
+
|
| 252 |
+
def _pprint_set(self, object, stream, indent, allowance, context, level):
|
| 253 |
+
if not len(object):
|
| 254 |
+
stream.write(repr(object))
|
| 255 |
+
return
|
| 256 |
+
typ = object.__class__
|
| 257 |
+
if typ is set:
|
| 258 |
+
stream.write('{')
|
| 259 |
+
endchar = '}'
|
| 260 |
+
else:
|
| 261 |
+
stream.write(typ.__name__ + '({')
|
| 262 |
+
endchar = '})'
|
| 263 |
+
indent += len(typ.__name__) + 1
|
| 264 |
+
object = sorted(object, key=_safe_key)
|
| 265 |
+
self._format_items(object, stream, indent, allowance + len(endchar),
|
| 266 |
+
context, level)
|
| 267 |
+
stream.write(endchar)
|
| 268 |
+
|
| 269 |
+
_dispatch[set.__repr__] = _pprint_set
|
| 270 |
+
_dispatch[frozenset.__repr__] = _pprint_set
|
| 271 |
+
|
| 272 |
+
def _pprint_str(self, object, stream, indent, allowance, context, level):
|
| 273 |
+
write = stream.write
|
| 274 |
+
if not len(object):
|
| 275 |
+
write(repr(object))
|
| 276 |
+
return
|
| 277 |
+
chunks = []
|
| 278 |
+
lines = object.splitlines(True)
|
| 279 |
+
if level == 1:
|
| 280 |
+
indent += 1
|
| 281 |
+
allowance += 1
|
| 282 |
+
max_width1 = max_width = self._width - indent
|
| 283 |
+
for i, line in enumerate(lines):
|
| 284 |
+
rep = repr(line)
|
| 285 |
+
if i == len(lines) - 1:
|
| 286 |
+
max_width1 -= allowance
|
| 287 |
+
if len(rep) <= max_width1:
|
| 288 |
+
chunks.append(rep)
|
| 289 |
+
else:
|
| 290 |
+
# A list of alternating (non-space, space) strings
|
| 291 |
+
parts = re.findall(r'\S*\s*', line)
|
| 292 |
+
assert parts
|
| 293 |
+
assert not parts[-1]
|
| 294 |
+
parts.pop() # drop empty last part
|
| 295 |
+
max_width2 = max_width
|
| 296 |
+
current = ''
|
| 297 |
+
for j, part in enumerate(parts):
|
| 298 |
+
candidate = current + part
|
| 299 |
+
if j == len(parts) - 1 and i == len(lines) - 1:
|
| 300 |
+
max_width2 -= allowance
|
| 301 |
+
if len(repr(candidate)) > max_width2:
|
| 302 |
+
if current:
|
| 303 |
+
chunks.append(repr(current))
|
| 304 |
+
current = part
|
| 305 |
+
else:
|
| 306 |
+
current = candidate
|
| 307 |
+
if current:
|
| 308 |
+
chunks.append(repr(current))
|
| 309 |
+
if len(chunks) == 1:
|
| 310 |
+
write(rep)
|
| 311 |
+
return
|
| 312 |
+
if level == 1:
|
| 313 |
+
write('(')
|
| 314 |
+
for i, rep in enumerate(chunks):
|
| 315 |
+
if i > 0:
|
| 316 |
+
write('\n' + ' '*indent)
|
| 317 |
+
write(rep)
|
| 318 |
+
if level == 1:
|
| 319 |
+
write(')')
|
| 320 |
+
|
| 321 |
+
_dispatch[str.__repr__] = _pprint_str
|
| 322 |
+
|
| 323 |
+
def _pprint_bytes(self, object, stream, indent, allowance, context, level):
|
| 324 |
+
write = stream.write
|
| 325 |
+
if len(object) <= 4:
|
| 326 |
+
write(repr(object))
|
| 327 |
+
return
|
| 328 |
+
parens = level == 1
|
| 329 |
+
if parens:
|
| 330 |
+
indent += 1
|
| 331 |
+
allowance += 1
|
| 332 |
+
write('(')
|
| 333 |
+
delim = ''
|
| 334 |
+
for rep in _wrap_bytes_repr(object, self._width - indent, allowance):
|
| 335 |
+
write(delim)
|
| 336 |
+
write(rep)
|
| 337 |
+
if not delim:
|
| 338 |
+
delim = '\n' + ' '*indent
|
| 339 |
+
if parens:
|
| 340 |
+
write(')')
|
| 341 |
+
|
| 342 |
+
_dispatch[bytes.__repr__] = _pprint_bytes
|
| 343 |
+
|
| 344 |
+
def _pprint_bytearray(self, object, stream, indent, allowance, context, level):
|
| 345 |
+
write = stream.write
|
| 346 |
+
write('bytearray(')
|
| 347 |
+
self._pprint_bytes(bytes(object), stream, indent + 10,
|
| 348 |
+
allowance + 1, context, level + 1)
|
| 349 |
+
write(')')
|
| 350 |
+
|
| 351 |
+
_dispatch[bytearray.__repr__] = _pprint_bytearray
|
| 352 |
+
|
| 353 |
+
def _pprint_mappingproxy(self, object, stream, indent, allowance, context, level):
|
| 354 |
+
stream.write('mappingproxy(')
|
| 355 |
+
self._format(object.copy(), stream, indent + 13, allowance + 1,
|
| 356 |
+
context, level)
|
| 357 |
+
stream.write(')')
|
| 358 |
+
|
| 359 |
+
_dispatch[_types.MappingProxyType.__repr__] = _pprint_mappingproxy
|
| 360 |
+
|
| 361 |
+
def _pprint_simplenamespace(self, object, stream, indent, allowance, context, level):
|
| 362 |
+
if type(object) is _types.SimpleNamespace:
|
| 363 |
+
# The SimpleNamespace repr is "namespace" instead of the class
|
| 364 |
+
# name, so we do the same here. For subclasses; use the class name.
|
| 365 |
+
cls_name = 'namespace'
|
| 366 |
+
else:
|
| 367 |
+
cls_name = object.__class__.__name__
|
| 368 |
+
indent += len(cls_name) + 1
|
| 369 |
+
items = object.__dict__.items()
|
| 370 |
+
stream.write(cls_name + '(')
|
| 371 |
+
self._format_namespace_items(items, stream, indent, allowance, context, level)
|
| 372 |
+
stream.write(')')
|
| 373 |
+
|
| 374 |
+
_dispatch[_types.SimpleNamespace.__repr__] = _pprint_simplenamespace
|
| 375 |
+
|
| 376 |
+
def _format_dict_items(self, items, stream, indent, allowance, context,
|
| 377 |
+
level):
|
| 378 |
+
write = stream.write
|
| 379 |
+
indent += self._indent_per_level
|
| 380 |
+
delimnl = ',\n' + ' ' * indent
|
| 381 |
+
last_index = len(items) - 1
|
| 382 |
+
for i, (key, ent) in enumerate(items):
|
| 383 |
+
last = i == last_index
|
| 384 |
+
rep = self._repr(key, context, level)
|
| 385 |
+
write(rep)
|
| 386 |
+
write(': ')
|
| 387 |
+
self._format(ent, stream, indent + len(rep) + 2,
|
| 388 |
+
allowance if last else 1,
|
| 389 |
+
context, level)
|
| 390 |
+
if not last:
|
| 391 |
+
write(delimnl)
|
| 392 |
+
|
| 393 |
+
def _format_namespace_items(self, items, stream, indent, allowance, context, level):
|
| 394 |
+
write = stream.write
|
| 395 |
+
delimnl = ',\n' + ' ' * indent
|
| 396 |
+
last_index = len(items) - 1
|
| 397 |
+
for i, (key, ent) in enumerate(items):
|
| 398 |
+
last = i == last_index
|
| 399 |
+
write(key)
|
| 400 |
+
write('=')
|
| 401 |
+
if id(ent) in context:
|
| 402 |
+
# Special-case representation of recursion to match standard
|
| 403 |
+
# recursive dataclass repr.
|
| 404 |
+
write("...")
|
| 405 |
+
else:
|
| 406 |
+
self._format(ent, stream, indent + len(key) + 1,
|
| 407 |
+
allowance if last else 1,
|
| 408 |
+
context, level)
|
| 409 |
+
if not last:
|
| 410 |
+
write(delimnl)
|
| 411 |
+
|
| 412 |
+
def _format_items(self, items, stream, indent, allowance, context, level):
|
| 413 |
+
write = stream.write
|
| 414 |
+
indent += self._indent_per_level
|
| 415 |
+
if self._indent_per_level > 1:
|
| 416 |
+
write((self._indent_per_level - 1) * ' ')
|
| 417 |
+
delimnl = ',\n' + ' ' * indent
|
| 418 |
+
delim = ''
|
| 419 |
+
width = max_width = self._width - indent + 1
|
| 420 |
+
it = iter(items)
|
| 421 |
+
try:
|
| 422 |
+
next_ent = next(it)
|
| 423 |
+
except StopIteration:
|
| 424 |
+
return
|
| 425 |
+
last = False
|
| 426 |
+
while not last:
|
| 427 |
+
ent = next_ent
|
| 428 |
+
try:
|
| 429 |
+
next_ent = next(it)
|
| 430 |
+
except StopIteration:
|
| 431 |
+
last = True
|
| 432 |
+
max_width -= allowance
|
| 433 |
+
width -= allowance
|
| 434 |
+
if self._compact:
|
| 435 |
+
rep = self._repr(ent, context, level)
|
| 436 |
+
w = len(rep) + 2
|
| 437 |
+
if width < w:
|
| 438 |
+
width = max_width
|
| 439 |
+
if delim:
|
| 440 |
+
delim = delimnl
|
| 441 |
+
if width >= w:
|
| 442 |
+
width -= w
|
| 443 |
+
write(delim)
|
| 444 |
+
delim = ', '
|
| 445 |
+
write(rep)
|
| 446 |
+
continue
|
| 447 |
+
write(delim)
|
| 448 |
+
delim = delimnl
|
| 449 |
+
self._format(ent, stream, indent,
|
| 450 |
+
allowance if last else 1,
|
| 451 |
+
context, level)
|
| 452 |
+
|
| 453 |
+
def _repr(self, object, context, level):
|
| 454 |
+
repr, readable, recursive = self.format(object, context.copy(),
|
| 455 |
+
self._depth, level)
|
| 456 |
+
if not readable:
|
| 457 |
+
self._readable = False
|
| 458 |
+
if recursive:
|
| 459 |
+
self._recursive = True
|
| 460 |
+
return repr
|
| 461 |
+
|
| 462 |
+
def format(self, object, context, maxlevels, level):
|
| 463 |
+
"""Format object for a specific context, returning a string
|
| 464 |
+
and flags indicating whether the representation is 'readable'
|
| 465 |
+
and whether the object represents a recursive construct.
|
| 466 |
+
"""
|
| 467 |
+
return self._safe_repr(object, context, maxlevels, level)
|
| 468 |
+
|
| 469 |
+
def _pprint_default_dict(self, object, stream, indent, allowance, context, level):
|
| 470 |
+
if not len(object):
|
| 471 |
+
stream.write(repr(object))
|
| 472 |
+
return
|
| 473 |
+
rdf = self._repr(object.default_factory, context, level)
|
| 474 |
+
cls = object.__class__
|
| 475 |
+
indent += len(cls.__name__) + 1
|
| 476 |
+
stream.write('%s(%s,\n%s' % (cls.__name__, rdf, ' ' * indent))
|
| 477 |
+
self._pprint_dict(object, stream, indent, allowance + 1, context, level)
|
| 478 |
+
stream.write(')')
|
| 479 |
+
|
| 480 |
+
_dispatch[_collections.defaultdict.__repr__] = _pprint_default_dict
|
| 481 |
+
|
| 482 |
+
def _pprint_counter(self, object, stream, indent, allowance, context, level):
|
| 483 |
+
if not len(object):
|
| 484 |
+
stream.write(repr(object))
|
| 485 |
+
return
|
| 486 |
+
cls = object.__class__
|
| 487 |
+
stream.write(cls.__name__ + '({')
|
| 488 |
+
if self._indent_per_level > 1:
|
| 489 |
+
stream.write((self._indent_per_level - 1) * ' ')
|
| 490 |
+
items = object.most_common()
|
| 491 |
+
self._format_dict_items(items, stream,
|
| 492 |
+
indent + len(cls.__name__) + 1, allowance + 2,
|
| 493 |
+
context, level)
|
| 494 |
+
stream.write('})')
|
| 495 |
+
|
| 496 |
+
_dispatch[_collections.Counter.__repr__] = _pprint_counter
|
| 497 |
+
|
| 498 |
+
def _pprint_chain_map(self, object, stream, indent, allowance, context, level):
|
| 499 |
+
if not len(object.maps):
|
| 500 |
+
stream.write(repr(object))
|
| 501 |
+
return
|
| 502 |
+
cls = object.__class__
|
| 503 |
+
stream.write(cls.__name__ + '(')
|
| 504 |
+
indent += len(cls.__name__) + 1
|
| 505 |
+
for i, m in enumerate(object.maps):
|
| 506 |
+
if i == len(object.maps) - 1:
|
| 507 |
+
self._format(m, stream, indent, allowance + 1, context, level)
|
| 508 |
+
stream.write(')')
|
| 509 |
+
else:
|
| 510 |
+
self._format(m, stream, indent, 1, context, level)
|
| 511 |
+
stream.write(',\n' + ' ' * indent)
|
| 512 |
+
|
| 513 |
+
_dispatch[_collections.ChainMap.__repr__] = _pprint_chain_map
|
| 514 |
+
|
| 515 |
+
def _pprint_deque(self, object, stream, indent, allowance, context, level):
|
| 516 |
+
if not len(object):
|
| 517 |
+
stream.write(repr(object))
|
| 518 |
+
return
|
| 519 |
+
cls = object.__class__
|
| 520 |
+
stream.write(cls.__name__ + '(')
|
| 521 |
+
indent += len(cls.__name__) + 1
|
| 522 |
+
stream.write('[')
|
| 523 |
+
if object.maxlen is None:
|
| 524 |
+
self._format_items(object, stream, indent, allowance + 2,
|
| 525 |
+
context, level)
|
| 526 |
+
stream.write('])')
|
| 527 |
+
else:
|
| 528 |
+
self._format_items(object, stream, indent, 2,
|
| 529 |
+
context, level)
|
| 530 |
+
rml = self._repr(object.maxlen, context, level)
|
| 531 |
+
stream.write('],\n%smaxlen=%s)' % (' ' * indent, rml))
|
| 532 |
+
|
| 533 |
+
_dispatch[_collections.deque.__repr__] = _pprint_deque
|
| 534 |
+
|
| 535 |
+
def _pprint_user_dict(self, object, stream, indent, allowance, context, level):
|
| 536 |
+
self._format(object.data, stream, indent, allowance, context, level - 1)
|
| 537 |
+
|
| 538 |
+
_dispatch[_collections.UserDict.__repr__] = _pprint_user_dict
|
| 539 |
+
|
| 540 |
+
def _pprint_user_list(self, object, stream, indent, allowance, context, level):
|
| 541 |
+
self._format(object.data, stream, indent, allowance, context, level - 1)
|
| 542 |
+
|
| 543 |
+
_dispatch[_collections.UserList.__repr__] = _pprint_user_list
|
| 544 |
+
|
| 545 |
+
def _pprint_user_string(self, object, stream, indent, allowance, context, level):
|
| 546 |
+
self._format(object.data, stream, indent, allowance, context, level - 1)
|
| 547 |
+
|
| 548 |
+
_dispatch[_collections.UserString.__repr__] = _pprint_user_string
|
| 549 |
+
|
| 550 |
+
def _safe_repr(self, object, context, maxlevels, level):
|
| 551 |
+
# Return triple (repr_string, isreadable, isrecursive).
|
| 552 |
+
typ = type(object)
|
| 553 |
+
if typ in _builtin_scalars:
|
| 554 |
+
return repr(object), True, False
|
| 555 |
+
|
| 556 |
+
r = getattr(typ, "__repr__", None)
|
| 557 |
+
|
| 558 |
+
if issubclass(typ, int) and r is int.__repr__:
|
| 559 |
+
if self._underscore_numbers:
|
| 560 |
+
return f"{object:_d}", True, False
|
| 561 |
+
else:
|
| 562 |
+
return repr(object), True, False
|
| 563 |
+
|
| 564 |
+
if issubclass(typ, dict) and r is dict.__repr__:
|
| 565 |
+
if not object:
|
| 566 |
+
return "{}", True, False
|
| 567 |
+
objid = id(object)
|
| 568 |
+
if maxlevels and level >= maxlevels:
|
| 569 |
+
return "{...}", False, objid in context
|
| 570 |
+
if objid in context:
|
| 571 |
+
return _recursion(object), False, True
|
| 572 |
+
context[objid] = 1
|
| 573 |
+
readable = True
|
| 574 |
+
recursive = False
|
| 575 |
+
components = []
|
| 576 |
+
append = components.append
|
| 577 |
+
level += 1
|
| 578 |
+
if self._sort_dicts:
|
| 579 |
+
items = sorted(object.items(), key=_safe_tuple)
|
| 580 |
+
else:
|
| 581 |
+
items = object.items()
|
| 582 |
+
for k, v in items:
|
| 583 |
+
krepr, kreadable, krecur = self.format(
|
| 584 |
+
k, context, maxlevels, level)
|
| 585 |
+
vrepr, vreadable, vrecur = self.format(
|
| 586 |
+
v, context, maxlevels, level)
|
| 587 |
+
append("%s: %s" % (krepr, vrepr))
|
| 588 |
+
readable = readable and kreadable and vreadable
|
| 589 |
+
if krecur or vrecur:
|
| 590 |
+
recursive = True
|
| 591 |
+
del context[objid]
|
| 592 |
+
return "{%s}" % ", ".join(components), readable, recursive
|
| 593 |
+
|
| 594 |
+
if (issubclass(typ, list) and r is list.__repr__) or \
|
| 595 |
+
(issubclass(typ, tuple) and r is tuple.__repr__):
|
| 596 |
+
if issubclass(typ, list):
|
| 597 |
+
if not object:
|
| 598 |
+
return "[]", True, False
|
| 599 |
+
format = "[%s]"
|
| 600 |
+
elif len(object) == 1:
|
| 601 |
+
format = "(%s,)"
|
| 602 |
+
else:
|
| 603 |
+
if not object:
|
| 604 |
+
return "()", True, False
|
| 605 |
+
format = "(%s)"
|
| 606 |
+
objid = id(object)
|
| 607 |
+
if maxlevels and level >= maxlevels:
|
| 608 |
+
return format % "...", False, objid in context
|
| 609 |
+
if objid in context:
|
| 610 |
+
return _recursion(object), False, True
|
| 611 |
+
context[objid] = 1
|
| 612 |
+
readable = True
|
| 613 |
+
recursive = False
|
| 614 |
+
components = []
|
| 615 |
+
append = components.append
|
| 616 |
+
level += 1
|
| 617 |
+
for o in object:
|
| 618 |
+
orepr, oreadable, orecur = self.format(
|
| 619 |
+
o, context, maxlevels, level)
|
| 620 |
+
append(orepr)
|
| 621 |
+
if not oreadable:
|
| 622 |
+
readable = False
|
| 623 |
+
if orecur:
|
| 624 |
+
recursive = True
|
| 625 |
+
del context[objid]
|
| 626 |
+
return format % ", ".join(components), readable, recursive
|
| 627 |
+
|
| 628 |
+
rep = repr(object)
|
| 629 |
+
return rep, (rep and not rep.startswith('<')), False
|
| 630 |
+
|
| 631 |
+
_builtin_scalars = frozenset({str, bytes, bytearray, float, complex,
|
| 632 |
+
bool, type(None)})
|
| 633 |
+
|
| 634 |
+
def _recursion(object):
|
| 635 |
+
return ("<Recursion on %s with id=%s>"
|
| 636 |
+
% (type(object).__name__, id(object)))
|
| 637 |
+
|
| 638 |
+
|
| 639 |
+
def _perfcheck(object=None):
|
| 640 |
+
import time
|
| 641 |
+
if object is None:
|
| 642 |
+
object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000
|
| 643 |
+
p = PrettyPrinter()
|
| 644 |
+
t1 = time.perf_counter()
|
| 645 |
+
p._safe_repr(object, {}, None, 0, True)
|
| 646 |
+
t2 = time.perf_counter()
|
| 647 |
+
p.pformat(object)
|
| 648 |
+
t3 = time.perf_counter()
|
| 649 |
+
print("_safe_repr:", t2 - t1)
|
| 650 |
+
print("pformat:", t3 - t2)
|
| 651 |
+
|
| 652 |
+
def _wrap_bytes_repr(object, width, allowance):
|
| 653 |
+
current = b''
|
| 654 |
+
last = len(object) // 4 * 4
|
| 655 |
+
for i in range(0, len(object), 4):
|
| 656 |
+
part = object[i: i+4]
|
| 657 |
+
candidate = current + part
|
| 658 |
+
if i == last:
|
| 659 |
+
width -= allowance
|
| 660 |
+
if len(repr(candidate)) > width:
|
| 661 |
+
if current:
|
| 662 |
+
yield repr(current)
|
| 663 |
+
current = part
|
| 664 |
+
else:
|
| 665 |
+
current = candidate
|
| 666 |
+
if current:
|
| 667 |
+
yield repr(current)
|
| 668 |
+
|
| 669 |
+
if __name__ == "__main__":
|
| 670 |
+
_perfcheck()
|
evalkit_cambrian/lib/python3.10/profile.py
ADDED
|
@@ -0,0 +1,611 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/bin/env python3
|
| 2 |
+
#
|
| 3 |
+
# Class for profiling python code. rev 1.0 6/2/94
|
| 4 |
+
#
|
| 5 |
+
# Written by James Roskind
|
| 6 |
+
# Based on prior profile module by Sjoerd Mullender...
|
| 7 |
+
# which was hacked somewhat by: Guido van Rossum
|
| 8 |
+
|
| 9 |
+
"""Class for profiling Python code."""
|
| 10 |
+
|
| 11 |
+
# Copyright Disney Enterprises, Inc. All Rights Reserved.
|
| 12 |
+
# Licensed to PSF under a Contributor Agreement
|
| 13 |
+
#
|
| 14 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 15 |
+
# you may not use this file except in compliance with the License.
|
| 16 |
+
# You may obtain a copy of the License at
|
| 17 |
+
#
|
| 18 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 19 |
+
#
|
| 20 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 21 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 22 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
| 23 |
+
# either express or implied. See the License for the specific language
|
| 24 |
+
# governing permissions and limitations under the License.
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
import io
|
| 28 |
+
import sys
|
| 29 |
+
import time
|
| 30 |
+
import marshal
|
| 31 |
+
|
| 32 |
+
__all__ = ["run", "runctx", "Profile"]
|
| 33 |
+
|
| 34 |
+
# Sample timer for use with
|
| 35 |
+
#i_count = 0
|
| 36 |
+
#def integer_timer():
|
| 37 |
+
# global i_count
|
| 38 |
+
# i_count = i_count + 1
|
| 39 |
+
# return i_count
|
| 40 |
+
#itimes = integer_timer # replace with C coded timer returning integers
|
| 41 |
+
|
| 42 |
+
class _Utils:
|
| 43 |
+
"""Support class for utility functions which are shared by
|
| 44 |
+
profile.py and cProfile.py modules.
|
| 45 |
+
Not supposed to be used directly.
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
def __init__(self, profiler):
|
| 49 |
+
self.profiler = profiler
|
| 50 |
+
|
| 51 |
+
def run(self, statement, filename, sort):
|
| 52 |
+
prof = self.profiler()
|
| 53 |
+
try:
|
| 54 |
+
prof.run(statement)
|
| 55 |
+
except SystemExit:
|
| 56 |
+
pass
|
| 57 |
+
finally:
|
| 58 |
+
self._show(prof, filename, sort)
|
| 59 |
+
|
| 60 |
+
def runctx(self, statement, globals, locals, filename, sort):
|
| 61 |
+
prof = self.profiler()
|
| 62 |
+
try:
|
| 63 |
+
prof.runctx(statement, globals, locals)
|
| 64 |
+
except SystemExit:
|
| 65 |
+
pass
|
| 66 |
+
finally:
|
| 67 |
+
self._show(prof, filename, sort)
|
| 68 |
+
|
| 69 |
+
def _show(self, prof, filename, sort):
|
| 70 |
+
if filename is not None:
|
| 71 |
+
prof.dump_stats(filename)
|
| 72 |
+
else:
|
| 73 |
+
prof.print_stats(sort)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
#**************************************************************************
|
| 77 |
+
# The following are the static member functions for the profiler class
|
| 78 |
+
# Note that an instance of Profile() is *not* needed to call them.
|
| 79 |
+
#**************************************************************************
|
| 80 |
+
|
| 81 |
+
def run(statement, filename=None, sort=-1):
|
| 82 |
+
"""Run statement under profiler optionally saving results in filename
|
| 83 |
+
|
| 84 |
+
This function takes a single argument that can be passed to the
|
| 85 |
+
"exec" statement, and an optional file name. In all cases this
|
| 86 |
+
routine attempts to "exec" its first argument and gather profiling
|
| 87 |
+
statistics from the execution. If no file name is present, then this
|
| 88 |
+
function automatically prints a simple profiling report, sorted by the
|
| 89 |
+
standard name string (file/line/function-name) that is presented in
|
| 90 |
+
each line.
|
| 91 |
+
"""
|
| 92 |
+
return _Utils(Profile).run(statement, filename, sort)
|
| 93 |
+
|
| 94 |
+
def runctx(statement, globals, locals, filename=None, sort=-1):
|
| 95 |
+
"""Run statement under profiler, supplying your own globals and locals,
|
| 96 |
+
optionally saving results in filename.
|
| 97 |
+
|
| 98 |
+
statement and filename have the same semantics as profile.run
|
| 99 |
+
"""
|
| 100 |
+
return _Utils(Profile).runctx(statement, globals, locals, filename, sort)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class Profile:
|
| 104 |
+
"""Profiler class.
|
| 105 |
+
|
| 106 |
+
self.cur is always a tuple. Each such tuple corresponds to a stack
|
| 107 |
+
frame that is currently active (self.cur[-2]). The following are the
|
| 108 |
+
definitions of its members. We use this external "parallel stack" to
|
| 109 |
+
avoid contaminating the program that we are profiling. (old profiler
|
| 110 |
+
used to write into the frames local dictionary!!) Derived classes
|
| 111 |
+
can change the definition of some entries, as long as they leave
|
| 112 |
+
[-2:] intact (frame and previous tuple). In case an internal error is
|
| 113 |
+
detected, the -3 element is used as the function name.
|
| 114 |
+
|
| 115 |
+
[ 0] = Time that needs to be charged to the parent frame's function.
|
| 116 |
+
It is used so that a function call will not have to access the
|
| 117 |
+
timing data for the parent frame.
|
| 118 |
+
[ 1] = Total time spent in this frame's function, excluding time in
|
| 119 |
+
subfunctions (this latter is tallied in cur[2]).
|
| 120 |
+
[ 2] = Total time spent in subfunctions, excluding time executing the
|
| 121 |
+
frame's function (this latter is tallied in cur[1]).
|
| 122 |
+
[-3] = Name of the function that corresponds to this frame.
|
| 123 |
+
[-2] = Actual frame that we correspond to (used to sync exception handling).
|
| 124 |
+
[-1] = Our parent 6-tuple (corresponds to frame.f_back).
|
| 125 |
+
|
| 126 |
+
Timing data for each function is stored as a 5-tuple in the dictionary
|
| 127 |
+
self.timings[]. The index is always the name stored in self.cur[-3].
|
| 128 |
+
The following are the definitions of the members:
|
| 129 |
+
|
| 130 |
+
[0] = The number of times this function was called, not counting direct
|
| 131 |
+
or indirect recursion,
|
| 132 |
+
[1] = Number of times this function appears on the stack, minus one
|
| 133 |
+
[2] = Total time spent internal to this function
|
| 134 |
+
[3] = Cumulative time that this function was present on the stack. In
|
| 135 |
+
non-recursive functions, this is the total execution time from start
|
| 136 |
+
to finish of each invocation of a function, including time spent in
|
| 137 |
+
all subfunctions.
|
| 138 |
+
[4] = A dictionary indicating for each function name, the number of times
|
| 139 |
+
it was called by us.
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
bias = 0 # calibration constant
|
| 143 |
+
|
| 144 |
+
def __init__(self, timer=None, bias=None):
|
| 145 |
+
self.timings = {}
|
| 146 |
+
self.cur = None
|
| 147 |
+
self.cmd = ""
|
| 148 |
+
self.c_func_name = ""
|
| 149 |
+
|
| 150 |
+
if bias is None:
|
| 151 |
+
bias = self.bias
|
| 152 |
+
self.bias = bias # Materialize in local dict for lookup speed.
|
| 153 |
+
|
| 154 |
+
if not timer:
|
| 155 |
+
self.timer = self.get_time = time.process_time
|
| 156 |
+
self.dispatcher = self.trace_dispatch_i
|
| 157 |
+
else:
|
| 158 |
+
self.timer = timer
|
| 159 |
+
t = self.timer() # test out timer function
|
| 160 |
+
try:
|
| 161 |
+
length = len(t)
|
| 162 |
+
except TypeError:
|
| 163 |
+
self.get_time = timer
|
| 164 |
+
self.dispatcher = self.trace_dispatch_i
|
| 165 |
+
else:
|
| 166 |
+
if length == 2:
|
| 167 |
+
self.dispatcher = self.trace_dispatch
|
| 168 |
+
else:
|
| 169 |
+
self.dispatcher = self.trace_dispatch_l
|
| 170 |
+
# This get_time() implementation needs to be defined
|
| 171 |
+
# here to capture the passed-in timer in the parameter
|
| 172 |
+
# list (for performance). Note that we can't assume
|
| 173 |
+
# the timer() result contains two values in all
|
| 174 |
+
# cases.
|
| 175 |
+
def get_time_timer(timer=timer, sum=sum):
|
| 176 |
+
return sum(timer())
|
| 177 |
+
self.get_time = get_time_timer
|
| 178 |
+
self.t = self.get_time()
|
| 179 |
+
self.simulate_call('profiler')
|
| 180 |
+
|
| 181 |
+
# Heavily optimized dispatch routine for time.process_time() timer
|
| 182 |
+
|
| 183 |
+
def trace_dispatch(self, frame, event, arg):
|
| 184 |
+
timer = self.timer
|
| 185 |
+
t = timer()
|
| 186 |
+
t = t[0] + t[1] - self.t - self.bias
|
| 187 |
+
|
| 188 |
+
if event == "c_call":
|
| 189 |
+
self.c_func_name = arg.__name__
|
| 190 |
+
|
| 191 |
+
if self.dispatch[event](self, frame,t):
|
| 192 |
+
t = timer()
|
| 193 |
+
self.t = t[0] + t[1]
|
| 194 |
+
else:
|
| 195 |
+
r = timer()
|
| 196 |
+
self.t = r[0] + r[1] - t # put back unrecorded delta
|
| 197 |
+
|
| 198 |
+
# Dispatch routine for best timer program (return = scalar, fastest if
|
| 199 |
+
# an integer but float works too -- and time.process_time() relies on that).
|
| 200 |
+
|
| 201 |
+
def trace_dispatch_i(self, frame, event, arg):
|
| 202 |
+
timer = self.timer
|
| 203 |
+
t = timer() - self.t - self.bias
|
| 204 |
+
|
| 205 |
+
if event == "c_call":
|
| 206 |
+
self.c_func_name = arg.__name__
|
| 207 |
+
|
| 208 |
+
if self.dispatch[event](self, frame, t):
|
| 209 |
+
self.t = timer()
|
| 210 |
+
else:
|
| 211 |
+
self.t = timer() - t # put back unrecorded delta
|
| 212 |
+
|
| 213 |
+
# Dispatch routine for macintosh (timer returns time in ticks of
|
| 214 |
+
# 1/60th second)
|
| 215 |
+
|
| 216 |
+
def trace_dispatch_mac(self, frame, event, arg):
|
| 217 |
+
timer = self.timer
|
| 218 |
+
t = timer()/60.0 - self.t - self.bias
|
| 219 |
+
|
| 220 |
+
if event == "c_call":
|
| 221 |
+
self.c_func_name = arg.__name__
|
| 222 |
+
|
| 223 |
+
if self.dispatch[event](self, frame, t):
|
| 224 |
+
self.t = timer()/60.0
|
| 225 |
+
else:
|
| 226 |
+
self.t = timer()/60.0 - t # put back unrecorded delta
|
| 227 |
+
|
| 228 |
+
# SLOW generic dispatch routine for timer returning lists of numbers
|
| 229 |
+
|
| 230 |
+
def trace_dispatch_l(self, frame, event, arg):
|
| 231 |
+
get_time = self.get_time
|
| 232 |
+
t = get_time() - self.t - self.bias
|
| 233 |
+
|
| 234 |
+
if event == "c_call":
|
| 235 |
+
self.c_func_name = arg.__name__
|
| 236 |
+
|
| 237 |
+
if self.dispatch[event](self, frame, t):
|
| 238 |
+
self.t = get_time()
|
| 239 |
+
else:
|
| 240 |
+
self.t = get_time() - t # put back unrecorded delta
|
| 241 |
+
|
| 242 |
+
# In the event handlers, the first 3 elements of self.cur are unpacked
|
| 243 |
+
# into vrbls w/ 3-letter names. The last two characters are meant to be
|
| 244 |
+
# mnemonic:
|
| 245 |
+
# _pt self.cur[0] "parent time" time to be charged to parent frame
|
| 246 |
+
# _it self.cur[1] "internal time" time spent directly in the function
|
| 247 |
+
# _et self.cur[2] "external time" time spent in subfunctions
|
| 248 |
+
|
| 249 |
+
def trace_dispatch_exception(self, frame, t):
|
| 250 |
+
rpt, rit, ret, rfn, rframe, rcur = self.cur
|
| 251 |
+
if (rframe is not frame) and rcur:
|
| 252 |
+
return self.trace_dispatch_return(rframe, t)
|
| 253 |
+
self.cur = rpt, rit+t, ret, rfn, rframe, rcur
|
| 254 |
+
return 1
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
def trace_dispatch_call(self, frame, t):
|
| 258 |
+
if self.cur and frame.f_back is not self.cur[-2]:
|
| 259 |
+
rpt, rit, ret, rfn, rframe, rcur = self.cur
|
| 260 |
+
if not isinstance(rframe, Profile.fake_frame):
|
| 261 |
+
assert rframe.f_back is frame.f_back, ("Bad call", rfn,
|
| 262 |
+
rframe, rframe.f_back,
|
| 263 |
+
frame, frame.f_back)
|
| 264 |
+
self.trace_dispatch_return(rframe, 0)
|
| 265 |
+
assert (self.cur is None or \
|
| 266 |
+
frame.f_back is self.cur[-2]), ("Bad call",
|
| 267 |
+
self.cur[-3])
|
| 268 |
+
fcode = frame.f_code
|
| 269 |
+
fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name)
|
| 270 |
+
self.cur = (t, 0, 0, fn, frame, self.cur)
|
| 271 |
+
timings = self.timings
|
| 272 |
+
if fn in timings:
|
| 273 |
+
cc, ns, tt, ct, callers = timings[fn]
|
| 274 |
+
timings[fn] = cc, ns + 1, tt, ct, callers
|
| 275 |
+
else:
|
| 276 |
+
timings[fn] = 0, 0, 0, 0, {}
|
| 277 |
+
return 1
|
| 278 |
+
|
| 279 |
+
def trace_dispatch_c_call (self, frame, t):
|
| 280 |
+
fn = ("", 0, self.c_func_name)
|
| 281 |
+
self.cur = (t, 0, 0, fn, frame, self.cur)
|
| 282 |
+
timings = self.timings
|
| 283 |
+
if fn in timings:
|
| 284 |
+
cc, ns, tt, ct, callers = timings[fn]
|
| 285 |
+
timings[fn] = cc, ns+1, tt, ct, callers
|
| 286 |
+
else:
|
| 287 |
+
timings[fn] = 0, 0, 0, 0, {}
|
| 288 |
+
return 1
|
| 289 |
+
|
| 290 |
+
def trace_dispatch_return(self, frame, t):
|
| 291 |
+
if frame is not self.cur[-2]:
|
| 292 |
+
assert frame is self.cur[-2].f_back, ("Bad return", self.cur[-3])
|
| 293 |
+
self.trace_dispatch_return(self.cur[-2], 0)
|
| 294 |
+
|
| 295 |
+
# Prefix "r" means part of the Returning or exiting frame.
|
| 296 |
+
# Prefix "p" means part of the Previous or Parent or older frame.
|
| 297 |
+
|
| 298 |
+
rpt, rit, ret, rfn, frame, rcur = self.cur
|
| 299 |
+
rit = rit + t
|
| 300 |
+
frame_total = rit + ret
|
| 301 |
+
|
| 302 |
+
ppt, pit, pet, pfn, pframe, pcur = rcur
|
| 303 |
+
self.cur = ppt, pit + rpt, pet + frame_total, pfn, pframe, pcur
|
| 304 |
+
|
| 305 |
+
timings = self.timings
|
| 306 |
+
cc, ns, tt, ct, callers = timings[rfn]
|
| 307 |
+
if not ns:
|
| 308 |
+
# This is the only occurrence of the function on the stack.
|
| 309 |
+
# Else this is a (directly or indirectly) recursive call, and
|
| 310 |
+
# its cumulative time will get updated when the topmost call to
|
| 311 |
+
# it returns.
|
| 312 |
+
ct = ct + frame_total
|
| 313 |
+
cc = cc + 1
|
| 314 |
+
|
| 315 |
+
if pfn in callers:
|
| 316 |
+
callers[pfn] = callers[pfn] + 1 # hack: gather more
|
| 317 |
+
# stats such as the amount of time added to ct courtesy
|
| 318 |
+
# of this specific call, and the contribution to cc
|
| 319 |
+
# courtesy of this call.
|
| 320 |
+
else:
|
| 321 |
+
callers[pfn] = 1
|
| 322 |
+
|
| 323 |
+
timings[rfn] = cc, ns - 1, tt + rit, ct, callers
|
| 324 |
+
|
| 325 |
+
return 1
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
dispatch = {
|
| 329 |
+
"call": trace_dispatch_call,
|
| 330 |
+
"exception": trace_dispatch_exception,
|
| 331 |
+
"return": trace_dispatch_return,
|
| 332 |
+
"c_call": trace_dispatch_c_call,
|
| 333 |
+
"c_exception": trace_dispatch_return, # the C function returned
|
| 334 |
+
"c_return": trace_dispatch_return,
|
| 335 |
+
}
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
# The next few functions play with self.cmd. By carefully preloading
|
| 339 |
+
# our parallel stack, we can force the profiled result to include
|
| 340 |
+
# an arbitrary string as the name of the calling function.
|
| 341 |
+
# We use self.cmd as that string, and the resulting stats look
|
| 342 |
+
# very nice :-).
|
| 343 |
+
|
| 344 |
+
def set_cmd(self, cmd):
|
| 345 |
+
if self.cur[-1]: return # already set
|
| 346 |
+
self.cmd = cmd
|
| 347 |
+
self.simulate_call(cmd)
|
| 348 |
+
|
| 349 |
+
class fake_code:
|
| 350 |
+
def __init__(self, filename, line, name):
|
| 351 |
+
self.co_filename = filename
|
| 352 |
+
self.co_line = line
|
| 353 |
+
self.co_name = name
|
| 354 |
+
self.co_firstlineno = 0
|
| 355 |
+
|
| 356 |
+
def __repr__(self):
|
| 357 |
+
return repr((self.co_filename, self.co_line, self.co_name))
|
| 358 |
+
|
| 359 |
+
class fake_frame:
|
| 360 |
+
def __init__(self, code, prior):
|
| 361 |
+
self.f_code = code
|
| 362 |
+
self.f_back = prior
|
| 363 |
+
|
| 364 |
+
def simulate_call(self, name):
|
| 365 |
+
code = self.fake_code('profile', 0, name)
|
| 366 |
+
if self.cur:
|
| 367 |
+
pframe = self.cur[-2]
|
| 368 |
+
else:
|
| 369 |
+
pframe = None
|
| 370 |
+
frame = self.fake_frame(code, pframe)
|
| 371 |
+
self.dispatch['call'](self, frame, 0)
|
| 372 |
+
|
| 373 |
+
# collect stats from pending stack, including getting final
|
| 374 |
+
# timings for self.cmd frame.
|
| 375 |
+
|
| 376 |
+
def simulate_cmd_complete(self):
|
| 377 |
+
get_time = self.get_time
|
| 378 |
+
t = get_time() - self.t
|
| 379 |
+
while self.cur[-1]:
|
| 380 |
+
# We *can* cause assertion errors here if
|
| 381 |
+
# dispatch_trace_return checks for a frame match!
|
| 382 |
+
self.dispatch['return'](self, self.cur[-2], t)
|
| 383 |
+
t = 0
|
| 384 |
+
self.t = get_time() - t
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
def print_stats(self, sort=-1):
|
| 388 |
+
import pstats
|
| 389 |
+
pstats.Stats(self).strip_dirs().sort_stats(sort). \
|
| 390 |
+
print_stats()
|
| 391 |
+
|
| 392 |
+
def dump_stats(self, file):
|
| 393 |
+
with open(file, 'wb') as f:
|
| 394 |
+
self.create_stats()
|
| 395 |
+
marshal.dump(self.stats, f)
|
| 396 |
+
|
| 397 |
+
def create_stats(self):
|
| 398 |
+
self.simulate_cmd_complete()
|
| 399 |
+
self.snapshot_stats()
|
| 400 |
+
|
| 401 |
+
def snapshot_stats(self):
|
| 402 |
+
self.stats = {}
|
| 403 |
+
for func, (cc, ns, tt, ct, callers) in self.timings.items():
|
| 404 |
+
callers = callers.copy()
|
| 405 |
+
nc = 0
|
| 406 |
+
for callcnt in callers.values():
|
| 407 |
+
nc += callcnt
|
| 408 |
+
self.stats[func] = cc, nc, tt, ct, callers
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
# The following two methods can be called by clients to use
|
| 412 |
+
# a profiler to profile a statement, given as a string.
|
| 413 |
+
|
| 414 |
+
def run(self, cmd):
|
| 415 |
+
import __main__
|
| 416 |
+
dict = __main__.__dict__
|
| 417 |
+
return self.runctx(cmd, dict, dict)
|
| 418 |
+
|
| 419 |
+
def runctx(self, cmd, globals, locals):
|
| 420 |
+
self.set_cmd(cmd)
|
| 421 |
+
sys.setprofile(self.dispatcher)
|
| 422 |
+
try:
|
| 423 |
+
exec(cmd, globals, locals)
|
| 424 |
+
finally:
|
| 425 |
+
sys.setprofile(None)
|
| 426 |
+
return self
|
| 427 |
+
|
| 428 |
+
# This method is more useful to profile a single function call.
|
| 429 |
+
def runcall(self, func, /, *args, **kw):
|
| 430 |
+
self.set_cmd(repr(func))
|
| 431 |
+
sys.setprofile(self.dispatcher)
|
| 432 |
+
try:
|
| 433 |
+
return func(*args, **kw)
|
| 434 |
+
finally:
|
| 435 |
+
sys.setprofile(None)
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
#******************************************************************
|
| 439 |
+
# The following calculates the overhead for using a profiler. The
|
| 440 |
+
# problem is that it takes a fair amount of time for the profiler
|
| 441 |
+
# to stop the stopwatch (from the time it receives an event).
|
| 442 |
+
# Similarly, there is a delay from the time that the profiler
|
| 443 |
+
# re-starts the stopwatch before the user's code really gets to
|
| 444 |
+
# continue. The following code tries to measure the difference on
|
| 445 |
+
# a per-event basis.
|
| 446 |
+
#
|
| 447 |
+
# Note that this difference is only significant if there are a lot of
|
| 448 |
+
# events, and relatively little user code per event. For example,
|
| 449 |
+
# code with small functions will typically benefit from having the
|
| 450 |
+
# profiler calibrated for the current platform. This *could* be
|
| 451 |
+
# done on the fly during init() time, but it is not worth the
|
| 452 |
+
# effort. Also note that if too large a value specified, then
|
| 453 |
+
# execution time on some functions will actually appear as a
|
| 454 |
+
# negative number. It is *normal* for some functions (with very
|
| 455 |
+
# low call counts) to have such negative stats, even if the
|
| 456 |
+
# calibration figure is "correct."
|
| 457 |
+
#
|
| 458 |
+
# One alternative to profile-time calibration adjustments (i.e.,
|
| 459 |
+
# adding in the magic little delta during each event) is to track
|
| 460 |
+
# more carefully the number of events (and cumulatively, the number
|
| 461 |
+
# of events during sub functions) that are seen. If this were
|
| 462 |
+
# done, then the arithmetic could be done after the fact (i.e., at
|
| 463 |
+
# display time). Currently, we track only call/return events.
|
| 464 |
+
# These values can be deduced by examining the callees and callers
|
| 465 |
+
# vectors for each functions. Hence we *can* almost correct the
|
| 466 |
+
# internal time figure at print time (note that we currently don't
|
| 467 |
+
# track exception event processing counts). Unfortunately, there
|
| 468 |
+
# is currently no similar information for cumulative sub-function
|
| 469 |
+
# time. It would not be hard to "get all this info" at profiler
|
| 470 |
+
# time. Specifically, we would have to extend the tuples to keep
|
| 471 |
+
# counts of this in each frame, and then extend the defs of timing
|
| 472 |
+
# tuples to include the significant two figures. I'm a bit fearful
|
| 473 |
+
# that this additional feature will slow the heavily optimized
|
| 474 |
+
# event/time ratio (i.e., the profiler would run slower, fur a very
|
| 475 |
+
# low "value added" feature.)
|
| 476 |
+
#**************************************************************
|
| 477 |
+
|
| 478 |
+
def calibrate(self, m, verbose=0):
|
| 479 |
+
if self.__class__ is not Profile:
|
| 480 |
+
raise TypeError("Subclasses must override .calibrate().")
|
| 481 |
+
|
| 482 |
+
saved_bias = self.bias
|
| 483 |
+
self.bias = 0
|
| 484 |
+
try:
|
| 485 |
+
return self._calibrate_inner(m, verbose)
|
| 486 |
+
finally:
|
| 487 |
+
self.bias = saved_bias
|
| 488 |
+
|
| 489 |
+
def _calibrate_inner(self, m, verbose):
|
| 490 |
+
get_time = self.get_time
|
| 491 |
+
|
| 492 |
+
# Set up a test case to be run with and without profiling. Include
|
| 493 |
+
# lots of calls, because we're trying to quantify stopwatch overhead.
|
| 494 |
+
# Do not raise any exceptions, though, because we want to know
|
| 495 |
+
# exactly how many profile events are generated (one call event, +
|
| 496 |
+
# one return event, per Python-level call).
|
| 497 |
+
|
| 498 |
+
def f1(n):
|
| 499 |
+
for i in range(n):
|
| 500 |
+
x = 1
|
| 501 |
+
|
| 502 |
+
def f(m, f1=f1):
|
| 503 |
+
for i in range(m):
|
| 504 |
+
f1(100)
|
| 505 |
+
|
| 506 |
+
f(m) # warm up the cache
|
| 507 |
+
|
| 508 |
+
# elapsed_noprofile <- time f(m) takes without profiling.
|
| 509 |
+
t0 = get_time()
|
| 510 |
+
f(m)
|
| 511 |
+
t1 = get_time()
|
| 512 |
+
elapsed_noprofile = t1 - t0
|
| 513 |
+
if verbose:
|
| 514 |
+
print("elapsed time without profiling =", elapsed_noprofile)
|
| 515 |
+
|
| 516 |
+
# elapsed_profile <- time f(m) takes with profiling. The difference
|
| 517 |
+
# is profiling overhead, only some of which the profiler subtracts
|
| 518 |
+
# out on its own.
|
| 519 |
+
p = Profile()
|
| 520 |
+
t0 = get_time()
|
| 521 |
+
p.runctx('f(m)', globals(), locals())
|
| 522 |
+
t1 = get_time()
|
| 523 |
+
elapsed_profile = t1 - t0
|
| 524 |
+
if verbose:
|
| 525 |
+
print("elapsed time with profiling =", elapsed_profile)
|
| 526 |
+
|
| 527 |
+
# reported_time <- "CPU seconds" the profiler charged to f and f1.
|
| 528 |
+
total_calls = 0.0
|
| 529 |
+
reported_time = 0.0
|
| 530 |
+
for (filename, line, funcname), (cc, ns, tt, ct, callers) in \
|
| 531 |
+
p.timings.items():
|
| 532 |
+
if funcname in ("f", "f1"):
|
| 533 |
+
total_calls += cc
|
| 534 |
+
reported_time += tt
|
| 535 |
+
|
| 536 |
+
if verbose:
|
| 537 |
+
print("'CPU seconds' profiler reported =", reported_time)
|
| 538 |
+
print("total # calls =", total_calls)
|
| 539 |
+
if total_calls != m + 1:
|
| 540 |
+
raise ValueError("internal error: total calls = %d" % total_calls)
|
| 541 |
+
|
| 542 |
+
# reported_time - elapsed_noprofile = overhead the profiler wasn't
|
| 543 |
+
# able to measure. Divide by twice the number of calls (since there
|
| 544 |
+
# are two profiler events per call in this test) to get the hidden
|
| 545 |
+
# overhead per event.
|
| 546 |
+
mean = (reported_time - elapsed_noprofile) / 2.0 / total_calls
|
| 547 |
+
if verbose:
|
| 548 |
+
print("mean stopwatch overhead per profile event =", mean)
|
| 549 |
+
return mean
|
| 550 |
+
|
| 551 |
+
#****************************************************************************
|
| 552 |
+
|
| 553 |
+
def main():
|
| 554 |
+
import os
|
| 555 |
+
from optparse import OptionParser
|
| 556 |
+
|
| 557 |
+
usage = "profile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
|
| 558 |
+
parser = OptionParser(usage=usage)
|
| 559 |
+
parser.allow_interspersed_args = False
|
| 560 |
+
parser.add_option('-o', '--outfile', dest="outfile",
|
| 561 |
+
help="Save stats to <outfile>", default=None)
|
| 562 |
+
parser.add_option('-m', dest="module", action="store_true",
|
| 563 |
+
help="Profile a library module.", default=False)
|
| 564 |
+
parser.add_option('-s', '--sort', dest="sort",
|
| 565 |
+
help="Sort order when printing to stdout, based on pstats.Stats class",
|
| 566 |
+
default=-1)
|
| 567 |
+
|
| 568 |
+
if not sys.argv[1:]:
|
| 569 |
+
parser.print_usage()
|
| 570 |
+
sys.exit(2)
|
| 571 |
+
|
| 572 |
+
(options, args) = parser.parse_args()
|
| 573 |
+
sys.argv[:] = args
|
| 574 |
+
|
| 575 |
+
# The script that we're profiling may chdir, so capture the absolute path
|
| 576 |
+
# to the output file at startup.
|
| 577 |
+
if options.outfile is not None:
|
| 578 |
+
options.outfile = os.path.abspath(options.outfile)
|
| 579 |
+
|
| 580 |
+
if len(args) > 0:
|
| 581 |
+
if options.module:
|
| 582 |
+
import runpy
|
| 583 |
+
code = "run_module(modname, run_name='__main__')"
|
| 584 |
+
globs = {
|
| 585 |
+
'run_module': runpy.run_module,
|
| 586 |
+
'modname': args[0]
|
| 587 |
+
}
|
| 588 |
+
else:
|
| 589 |
+
progname = args[0]
|
| 590 |
+
sys.path.insert(0, os.path.dirname(progname))
|
| 591 |
+
with io.open_code(progname) as fp:
|
| 592 |
+
code = compile(fp.read(), progname, 'exec')
|
| 593 |
+
globs = {
|
| 594 |
+
'__file__': progname,
|
| 595 |
+
'__name__': '__main__',
|
| 596 |
+
'__package__': None,
|
| 597 |
+
'__cached__': None,
|
| 598 |
+
}
|
| 599 |
+
try:
|
| 600 |
+
runctx(code, globs, None, options.outfile, options.sort)
|
| 601 |
+
except BrokenPipeError as exc:
|
| 602 |
+
# Prevent "Exception ignored" during interpreter shutdown.
|
| 603 |
+
sys.stdout = None
|
| 604 |
+
sys.exit(exc.errno)
|
| 605 |
+
else:
|
| 606 |
+
parser.print_usage()
|
| 607 |
+
return parser
|
| 608 |
+
|
| 609 |
+
# When invoked as main program, invoke the profiler on a script
|
| 610 |
+
if __name__ == '__main__':
|
| 611 |
+
main()
|
evalkit_cambrian/lib/python3.10/pty.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Pseudo terminal utilities."""
|
| 2 |
+
|
| 3 |
+
# Bugs: No signal handling. Doesn't set slave termios and window size.
|
| 4 |
+
# Only tested on Linux, FreeBSD, and macOS.
|
| 5 |
+
# See: W. Richard Stevens. 1992. Advanced Programming in the
|
| 6 |
+
# UNIX Environment. Chapter 19.
|
| 7 |
+
# Author: Steen Lumholt -- with additions by Guido.
|
| 8 |
+
|
| 9 |
+
from select import select
|
| 10 |
+
import os
|
| 11 |
+
import sys
|
| 12 |
+
import tty
|
| 13 |
+
|
| 14 |
+
# names imported directly for test mocking purposes
|
| 15 |
+
from os import close, waitpid
|
| 16 |
+
from tty import setraw, tcgetattr, tcsetattr
|
| 17 |
+
|
| 18 |
+
__all__ = ["openpty", "fork", "spawn"]
|
| 19 |
+
|
| 20 |
+
STDIN_FILENO = 0
|
| 21 |
+
STDOUT_FILENO = 1
|
| 22 |
+
STDERR_FILENO = 2
|
| 23 |
+
|
| 24 |
+
CHILD = 0
|
| 25 |
+
|
| 26 |
+
def openpty():
|
| 27 |
+
"""openpty() -> (master_fd, slave_fd)
|
| 28 |
+
Open a pty master/slave pair, using os.openpty() if possible."""
|
| 29 |
+
|
| 30 |
+
try:
|
| 31 |
+
return os.openpty()
|
| 32 |
+
except (AttributeError, OSError):
|
| 33 |
+
pass
|
| 34 |
+
master_fd, slave_name = _open_terminal()
|
| 35 |
+
slave_fd = slave_open(slave_name)
|
| 36 |
+
return master_fd, slave_fd
|
| 37 |
+
|
| 38 |
+
def master_open():
|
| 39 |
+
"""master_open() -> (master_fd, slave_name)
|
| 40 |
+
Open a pty master and return the fd, and the filename of the slave end.
|
| 41 |
+
Deprecated, use openpty() instead."""
|
| 42 |
+
|
| 43 |
+
try:
|
| 44 |
+
master_fd, slave_fd = os.openpty()
|
| 45 |
+
except (AttributeError, OSError):
|
| 46 |
+
pass
|
| 47 |
+
else:
|
| 48 |
+
slave_name = os.ttyname(slave_fd)
|
| 49 |
+
os.close(slave_fd)
|
| 50 |
+
return master_fd, slave_name
|
| 51 |
+
|
| 52 |
+
return _open_terminal()
|
| 53 |
+
|
| 54 |
+
def _open_terminal():
|
| 55 |
+
"""Open pty master and return (master_fd, tty_name)."""
|
| 56 |
+
for x in 'pqrstuvwxyzPQRST':
|
| 57 |
+
for y in '0123456789abcdef':
|
| 58 |
+
pty_name = '/dev/pty' + x + y
|
| 59 |
+
try:
|
| 60 |
+
fd = os.open(pty_name, os.O_RDWR)
|
| 61 |
+
except OSError:
|
| 62 |
+
continue
|
| 63 |
+
return (fd, '/dev/tty' + x + y)
|
| 64 |
+
raise OSError('out of pty devices')
|
| 65 |
+
|
| 66 |
+
def slave_open(tty_name):
|
| 67 |
+
"""slave_open(tty_name) -> slave_fd
|
| 68 |
+
Open the pty slave and acquire the controlling terminal, returning
|
| 69 |
+
opened filedescriptor.
|
| 70 |
+
Deprecated, use openpty() instead."""
|
| 71 |
+
|
| 72 |
+
result = os.open(tty_name, os.O_RDWR)
|
| 73 |
+
try:
|
| 74 |
+
from fcntl import ioctl, I_PUSH
|
| 75 |
+
except ImportError:
|
| 76 |
+
return result
|
| 77 |
+
try:
|
| 78 |
+
ioctl(result, I_PUSH, "ptem")
|
| 79 |
+
ioctl(result, I_PUSH, "ldterm")
|
| 80 |
+
except OSError:
|
| 81 |
+
pass
|
| 82 |
+
return result
|
| 83 |
+
|
| 84 |
+
def fork():
|
| 85 |
+
"""fork() -> (pid, master_fd)
|
| 86 |
+
Fork and make the child a session leader with a controlling terminal."""
|
| 87 |
+
|
| 88 |
+
try:
|
| 89 |
+
pid, fd = os.forkpty()
|
| 90 |
+
except (AttributeError, OSError):
|
| 91 |
+
pass
|
| 92 |
+
else:
|
| 93 |
+
if pid == CHILD:
|
| 94 |
+
try:
|
| 95 |
+
os.setsid()
|
| 96 |
+
except OSError:
|
| 97 |
+
# os.forkpty() already set us session leader
|
| 98 |
+
pass
|
| 99 |
+
return pid, fd
|
| 100 |
+
|
| 101 |
+
master_fd, slave_fd = openpty()
|
| 102 |
+
pid = os.fork()
|
| 103 |
+
if pid == CHILD:
|
| 104 |
+
# Establish a new session.
|
| 105 |
+
os.setsid()
|
| 106 |
+
os.close(master_fd)
|
| 107 |
+
|
| 108 |
+
# Slave becomes stdin/stdout/stderr of child.
|
| 109 |
+
os.dup2(slave_fd, STDIN_FILENO)
|
| 110 |
+
os.dup2(slave_fd, STDOUT_FILENO)
|
| 111 |
+
os.dup2(slave_fd, STDERR_FILENO)
|
| 112 |
+
if slave_fd > STDERR_FILENO:
|
| 113 |
+
os.close(slave_fd)
|
| 114 |
+
|
| 115 |
+
# Explicitly open the tty to make it become a controlling tty.
|
| 116 |
+
tmp_fd = os.open(os.ttyname(STDOUT_FILENO), os.O_RDWR)
|
| 117 |
+
os.close(tmp_fd)
|
| 118 |
+
else:
|
| 119 |
+
os.close(slave_fd)
|
| 120 |
+
|
| 121 |
+
# Parent and child process.
|
| 122 |
+
return pid, master_fd
|
| 123 |
+
|
| 124 |
+
def _writen(fd, data):
|
| 125 |
+
"""Write all the data to a descriptor."""
|
| 126 |
+
while data:
|
| 127 |
+
n = os.write(fd, data)
|
| 128 |
+
data = data[n:]
|
| 129 |
+
|
| 130 |
+
def _read(fd):
|
| 131 |
+
"""Default read function."""
|
| 132 |
+
return os.read(fd, 1024)
|
| 133 |
+
|
| 134 |
+
def _copy(master_fd, master_read=_read, stdin_read=_read):
|
| 135 |
+
"""Parent copy loop.
|
| 136 |
+
Copies
|
| 137 |
+
pty master -> standard output (master_read)
|
| 138 |
+
standard input -> pty master (stdin_read)"""
|
| 139 |
+
fds = [master_fd, STDIN_FILENO]
|
| 140 |
+
while fds:
|
| 141 |
+
rfds, _wfds, _xfds = select(fds, [], [])
|
| 142 |
+
|
| 143 |
+
if master_fd in rfds:
|
| 144 |
+
# Some OSes signal EOF by returning an empty byte string,
|
| 145 |
+
# some throw OSErrors.
|
| 146 |
+
try:
|
| 147 |
+
data = master_read(master_fd)
|
| 148 |
+
except OSError:
|
| 149 |
+
data = b""
|
| 150 |
+
if not data: # Reached EOF.
|
| 151 |
+
return # Assume the child process has exited and is
|
| 152 |
+
# unreachable, so we clean up.
|
| 153 |
+
else:
|
| 154 |
+
os.write(STDOUT_FILENO, data)
|
| 155 |
+
|
| 156 |
+
if STDIN_FILENO in rfds:
|
| 157 |
+
data = stdin_read(STDIN_FILENO)
|
| 158 |
+
if not data:
|
| 159 |
+
fds.remove(STDIN_FILENO)
|
| 160 |
+
else:
|
| 161 |
+
_writen(master_fd, data)
|
| 162 |
+
|
| 163 |
+
def spawn(argv, master_read=_read, stdin_read=_read):
|
| 164 |
+
"""Create a spawned process."""
|
| 165 |
+
if type(argv) == type(''):
|
| 166 |
+
argv = (argv,)
|
| 167 |
+
sys.audit('pty.spawn', argv)
|
| 168 |
+
|
| 169 |
+
pid, master_fd = fork()
|
| 170 |
+
if pid == CHILD:
|
| 171 |
+
os.execlp(argv[0], *argv)
|
| 172 |
+
|
| 173 |
+
try:
|
| 174 |
+
mode = tcgetattr(STDIN_FILENO)
|
| 175 |
+
setraw(STDIN_FILENO)
|
| 176 |
+
restore = True
|
| 177 |
+
except tty.error: # This is the same as termios.error
|
| 178 |
+
restore = False
|
| 179 |
+
|
| 180 |
+
try:
|
| 181 |
+
_copy(master_fd, master_read, stdin_read)
|
| 182 |
+
finally:
|
| 183 |
+
if restore:
|
| 184 |
+
tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
|
| 185 |
+
|
| 186 |
+
close(master_fd)
|
| 187 |
+
return waitpid(pid, 0)[1]
|
evalkit_cambrian/lib/python3.10/pyclbr.py
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Parse a Python module and describe its classes and functions.
|
| 2 |
+
|
| 3 |
+
Parse enough of a Python file to recognize imports and class and
|
| 4 |
+
function definitions, and to find out the superclasses of a class.
|
| 5 |
+
|
| 6 |
+
The interface consists of a single function:
|
| 7 |
+
readmodule_ex(module, path=None)
|
| 8 |
+
where module is the name of a Python module, and path is an optional
|
| 9 |
+
list of directories where the module is to be searched. If present,
|
| 10 |
+
path is prepended to the system search path sys.path. The return value
|
| 11 |
+
is a dictionary. The keys of the dictionary are the names of the
|
| 12 |
+
classes and functions defined in the module (including classes that are
|
| 13 |
+
defined via the from XXX import YYY construct). The values are
|
| 14 |
+
instances of classes Class and Function. One special key/value pair is
|
| 15 |
+
present for packages: the key '__path__' has a list as its value which
|
| 16 |
+
contains the package search path.
|
| 17 |
+
|
| 18 |
+
Classes and Functions have a common superclass: _Object. Every instance
|
| 19 |
+
has the following attributes:
|
| 20 |
+
module -- name of the module;
|
| 21 |
+
name -- name of the object;
|
| 22 |
+
file -- file in which the object is defined;
|
| 23 |
+
lineno -- line in the file where the object's definition starts;
|
| 24 |
+
end_lineno -- line in the file where the object's definition ends;
|
| 25 |
+
parent -- parent of this object, if any;
|
| 26 |
+
children -- nested objects contained in this object.
|
| 27 |
+
The 'children' attribute is a dictionary mapping names to objects.
|
| 28 |
+
|
| 29 |
+
Instances of Function describe functions with the attributes from _Object,
|
| 30 |
+
plus the following:
|
| 31 |
+
is_async -- if a function is defined with an 'async' prefix
|
| 32 |
+
|
| 33 |
+
Instances of Class describe classes with the attributes from _Object,
|
| 34 |
+
plus the following:
|
| 35 |
+
super -- list of super classes (Class instances if possible);
|
| 36 |
+
methods -- mapping of method names to beginning line numbers.
|
| 37 |
+
If the name of a super class is not recognized, the corresponding
|
| 38 |
+
entry in the list of super classes is not a class instance but a
|
| 39 |
+
string giving the name of the super class. Since import statements
|
| 40 |
+
are recognized and imported modules are scanned as well, this
|
| 41 |
+
shouldn't happen often.
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
import ast
|
| 45 |
+
import sys
|
| 46 |
+
import importlib.util
|
| 47 |
+
|
| 48 |
+
__all__ = ["readmodule", "readmodule_ex", "Class", "Function"]
|
| 49 |
+
|
| 50 |
+
_modules = {} # Initialize cache of modules we've seen.
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class _Object:
|
| 54 |
+
"Information about Python class or function."
|
| 55 |
+
def __init__(self, module, name, file, lineno, end_lineno, parent):
|
| 56 |
+
self.module = module
|
| 57 |
+
self.name = name
|
| 58 |
+
self.file = file
|
| 59 |
+
self.lineno = lineno
|
| 60 |
+
self.end_lineno = end_lineno
|
| 61 |
+
self.parent = parent
|
| 62 |
+
self.children = {}
|
| 63 |
+
if parent is not None:
|
| 64 |
+
parent.children[name] = self
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# Odd Function and Class signatures are for back-compatibility.
|
| 68 |
+
class Function(_Object):
|
| 69 |
+
"Information about a Python function, including methods."
|
| 70 |
+
def __init__(self, module, name, file, lineno,
|
| 71 |
+
parent=None, is_async=False, *, end_lineno=None):
|
| 72 |
+
super().__init__(module, name, file, lineno, end_lineno, parent)
|
| 73 |
+
self.is_async = is_async
|
| 74 |
+
if isinstance(parent, Class):
|
| 75 |
+
parent.methods[name] = lineno
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class Class(_Object):
|
| 79 |
+
"Information about a Python class."
|
| 80 |
+
def __init__(self, module, name, super_, file, lineno,
|
| 81 |
+
parent=None, *, end_lineno=None):
|
| 82 |
+
super().__init__(module, name, file, lineno, end_lineno, parent)
|
| 83 |
+
self.super = super_ or []
|
| 84 |
+
self.methods = {}
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
# These 2 functions are used in these tests
|
| 88 |
+
# Lib/test/test_pyclbr, Lib/idlelib/idle_test/test_browser.py
|
| 89 |
+
def _nest_function(ob, func_name, lineno, end_lineno, is_async=False):
|
| 90 |
+
"Return a Function after nesting within ob."
|
| 91 |
+
return Function(ob.module, func_name, ob.file, lineno,
|
| 92 |
+
parent=ob, is_async=is_async, end_lineno=end_lineno)
|
| 93 |
+
|
| 94 |
+
def _nest_class(ob, class_name, lineno, end_lineno, super=None):
|
| 95 |
+
"Return a Class after nesting within ob."
|
| 96 |
+
return Class(ob.module, class_name, super, ob.file, lineno,
|
| 97 |
+
parent=ob, end_lineno=end_lineno)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def readmodule(module, path=None):
|
| 101 |
+
"""Return Class objects for the top-level classes in module.
|
| 102 |
+
|
| 103 |
+
This is the original interface, before Functions were added.
|
| 104 |
+
"""
|
| 105 |
+
|
| 106 |
+
res = {}
|
| 107 |
+
for key, value in _readmodule(module, path or []).items():
|
| 108 |
+
if isinstance(value, Class):
|
| 109 |
+
res[key] = value
|
| 110 |
+
return res
|
| 111 |
+
|
| 112 |
+
def readmodule_ex(module, path=None):
|
| 113 |
+
"""Return a dictionary with all functions and classes in module.
|
| 114 |
+
|
| 115 |
+
Search for module in PATH + sys.path.
|
| 116 |
+
If possible, include imported superclasses.
|
| 117 |
+
Do this by reading source, without importing (and executing) it.
|
| 118 |
+
"""
|
| 119 |
+
return _readmodule(module, path or [])
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _readmodule(module, path, inpackage=None):
|
| 123 |
+
"""Do the hard work for readmodule[_ex].
|
| 124 |
+
|
| 125 |
+
If inpackage is given, it must be the dotted name of the package in
|
| 126 |
+
which we are searching for a submodule, and then PATH must be the
|
| 127 |
+
package search path; otherwise, we are searching for a top-level
|
| 128 |
+
module, and path is combined with sys.path.
|
| 129 |
+
"""
|
| 130 |
+
# Compute the full module name (prepending inpackage if set).
|
| 131 |
+
if inpackage is not None:
|
| 132 |
+
fullmodule = "%s.%s" % (inpackage, module)
|
| 133 |
+
else:
|
| 134 |
+
fullmodule = module
|
| 135 |
+
|
| 136 |
+
# Check in the cache.
|
| 137 |
+
if fullmodule in _modules:
|
| 138 |
+
return _modules[fullmodule]
|
| 139 |
+
|
| 140 |
+
# Initialize the dict for this module's contents.
|
| 141 |
+
tree = {}
|
| 142 |
+
|
| 143 |
+
# Check if it is a built-in module; we don't do much for these.
|
| 144 |
+
if module in sys.builtin_module_names and inpackage is None:
|
| 145 |
+
_modules[module] = tree
|
| 146 |
+
return tree
|
| 147 |
+
|
| 148 |
+
# Check for a dotted module name.
|
| 149 |
+
i = module.rfind('.')
|
| 150 |
+
if i >= 0:
|
| 151 |
+
package = module[:i]
|
| 152 |
+
submodule = module[i+1:]
|
| 153 |
+
parent = _readmodule(package, path, inpackage)
|
| 154 |
+
if inpackage is not None:
|
| 155 |
+
package = "%s.%s" % (inpackage, package)
|
| 156 |
+
if not '__path__' in parent:
|
| 157 |
+
raise ImportError('No package named {}'.format(package))
|
| 158 |
+
return _readmodule(submodule, parent['__path__'], package)
|
| 159 |
+
|
| 160 |
+
# Search the path for the module.
|
| 161 |
+
f = None
|
| 162 |
+
if inpackage is not None:
|
| 163 |
+
search_path = path
|
| 164 |
+
else:
|
| 165 |
+
search_path = path + sys.path
|
| 166 |
+
spec = importlib.util._find_spec_from_path(fullmodule, search_path)
|
| 167 |
+
if spec is None:
|
| 168 |
+
raise ModuleNotFoundError(f"no module named {fullmodule!r}", name=fullmodule)
|
| 169 |
+
_modules[fullmodule] = tree
|
| 170 |
+
# Is module a package?
|
| 171 |
+
if spec.submodule_search_locations is not None:
|
| 172 |
+
tree['__path__'] = spec.submodule_search_locations
|
| 173 |
+
try:
|
| 174 |
+
source = spec.loader.get_source(fullmodule)
|
| 175 |
+
except (AttributeError, ImportError):
|
| 176 |
+
# If module is not Python source, we cannot do anything.
|
| 177 |
+
return tree
|
| 178 |
+
else:
|
| 179 |
+
if source is None:
|
| 180 |
+
return tree
|
| 181 |
+
|
| 182 |
+
fname = spec.loader.get_filename(fullmodule)
|
| 183 |
+
return _create_tree(fullmodule, path, fname, source, tree, inpackage)
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
class _ModuleBrowser(ast.NodeVisitor):
|
| 187 |
+
def __init__(self, module, path, file, tree, inpackage):
|
| 188 |
+
self.path = path
|
| 189 |
+
self.tree = tree
|
| 190 |
+
self.file = file
|
| 191 |
+
self.module = module
|
| 192 |
+
self.inpackage = inpackage
|
| 193 |
+
self.stack = []
|
| 194 |
+
|
| 195 |
+
def visit_ClassDef(self, node):
|
| 196 |
+
bases = []
|
| 197 |
+
for base in node.bases:
|
| 198 |
+
name = ast.unparse(base)
|
| 199 |
+
if name in self.tree:
|
| 200 |
+
# We know this super class.
|
| 201 |
+
bases.append(self.tree[name])
|
| 202 |
+
elif len(names := name.split(".")) > 1:
|
| 203 |
+
# Super class form is module.class:
|
| 204 |
+
# look in module for class.
|
| 205 |
+
*_, module, class_ = names
|
| 206 |
+
if module in _modules:
|
| 207 |
+
bases.append(_modules[module].get(class_, name))
|
| 208 |
+
else:
|
| 209 |
+
bases.append(name)
|
| 210 |
+
|
| 211 |
+
parent = self.stack[-1] if self.stack else None
|
| 212 |
+
class_ = Class(self.module, node.name, bases, self.file, node.lineno,
|
| 213 |
+
parent=parent, end_lineno=node.end_lineno)
|
| 214 |
+
if parent is None:
|
| 215 |
+
self.tree[node.name] = class_
|
| 216 |
+
self.stack.append(class_)
|
| 217 |
+
self.generic_visit(node)
|
| 218 |
+
self.stack.pop()
|
| 219 |
+
|
| 220 |
+
def visit_FunctionDef(self, node, *, is_async=False):
|
| 221 |
+
parent = self.stack[-1] if self.stack else None
|
| 222 |
+
function = Function(self.module, node.name, self.file, node.lineno,
|
| 223 |
+
parent, is_async, end_lineno=node.end_lineno)
|
| 224 |
+
if parent is None:
|
| 225 |
+
self.tree[node.name] = function
|
| 226 |
+
self.stack.append(function)
|
| 227 |
+
self.generic_visit(node)
|
| 228 |
+
self.stack.pop()
|
| 229 |
+
|
| 230 |
+
def visit_AsyncFunctionDef(self, node):
|
| 231 |
+
self.visit_FunctionDef(node, is_async=True)
|
| 232 |
+
|
| 233 |
+
def visit_Import(self, node):
|
| 234 |
+
if node.col_offset != 0:
|
| 235 |
+
return
|
| 236 |
+
|
| 237 |
+
for module in node.names:
|
| 238 |
+
try:
|
| 239 |
+
try:
|
| 240 |
+
_readmodule(module.name, self.path, self.inpackage)
|
| 241 |
+
except ImportError:
|
| 242 |
+
_readmodule(module.name, [])
|
| 243 |
+
except (ImportError, SyntaxError):
|
| 244 |
+
# If we can't find or parse the imported module,
|
| 245 |
+
# too bad -- don't die here.
|
| 246 |
+
continue
|
| 247 |
+
|
| 248 |
+
def visit_ImportFrom(self, node):
|
| 249 |
+
if node.col_offset != 0:
|
| 250 |
+
return
|
| 251 |
+
try:
|
| 252 |
+
module = "." * node.level
|
| 253 |
+
if node.module:
|
| 254 |
+
module += node.module
|
| 255 |
+
module = _readmodule(module, self.path, self.inpackage)
|
| 256 |
+
except (ImportError, SyntaxError):
|
| 257 |
+
return
|
| 258 |
+
|
| 259 |
+
for name in node.names:
|
| 260 |
+
if name.name in module:
|
| 261 |
+
self.tree[name.asname or name.name] = module[name.name]
|
| 262 |
+
elif name.name == "*":
|
| 263 |
+
for import_name, import_value in module.items():
|
| 264 |
+
if import_name.startswith("_"):
|
| 265 |
+
continue
|
| 266 |
+
self.tree[import_name] = import_value
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
def _create_tree(fullmodule, path, fname, source, tree, inpackage):
|
| 270 |
+
mbrowser = _ModuleBrowser(fullmodule, path, fname, tree, inpackage)
|
| 271 |
+
mbrowser.visit(ast.parse(source))
|
| 272 |
+
return mbrowser.tree
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
def _main():
|
| 276 |
+
"Print module output (default this file) for quick visual check."
|
| 277 |
+
import os
|
| 278 |
+
try:
|
| 279 |
+
mod = sys.argv[1]
|
| 280 |
+
except:
|
| 281 |
+
mod = __file__
|
| 282 |
+
if os.path.exists(mod):
|
| 283 |
+
path = [os.path.dirname(mod)]
|
| 284 |
+
mod = os.path.basename(mod)
|
| 285 |
+
if mod.lower().endswith(".py"):
|
| 286 |
+
mod = mod[:-3]
|
| 287 |
+
else:
|
| 288 |
+
path = []
|
| 289 |
+
tree = readmodule_ex(mod, path)
|
| 290 |
+
lineno_key = lambda a: getattr(a, 'lineno', 0)
|
| 291 |
+
objs = sorted(tree.values(), key=lineno_key, reverse=True)
|
| 292 |
+
indent_level = 2
|
| 293 |
+
while objs:
|
| 294 |
+
obj = objs.pop()
|
| 295 |
+
if isinstance(obj, list):
|
| 296 |
+
# Value is a __path__ key.
|
| 297 |
+
continue
|
| 298 |
+
if not hasattr(obj, 'indent'):
|
| 299 |
+
obj.indent = 0
|
| 300 |
+
|
| 301 |
+
if isinstance(obj, _Object):
|
| 302 |
+
new_objs = sorted(obj.children.values(),
|
| 303 |
+
key=lineno_key, reverse=True)
|
| 304 |
+
for ob in new_objs:
|
| 305 |
+
ob.indent = obj.indent + indent_level
|
| 306 |
+
objs.extend(new_objs)
|
| 307 |
+
if isinstance(obj, Class):
|
| 308 |
+
print("{}class {} {} {}"
|
| 309 |
+
.format(' ' * obj.indent, obj.name, obj.super, obj.lineno))
|
| 310 |
+
elif isinstance(obj, Function):
|
| 311 |
+
print("{}def {} {}".format(' ' * obj.indent, obj.name, obj.lineno))
|
| 312 |
+
|
| 313 |
+
if __name__ == "__main__":
|
| 314 |
+
_main()
|
evalkit_cambrian/lib/python3.10/pydoc.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_cambrian/lib/python3.10/random.py
ADDED
|
@@ -0,0 +1,930 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Random variable generators.
|
| 2 |
+
|
| 3 |
+
bytes
|
| 4 |
+
-----
|
| 5 |
+
uniform bytes (values between 0 and 255)
|
| 6 |
+
|
| 7 |
+
integers
|
| 8 |
+
--------
|
| 9 |
+
uniform within range
|
| 10 |
+
|
| 11 |
+
sequences
|
| 12 |
+
---------
|
| 13 |
+
pick random element
|
| 14 |
+
pick random sample
|
| 15 |
+
pick weighted random sample
|
| 16 |
+
generate random permutation
|
| 17 |
+
|
| 18 |
+
distributions on the real line:
|
| 19 |
+
------------------------------
|
| 20 |
+
uniform
|
| 21 |
+
triangular
|
| 22 |
+
normal (Gaussian)
|
| 23 |
+
lognormal
|
| 24 |
+
negative exponential
|
| 25 |
+
gamma
|
| 26 |
+
beta
|
| 27 |
+
pareto
|
| 28 |
+
Weibull
|
| 29 |
+
|
| 30 |
+
distributions on the circle (angles 0 to 2pi)
|
| 31 |
+
---------------------------------------------
|
| 32 |
+
circular uniform
|
| 33 |
+
von Mises
|
| 34 |
+
|
| 35 |
+
General notes on the underlying Mersenne Twister core generator:
|
| 36 |
+
|
| 37 |
+
* The period is 2**19937-1.
|
| 38 |
+
* It is one of the most extensively tested generators in existence.
|
| 39 |
+
* The random() method is implemented in C, executes in a single Python step,
|
| 40 |
+
and is, therefore, threadsafe.
|
| 41 |
+
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
# Translated by Guido van Rossum from C source provided by
|
| 45 |
+
# Adrian Baddeley. Adapted by Raymond Hettinger for use with
|
| 46 |
+
# the Mersenne Twister and os.urandom() core generators.
|
| 47 |
+
|
| 48 |
+
from warnings import warn as _warn
|
| 49 |
+
from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
|
| 50 |
+
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
|
| 51 |
+
from math import tau as TWOPI, floor as _floor, isfinite as _isfinite
|
| 52 |
+
from os import urandom as _urandom
|
| 53 |
+
from _collections_abc import Set as _Set, Sequence as _Sequence
|
| 54 |
+
from operator import index as _index
|
| 55 |
+
from itertools import accumulate as _accumulate, repeat as _repeat
|
| 56 |
+
from bisect import bisect as _bisect
|
| 57 |
+
import os as _os
|
| 58 |
+
import _random
|
| 59 |
+
|
| 60 |
+
try:
|
| 61 |
+
# hashlib is pretty heavy to load, try lean internal module first
|
| 62 |
+
from _sha512 import sha512 as _sha512
|
| 63 |
+
except ImportError:
|
| 64 |
+
# fallback to official implementation
|
| 65 |
+
from hashlib import sha512 as _sha512
|
| 66 |
+
|
| 67 |
+
__all__ = [
|
| 68 |
+
"Random",
|
| 69 |
+
"SystemRandom",
|
| 70 |
+
"betavariate",
|
| 71 |
+
"choice",
|
| 72 |
+
"choices",
|
| 73 |
+
"expovariate",
|
| 74 |
+
"gammavariate",
|
| 75 |
+
"gauss",
|
| 76 |
+
"getrandbits",
|
| 77 |
+
"getstate",
|
| 78 |
+
"lognormvariate",
|
| 79 |
+
"normalvariate",
|
| 80 |
+
"paretovariate",
|
| 81 |
+
"randbytes",
|
| 82 |
+
"randint",
|
| 83 |
+
"random",
|
| 84 |
+
"randrange",
|
| 85 |
+
"sample",
|
| 86 |
+
"seed",
|
| 87 |
+
"setstate",
|
| 88 |
+
"shuffle",
|
| 89 |
+
"triangular",
|
| 90 |
+
"uniform",
|
| 91 |
+
"vonmisesvariate",
|
| 92 |
+
"weibullvariate",
|
| 93 |
+
]
|
| 94 |
+
|
| 95 |
+
NV_MAGICCONST = 4 * _exp(-0.5) / _sqrt(2.0)
|
| 96 |
+
LOG4 = _log(4.0)
|
| 97 |
+
SG_MAGICCONST = 1.0 + _log(4.5)
|
| 98 |
+
BPF = 53 # Number of bits in a float
|
| 99 |
+
RECIP_BPF = 2 ** -BPF
|
| 100 |
+
_ONE = 1
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class Random(_random.Random):
|
| 104 |
+
"""Random number generator base class used by bound module functions.
|
| 105 |
+
|
| 106 |
+
Used to instantiate instances of Random to get generators that don't
|
| 107 |
+
share state.
|
| 108 |
+
|
| 109 |
+
Class Random can also be subclassed if you want to use a different basic
|
| 110 |
+
generator of your own devising: in that case, override the following
|
| 111 |
+
methods: random(), seed(), getstate(), and setstate().
|
| 112 |
+
Optionally, implement a getrandbits() method so that randrange()
|
| 113 |
+
can cover arbitrarily large ranges.
|
| 114 |
+
|
| 115 |
+
"""
|
| 116 |
+
|
| 117 |
+
VERSION = 3 # used by getstate/setstate
|
| 118 |
+
|
| 119 |
+
def __init__(self, x=None):
|
| 120 |
+
"""Initialize an instance.
|
| 121 |
+
|
| 122 |
+
Optional argument x controls seeding, as for Random.seed().
|
| 123 |
+
"""
|
| 124 |
+
|
| 125 |
+
self.seed(x)
|
| 126 |
+
self.gauss_next = None
|
| 127 |
+
|
| 128 |
+
def seed(self, a=None, version=2):
|
| 129 |
+
"""Initialize internal state from a seed.
|
| 130 |
+
|
| 131 |
+
The only supported seed types are None, int, float,
|
| 132 |
+
str, bytes, and bytearray.
|
| 133 |
+
|
| 134 |
+
None or no argument seeds from current time or from an operating
|
| 135 |
+
system specific randomness source if available.
|
| 136 |
+
|
| 137 |
+
If *a* is an int, all bits are used.
|
| 138 |
+
|
| 139 |
+
For version 2 (the default), all of the bits are used if *a* is a str,
|
| 140 |
+
bytes, or bytearray. For version 1 (provided for reproducing random
|
| 141 |
+
sequences from older versions of Python), the algorithm for str and
|
| 142 |
+
bytes generates a narrower range of seeds.
|
| 143 |
+
|
| 144 |
+
"""
|
| 145 |
+
|
| 146 |
+
if version == 1 and isinstance(a, (str, bytes)):
|
| 147 |
+
a = a.decode('latin-1') if isinstance(a, bytes) else a
|
| 148 |
+
x = ord(a[0]) << 7 if a else 0
|
| 149 |
+
for c in map(ord, a):
|
| 150 |
+
x = ((1000003 * x) ^ c) & 0xFFFFFFFFFFFFFFFF
|
| 151 |
+
x ^= len(a)
|
| 152 |
+
a = -2 if x == -1 else x
|
| 153 |
+
|
| 154 |
+
elif version == 2 and isinstance(a, (str, bytes, bytearray)):
|
| 155 |
+
if isinstance(a, str):
|
| 156 |
+
a = a.encode()
|
| 157 |
+
a = int.from_bytes(a + _sha512(a).digest(), 'big')
|
| 158 |
+
|
| 159 |
+
elif not isinstance(a, (type(None), int, float, str, bytes, bytearray)):
|
| 160 |
+
_warn('Seeding based on hashing is deprecated\n'
|
| 161 |
+
'since Python 3.9 and will be removed in a subsequent '
|
| 162 |
+
'version. The only \n'
|
| 163 |
+
'supported seed types are: None, '
|
| 164 |
+
'int, float, str, bytes, and bytearray.',
|
| 165 |
+
DeprecationWarning, 2)
|
| 166 |
+
|
| 167 |
+
super().seed(a)
|
| 168 |
+
self.gauss_next = None
|
| 169 |
+
|
| 170 |
+
def getstate(self):
|
| 171 |
+
"""Return internal state; can be passed to setstate() later."""
|
| 172 |
+
return self.VERSION, super().getstate(), self.gauss_next
|
| 173 |
+
|
| 174 |
+
def setstate(self, state):
|
| 175 |
+
"""Restore internal state from object returned by getstate()."""
|
| 176 |
+
version = state[0]
|
| 177 |
+
if version == 3:
|
| 178 |
+
version, internalstate, self.gauss_next = state
|
| 179 |
+
super().setstate(internalstate)
|
| 180 |
+
elif version == 2:
|
| 181 |
+
version, internalstate, self.gauss_next = state
|
| 182 |
+
# In version 2, the state was saved as signed ints, which causes
|
| 183 |
+
# inconsistencies between 32/64-bit systems. The state is
|
| 184 |
+
# really unsigned 32-bit ints, so we convert negative ints from
|
| 185 |
+
# version 2 to positive longs for version 3.
|
| 186 |
+
try:
|
| 187 |
+
internalstate = tuple(x % (2 ** 32) for x in internalstate)
|
| 188 |
+
except ValueError as e:
|
| 189 |
+
raise TypeError from e
|
| 190 |
+
super().setstate(internalstate)
|
| 191 |
+
else:
|
| 192 |
+
raise ValueError("state with version %s passed to "
|
| 193 |
+
"Random.setstate() of version %s" %
|
| 194 |
+
(version, self.VERSION))
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
## -------------------------------------------------------
|
| 198 |
+
## ---- Methods below this point do not need to be overridden or extended
|
| 199 |
+
## ---- when subclassing for the purpose of using a different core generator.
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
## -------------------- pickle support -------------------
|
| 203 |
+
|
| 204 |
+
# Issue 17489: Since __reduce__ was defined to fix #759889 this is no
|
| 205 |
+
# longer called; we leave it here because it has been here since random was
|
| 206 |
+
# rewritten back in 2001 and why risk breaking something.
|
| 207 |
+
def __getstate__(self): # for pickle
|
| 208 |
+
return self.getstate()
|
| 209 |
+
|
| 210 |
+
def __setstate__(self, state): # for pickle
|
| 211 |
+
self.setstate(state)
|
| 212 |
+
|
| 213 |
+
def __reduce__(self):
|
| 214 |
+
return self.__class__, (), self.getstate()
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
## ---- internal support method for evenly distributed integers ----
|
| 218 |
+
|
| 219 |
+
def __init_subclass__(cls, /, **kwargs):
|
| 220 |
+
"""Control how subclasses generate random integers.
|
| 221 |
+
|
| 222 |
+
The algorithm a subclass can use depends on the random() and/or
|
| 223 |
+
getrandbits() implementation available to it and determines
|
| 224 |
+
whether it can generate random integers from arbitrarily large
|
| 225 |
+
ranges.
|
| 226 |
+
"""
|
| 227 |
+
|
| 228 |
+
for c in cls.__mro__:
|
| 229 |
+
if '_randbelow' in c.__dict__:
|
| 230 |
+
# just inherit it
|
| 231 |
+
break
|
| 232 |
+
if 'getrandbits' in c.__dict__:
|
| 233 |
+
cls._randbelow = cls._randbelow_with_getrandbits
|
| 234 |
+
break
|
| 235 |
+
if 'random' in c.__dict__:
|
| 236 |
+
cls._randbelow = cls._randbelow_without_getrandbits
|
| 237 |
+
break
|
| 238 |
+
|
| 239 |
+
def _randbelow_with_getrandbits(self, n):
|
| 240 |
+
"Return a random int in the range [0,n). Returns 0 if n==0."
|
| 241 |
+
|
| 242 |
+
if not n:
|
| 243 |
+
return 0
|
| 244 |
+
getrandbits = self.getrandbits
|
| 245 |
+
k = n.bit_length() # don't use (n-1) here because n can be 1
|
| 246 |
+
r = getrandbits(k) # 0 <= r < 2**k
|
| 247 |
+
while r >= n:
|
| 248 |
+
r = getrandbits(k)
|
| 249 |
+
return r
|
| 250 |
+
|
| 251 |
+
def _randbelow_without_getrandbits(self, n, maxsize=1<<BPF):
|
| 252 |
+
"""Return a random int in the range [0,n). Returns 0 if n==0.
|
| 253 |
+
|
| 254 |
+
The implementation does not use getrandbits, but only random.
|
| 255 |
+
"""
|
| 256 |
+
|
| 257 |
+
random = self.random
|
| 258 |
+
if n >= maxsize:
|
| 259 |
+
_warn("Underlying random() generator does not supply \n"
|
| 260 |
+
"enough bits to choose from a population range this large.\n"
|
| 261 |
+
"To remove the range limitation, add a getrandbits() method.")
|
| 262 |
+
return _floor(random() * n)
|
| 263 |
+
if n == 0:
|
| 264 |
+
return 0
|
| 265 |
+
rem = maxsize % n
|
| 266 |
+
limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0
|
| 267 |
+
r = random()
|
| 268 |
+
while r >= limit:
|
| 269 |
+
r = random()
|
| 270 |
+
return _floor(r * maxsize) % n
|
| 271 |
+
|
| 272 |
+
_randbelow = _randbelow_with_getrandbits
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
## --------------------------------------------------------
|
| 276 |
+
## ---- Methods below this point generate custom distributions
|
| 277 |
+
## ---- based on the methods defined above. They do not
|
| 278 |
+
## ---- directly touch the underlying generator and only
|
| 279 |
+
## ---- access randomness through the methods: random(),
|
| 280 |
+
## ---- getrandbits(), or _randbelow().
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
## -------------------- bytes methods ---------------------
|
| 284 |
+
|
| 285 |
+
def randbytes(self, n):
|
| 286 |
+
"""Generate n random bytes."""
|
| 287 |
+
return self.getrandbits(n * 8).to_bytes(n, 'little')
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
## -------------------- integer methods -------------------
|
| 291 |
+
|
| 292 |
+
def randrange(self, start, stop=None, step=_ONE):
|
| 293 |
+
"""Choose a random item from range(start, stop[, step]).
|
| 294 |
+
|
| 295 |
+
This fixes the problem with randint() which includes the
|
| 296 |
+
endpoint; in Python this is usually not what you want.
|
| 297 |
+
|
| 298 |
+
"""
|
| 299 |
+
|
| 300 |
+
# This code is a bit messy to make it fast for the
|
| 301 |
+
# common case while still doing adequate error checking.
|
| 302 |
+
try:
|
| 303 |
+
istart = _index(start)
|
| 304 |
+
except TypeError:
|
| 305 |
+
istart = int(start)
|
| 306 |
+
if istart != start:
|
| 307 |
+
_warn('randrange() will raise TypeError in the future',
|
| 308 |
+
DeprecationWarning, 2)
|
| 309 |
+
raise ValueError("non-integer arg 1 for randrange()")
|
| 310 |
+
_warn('non-integer arguments to randrange() have been deprecated '
|
| 311 |
+
'since Python 3.10 and will be removed in a subsequent '
|
| 312 |
+
'version',
|
| 313 |
+
DeprecationWarning, 2)
|
| 314 |
+
if stop is None:
|
| 315 |
+
# We don't check for "step != 1" because it hasn't been
|
| 316 |
+
# type checked and converted to an integer yet.
|
| 317 |
+
if step is not _ONE:
|
| 318 |
+
raise TypeError('Missing a non-None stop argument')
|
| 319 |
+
if istart > 0:
|
| 320 |
+
return self._randbelow(istart)
|
| 321 |
+
raise ValueError("empty range for randrange()")
|
| 322 |
+
|
| 323 |
+
# stop argument supplied.
|
| 324 |
+
try:
|
| 325 |
+
istop = _index(stop)
|
| 326 |
+
except TypeError:
|
| 327 |
+
istop = int(stop)
|
| 328 |
+
if istop != stop:
|
| 329 |
+
_warn('randrange() will raise TypeError in the future',
|
| 330 |
+
DeprecationWarning, 2)
|
| 331 |
+
raise ValueError("non-integer stop for randrange()")
|
| 332 |
+
_warn('non-integer arguments to randrange() have been deprecated '
|
| 333 |
+
'since Python 3.10 and will be removed in a subsequent '
|
| 334 |
+
'version',
|
| 335 |
+
DeprecationWarning, 2)
|
| 336 |
+
width = istop - istart
|
| 337 |
+
try:
|
| 338 |
+
istep = _index(step)
|
| 339 |
+
except TypeError:
|
| 340 |
+
istep = int(step)
|
| 341 |
+
if istep != step:
|
| 342 |
+
_warn('randrange() will raise TypeError in the future',
|
| 343 |
+
DeprecationWarning, 2)
|
| 344 |
+
raise ValueError("non-integer step for randrange()")
|
| 345 |
+
_warn('non-integer arguments to randrange() have been deprecated '
|
| 346 |
+
'since Python 3.10 and will be removed in a subsequent '
|
| 347 |
+
'version',
|
| 348 |
+
DeprecationWarning, 2)
|
| 349 |
+
# Fast path.
|
| 350 |
+
if istep == 1:
|
| 351 |
+
if width > 0:
|
| 352 |
+
return istart + self._randbelow(width)
|
| 353 |
+
raise ValueError("empty range for randrange() (%d, %d, %d)" % (istart, istop, width))
|
| 354 |
+
|
| 355 |
+
# Non-unit step argument supplied.
|
| 356 |
+
if istep > 0:
|
| 357 |
+
n = (width + istep - 1) // istep
|
| 358 |
+
elif istep < 0:
|
| 359 |
+
n = (width + istep + 1) // istep
|
| 360 |
+
else:
|
| 361 |
+
raise ValueError("zero step for randrange()")
|
| 362 |
+
if n <= 0:
|
| 363 |
+
raise ValueError("empty range for randrange()")
|
| 364 |
+
return istart + istep * self._randbelow(n)
|
| 365 |
+
|
| 366 |
+
def randint(self, a, b):
|
| 367 |
+
"""Return random integer in range [a, b], including both end points.
|
| 368 |
+
"""
|
| 369 |
+
|
| 370 |
+
return self.randrange(a, b+1)
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
## -------------------- sequence methods -------------------
|
| 374 |
+
|
| 375 |
+
def choice(self, seq):
|
| 376 |
+
"""Choose a random element from a non-empty sequence."""
|
| 377 |
+
# raises IndexError if seq is empty
|
| 378 |
+
return seq[self._randbelow(len(seq))]
|
| 379 |
+
|
| 380 |
+
def shuffle(self, x, random=None):
|
| 381 |
+
"""Shuffle list x in place, and return None.
|
| 382 |
+
|
| 383 |
+
Optional argument random is a 0-argument function returning a
|
| 384 |
+
random float in [0.0, 1.0); if it is the default None, the
|
| 385 |
+
standard random.random will be used.
|
| 386 |
+
|
| 387 |
+
"""
|
| 388 |
+
|
| 389 |
+
if random is None:
|
| 390 |
+
randbelow = self._randbelow
|
| 391 |
+
for i in reversed(range(1, len(x))):
|
| 392 |
+
# pick an element in x[:i+1] with which to exchange x[i]
|
| 393 |
+
j = randbelow(i + 1)
|
| 394 |
+
x[i], x[j] = x[j], x[i]
|
| 395 |
+
else:
|
| 396 |
+
_warn('The *random* parameter to shuffle() has been deprecated\n'
|
| 397 |
+
'since Python 3.9 and will be removed in a subsequent '
|
| 398 |
+
'version.',
|
| 399 |
+
DeprecationWarning, 2)
|
| 400 |
+
floor = _floor
|
| 401 |
+
for i in reversed(range(1, len(x))):
|
| 402 |
+
# pick an element in x[:i+1] with which to exchange x[i]
|
| 403 |
+
j = floor(random() * (i + 1))
|
| 404 |
+
x[i], x[j] = x[j], x[i]
|
| 405 |
+
|
| 406 |
+
def sample(self, population, k, *, counts=None):
|
| 407 |
+
"""Chooses k unique random elements from a population sequence or set.
|
| 408 |
+
|
| 409 |
+
Returns a new list containing elements from the population while
|
| 410 |
+
leaving the original population unchanged. The resulting list is
|
| 411 |
+
in selection order so that all sub-slices will also be valid random
|
| 412 |
+
samples. This allows raffle winners (the sample) to be partitioned
|
| 413 |
+
into grand prize and second place winners (the subslices).
|
| 414 |
+
|
| 415 |
+
Members of the population need not be hashable or unique. If the
|
| 416 |
+
population contains repeats, then each occurrence is a possible
|
| 417 |
+
selection in the sample.
|
| 418 |
+
|
| 419 |
+
Repeated elements can be specified one at a time or with the optional
|
| 420 |
+
counts parameter. For example:
|
| 421 |
+
|
| 422 |
+
sample(['red', 'blue'], counts=[4, 2], k=5)
|
| 423 |
+
|
| 424 |
+
is equivalent to:
|
| 425 |
+
|
| 426 |
+
sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5)
|
| 427 |
+
|
| 428 |
+
To choose a sample from a range of integers, use range() for the
|
| 429 |
+
population argument. This is especially fast and space efficient
|
| 430 |
+
for sampling from a large population:
|
| 431 |
+
|
| 432 |
+
sample(range(10000000), 60)
|
| 433 |
+
|
| 434 |
+
"""
|
| 435 |
+
|
| 436 |
+
# Sampling without replacement entails tracking either potential
|
| 437 |
+
# selections (the pool) in a list or previous selections in a set.
|
| 438 |
+
|
| 439 |
+
# When the number of selections is small compared to the
|
| 440 |
+
# population, then tracking selections is efficient, requiring
|
| 441 |
+
# only a small set and an occasional reselection. For
|
| 442 |
+
# a larger number of selections, the pool tracking method is
|
| 443 |
+
# preferred since the list takes less space than the
|
| 444 |
+
# set and it doesn't suffer from frequent reselections.
|
| 445 |
+
|
| 446 |
+
# The number of calls to _randbelow() is kept at or near k, the
|
| 447 |
+
# theoretical minimum. This is important because running time
|
| 448 |
+
# is dominated by _randbelow() and because it extracts the
|
| 449 |
+
# least entropy from the underlying random number generators.
|
| 450 |
+
|
| 451 |
+
# Memory requirements are kept to the smaller of a k-length
|
| 452 |
+
# set or an n-length list.
|
| 453 |
+
|
| 454 |
+
# There are other sampling algorithms that do not require
|
| 455 |
+
# auxiliary memory, but they were rejected because they made
|
| 456 |
+
# too many calls to _randbelow(), making them slower and
|
| 457 |
+
# causing them to eat more entropy than necessary.
|
| 458 |
+
|
| 459 |
+
if not isinstance(population, _Sequence):
|
| 460 |
+
if isinstance(population, _Set):
|
| 461 |
+
_warn('Sampling from a set deprecated\n'
|
| 462 |
+
'since Python 3.9 and will be removed in a subsequent version.',
|
| 463 |
+
DeprecationWarning, 2)
|
| 464 |
+
population = tuple(population)
|
| 465 |
+
else:
|
| 466 |
+
raise TypeError("Population must be a sequence. For dicts or sets, use sorted(d).")
|
| 467 |
+
n = len(population)
|
| 468 |
+
if counts is not None:
|
| 469 |
+
cum_counts = list(_accumulate(counts))
|
| 470 |
+
if len(cum_counts) != n:
|
| 471 |
+
raise ValueError('The number of counts does not match the population')
|
| 472 |
+
total = cum_counts.pop()
|
| 473 |
+
if not isinstance(total, int):
|
| 474 |
+
raise TypeError('Counts must be integers')
|
| 475 |
+
if total <= 0:
|
| 476 |
+
raise ValueError('Total of counts must be greater than zero')
|
| 477 |
+
selections = self.sample(range(total), k=k)
|
| 478 |
+
bisect = _bisect
|
| 479 |
+
return [population[bisect(cum_counts, s)] for s in selections]
|
| 480 |
+
randbelow = self._randbelow
|
| 481 |
+
if not 0 <= k <= n:
|
| 482 |
+
raise ValueError("Sample larger than population or is negative")
|
| 483 |
+
result = [None] * k
|
| 484 |
+
setsize = 21 # size of a small set minus size of an empty list
|
| 485 |
+
if k > 5:
|
| 486 |
+
setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
|
| 487 |
+
if n <= setsize:
|
| 488 |
+
# An n-length list is smaller than a k-length set.
|
| 489 |
+
# Invariant: non-selected at pool[0 : n-i]
|
| 490 |
+
pool = list(population)
|
| 491 |
+
for i in range(k):
|
| 492 |
+
j = randbelow(n - i)
|
| 493 |
+
result[i] = pool[j]
|
| 494 |
+
pool[j] = pool[n - i - 1] # move non-selected item into vacancy
|
| 495 |
+
else:
|
| 496 |
+
selected = set()
|
| 497 |
+
selected_add = selected.add
|
| 498 |
+
for i in range(k):
|
| 499 |
+
j = randbelow(n)
|
| 500 |
+
while j in selected:
|
| 501 |
+
j = randbelow(n)
|
| 502 |
+
selected_add(j)
|
| 503 |
+
result[i] = population[j]
|
| 504 |
+
return result
|
| 505 |
+
|
| 506 |
+
def choices(self, population, weights=None, *, cum_weights=None, k=1):
|
| 507 |
+
"""Return a k sized list of population elements chosen with replacement.
|
| 508 |
+
|
| 509 |
+
If the relative weights or cumulative weights are not specified,
|
| 510 |
+
the selections are made with equal probability.
|
| 511 |
+
|
| 512 |
+
"""
|
| 513 |
+
random = self.random
|
| 514 |
+
n = len(population)
|
| 515 |
+
if cum_weights is None:
|
| 516 |
+
if weights is None:
|
| 517 |
+
floor = _floor
|
| 518 |
+
n += 0.0 # convert to float for a small speed improvement
|
| 519 |
+
return [population[floor(random() * n)] for i in _repeat(None, k)]
|
| 520 |
+
try:
|
| 521 |
+
cum_weights = list(_accumulate(weights))
|
| 522 |
+
except TypeError:
|
| 523 |
+
if not isinstance(weights, int):
|
| 524 |
+
raise
|
| 525 |
+
k = weights
|
| 526 |
+
raise TypeError(
|
| 527 |
+
f'The number of choices must be a keyword argument: {k=}'
|
| 528 |
+
) from None
|
| 529 |
+
elif weights is not None:
|
| 530 |
+
raise TypeError('Cannot specify both weights and cumulative weights')
|
| 531 |
+
if len(cum_weights) != n:
|
| 532 |
+
raise ValueError('The number of weights does not match the population')
|
| 533 |
+
total = cum_weights[-1] + 0.0 # convert to float
|
| 534 |
+
if total <= 0.0:
|
| 535 |
+
raise ValueError('Total of weights must be greater than zero')
|
| 536 |
+
if not _isfinite(total):
|
| 537 |
+
raise ValueError('Total of weights must be finite')
|
| 538 |
+
bisect = _bisect
|
| 539 |
+
hi = n - 1
|
| 540 |
+
return [population[bisect(cum_weights, random() * total, 0, hi)]
|
| 541 |
+
for i in _repeat(None, k)]
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
## -------------------- real-valued distributions -------------------
|
| 545 |
+
|
| 546 |
+
def uniform(self, a, b):
|
| 547 |
+
"Get a random number in the range [a, b) or [a, b] depending on rounding."
|
| 548 |
+
return a + (b - a) * self.random()
|
| 549 |
+
|
| 550 |
+
def triangular(self, low=0.0, high=1.0, mode=None):
|
| 551 |
+
"""Triangular distribution.
|
| 552 |
+
|
| 553 |
+
Continuous distribution bounded by given lower and upper limits,
|
| 554 |
+
and having a given mode value in-between.
|
| 555 |
+
|
| 556 |
+
http://en.wikipedia.org/wiki/Triangular_distribution
|
| 557 |
+
|
| 558 |
+
"""
|
| 559 |
+
u = self.random()
|
| 560 |
+
try:
|
| 561 |
+
c = 0.5 if mode is None else (mode - low) / (high - low)
|
| 562 |
+
except ZeroDivisionError:
|
| 563 |
+
return low
|
| 564 |
+
if u > c:
|
| 565 |
+
u = 1.0 - u
|
| 566 |
+
c = 1.0 - c
|
| 567 |
+
low, high = high, low
|
| 568 |
+
return low + (high - low) * _sqrt(u * c)
|
| 569 |
+
|
| 570 |
+
def normalvariate(self, mu, sigma):
|
| 571 |
+
"""Normal distribution.
|
| 572 |
+
|
| 573 |
+
mu is the mean, and sigma is the standard deviation.
|
| 574 |
+
|
| 575 |
+
"""
|
| 576 |
+
# Uses Kinderman and Monahan method. Reference: Kinderman,
|
| 577 |
+
# A.J. and Monahan, J.F., "Computer generation of random
|
| 578 |
+
# variables using the ratio of uniform deviates", ACM Trans
|
| 579 |
+
# Math Software, 3, (1977), pp257-260.
|
| 580 |
+
|
| 581 |
+
random = self.random
|
| 582 |
+
while True:
|
| 583 |
+
u1 = random()
|
| 584 |
+
u2 = 1.0 - random()
|
| 585 |
+
z = NV_MAGICCONST * (u1 - 0.5) / u2
|
| 586 |
+
zz = z * z / 4.0
|
| 587 |
+
if zz <= -_log(u2):
|
| 588 |
+
break
|
| 589 |
+
return mu + z * sigma
|
| 590 |
+
|
| 591 |
+
def gauss(self, mu, sigma):
|
| 592 |
+
"""Gaussian distribution.
|
| 593 |
+
|
| 594 |
+
mu is the mean, and sigma is the standard deviation. This is
|
| 595 |
+
slightly faster than the normalvariate() function.
|
| 596 |
+
|
| 597 |
+
Not thread-safe without a lock around calls.
|
| 598 |
+
|
| 599 |
+
"""
|
| 600 |
+
# When x and y are two variables from [0, 1), uniformly
|
| 601 |
+
# distributed, then
|
| 602 |
+
#
|
| 603 |
+
# cos(2*pi*x)*sqrt(-2*log(1-y))
|
| 604 |
+
# sin(2*pi*x)*sqrt(-2*log(1-y))
|
| 605 |
+
#
|
| 606 |
+
# are two *independent* variables with normal distribution
|
| 607 |
+
# (mu = 0, sigma = 1).
|
| 608 |
+
# (Lambert Meertens)
|
| 609 |
+
# (corrected version; bug discovered by Mike Miller, fixed by LM)
|
| 610 |
+
|
| 611 |
+
# Multithreading note: When two threads call this function
|
| 612 |
+
# simultaneously, it is possible that they will receive the
|
| 613 |
+
# same return value. The window is very small though. To
|
| 614 |
+
# avoid this, you have to use a lock around all calls. (I
|
| 615 |
+
# didn't want to slow this down in the serial case by using a
|
| 616 |
+
# lock here.)
|
| 617 |
+
|
| 618 |
+
random = self.random
|
| 619 |
+
z = self.gauss_next
|
| 620 |
+
self.gauss_next = None
|
| 621 |
+
if z is None:
|
| 622 |
+
x2pi = random() * TWOPI
|
| 623 |
+
g2rad = _sqrt(-2.0 * _log(1.0 - random()))
|
| 624 |
+
z = _cos(x2pi) * g2rad
|
| 625 |
+
self.gauss_next = _sin(x2pi) * g2rad
|
| 626 |
+
|
| 627 |
+
return mu + z * sigma
|
| 628 |
+
|
| 629 |
+
def lognormvariate(self, mu, sigma):
|
| 630 |
+
"""Log normal distribution.
|
| 631 |
+
|
| 632 |
+
If you take the natural logarithm of this distribution, you'll get a
|
| 633 |
+
normal distribution with mean mu and standard deviation sigma.
|
| 634 |
+
mu can have any value, and sigma must be greater than zero.
|
| 635 |
+
|
| 636 |
+
"""
|
| 637 |
+
return _exp(self.normalvariate(mu, sigma))
|
| 638 |
+
|
| 639 |
+
def expovariate(self, lambd):
|
| 640 |
+
"""Exponential distribution.
|
| 641 |
+
|
| 642 |
+
lambd is 1.0 divided by the desired mean. It should be
|
| 643 |
+
nonzero. (The parameter would be called "lambda", but that is
|
| 644 |
+
a reserved word in Python.) Returned values range from 0 to
|
| 645 |
+
positive infinity if lambd is positive, and from negative
|
| 646 |
+
infinity to 0 if lambd is negative.
|
| 647 |
+
|
| 648 |
+
"""
|
| 649 |
+
# lambd: rate lambd = 1/mean
|
| 650 |
+
# ('lambda' is a Python reserved word)
|
| 651 |
+
|
| 652 |
+
# we use 1-random() instead of random() to preclude the
|
| 653 |
+
# possibility of taking the log of zero.
|
| 654 |
+
return -_log(1.0 - self.random()) / lambd
|
| 655 |
+
|
| 656 |
+
def vonmisesvariate(self, mu, kappa):
|
| 657 |
+
"""Circular data distribution.
|
| 658 |
+
|
| 659 |
+
mu is the mean angle, expressed in radians between 0 and 2*pi, and
|
| 660 |
+
kappa is the concentration parameter, which must be greater than or
|
| 661 |
+
equal to zero. If kappa is equal to zero, this distribution reduces
|
| 662 |
+
to a uniform random angle over the range 0 to 2*pi.
|
| 663 |
+
|
| 664 |
+
"""
|
| 665 |
+
# Based upon an algorithm published in: Fisher, N.I.,
|
| 666 |
+
# "Statistical Analysis of Circular Data", Cambridge
|
| 667 |
+
# University Press, 1993.
|
| 668 |
+
|
| 669 |
+
# Thanks to Magnus Kessler for a correction to the
|
| 670 |
+
# implementation of step 4.
|
| 671 |
+
|
| 672 |
+
random = self.random
|
| 673 |
+
if kappa <= 1e-6:
|
| 674 |
+
return TWOPI * random()
|
| 675 |
+
|
| 676 |
+
s = 0.5 / kappa
|
| 677 |
+
r = s + _sqrt(1.0 + s * s)
|
| 678 |
+
|
| 679 |
+
while True:
|
| 680 |
+
u1 = random()
|
| 681 |
+
z = _cos(_pi * u1)
|
| 682 |
+
|
| 683 |
+
d = z / (r + z)
|
| 684 |
+
u2 = random()
|
| 685 |
+
if u2 < 1.0 - d * d or u2 <= (1.0 - d) * _exp(d):
|
| 686 |
+
break
|
| 687 |
+
|
| 688 |
+
q = 1.0 / r
|
| 689 |
+
f = (q + z) / (1.0 + q * z)
|
| 690 |
+
u3 = random()
|
| 691 |
+
if u3 > 0.5:
|
| 692 |
+
theta = (mu + _acos(f)) % TWOPI
|
| 693 |
+
else:
|
| 694 |
+
theta = (mu - _acos(f)) % TWOPI
|
| 695 |
+
|
| 696 |
+
return theta
|
| 697 |
+
|
| 698 |
+
def gammavariate(self, alpha, beta):
|
| 699 |
+
"""Gamma distribution. Not the gamma function!
|
| 700 |
+
|
| 701 |
+
Conditions on the parameters are alpha > 0 and beta > 0.
|
| 702 |
+
|
| 703 |
+
The probability distribution function is:
|
| 704 |
+
|
| 705 |
+
x ** (alpha - 1) * math.exp(-x / beta)
|
| 706 |
+
pdf(x) = --------------------------------------
|
| 707 |
+
math.gamma(alpha) * beta ** alpha
|
| 708 |
+
|
| 709 |
+
"""
|
| 710 |
+
# alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
|
| 711 |
+
|
| 712 |
+
# Warning: a few older sources define the gamma distribution in terms
|
| 713 |
+
# of alpha > -1.0
|
| 714 |
+
if alpha <= 0.0 or beta <= 0.0:
|
| 715 |
+
raise ValueError('gammavariate: alpha and beta must be > 0.0')
|
| 716 |
+
|
| 717 |
+
random = self.random
|
| 718 |
+
if alpha > 1.0:
|
| 719 |
+
|
| 720 |
+
# Uses R.C.H. Cheng, "The generation of Gamma
|
| 721 |
+
# variables with non-integral shape parameters",
|
| 722 |
+
# Applied Statistics, (1977), 26, No. 1, p71-74
|
| 723 |
+
|
| 724 |
+
ainv = _sqrt(2.0 * alpha - 1.0)
|
| 725 |
+
bbb = alpha - LOG4
|
| 726 |
+
ccc = alpha + ainv
|
| 727 |
+
|
| 728 |
+
while True:
|
| 729 |
+
u1 = random()
|
| 730 |
+
if not 1e-7 < u1 < 0.9999999:
|
| 731 |
+
continue
|
| 732 |
+
u2 = 1.0 - random()
|
| 733 |
+
v = _log(u1 / (1.0 - u1)) / ainv
|
| 734 |
+
x = alpha * _exp(v)
|
| 735 |
+
z = u1 * u1 * u2
|
| 736 |
+
r = bbb + ccc * v - x
|
| 737 |
+
if r + SG_MAGICCONST - 4.5 * z >= 0.0 or r >= _log(z):
|
| 738 |
+
return x * beta
|
| 739 |
+
|
| 740 |
+
elif alpha == 1.0:
|
| 741 |
+
# expovariate(1/beta)
|
| 742 |
+
return -_log(1.0 - random()) * beta
|
| 743 |
+
|
| 744 |
+
else:
|
| 745 |
+
# alpha is between 0 and 1 (exclusive)
|
| 746 |
+
# Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
|
| 747 |
+
while True:
|
| 748 |
+
u = random()
|
| 749 |
+
b = (_e + alpha) / _e
|
| 750 |
+
p = b * u
|
| 751 |
+
if p <= 1.0:
|
| 752 |
+
x = p ** (1.0 / alpha)
|
| 753 |
+
else:
|
| 754 |
+
x = -_log((b - p) / alpha)
|
| 755 |
+
u1 = random()
|
| 756 |
+
if p > 1.0:
|
| 757 |
+
if u1 <= x ** (alpha - 1.0):
|
| 758 |
+
break
|
| 759 |
+
elif u1 <= _exp(-x):
|
| 760 |
+
break
|
| 761 |
+
return x * beta
|
| 762 |
+
|
| 763 |
+
def betavariate(self, alpha, beta):
|
| 764 |
+
"""Beta distribution.
|
| 765 |
+
|
| 766 |
+
Conditions on the parameters are alpha > 0 and beta > 0.
|
| 767 |
+
Returned values range between 0 and 1.
|
| 768 |
+
|
| 769 |
+
"""
|
| 770 |
+
## See
|
| 771 |
+
## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
|
| 772 |
+
## for Ivan Frohne's insightful analysis of why the original implementation:
|
| 773 |
+
##
|
| 774 |
+
## def betavariate(self, alpha, beta):
|
| 775 |
+
## # Discrete Event Simulation in C, pp 87-88.
|
| 776 |
+
##
|
| 777 |
+
## y = self.expovariate(alpha)
|
| 778 |
+
## z = self.expovariate(1.0/beta)
|
| 779 |
+
## return z/(y+z)
|
| 780 |
+
##
|
| 781 |
+
## was dead wrong, and how it probably got that way.
|
| 782 |
+
|
| 783 |
+
# This version due to Janne Sinkkonen, and matches all the std
|
| 784 |
+
# texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
|
| 785 |
+
y = self.gammavariate(alpha, 1.0)
|
| 786 |
+
if y:
|
| 787 |
+
return y / (y + self.gammavariate(beta, 1.0))
|
| 788 |
+
return 0.0
|
| 789 |
+
|
| 790 |
+
def paretovariate(self, alpha):
|
| 791 |
+
"""Pareto distribution. alpha is the shape parameter."""
|
| 792 |
+
# Jain, pg. 495
|
| 793 |
+
|
| 794 |
+
u = 1.0 - self.random()
|
| 795 |
+
return u ** (-1.0 / alpha)
|
| 796 |
+
|
| 797 |
+
def weibullvariate(self, alpha, beta):
|
| 798 |
+
"""Weibull distribution.
|
| 799 |
+
|
| 800 |
+
alpha is the scale parameter and beta is the shape parameter.
|
| 801 |
+
|
| 802 |
+
"""
|
| 803 |
+
# Jain, pg. 499; bug fix courtesy Bill Arms
|
| 804 |
+
|
| 805 |
+
u = 1.0 - self.random()
|
| 806 |
+
return alpha * (-_log(u)) ** (1.0 / beta)
|
| 807 |
+
|
| 808 |
+
|
| 809 |
+
## ------------------------------------------------------------------
|
| 810 |
+
## --------------- Operating System Random Source ------------------
|
| 811 |
+
|
| 812 |
+
|
| 813 |
+
class SystemRandom(Random):
|
| 814 |
+
"""Alternate random number generator using sources provided
|
| 815 |
+
by the operating system (such as /dev/urandom on Unix or
|
| 816 |
+
CryptGenRandom on Windows).
|
| 817 |
+
|
| 818 |
+
Not available on all systems (see os.urandom() for details).
|
| 819 |
+
|
| 820 |
+
"""
|
| 821 |
+
|
| 822 |
+
def random(self):
|
| 823 |
+
"""Get the next random number in the range [0.0, 1.0)."""
|
| 824 |
+
return (int.from_bytes(_urandom(7), 'big') >> 3) * RECIP_BPF
|
| 825 |
+
|
| 826 |
+
def getrandbits(self, k):
|
| 827 |
+
"""getrandbits(k) -> x. Generates an int with k random bits."""
|
| 828 |
+
if k < 0:
|
| 829 |
+
raise ValueError('number of bits must be non-negative')
|
| 830 |
+
numbytes = (k + 7) // 8 # bits / 8 and rounded up
|
| 831 |
+
x = int.from_bytes(_urandom(numbytes), 'big')
|
| 832 |
+
return x >> (numbytes * 8 - k) # trim excess bits
|
| 833 |
+
|
| 834 |
+
def randbytes(self, n):
|
| 835 |
+
"""Generate n random bytes."""
|
| 836 |
+
# os.urandom(n) fails with ValueError for n < 0
|
| 837 |
+
# and returns an empty bytes string for n == 0.
|
| 838 |
+
return _urandom(n)
|
| 839 |
+
|
| 840 |
+
def seed(self, *args, **kwds):
|
| 841 |
+
"Stub method. Not used for a system random number generator."
|
| 842 |
+
return None
|
| 843 |
+
|
| 844 |
+
def _notimplemented(self, *args, **kwds):
|
| 845 |
+
"Method should not be called for a system random number generator."
|
| 846 |
+
raise NotImplementedError('System entropy source does not have state.')
|
| 847 |
+
getstate = setstate = _notimplemented
|
| 848 |
+
|
| 849 |
+
|
| 850 |
+
# ----------------------------------------------------------------------
|
| 851 |
+
# Create one instance, seeded from current time, and export its methods
|
| 852 |
+
# as module-level functions. The functions share state across all uses
|
| 853 |
+
# (both in the user's code and in the Python libraries), but that's fine
|
| 854 |
+
# for most programs and is easier for the casual user than making them
|
| 855 |
+
# instantiate their own Random() instance.
|
| 856 |
+
|
| 857 |
+
_inst = Random()
|
| 858 |
+
seed = _inst.seed
|
| 859 |
+
random = _inst.random
|
| 860 |
+
uniform = _inst.uniform
|
| 861 |
+
triangular = _inst.triangular
|
| 862 |
+
randint = _inst.randint
|
| 863 |
+
choice = _inst.choice
|
| 864 |
+
randrange = _inst.randrange
|
| 865 |
+
sample = _inst.sample
|
| 866 |
+
shuffle = _inst.shuffle
|
| 867 |
+
choices = _inst.choices
|
| 868 |
+
normalvariate = _inst.normalvariate
|
| 869 |
+
lognormvariate = _inst.lognormvariate
|
| 870 |
+
expovariate = _inst.expovariate
|
| 871 |
+
vonmisesvariate = _inst.vonmisesvariate
|
| 872 |
+
gammavariate = _inst.gammavariate
|
| 873 |
+
gauss = _inst.gauss
|
| 874 |
+
betavariate = _inst.betavariate
|
| 875 |
+
paretovariate = _inst.paretovariate
|
| 876 |
+
weibullvariate = _inst.weibullvariate
|
| 877 |
+
getstate = _inst.getstate
|
| 878 |
+
setstate = _inst.setstate
|
| 879 |
+
getrandbits = _inst.getrandbits
|
| 880 |
+
randbytes = _inst.randbytes
|
| 881 |
+
|
| 882 |
+
|
| 883 |
+
## ------------------------------------------------------
|
| 884 |
+
## ----------------- test program -----------------------
|
| 885 |
+
|
| 886 |
+
def _test_generator(n, func, args):
|
| 887 |
+
from statistics import stdev, fmean as mean
|
| 888 |
+
from time import perf_counter
|
| 889 |
+
|
| 890 |
+
t0 = perf_counter()
|
| 891 |
+
data = [func(*args) for i in _repeat(None, n)]
|
| 892 |
+
t1 = perf_counter()
|
| 893 |
+
|
| 894 |
+
xbar = mean(data)
|
| 895 |
+
sigma = stdev(data, xbar)
|
| 896 |
+
low = min(data)
|
| 897 |
+
high = max(data)
|
| 898 |
+
|
| 899 |
+
print(f'{t1 - t0:.3f} sec, {n} times {func.__name__}')
|
| 900 |
+
print('avg %g, stddev %g, min %g, max %g\n' % (xbar, sigma, low, high))
|
| 901 |
+
|
| 902 |
+
|
| 903 |
+
def _test(N=2000):
|
| 904 |
+
_test_generator(N, random, ())
|
| 905 |
+
_test_generator(N, normalvariate, (0.0, 1.0))
|
| 906 |
+
_test_generator(N, lognormvariate, (0.0, 1.0))
|
| 907 |
+
_test_generator(N, vonmisesvariate, (0.0, 1.0))
|
| 908 |
+
_test_generator(N, gammavariate, (0.01, 1.0))
|
| 909 |
+
_test_generator(N, gammavariate, (0.1, 1.0))
|
| 910 |
+
_test_generator(N, gammavariate, (0.1, 2.0))
|
| 911 |
+
_test_generator(N, gammavariate, (0.5, 1.0))
|
| 912 |
+
_test_generator(N, gammavariate, (0.9, 1.0))
|
| 913 |
+
_test_generator(N, gammavariate, (1.0, 1.0))
|
| 914 |
+
_test_generator(N, gammavariate, (2.0, 1.0))
|
| 915 |
+
_test_generator(N, gammavariate, (20.0, 1.0))
|
| 916 |
+
_test_generator(N, gammavariate, (200.0, 1.0))
|
| 917 |
+
_test_generator(N, gauss, (0.0, 1.0))
|
| 918 |
+
_test_generator(N, betavariate, (3.0, 3.0))
|
| 919 |
+
_test_generator(N, triangular, (0.0, 1.0, 1.0 / 3.0))
|
| 920 |
+
|
| 921 |
+
|
| 922 |
+
## ------------------------------------------------------
|
| 923 |
+
## ------------------ fork support ---------------------
|
| 924 |
+
|
| 925 |
+
if hasattr(_os, "fork"):
|
| 926 |
+
_os.register_at_fork(after_in_child=_inst.seed)
|
| 927 |
+
|
| 928 |
+
|
| 929 |
+
if __name__ == '__main__':
|
| 930 |
+
_test()
|
evalkit_cambrian/lib/python3.10/runpy.py
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""runpy.py - locating and running Python code using the module namespace
|
| 2 |
+
|
| 3 |
+
Provides support for locating and running Python scripts using the Python
|
| 4 |
+
module namespace instead of the native filesystem.
|
| 5 |
+
|
| 6 |
+
This allows Python code to play nicely with non-filesystem based PEP 302
|
| 7 |
+
importers when locating support scripts as well as when importing modules.
|
| 8 |
+
"""
|
| 9 |
+
# Written by Nick Coghlan <ncoghlan at gmail.com>
|
| 10 |
+
# to implement PEP 338 (Executing Modules as Scripts)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
import sys
|
| 14 |
+
import importlib.machinery # importlib first so we can test #15386 via -m
|
| 15 |
+
import importlib.util
|
| 16 |
+
import io
|
| 17 |
+
import types
|
| 18 |
+
import os
|
| 19 |
+
|
| 20 |
+
__all__ = [
|
| 21 |
+
"run_module", "run_path",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
class _TempModule(object):
|
| 25 |
+
"""Temporarily replace a module in sys.modules with an empty namespace"""
|
| 26 |
+
def __init__(self, mod_name):
|
| 27 |
+
self.mod_name = mod_name
|
| 28 |
+
self.module = types.ModuleType(mod_name)
|
| 29 |
+
self._saved_module = []
|
| 30 |
+
|
| 31 |
+
def __enter__(self):
|
| 32 |
+
mod_name = self.mod_name
|
| 33 |
+
try:
|
| 34 |
+
self._saved_module.append(sys.modules[mod_name])
|
| 35 |
+
except KeyError:
|
| 36 |
+
pass
|
| 37 |
+
sys.modules[mod_name] = self.module
|
| 38 |
+
return self
|
| 39 |
+
|
| 40 |
+
def __exit__(self, *args):
|
| 41 |
+
if self._saved_module:
|
| 42 |
+
sys.modules[self.mod_name] = self._saved_module[0]
|
| 43 |
+
else:
|
| 44 |
+
del sys.modules[self.mod_name]
|
| 45 |
+
self._saved_module = []
|
| 46 |
+
|
| 47 |
+
class _ModifiedArgv0(object):
|
| 48 |
+
def __init__(self, value):
|
| 49 |
+
self.value = value
|
| 50 |
+
self._saved_value = self._sentinel = object()
|
| 51 |
+
|
| 52 |
+
def __enter__(self):
|
| 53 |
+
if self._saved_value is not self._sentinel:
|
| 54 |
+
raise RuntimeError("Already preserving saved value")
|
| 55 |
+
self._saved_value = sys.argv[0]
|
| 56 |
+
sys.argv[0] = self.value
|
| 57 |
+
|
| 58 |
+
def __exit__(self, *args):
|
| 59 |
+
self.value = self._sentinel
|
| 60 |
+
sys.argv[0] = self._saved_value
|
| 61 |
+
|
| 62 |
+
# TODO: Replace these helpers with importlib._bootstrap_external functions.
|
| 63 |
+
def _run_code(code, run_globals, init_globals=None,
|
| 64 |
+
mod_name=None, mod_spec=None,
|
| 65 |
+
pkg_name=None, script_name=None):
|
| 66 |
+
"""Helper to run code in nominated namespace"""
|
| 67 |
+
if init_globals is not None:
|
| 68 |
+
run_globals.update(init_globals)
|
| 69 |
+
if mod_spec is None:
|
| 70 |
+
loader = None
|
| 71 |
+
fname = script_name
|
| 72 |
+
cached = None
|
| 73 |
+
else:
|
| 74 |
+
loader = mod_spec.loader
|
| 75 |
+
fname = mod_spec.origin
|
| 76 |
+
cached = mod_spec.cached
|
| 77 |
+
if pkg_name is None:
|
| 78 |
+
pkg_name = mod_spec.parent
|
| 79 |
+
run_globals.update(__name__ = mod_name,
|
| 80 |
+
__file__ = fname,
|
| 81 |
+
__cached__ = cached,
|
| 82 |
+
__doc__ = None,
|
| 83 |
+
__loader__ = loader,
|
| 84 |
+
__package__ = pkg_name,
|
| 85 |
+
__spec__ = mod_spec)
|
| 86 |
+
exec(code, run_globals)
|
| 87 |
+
return run_globals
|
| 88 |
+
|
| 89 |
+
def _run_module_code(code, init_globals=None,
|
| 90 |
+
mod_name=None, mod_spec=None,
|
| 91 |
+
pkg_name=None, script_name=None):
|
| 92 |
+
"""Helper to run code in new namespace with sys modified"""
|
| 93 |
+
fname = script_name if mod_spec is None else mod_spec.origin
|
| 94 |
+
with _TempModule(mod_name) as temp_module, _ModifiedArgv0(fname):
|
| 95 |
+
mod_globals = temp_module.module.__dict__
|
| 96 |
+
_run_code(code, mod_globals, init_globals,
|
| 97 |
+
mod_name, mod_spec, pkg_name, script_name)
|
| 98 |
+
# Copy the globals of the temporary module, as they
|
| 99 |
+
# may be cleared when the temporary module goes away
|
| 100 |
+
return mod_globals.copy()
|
| 101 |
+
|
| 102 |
+
# Helper to get the full name, spec and code for a module
|
| 103 |
+
def _get_module_details(mod_name, error=ImportError):
|
| 104 |
+
if mod_name.startswith("."):
|
| 105 |
+
raise error("Relative module names not supported")
|
| 106 |
+
pkg_name, _, _ = mod_name.rpartition(".")
|
| 107 |
+
if pkg_name:
|
| 108 |
+
# Try importing the parent to avoid catching initialization errors
|
| 109 |
+
try:
|
| 110 |
+
__import__(pkg_name)
|
| 111 |
+
except ImportError as e:
|
| 112 |
+
# If the parent or higher ancestor package is missing, let the
|
| 113 |
+
# error be raised by find_spec() below and then be caught. But do
|
| 114 |
+
# not allow other errors to be caught.
|
| 115 |
+
if e.name is None or (e.name != pkg_name and
|
| 116 |
+
not pkg_name.startswith(e.name + ".")):
|
| 117 |
+
raise
|
| 118 |
+
# Warn if the module has already been imported under its normal name
|
| 119 |
+
existing = sys.modules.get(mod_name)
|
| 120 |
+
if existing is not None and not hasattr(existing, "__path__"):
|
| 121 |
+
from warnings import warn
|
| 122 |
+
msg = "{mod_name!r} found in sys.modules after import of " \
|
| 123 |
+
"package {pkg_name!r}, but prior to execution of " \
|
| 124 |
+
"{mod_name!r}; this may result in unpredictable " \
|
| 125 |
+
"behaviour".format(mod_name=mod_name, pkg_name=pkg_name)
|
| 126 |
+
warn(RuntimeWarning(msg))
|
| 127 |
+
|
| 128 |
+
try:
|
| 129 |
+
spec = importlib.util.find_spec(mod_name)
|
| 130 |
+
except (ImportError, AttributeError, TypeError, ValueError) as ex:
|
| 131 |
+
# This hack fixes an impedance mismatch between pkgutil and
|
| 132 |
+
# importlib, where the latter raises other errors for cases where
|
| 133 |
+
# pkgutil previously raised ImportError
|
| 134 |
+
msg = "Error while finding module specification for {!r} ({}: {})"
|
| 135 |
+
if mod_name.endswith(".py"):
|
| 136 |
+
msg += (f". Try using '{mod_name[:-3]}' instead of "
|
| 137 |
+
f"'{mod_name}' as the module name.")
|
| 138 |
+
raise error(msg.format(mod_name, type(ex).__name__, ex)) from ex
|
| 139 |
+
if spec is None:
|
| 140 |
+
raise error("No module named %s" % mod_name)
|
| 141 |
+
if spec.submodule_search_locations is not None:
|
| 142 |
+
if mod_name == "__main__" or mod_name.endswith(".__main__"):
|
| 143 |
+
raise error("Cannot use package as __main__ module")
|
| 144 |
+
try:
|
| 145 |
+
pkg_main_name = mod_name + ".__main__"
|
| 146 |
+
return _get_module_details(pkg_main_name, error)
|
| 147 |
+
except error as e:
|
| 148 |
+
if mod_name not in sys.modules:
|
| 149 |
+
raise # No module loaded; being a package is irrelevant
|
| 150 |
+
raise error(("%s; %r is a package and cannot " +
|
| 151 |
+
"be directly executed") %(e, mod_name))
|
| 152 |
+
loader = spec.loader
|
| 153 |
+
if loader is None:
|
| 154 |
+
raise error("%r is a namespace package and cannot be executed"
|
| 155 |
+
% mod_name)
|
| 156 |
+
try:
|
| 157 |
+
code = loader.get_code(mod_name)
|
| 158 |
+
except ImportError as e:
|
| 159 |
+
raise error(format(e)) from e
|
| 160 |
+
if code is None:
|
| 161 |
+
raise error("No code object available for %s" % mod_name)
|
| 162 |
+
return mod_name, spec, code
|
| 163 |
+
|
| 164 |
+
class _Error(Exception):
|
| 165 |
+
"""Error that _run_module_as_main() should report without a traceback"""
|
| 166 |
+
|
| 167 |
+
# XXX ncoghlan: Should this be documented and made public?
|
| 168 |
+
# (Current thoughts: don't repeat the mistake that lead to its
|
| 169 |
+
# creation when run_module() no longer met the needs of
|
| 170 |
+
# mainmodule.c, but couldn't be changed because it was public)
|
| 171 |
+
def _run_module_as_main(mod_name, alter_argv=True):
|
| 172 |
+
"""Runs the designated module in the __main__ namespace
|
| 173 |
+
|
| 174 |
+
Note that the executed module will have full access to the
|
| 175 |
+
__main__ namespace. If this is not desirable, the run_module()
|
| 176 |
+
function should be used to run the module code in a fresh namespace.
|
| 177 |
+
|
| 178 |
+
At the very least, these variables in __main__ will be overwritten:
|
| 179 |
+
__name__
|
| 180 |
+
__file__
|
| 181 |
+
__cached__
|
| 182 |
+
__loader__
|
| 183 |
+
__package__
|
| 184 |
+
"""
|
| 185 |
+
try:
|
| 186 |
+
if alter_argv or mod_name != "__main__": # i.e. -m switch
|
| 187 |
+
mod_name, mod_spec, code = _get_module_details(mod_name, _Error)
|
| 188 |
+
else: # i.e. directory or zipfile execution
|
| 189 |
+
mod_name, mod_spec, code = _get_main_module_details(_Error)
|
| 190 |
+
except _Error as exc:
|
| 191 |
+
msg = "%s: %s" % (sys.executable, exc)
|
| 192 |
+
sys.exit(msg)
|
| 193 |
+
main_globals = sys.modules["__main__"].__dict__
|
| 194 |
+
if alter_argv:
|
| 195 |
+
sys.argv[0] = mod_spec.origin
|
| 196 |
+
return _run_code(code, main_globals, None,
|
| 197 |
+
"__main__", mod_spec)
|
| 198 |
+
|
| 199 |
+
def run_module(mod_name, init_globals=None,
|
| 200 |
+
run_name=None, alter_sys=False):
|
| 201 |
+
"""Execute a module's code without importing it.
|
| 202 |
+
|
| 203 |
+
mod_name -- an absolute module name or package name.
|
| 204 |
+
|
| 205 |
+
Optional arguments:
|
| 206 |
+
init_globals -- dictionary used to pre-populate the module’s
|
| 207 |
+
globals dictionary before the code is executed.
|
| 208 |
+
|
| 209 |
+
run_name -- if not None, this will be used for setting __name__;
|
| 210 |
+
otherwise, __name__ will be set to mod_name + '__main__' if the
|
| 211 |
+
named module is a package and to just mod_name otherwise.
|
| 212 |
+
|
| 213 |
+
alter_sys -- if True, sys.argv[0] is updated with the value of
|
| 214 |
+
__file__ and sys.modules[__name__] is updated with a temporary
|
| 215 |
+
module object for the module being executed. Both are
|
| 216 |
+
restored to their original values before the function returns.
|
| 217 |
+
|
| 218 |
+
Returns the resulting module globals dictionary.
|
| 219 |
+
"""
|
| 220 |
+
mod_name, mod_spec, code = _get_module_details(mod_name)
|
| 221 |
+
if run_name is None:
|
| 222 |
+
run_name = mod_name
|
| 223 |
+
if alter_sys:
|
| 224 |
+
return _run_module_code(code, init_globals, run_name, mod_spec)
|
| 225 |
+
else:
|
| 226 |
+
# Leave the sys module alone
|
| 227 |
+
return _run_code(code, {}, init_globals, run_name, mod_spec)
|
| 228 |
+
|
| 229 |
+
def _get_main_module_details(error=ImportError):
|
| 230 |
+
# Helper that gives a nicer error message when attempting to
|
| 231 |
+
# execute a zipfile or directory by invoking __main__.py
|
| 232 |
+
# Also moves the standard __main__ out of the way so that the
|
| 233 |
+
# preexisting __loader__ entry doesn't cause issues
|
| 234 |
+
main_name = "__main__"
|
| 235 |
+
saved_main = sys.modules[main_name]
|
| 236 |
+
del sys.modules[main_name]
|
| 237 |
+
try:
|
| 238 |
+
return _get_module_details(main_name)
|
| 239 |
+
except ImportError as exc:
|
| 240 |
+
if main_name in str(exc):
|
| 241 |
+
raise error("can't find %r module in %r" %
|
| 242 |
+
(main_name, sys.path[0])) from exc
|
| 243 |
+
raise
|
| 244 |
+
finally:
|
| 245 |
+
sys.modules[main_name] = saved_main
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
def _get_code_from_file(run_name, fname):
|
| 249 |
+
# Check for a compiled file first
|
| 250 |
+
from pkgutil import read_code
|
| 251 |
+
decoded_path = os.path.abspath(os.fsdecode(fname))
|
| 252 |
+
with io.open_code(decoded_path) as f:
|
| 253 |
+
code = read_code(f)
|
| 254 |
+
if code is None:
|
| 255 |
+
# That didn't work, so try it as normal source code
|
| 256 |
+
with io.open_code(decoded_path) as f:
|
| 257 |
+
code = compile(f.read(), fname, 'exec')
|
| 258 |
+
return code, fname
|
| 259 |
+
|
| 260 |
+
def run_path(path_name, init_globals=None, run_name=None):
|
| 261 |
+
"""Execute code located at the specified filesystem location.
|
| 262 |
+
|
| 263 |
+
path_name -- filesystem location of a Python script, zipfile,
|
| 264 |
+
or directory containing a top level __main__.py script.
|
| 265 |
+
|
| 266 |
+
Optional arguments:
|
| 267 |
+
init_globals -- dictionary used to pre-populate the module’s
|
| 268 |
+
globals dictionary before the code is executed.
|
| 269 |
+
|
| 270 |
+
run_name -- if not None, this will be used to set __name__;
|
| 271 |
+
otherwise, '<run_path>' will be used for __name__.
|
| 272 |
+
|
| 273 |
+
Returns the resulting module globals dictionary.
|
| 274 |
+
"""
|
| 275 |
+
if run_name is None:
|
| 276 |
+
run_name = "<run_path>"
|
| 277 |
+
pkg_name = run_name.rpartition(".")[0]
|
| 278 |
+
from pkgutil import get_importer
|
| 279 |
+
importer = get_importer(path_name)
|
| 280 |
+
# Trying to avoid importing imp so as to not consume the deprecation warning.
|
| 281 |
+
is_NullImporter = False
|
| 282 |
+
if type(importer).__module__ == 'imp':
|
| 283 |
+
if type(importer).__name__ == 'NullImporter':
|
| 284 |
+
is_NullImporter = True
|
| 285 |
+
if isinstance(importer, type(None)) or is_NullImporter:
|
| 286 |
+
# Not a valid sys.path entry, so run the code directly
|
| 287 |
+
# execfile() doesn't help as we want to allow compiled files
|
| 288 |
+
code, fname = _get_code_from_file(run_name, path_name)
|
| 289 |
+
return _run_module_code(code, init_globals, run_name,
|
| 290 |
+
pkg_name=pkg_name, script_name=fname)
|
| 291 |
+
else:
|
| 292 |
+
# Finder is defined for path, so add it to
|
| 293 |
+
# the start of sys.path
|
| 294 |
+
sys.path.insert(0, path_name)
|
| 295 |
+
try:
|
| 296 |
+
# Here's where things are a little different from the run_module
|
| 297 |
+
# case. There, we only had to replace the module in sys while the
|
| 298 |
+
# code was running and doing so was somewhat optional. Here, we
|
| 299 |
+
# have no choice and we have to remove it even while we read the
|
| 300 |
+
# code. If we don't do this, a __loader__ attribute in the
|
| 301 |
+
# existing __main__ module may prevent location of the new module.
|
| 302 |
+
mod_name, mod_spec, code = _get_main_module_details()
|
| 303 |
+
with _TempModule(run_name) as temp_module, \
|
| 304 |
+
_ModifiedArgv0(path_name):
|
| 305 |
+
mod_globals = temp_module.module.__dict__
|
| 306 |
+
return _run_code(code, mod_globals, init_globals,
|
| 307 |
+
run_name, mod_spec, pkg_name).copy()
|
| 308 |
+
finally:
|
| 309 |
+
try:
|
| 310 |
+
sys.path.remove(path_name)
|
| 311 |
+
except ValueError:
|
| 312 |
+
pass
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
if __name__ == "__main__":
|
| 316 |
+
# Run the module specified as the next command line argument
|
| 317 |
+
if len(sys.argv) < 2:
|
| 318 |
+
print("No module specified for execution", file=sys.stderr)
|
| 319 |
+
else:
|
| 320 |
+
del sys.argv[0] # Make the requested module sys.argv[0]
|
| 321 |
+
_run_module_as_main(sys.argv[0])
|
evalkit_cambrian/lib/python3.10/sched.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A generally useful event scheduler class.
|
| 2 |
+
|
| 3 |
+
Each instance of this class manages its own queue.
|
| 4 |
+
No multi-threading is implied; you are supposed to hack that
|
| 5 |
+
yourself, or use a single instance per application.
|
| 6 |
+
|
| 7 |
+
Each instance is parametrized with two functions, one that is
|
| 8 |
+
supposed to return the current time, one that is supposed to
|
| 9 |
+
implement a delay. You can implement real-time scheduling by
|
| 10 |
+
substituting time and sleep from built-in module time, or you can
|
| 11 |
+
implement simulated time by writing your own functions. This can
|
| 12 |
+
also be used to integrate scheduling with STDWIN events; the delay
|
| 13 |
+
function is allowed to modify the queue. Time can be expressed as
|
| 14 |
+
integers or floating point numbers, as long as it is consistent.
|
| 15 |
+
|
| 16 |
+
Events are specified by tuples (time, priority, action, argument, kwargs).
|
| 17 |
+
As in UNIX, lower priority numbers mean higher priority; in this
|
| 18 |
+
way the queue can be maintained as a priority queue. Execution of the
|
| 19 |
+
event means calling the action function, passing it the argument
|
| 20 |
+
sequence in "argument" (remember that in Python, multiple function
|
| 21 |
+
arguments are be packed in a sequence) and keyword parameters in "kwargs".
|
| 22 |
+
The action function may be an instance method so it
|
| 23 |
+
has another way to reference private data (besides global variables).
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
import time
|
| 27 |
+
import heapq
|
| 28 |
+
from collections import namedtuple
|
| 29 |
+
from itertools import count
|
| 30 |
+
import threading
|
| 31 |
+
from time import monotonic as _time
|
| 32 |
+
|
| 33 |
+
__all__ = ["scheduler"]
|
| 34 |
+
|
| 35 |
+
Event = namedtuple('Event', 'time, priority, sequence, action, argument, kwargs')
|
| 36 |
+
Event.time.__doc__ = ('''Numeric type compatible with the return value of the
|
| 37 |
+
timefunc function passed to the constructor.''')
|
| 38 |
+
Event.priority.__doc__ = ('''Events scheduled for the same time will be executed
|
| 39 |
+
in the order of their priority.''')
|
| 40 |
+
Event.sequence.__doc__ = ('''A continually increasing sequence number that
|
| 41 |
+
separates events if time and priority are equal.''')
|
| 42 |
+
Event.action.__doc__ = ('''Executing the event means executing
|
| 43 |
+
action(*argument, **kwargs)''')
|
| 44 |
+
Event.argument.__doc__ = ('''argument is a sequence holding the positional
|
| 45 |
+
arguments for the action.''')
|
| 46 |
+
Event.kwargs.__doc__ = ('''kwargs is a dictionary holding the keyword
|
| 47 |
+
arguments for the action.''')
|
| 48 |
+
|
| 49 |
+
_sentinel = object()
|
| 50 |
+
|
| 51 |
+
class scheduler:
|
| 52 |
+
|
| 53 |
+
def __init__(self, timefunc=_time, delayfunc=time.sleep):
|
| 54 |
+
"""Initialize a new instance, passing the time and delay
|
| 55 |
+
functions"""
|
| 56 |
+
self._queue = []
|
| 57 |
+
self._lock = threading.RLock()
|
| 58 |
+
self.timefunc = timefunc
|
| 59 |
+
self.delayfunc = delayfunc
|
| 60 |
+
self._sequence_generator = count()
|
| 61 |
+
|
| 62 |
+
def enterabs(self, time, priority, action, argument=(), kwargs=_sentinel):
|
| 63 |
+
"""Enter a new event in the queue at an absolute time.
|
| 64 |
+
|
| 65 |
+
Returns an ID for the event which can be used to remove it,
|
| 66 |
+
if necessary.
|
| 67 |
+
|
| 68 |
+
"""
|
| 69 |
+
if kwargs is _sentinel:
|
| 70 |
+
kwargs = {}
|
| 71 |
+
|
| 72 |
+
with self._lock:
|
| 73 |
+
event = Event(time, priority, next(self._sequence_generator),
|
| 74 |
+
action, argument, kwargs)
|
| 75 |
+
heapq.heappush(self._queue, event)
|
| 76 |
+
return event # The ID
|
| 77 |
+
|
| 78 |
+
def enter(self, delay, priority, action, argument=(), kwargs=_sentinel):
|
| 79 |
+
"""A variant that specifies the time as a relative time.
|
| 80 |
+
|
| 81 |
+
This is actually the more commonly used interface.
|
| 82 |
+
|
| 83 |
+
"""
|
| 84 |
+
time = self.timefunc() + delay
|
| 85 |
+
return self.enterabs(time, priority, action, argument, kwargs)
|
| 86 |
+
|
| 87 |
+
def cancel(self, event):
|
| 88 |
+
"""Remove an event from the queue.
|
| 89 |
+
|
| 90 |
+
This must be presented the ID as returned by enter().
|
| 91 |
+
If the event is not in the queue, this raises ValueError.
|
| 92 |
+
|
| 93 |
+
"""
|
| 94 |
+
with self._lock:
|
| 95 |
+
self._queue.remove(event)
|
| 96 |
+
heapq.heapify(self._queue)
|
| 97 |
+
|
| 98 |
+
def empty(self):
|
| 99 |
+
"""Check whether the queue is empty."""
|
| 100 |
+
with self._lock:
|
| 101 |
+
return not self._queue
|
| 102 |
+
|
| 103 |
+
def run(self, blocking=True):
|
| 104 |
+
"""Execute events until the queue is empty.
|
| 105 |
+
If blocking is False executes the scheduled events due to
|
| 106 |
+
expire soonest (if any) and then return the deadline of the
|
| 107 |
+
next scheduled call in the scheduler.
|
| 108 |
+
|
| 109 |
+
When there is a positive delay until the first event, the
|
| 110 |
+
delay function is called and the event is left in the queue;
|
| 111 |
+
otherwise, the event is removed from the queue and executed
|
| 112 |
+
(its action function is called, passing it the argument). If
|
| 113 |
+
the delay function returns prematurely, it is simply
|
| 114 |
+
restarted.
|
| 115 |
+
|
| 116 |
+
It is legal for both the delay function and the action
|
| 117 |
+
function to modify the queue or to raise an exception;
|
| 118 |
+
exceptions are not caught but the scheduler's state remains
|
| 119 |
+
well-defined so run() may be called again.
|
| 120 |
+
|
| 121 |
+
A questionable hack is added to allow other threads to run:
|
| 122 |
+
just after an event is executed, a delay of 0 is executed, to
|
| 123 |
+
avoid monopolizing the CPU when other threads are also
|
| 124 |
+
runnable.
|
| 125 |
+
|
| 126 |
+
"""
|
| 127 |
+
# localize variable access to minimize overhead
|
| 128 |
+
# and to improve thread safety
|
| 129 |
+
lock = self._lock
|
| 130 |
+
q = self._queue
|
| 131 |
+
delayfunc = self.delayfunc
|
| 132 |
+
timefunc = self.timefunc
|
| 133 |
+
pop = heapq.heappop
|
| 134 |
+
while True:
|
| 135 |
+
with lock:
|
| 136 |
+
if not q:
|
| 137 |
+
break
|
| 138 |
+
(time, priority, sequence, action,
|
| 139 |
+
argument, kwargs) = q[0]
|
| 140 |
+
now = timefunc()
|
| 141 |
+
if time > now:
|
| 142 |
+
delay = True
|
| 143 |
+
else:
|
| 144 |
+
delay = False
|
| 145 |
+
pop(q)
|
| 146 |
+
if delay:
|
| 147 |
+
if not blocking:
|
| 148 |
+
return time - now
|
| 149 |
+
delayfunc(time - now)
|
| 150 |
+
else:
|
| 151 |
+
action(*argument, **kwargs)
|
| 152 |
+
delayfunc(0) # Let other threads run
|
| 153 |
+
|
| 154 |
+
@property
|
| 155 |
+
def queue(self):
|
| 156 |
+
"""An ordered list of upcoming events.
|
| 157 |
+
|
| 158 |
+
Events are named tuples with fields for:
|
| 159 |
+
time, priority, action, arguments, kwargs
|
| 160 |
+
|
| 161 |
+
"""
|
| 162 |
+
# Use heapq to sort the queue rather than using 'sorted(self._queue)'.
|
| 163 |
+
# With heapq, two events scheduled at the same time will show in
|
| 164 |
+
# the actual order they would be retrieved.
|
| 165 |
+
with self._lock:
|
| 166 |
+
events = self._queue[:]
|
| 167 |
+
return list(map(heapq.heappop, [events]*len(events)))
|
evalkit_cambrian/lib/python3.10/signal.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import _signal
|
| 2 |
+
from _signal import *
|
| 3 |
+
from enum import IntEnum as _IntEnum
|
| 4 |
+
|
| 5 |
+
_globals = globals()
|
| 6 |
+
|
| 7 |
+
_IntEnum._convert_(
|
| 8 |
+
'Signals', __name__,
|
| 9 |
+
lambda name:
|
| 10 |
+
name.isupper()
|
| 11 |
+
and (name.startswith('SIG') and not name.startswith('SIG_'))
|
| 12 |
+
or name.startswith('CTRL_'))
|
| 13 |
+
|
| 14 |
+
_IntEnum._convert_(
|
| 15 |
+
'Handlers', __name__,
|
| 16 |
+
lambda name: name in ('SIG_DFL', 'SIG_IGN'))
|
| 17 |
+
|
| 18 |
+
if 'pthread_sigmask' in _globals:
|
| 19 |
+
_IntEnum._convert_(
|
| 20 |
+
'Sigmasks', __name__,
|
| 21 |
+
lambda name: name in ('SIG_BLOCK', 'SIG_UNBLOCK', 'SIG_SETMASK'))
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def _int_to_enum(value, enum_klass):
|
| 25 |
+
"""Convert a numeric value to an IntEnum member.
|
| 26 |
+
If it's not a known member, return the numeric value itself.
|
| 27 |
+
"""
|
| 28 |
+
try:
|
| 29 |
+
return enum_klass(value)
|
| 30 |
+
except ValueError:
|
| 31 |
+
return value
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _enum_to_int(value):
|
| 35 |
+
"""Convert an IntEnum member to a numeric value.
|
| 36 |
+
If it's not an IntEnum member return the value itself.
|
| 37 |
+
"""
|
| 38 |
+
try:
|
| 39 |
+
return int(value)
|
| 40 |
+
except (ValueError, TypeError):
|
| 41 |
+
return value
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
# Similar to functools.wraps(), but only assign __doc__.
|
| 45 |
+
# __module__ should be preserved,
|
| 46 |
+
# __name__ and __qualname__ are already fine,
|
| 47 |
+
# __annotations__ is not set.
|
| 48 |
+
def _wraps(wrapped):
|
| 49 |
+
def decorator(wrapper):
|
| 50 |
+
wrapper.__doc__ = wrapped.__doc__
|
| 51 |
+
return wrapper
|
| 52 |
+
return decorator
|
| 53 |
+
|
| 54 |
+
@_wraps(_signal.signal)
|
| 55 |
+
def signal(signalnum, handler):
|
| 56 |
+
handler = _signal.signal(_enum_to_int(signalnum), _enum_to_int(handler))
|
| 57 |
+
return _int_to_enum(handler, Handlers)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
@_wraps(_signal.getsignal)
|
| 61 |
+
def getsignal(signalnum):
|
| 62 |
+
handler = _signal.getsignal(signalnum)
|
| 63 |
+
return _int_to_enum(handler, Handlers)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
if 'pthread_sigmask' in _globals:
|
| 67 |
+
@_wraps(_signal.pthread_sigmask)
|
| 68 |
+
def pthread_sigmask(how, mask):
|
| 69 |
+
sigs_set = _signal.pthread_sigmask(how, mask)
|
| 70 |
+
return set(_int_to_enum(x, Signals) for x in sigs_set)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
if 'sigpending' in _globals:
|
| 74 |
+
@_wraps(_signal.sigpending)
|
| 75 |
+
def sigpending():
|
| 76 |
+
return {_int_to_enum(x, Signals) for x in _signal.sigpending()}
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
if 'sigwait' in _globals:
|
| 80 |
+
@_wraps(_signal.sigwait)
|
| 81 |
+
def sigwait(sigset):
|
| 82 |
+
retsig = _signal.sigwait(sigset)
|
| 83 |
+
return _int_to_enum(retsig, Signals)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
if 'valid_signals' in _globals:
|
| 87 |
+
@_wraps(_signal.valid_signals)
|
| 88 |
+
def valid_signals():
|
| 89 |
+
return {_int_to_enum(x, Signals) for x in _signal.valid_signals()}
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
del _globals, _wraps
|
evalkit_cambrian/lib/python3.10/sndhdr.py
ADDED
|
@@ -0,0 +1,257 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Routines to help recognizing sound files.
|
| 2 |
+
|
| 3 |
+
Function whathdr() recognizes various types of sound file headers.
|
| 4 |
+
It understands almost all headers that SOX can decode.
|
| 5 |
+
|
| 6 |
+
The return tuple contains the following items, in this order:
|
| 7 |
+
- file type (as SOX understands it)
|
| 8 |
+
- sampling rate (0 if unknown or hard to decode)
|
| 9 |
+
- number of channels (0 if unknown or hard to decode)
|
| 10 |
+
- number of frames in the file (-1 if unknown or hard to decode)
|
| 11 |
+
- number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW
|
| 12 |
+
|
| 13 |
+
If the file doesn't have a recognizable type, it returns None.
|
| 14 |
+
If the file can't be opened, OSError is raised.
|
| 15 |
+
|
| 16 |
+
To compute the total time, divide the number of frames by the
|
| 17 |
+
sampling rate (a frame contains a sample for each channel).
|
| 18 |
+
|
| 19 |
+
Function what() calls whathdr(). (It used to also use some
|
| 20 |
+
heuristics for raw data, but this doesn't work very well.)
|
| 21 |
+
|
| 22 |
+
Finally, the function test() is a simple main program that calls
|
| 23 |
+
what() for all files mentioned on the argument list. For directory
|
| 24 |
+
arguments it calls what() for all files in that directory. Default
|
| 25 |
+
argument is "." (testing all files in the current directory). The
|
| 26 |
+
option -r tells it to recurse down directories found inside
|
| 27 |
+
explicitly given directories.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
# The file structure is top-down except that the test program and its
|
| 31 |
+
# subroutine come last.
|
| 32 |
+
|
| 33 |
+
__all__ = ['what', 'whathdr']
|
| 34 |
+
|
| 35 |
+
from collections import namedtuple
|
| 36 |
+
|
| 37 |
+
SndHeaders = namedtuple('SndHeaders',
|
| 38 |
+
'filetype framerate nchannels nframes sampwidth')
|
| 39 |
+
|
| 40 |
+
SndHeaders.filetype.__doc__ = ("""The value for type indicates the data type
|
| 41 |
+
and will be one of the strings 'aifc', 'aiff', 'au','hcom',
|
| 42 |
+
'sndr', 'sndt', 'voc', 'wav', '8svx', 'sb', 'ub', or 'ul'.""")
|
| 43 |
+
SndHeaders.framerate.__doc__ = ("""The sampling_rate will be either the actual
|
| 44 |
+
value or 0 if unknown or difficult to decode.""")
|
| 45 |
+
SndHeaders.nchannels.__doc__ = ("""The number of channels or 0 if it cannot be
|
| 46 |
+
determined or if the value is difficult to decode.""")
|
| 47 |
+
SndHeaders.nframes.__doc__ = ("""The value for frames will be either the number
|
| 48 |
+
of frames or -1.""")
|
| 49 |
+
SndHeaders.sampwidth.__doc__ = ("""Either the sample size in bits or
|
| 50 |
+
'A' for A-LAW or 'U' for u-LAW.""")
|
| 51 |
+
|
| 52 |
+
def what(filename):
|
| 53 |
+
"""Guess the type of a sound file."""
|
| 54 |
+
res = whathdr(filename)
|
| 55 |
+
return res
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def whathdr(filename):
|
| 59 |
+
"""Recognize sound headers."""
|
| 60 |
+
with open(filename, 'rb') as f:
|
| 61 |
+
h = f.read(512)
|
| 62 |
+
for tf in tests:
|
| 63 |
+
res = tf(h, f)
|
| 64 |
+
if res:
|
| 65 |
+
return SndHeaders(*res)
|
| 66 |
+
return None
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
#-----------------------------------#
|
| 70 |
+
# Subroutines per sound header type #
|
| 71 |
+
#-----------------------------------#
|
| 72 |
+
|
| 73 |
+
tests = []
|
| 74 |
+
|
| 75 |
+
def test_aifc(h, f):
|
| 76 |
+
import aifc
|
| 77 |
+
if not h.startswith(b'FORM'):
|
| 78 |
+
return None
|
| 79 |
+
if h[8:12] == b'AIFC':
|
| 80 |
+
fmt = 'aifc'
|
| 81 |
+
elif h[8:12] == b'AIFF':
|
| 82 |
+
fmt = 'aiff'
|
| 83 |
+
else:
|
| 84 |
+
return None
|
| 85 |
+
f.seek(0)
|
| 86 |
+
try:
|
| 87 |
+
a = aifc.open(f, 'r')
|
| 88 |
+
except (EOFError, aifc.Error):
|
| 89 |
+
return None
|
| 90 |
+
return (fmt, a.getframerate(), a.getnchannels(),
|
| 91 |
+
a.getnframes(), 8 * a.getsampwidth())
|
| 92 |
+
|
| 93 |
+
tests.append(test_aifc)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def test_au(h, f):
|
| 97 |
+
if h.startswith(b'.snd'):
|
| 98 |
+
func = get_long_be
|
| 99 |
+
elif h[:4] in (b'\0ds.', b'dns.'):
|
| 100 |
+
func = get_long_le
|
| 101 |
+
else:
|
| 102 |
+
return None
|
| 103 |
+
filetype = 'au'
|
| 104 |
+
hdr_size = func(h[4:8])
|
| 105 |
+
data_size = func(h[8:12])
|
| 106 |
+
encoding = func(h[12:16])
|
| 107 |
+
rate = func(h[16:20])
|
| 108 |
+
nchannels = func(h[20:24])
|
| 109 |
+
sample_size = 1 # default
|
| 110 |
+
if encoding == 1:
|
| 111 |
+
sample_bits = 'U'
|
| 112 |
+
elif encoding == 2:
|
| 113 |
+
sample_bits = 8
|
| 114 |
+
elif encoding == 3:
|
| 115 |
+
sample_bits = 16
|
| 116 |
+
sample_size = 2
|
| 117 |
+
else:
|
| 118 |
+
sample_bits = '?'
|
| 119 |
+
frame_size = sample_size * nchannels
|
| 120 |
+
if frame_size:
|
| 121 |
+
nframe = data_size / frame_size
|
| 122 |
+
else:
|
| 123 |
+
nframe = -1
|
| 124 |
+
return filetype, rate, nchannels, nframe, sample_bits
|
| 125 |
+
|
| 126 |
+
tests.append(test_au)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def test_hcom(h, f):
|
| 130 |
+
if h[65:69] != b'FSSD' or h[128:132] != b'HCOM':
|
| 131 |
+
return None
|
| 132 |
+
divisor = get_long_be(h[144:148])
|
| 133 |
+
if divisor:
|
| 134 |
+
rate = 22050 / divisor
|
| 135 |
+
else:
|
| 136 |
+
rate = 0
|
| 137 |
+
return 'hcom', rate, 1, -1, 8
|
| 138 |
+
|
| 139 |
+
tests.append(test_hcom)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def test_voc(h, f):
|
| 143 |
+
if not h.startswith(b'Creative Voice File\032'):
|
| 144 |
+
return None
|
| 145 |
+
sbseek = get_short_le(h[20:22])
|
| 146 |
+
rate = 0
|
| 147 |
+
if 0 <= sbseek < 500 and h[sbseek] == 1:
|
| 148 |
+
ratecode = 256 - h[sbseek+4]
|
| 149 |
+
if ratecode:
|
| 150 |
+
rate = int(1000000.0 / ratecode)
|
| 151 |
+
return 'voc', rate, 1, -1, 8
|
| 152 |
+
|
| 153 |
+
tests.append(test_voc)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def test_wav(h, f):
|
| 157 |
+
import wave
|
| 158 |
+
# 'RIFF' <len> 'WAVE' 'fmt ' <len>
|
| 159 |
+
if not h.startswith(b'RIFF') or h[8:12] != b'WAVE' or h[12:16] != b'fmt ':
|
| 160 |
+
return None
|
| 161 |
+
f.seek(0)
|
| 162 |
+
try:
|
| 163 |
+
w = wave.open(f, 'r')
|
| 164 |
+
except (EOFError, wave.Error):
|
| 165 |
+
return None
|
| 166 |
+
return ('wav', w.getframerate(), w.getnchannels(),
|
| 167 |
+
w.getnframes(), 8*w.getsampwidth())
|
| 168 |
+
|
| 169 |
+
tests.append(test_wav)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def test_8svx(h, f):
|
| 173 |
+
if not h.startswith(b'FORM') or h[8:12] != b'8SVX':
|
| 174 |
+
return None
|
| 175 |
+
# Should decode it to get #channels -- assume always 1
|
| 176 |
+
return '8svx', 0, 1, 0, 8
|
| 177 |
+
|
| 178 |
+
tests.append(test_8svx)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def test_sndt(h, f):
|
| 182 |
+
if h.startswith(b'SOUND'):
|
| 183 |
+
nsamples = get_long_le(h[8:12])
|
| 184 |
+
rate = get_short_le(h[20:22])
|
| 185 |
+
return 'sndt', rate, 1, nsamples, 8
|
| 186 |
+
|
| 187 |
+
tests.append(test_sndt)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def test_sndr(h, f):
|
| 191 |
+
if h.startswith(b'\0\0'):
|
| 192 |
+
rate = get_short_le(h[2:4])
|
| 193 |
+
if 4000 <= rate <= 25000:
|
| 194 |
+
return 'sndr', rate, 1, -1, 8
|
| 195 |
+
|
| 196 |
+
tests.append(test_sndr)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
#-------------------------------------------#
|
| 200 |
+
# Subroutines to extract numbers from bytes #
|
| 201 |
+
#-------------------------------------------#
|
| 202 |
+
|
| 203 |
+
def get_long_be(b):
|
| 204 |
+
return (b[0] << 24) | (b[1] << 16) | (b[2] << 8) | b[3]
|
| 205 |
+
|
| 206 |
+
def get_long_le(b):
|
| 207 |
+
return (b[3] << 24) | (b[2] << 16) | (b[1] << 8) | b[0]
|
| 208 |
+
|
| 209 |
+
def get_short_be(b):
|
| 210 |
+
return (b[0] << 8) | b[1]
|
| 211 |
+
|
| 212 |
+
def get_short_le(b):
|
| 213 |
+
return (b[1] << 8) | b[0]
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
#--------------------#
|
| 217 |
+
# Small test program #
|
| 218 |
+
#--------------------#
|
| 219 |
+
|
| 220 |
+
def test():
|
| 221 |
+
import sys
|
| 222 |
+
recursive = 0
|
| 223 |
+
if sys.argv[1:] and sys.argv[1] == '-r':
|
| 224 |
+
del sys.argv[1:2]
|
| 225 |
+
recursive = 1
|
| 226 |
+
try:
|
| 227 |
+
if sys.argv[1:]:
|
| 228 |
+
testall(sys.argv[1:], recursive, 1)
|
| 229 |
+
else:
|
| 230 |
+
testall(['.'], recursive, 1)
|
| 231 |
+
except KeyboardInterrupt:
|
| 232 |
+
sys.stderr.write('\n[Interrupted]\n')
|
| 233 |
+
sys.exit(1)
|
| 234 |
+
|
| 235 |
+
def testall(list, recursive, toplevel):
|
| 236 |
+
import sys
|
| 237 |
+
import os
|
| 238 |
+
for filename in list:
|
| 239 |
+
if os.path.isdir(filename):
|
| 240 |
+
print(filename + '/:', end=' ')
|
| 241 |
+
if recursive or toplevel:
|
| 242 |
+
print('recursing down:')
|
| 243 |
+
import glob
|
| 244 |
+
names = glob.glob(os.path.join(glob.escape(filename), '*'))
|
| 245 |
+
testall(names, recursive, 0)
|
| 246 |
+
else:
|
| 247 |
+
print('*** directory (use -r) ***')
|
| 248 |
+
else:
|
| 249 |
+
print(filename + ':', end=' ')
|
| 250 |
+
sys.stdout.flush()
|
| 251 |
+
try:
|
| 252 |
+
print(what(filename))
|
| 253 |
+
except OSError:
|
| 254 |
+
print('*** not found ***')
|
| 255 |
+
|
| 256 |
+
if __name__ == '__main__':
|
| 257 |
+
test()
|
evalkit_cambrian/lib/python3.10/socketserver.py
ADDED
|
@@ -0,0 +1,844 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Generic socket server classes.
|
| 2 |
+
|
| 3 |
+
This module tries to capture the various aspects of defining a server:
|
| 4 |
+
|
| 5 |
+
For socket-based servers:
|
| 6 |
+
|
| 7 |
+
- address family:
|
| 8 |
+
- AF_INET{,6}: IP (Internet Protocol) sockets (default)
|
| 9 |
+
- AF_UNIX: Unix domain sockets
|
| 10 |
+
- others, e.g. AF_DECNET are conceivable (see <socket.h>
|
| 11 |
+
- socket type:
|
| 12 |
+
- SOCK_STREAM (reliable stream, e.g. TCP)
|
| 13 |
+
- SOCK_DGRAM (datagrams, e.g. UDP)
|
| 14 |
+
|
| 15 |
+
For request-based servers (including socket-based):
|
| 16 |
+
|
| 17 |
+
- client address verification before further looking at the request
|
| 18 |
+
(This is actually a hook for any processing that needs to look
|
| 19 |
+
at the request before anything else, e.g. logging)
|
| 20 |
+
- how to handle multiple requests:
|
| 21 |
+
- synchronous (one request is handled at a time)
|
| 22 |
+
- forking (each request is handled by a new process)
|
| 23 |
+
- threading (each request is handled by a new thread)
|
| 24 |
+
|
| 25 |
+
The classes in this module favor the server type that is simplest to
|
| 26 |
+
write: a synchronous TCP/IP server. This is bad class design, but
|
| 27 |
+
saves some typing. (There's also the issue that a deep class hierarchy
|
| 28 |
+
slows down method lookups.)
|
| 29 |
+
|
| 30 |
+
There are five classes in an inheritance diagram, four of which represent
|
| 31 |
+
synchronous servers of four types:
|
| 32 |
+
|
| 33 |
+
+------------+
|
| 34 |
+
| BaseServer |
|
| 35 |
+
+------------+
|
| 36 |
+
|
|
| 37 |
+
v
|
| 38 |
+
+-----------+ +------------------+
|
| 39 |
+
| TCPServer |------->| UnixStreamServer |
|
| 40 |
+
+-----------+ +------------------+
|
| 41 |
+
|
|
| 42 |
+
v
|
| 43 |
+
+-----------+ +--------------------+
|
| 44 |
+
| UDPServer |------->| UnixDatagramServer |
|
| 45 |
+
+-----------+ +--------------------+
|
| 46 |
+
|
| 47 |
+
Note that UnixDatagramServer derives from UDPServer, not from
|
| 48 |
+
UnixStreamServer -- the only difference between an IP and a Unix
|
| 49 |
+
stream server is the address family, which is simply repeated in both
|
| 50 |
+
unix server classes.
|
| 51 |
+
|
| 52 |
+
Forking and threading versions of each type of server can be created
|
| 53 |
+
using the ForkingMixIn and ThreadingMixIn mix-in classes. For
|
| 54 |
+
instance, a threading UDP server class is created as follows:
|
| 55 |
+
|
| 56 |
+
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
|
| 57 |
+
|
| 58 |
+
The Mix-in class must come first, since it overrides a method defined
|
| 59 |
+
in UDPServer! Setting the various member variables also changes
|
| 60 |
+
the behavior of the underlying server mechanism.
|
| 61 |
+
|
| 62 |
+
To implement a service, you must derive a class from
|
| 63 |
+
BaseRequestHandler and redefine its handle() method. You can then run
|
| 64 |
+
various versions of the service by combining one of the server classes
|
| 65 |
+
with your request handler class.
|
| 66 |
+
|
| 67 |
+
The request handler class must be different for datagram or stream
|
| 68 |
+
services. This can be hidden by using the request handler
|
| 69 |
+
subclasses StreamRequestHandler or DatagramRequestHandler.
|
| 70 |
+
|
| 71 |
+
Of course, you still have to use your head!
|
| 72 |
+
|
| 73 |
+
For instance, it makes no sense to use a forking server if the service
|
| 74 |
+
contains state in memory that can be modified by requests (since the
|
| 75 |
+
modifications in the child process would never reach the initial state
|
| 76 |
+
kept in the parent process and passed to each child). In this case,
|
| 77 |
+
you can use a threading server, but you will probably have to use
|
| 78 |
+
locks to avoid two requests that come in nearly simultaneous to apply
|
| 79 |
+
conflicting changes to the server state.
|
| 80 |
+
|
| 81 |
+
On the other hand, if you are building e.g. an HTTP server, where all
|
| 82 |
+
data is stored externally (e.g. in the file system), a synchronous
|
| 83 |
+
class will essentially render the service "deaf" while one request is
|
| 84 |
+
being handled -- which may be for a very long time if a client is slow
|
| 85 |
+
to read all the data it has requested. Here a threading or forking
|
| 86 |
+
server is appropriate.
|
| 87 |
+
|
| 88 |
+
In some cases, it may be appropriate to process part of a request
|
| 89 |
+
synchronously, but to finish processing in a forked child depending on
|
| 90 |
+
the request data. This can be implemented by using a synchronous
|
| 91 |
+
server and doing an explicit fork in the request handler class
|
| 92 |
+
handle() method.
|
| 93 |
+
|
| 94 |
+
Another approach to handling multiple simultaneous requests in an
|
| 95 |
+
environment that supports neither threads nor fork (or where these are
|
| 96 |
+
too expensive or inappropriate for the service) is to maintain an
|
| 97 |
+
explicit table of partially finished requests and to use a selector to
|
| 98 |
+
decide which request to work on next (or whether to handle a new
|
| 99 |
+
incoming request). This is particularly important for stream services
|
| 100 |
+
where each client can potentially be connected for a long time (if
|
| 101 |
+
threads or subprocesses cannot be used).
|
| 102 |
+
|
| 103 |
+
Future work:
|
| 104 |
+
- Standard classes for Sun RPC (which uses either UDP or TCP)
|
| 105 |
+
- Standard mix-in classes to implement various authentication
|
| 106 |
+
and encryption schemes
|
| 107 |
+
|
| 108 |
+
XXX Open problems:
|
| 109 |
+
- What to do with out-of-band data?
|
| 110 |
+
|
| 111 |
+
BaseServer:
|
| 112 |
+
- split generic "request" functionality out into BaseServer class.
|
| 113 |
+
Copyright (C) 2000 Luke Kenneth Casson Leighton <lkcl@samba.org>
|
| 114 |
+
|
| 115 |
+
example: read entries from a SQL database (requires overriding
|
| 116 |
+
get_request() to return a table entry from the database).
|
| 117 |
+
entry is processed by a RequestHandlerClass.
|
| 118 |
+
|
| 119 |
+
"""
|
| 120 |
+
|
| 121 |
+
# Author of the BaseServer patch: Luke Kenneth Casson Leighton
|
| 122 |
+
|
| 123 |
+
__version__ = "0.4"
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
import socket
|
| 127 |
+
import selectors
|
| 128 |
+
import os
|
| 129 |
+
import sys
|
| 130 |
+
import threading
|
| 131 |
+
from io import BufferedIOBase
|
| 132 |
+
from time import monotonic as time
|
| 133 |
+
|
| 134 |
+
__all__ = ["BaseServer", "TCPServer", "UDPServer",
|
| 135 |
+
"ThreadingUDPServer", "ThreadingTCPServer",
|
| 136 |
+
"BaseRequestHandler", "StreamRequestHandler",
|
| 137 |
+
"DatagramRequestHandler", "ThreadingMixIn"]
|
| 138 |
+
if hasattr(os, "fork"):
|
| 139 |
+
__all__.extend(["ForkingUDPServer","ForkingTCPServer", "ForkingMixIn"])
|
| 140 |
+
if hasattr(socket, "AF_UNIX"):
|
| 141 |
+
__all__.extend(["UnixStreamServer","UnixDatagramServer",
|
| 142 |
+
"ThreadingUnixStreamServer",
|
| 143 |
+
"ThreadingUnixDatagramServer"])
|
| 144 |
+
|
| 145 |
+
# poll/select have the advantage of not requiring any extra file descriptor,
|
| 146 |
+
# contrarily to epoll/kqueue (also, they require a single syscall).
|
| 147 |
+
if hasattr(selectors, 'PollSelector'):
|
| 148 |
+
_ServerSelector = selectors.PollSelector
|
| 149 |
+
else:
|
| 150 |
+
_ServerSelector = selectors.SelectSelector
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
class BaseServer:
|
| 154 |
+
|
| 155 |
+
"""Base class for server classes.
|
| 156 |
+
|
| 157 |
+
Methods for the caller:
|
| 158 |
+
|
| 159 |
+
- __init__(server_address, RequestHandlerClass)
|
| 160 |
+
- serve_forever(poll_interval=0.5)
|
| 161 |
+
- shutdown()
|
| 162 |
+
- handle_request() # if you do not use serve_forever()
|
| 163 |
+
- fileno() -> int # for selector
|
| 164 |
+
|
| 165 |
+
Methods that may be overridden:
|
| 166 |
+
|
| 167 |
+
- server_bind()
|
| 168 |
+
- server_activate()
|
| 169 |
+
- get_request() -> request, client_address
|
| 170 |
+
- handle_timeout()
|
| 171 |
+
- verify_request(request, client_address)
|
| 172 |
+
- server_close()
|
| 173 |
+
- process_request(request, client_address)
|
| 174 |
+
- shutdown_request(request)
|
| 175 |
+
- close_request(request)
|
| 176 |
+
- service_actions()
|
| 177 |
+
- handle_error()
|
| 178 |
+
|
| 179 |
+
Methods for derived classes:
|
| 180 |
+
|
| 181 |
+
- finish_request(request, client_address)
|
| 182 |
+
|
| 183 |
+
Class variables that may be overridden by derived classes or
|
| 184 |
+
instances:
|
| 185 |
+
|
| 186 |
+
- timeout
|
| 187 |
+
- address_family
|
| 188 |
+
- socket_type
|
| 189 |
+
- allow_reuse_address
|
| 190 |
+
|
| 191 |
+
Instance variables:
|
| 192 |
+
|
| 193 |
+
- RequestHandlerClass
|
| 194 |
+
- socket
|
| 195 |
+
|
| 196 |
+
"""
|
| 197 |
+
|
| 198 |
+
timeout = None
|
| 199 |
+
|
| 200 |
+
def __init__(self, server_address, RequestHandlerClass):
|
| 201 |
+
"""Constructor. May be extended, do not override."""
|
| 202 |
+
self.server_address = server_address
|
| 203 |
+
self.RequestHandlerClass = RequestHandlerClass
|
| 204 |
+
self.__is_shut_down = threading.Event()
|
| 205 |
+
self.__shutdown_request = False
|
| 206 |
+
|
| 207 |
+
def server_activate(self):
|
| 208 |
+
"""Called by constructor to activate the server.
|
| 209 |
+
|
| 210 |
+
May be overridden.
|
| 211 |
+
|
| 212 |
+
"""
|
| 213 |
+
pass
|
| 214 |
+
|
| 215 |
+
def serve_forever(self, poll_interval=0.5):
|
| 216 |
+
"""Handle one request at a time until shutdown.
|
| 217 |
+
|
| 218 |
+
Polls for shutdown every poll_interval seconds. Ignores
|
| 219 |
+
self.timeout. If you need to do periodic tasks, do them in
|
| 220 |
+
another thread.
|
| 221 |
+
"""
|
| 222 |
+
self.__is_shut_down.clear()
|
| 223 |
+
try:
|
| 224 |
+
# XXX: Consider using another file descriptor or connecting to the
|
| 225 |
+
# socket to wake this up instead of polling. Polling reduces our
|
| 226 |
+
# responsiveness to a shutdown request and wastes cpu at all other
|
| 227 |
+
# times.
|
| 228 |
+
with _ServerSelector() as selector:
|
| 229 |
+
selector.register(self, selectors.EVENT_READ)
|
| 230 |
+
|
| 231 |
+
while not self.__shutdown_request:
|
| 232 |
+
ready = selector.select(poll_interval)
|
| 233 |
+
# bpo-35017: shutdown() called during select(), exit immediately.
|
| 234 |
+
if self.__shutdown_request:
|
| 235 |
+
break
|
| 236 |
+
if ready:
|
| 237 |
+
self._handle_request_noblock()
|
| 238 |
+
|
| 239 |
+
self.service_actions()
|
| 240 |
+
finally:
|
| 241 |
+
self.__shutdown_request = False
|
| 242 |
+
self.__is_shut_down.set()
|
| 243 |
+
|
| 244 |
+
def shutdown(self):
|
| 245 |
+
"""Stops the serve_forever loop.
|
| 246 |
+
|
| 247 |
+
Blocks until the loop has finished. This must be called while
|
| 248 |
+
serve_forever() is running in another thread, or it will
|
| 249 |
+
deadlock.
|
| 250 |
+
"""
|
| 251 |
+
self.__shutdown_request = True
|
| 252 |
+
self.__is_shut_down.wait()
|
| 253 |
+
|
| 254 |
+
def service_actions(self):
|
| 255 |
+
"""Called by the serve_forever() loop.
|
| 256 |
+
|
| 257 |
+
May be overridden by a subclass / Mixin to implement any code that
|
| 258 |
+
needs to be run during the loop.
|
| 259 |
+
"""
|
| 260 |
+
pass
|
| 261 |
+
|
| 262 |
+
# The distinction between handling, getting, processing and finishing a
|
| 263 |
+
# request is fairly arbitrary. Remember:
|
| 264 |
+
#
|
| 265 |
+
# - handle_request() is the top-level call. It calls selector.select(),
|
| 266 |
+
# get_request(), verify_request() and process_request()
|
| 267 |
+
# - get_request() is different for stream or datagram sockets
|
| 268 |
+
# - process_request() is the place that may fork a new process or create a
|
| 269 |
+
# new thread to finish the request
|
| 270 |
+
# - finish_request() instantiates the request handler class; this
|
| 271 |
+
# constructor will handle the request all by itself
|
| 272 |
+
|
| 273 |
+
def handle_request(self):
|
| 274 |
+
"""Handle one request, possibly blocking.
|
| 275 |
+
|
| 276 |
+
Respects self.timeout.
|
| 277 |
+
"""
|
| 278 |
+
# Support people who used socket.settimeout() to escape
|
| 279 |
+
# handle_request before self.timeout was available.
|
| 280 |
+
timeout = self.socket.gettimeout()
|
| 281 |
+
if timeout is None:
|
| 282 |
+
timeout = self.timeout
|
| 283 |
+
elif self.timeout is not None:
|
| 284 |
+
timeout = min(timeout, self.timeout)
|
| 285 |
+
if timeout is not None:
|
| 286 |
+
deadline = time() + timeout
|
| 287 |
+
|
| 288 |
+
# Wait until a request arrives or the timeout expires - the loop is
|
| 289 |
+
# necessary to accommodate early wakeups due to EINTR.
|
| 290 |
+
with _ServerSelector() as selector:
|
| 291 |
+
selector.register(self, selectors.EVENT_READ)
|
| 292 |
+
|
| 293 |
+
while True:
|
| 294 |
+
ready = selector.select(timeout)
|
| 295 |
+
if ready:
|
| 296 |
+
return self._handle_request_noblock()
|
| 297 |
+
else:
|
| 298 |
+
if timeout is not None:
|
| 299 |
+
timeout = deadline - time()
|
| 300 |
+
if timeout < 0:
|
| 301 |
+
return self.handle_timeout()
|
| 302 |
+
|
| 303 |
+
def _handle_request_noblock(self):
|
| 304 |
+
"""Handle one request, without blocking.
|
| 305 |
+
|
| 306 |
+
I assume that selector.select() has returned that the socket is
|
| 307 |
+
readable before this function was called, so there should be no risk of
|
| 308 |
+
blocking in get_request().
|
| 309 |
+
"""
|
| 310 |
+
try:
|
| 311 |
+
request, client_address = self.get_request()
|
| 312 |
+
except OSError:
|
| 313 |
+
return
|
| 314 |
+
if self.verify_request(request, client_address):
|
| 315 |
+
try:
|
| 316 |
+
self.process_request(request, client_address)
|
| 317 |
+
except Exception:
|
| 318 |
+
self.handle_error(request, client_address)
|
| 319 |
+
self.shutdown_request(request)
|
| 320 |
+
except:
|
| 321 |
+
self.shutdown_request(request)
|
| 322 |
+
raise
|
| 323 |
+
else:
|
| 324 |
+
self.shutdown_request(request)
|
| 325 |
+
|
| 326 |
+
def handle_timeout(self):
|
| 327 |
+
"""Called if no new request arrives within self.timeout.
|
| 328 |
+
|
| 329 |
+
Overridden by ForkingMixIn.
|
| 330 |
+
"""
|
| 331 |
+
pass
|
| 332 |
+
|
| 333 |
+
def verify_request(self, request, client_address):
|
| 334 |
+
"""Verify the request. May be overridden.
|
| 335 |
+
|
| 336 |
+
Return True if we should proceed with this request.
|
| 337 |
+
|
| 338 |
+
"""
|
| 339 |
+
return True
|
| 340 |
+
|
| 341 |
+
def process_request(self, request, client_address):
|
| 342 |
+
"""Call finish_request.
|
| 343 |
+
|
| 344 |
+
Overridden by ForkingMixIn and ThreadingMixIn.
|
| 345 |
+
|
| 346 |
+
"""
|
| 347 |
+
self.finish_request(request, client_address)
|
| 348 |
+
self.shutdown_request(request)
|
| 349 |
+
|
| 350 |
+
def server_close(self):
|
| 351 |
+
"""Called to clean-up the server.
|
| 352 |
+
|
| 353 |
+
May be overridden.
|
| 354 |
+
|
| 355 |
+
"""
|
| 356 |
+
pass
|
| 357 |
+
|
| 358 |
+
def finish_request(self, request, client_address):
|
| 359 |
+
"""Finish one request by instantiating RequestHandlerClass."""
|
| 360 |
+
self.RequestHandlerClass(request, client_address, self)
|
| 361 |
+
|
| 362 |
+
def shutdown_request(self, request):
|
| 363 |
+
"""Called to shutdown and close an individual request."""
|
| 364 |
+
self.close_request(request)
|
| 365 |
+
|
| 366 |
+
def close_request(self, request):
|
| 367 |
+
"""Called to clean up an individual request."""
|
| 368 |
+
pass
|
| 369 |
+
|
| 370 |
+
def handle_error(self, request, client_address):
|
| 371 |
+
"""Handle an error gracefully. May be overridden.
|
| 372 |
+
|
| 373 |
+
The default is to print a traceback and continue.
|
| 374 |
+
|
| 375 |
+
"""
|
| 376 |
+
print('-'*40, file=sys.stderr)
|
| 377 |
+
print('Exception occurred during processing of request from',
|
| 378 |
+
client_address, file=sys.stderr)
|
| 379 |
+
import traceback
|
| 380 |
+
traceback.print_exc()
|
| 381 |
+
print('-'*40, file=sys.stderr)
|
| 382 |
+
|
| 383 |
+
def __enter__(self):
|
| 384 |
+
return self
|
| 385 |
+
|
| 386 |
+
def __exit__(self, *args):
|
| 387 |
+
self.server_close()
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
class TCPServer(BaseServer):
|
| 391 |
+
|
| 392 |
+
"""Base class for various socket-based server classes.
|
| 393 |
+
|
| 394 |
+
Defaults to synchronous IP stream (i.e., TCP).
|
| 395 |
+
|
| 396 |
+
Methods for the caller:
|
| 397 |
+
|
| 398 |
+
- __init__(server_address, RequestHandlerClass, bind_and_activate=True)
|
| 399 |
+
- serve_forever(poll_interval=0.5)
|
| 400 |
+
- shutdown()
|
| 401 |
+
- handle_request() # if you don't use serve_forever()
|
| 402 |
+
- fileno() -> int # for selector
|
| 403 |
+
|
| 404 |
+
Methods that may be overridden:
|
| 405 |
+
|
| 406 |
+
- server_bind()
|
| 407 |
+
- server_activate()
|
| 408 |
+
- get_request() -> request, client_address
|
| 409 |
+
- handle_timeout()
|
| 410 |
+
- verify_request(request, client_address)
|
| 411 |
+
- process_request(request, client_address)
|
| 412 |
+
- shutdown_request(request)
|
| 413 |
+
- close_request(request)
|
| 414 |
+
- handle_error()
|
| 415 |
+
|
| 416 |
+
Methods for derived classes:
|
| 417 |
+
|
| 418 |
+
- finish_request(request, client_address)
|
| 419 |
+
|
| 420 |
+
Class variables that may be overridden by derived classes or
|
| 421 |
+
instances:
|
| 422 |
+
|
| 423 |
+
- timeout
|
| 424 |
+
- address_family
|
| 425 |
+
- socket_type
|
| 426 |
+
- request_queue_size (only for stream sockets)
|
| 427 |
+
- allow_reuse_address
|
| 428 |
+
|
| 429 |
+
Instance variables:
|
| 430 |
+
|
| 431 |
+
- server_address
|
| 432 |
+
- RequestHandlerClass
|
| 433 |
+
- socket
|
| 434 |
+
|
| 435 |
+
"""
|
| 436 |
+
|
| 437 |
+
address_family = socket.AF_INET
|
| 438 |
+
|
| 439 |
+
socket_type = socket.SOCK_STREAM
|
| 440 |
+
|
| 441 |
+
request_queue_size = 5
|
| 442 |
+
|
| 443 |
+
allow_reuse_address = False
|
| 444 |
+
|
| 445 |
+
def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True):
|
| 446 |
+
"""Constructor. May be extended, do not override."""
|
| 447 |
+
BaseServer.__init__(self, server_address, RequestHandlerClass)
|
| 448 |
+
self.socket = socket.socket(self.address_family,
|
| 449 |
+
self.socket_type)
|
| 450 |
+
if bind_and_activate:
|
| 451 |
+
try:
|
| 452 |
+
self.server_bind()
|
| 453 |
+
self.server_activate()
|
| 454 |
+
except:
|
| 455 |
+
self.server_close()
|
| 456 |
+
raise
|
| 457 |
+
|
| 458 |
+
def server_bind(self):
|
| 459 |
+
"""Called by constructor to bind the socket.
|
| 460 |
+
|
| 461 |
+
May be overridden.
|
| 462 |
+
|
| 463 |
+
"""
|
| 464 |
+
if self.allow_reuse_address:
|
| 465 |
+
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
| 466 |
+
self.socket.bind(self.server_address)
|
| 467 |
+
self.server_address = self.socket.getsockname()
|
| 468 |
+
|
| 469 |
+
def server_activate(self):
|
| 470 |
+
"""Called by constructor to activate the server.
|
| 471 |
+
|
| 472 |
+
May be overridden.
|
| 473 |
+
|
| 474 |
+
"""
|
| 475 |
+
self.socket.listen(self.request_queue_size)
|
| 476 |
+
|
| 477 |
+
def server_close(self):
|
| 478 |
+
"""Called to clean-up the server.
|
| 479 |
+
|
| 480 |
+
May be overridden.
|
| 481 |
+
|
| 482 |
+
"""
|
| 483 |
+
self.socket.close()
|
| 484 |
+
|
| 485 |
+
def fileno(self):
|
| 486 |
+
"""Return socket file number.
|
| 487 |
+
|
| 488 |
+
Interface required by selector.
|
| 489 |
+
|
| 490 |
+
"""
|
| 491 |
+
return self.socket.fileno()
|
| 492 |
+
|
| 493 |
+
def get_request(self):
|
| 494 |
+
"""Get the request and client address from the socket.
|
| 495 |
+
|
| 496 |
+
May be overridden.
|
| 497 |
+
|
| 498 |
+
"""
|
| 499 |
+
return self.socket.accept()
|
| 500 |
+
|
| 501 |
+
def shutdown_request(self, request):
|
| 502 |
+
"""Called to shutdown and close an individual request."""
|
| 503 |
+
try:
|
| 504 |
+
#explicitly shutdown. socket.close() merely releases
|
| 505 |
+
#the socket and waits for GC to perform the actual close.
|
| 506 |
+
request.shutdown(socket.SHUT_WR)
|
| 507 |
+
except OSError:
|
| 508 |
+
pass #some platforms may raise ENOTCONN here
|
| 509 |
+
self.close_request(request)
|
| 510 |
+
|
| 511 |
+
def close_request(self, request):
|
| 512 |
+
"""Called to clean up an individual request."""
|
| 513 |
+
request.close()
|
| 514 |
+
|
| 515 |
+
|
| 516 |
+
class UDPServer(TCPServer):
|
| 517 |
+
|
| 518 |
+
"""UDP server class."""
|
| 519 |
+
|
| 520 |
+
allow_reuse_address = False
|
| 521 |
+
|
| 522 |
+
socket_type = socket.SOCK_DGRAM
|
| 523 |
+
|
| 524 |
+
max_packet_size = 8192
|
| 525 |
+
|
| 526 |
+
def get_request(self):
|
| 527 |
+
data, client_addr = self.socket.recvfrom(self.max_packet_size)
|
| 528 |
+
return (data, self.socket), client_addr
|
| 529 |
+
|
| 530 |
+
def server_activate(self):
|
| 531 |
+
# No need to call listen() for UDP.
|
| 532 |
+
pass
|
| 533 |
+
|
| 534 |
+
def shutdown_request(self, request):
|
| 535 |
+
# No need to shutdown anything.
|
| 536 |
+
self.close_request(request)
|
| 537 |
+
|
| 538 |
+
def close_request(self, request):
|
| 539 |
+
# No need to close anything.
|
| 540 |
+
pass
|
| 541 |
+
|
| 542 |
+
if hasattr(os, "fork"):
|
| 543 |
+
class ForkingMixIn:
|
| 544 |
+
"""Mix-in class to handle each request in a new process."""
|
| 545 |
+
|
| 546 |
+
timeout = 300
|
| 547 |
+
active_children = None
|
| 548 |
+
max_children = 40
|
| 549 |
+
# If true, server_close() waits until all child processes complete.
|
| 550 |
+
block_on_close = True
|
| 551 |
+
|
| 552 |
+
def collect_children(self, *, blocking=False):
|
| 553 |
+
"""Internal routine to wait for children that have exited."""
|
| 554 |
+
if self.active_children is None:
|
| 555 |
+
return
|
| 556 |
+
|
| 557 |
+
# If we're above the max number of children, wait and reap them until
|
| 558 |
+
# we go back below threshold. Note that we use waitpid(-1) below to be
|
| 559 |
+
# able to collect children in size(<defunct children>) syscalls instead
|
| 560 |
+
# of size(<children>): the downside is that this might reap children
|
| 561 |
+
# which we didn't spawn, which is why we only resort to this when we're
|
| 562 |
+
# above max_children.
|
| 563 |
+
while len(self.active_children) >= self.max_children:
|
| 564 |
+
try:
|
| 565 |
+
pid, _ = os.waitpid(-1, 0)
|
| 566 |
+
self.active_children.discard(pid)
|
| 567 |
+
except ChildProcessError:
|
| 568 |
+
# we don't have any children, we're done
|
| 569 |
+
self.active_children.clear()
|
| 570 |
+
except OSError:
|
| 571 |
+
break
|
| 572 |
+
|
| 573 |
+
# Now reap all defunct children.
|
| 574 |
+
for pid in self.active_children.copy():
|
| 575 |
+
try:
|
| 576 |
+
flags = 0 if blocking else os.WNOHANG
|
| 577 |
+
pid, _ = os.waitpid(pid, flags)
|
| 578 |
+
# if the child hasn't exited yet, pid will be 0 and ignored by
|
| 579 |
+
# discard() below
|
| 580 |
+
self.active_children.discard(pid)
|
| 581 |
+
except ChildProcessError:
|
| 582 |
+
# someone else reaped it
|
| 583 |
+
self.active_children.discard(pid)
|
| 584 |
+
except OSError:
|
| 585 |
+
pass
|
| 586 |
+
|
| 587 |
+
def handle_timeout(self):
|
| 588 |
+
"""Wait for zombies after self.timeout seconds of inactivity.
|
| 589 |
+
|
| 590 |
+
May be extended, do not override.
|
| 591 |
+
"""
|
| 592 |
+
self.collect_children()
|
| 593 |
+
|
| 594 |
+
def service_actions(self):
|
| 595 |
+
"""Collect the zombie child processes regularly in the ForkingMixIn.
|
| 596 |
+
|
| 597 |
+
service_actions is called in the BaseServer's serve_forever loop.
|
| 598 |
+
"""
|
| 599 |
+
self.collect_children()
|
| 600 |
+
|
| 601 |
+
def process_request(self, request, client_address):
|
| 602 |
+
"""Fork a new subprocess to process the request."""
|
| 603 |
+
pid = os.fork()
|
| 604 |
+
if pid:
|
| 605 |
+
# Parent process
|
| 606 |
+
if self.active_children is None:
|
| 607 |
+
self.active_children = set()
|
| 608 |
+
self.active_children.add(pid)
|
| 609 |
+
self.close_request(request)
|
| 610 |
+
return
|
| 611 |
+
else:
|
| 612 |
+
# Child process.
|
| 613 |
+
# This must never return, hence os._exit()!
|
| 614 |
+
status = 1
|
| 615 |
+
try:
|
| 616 |
+
self.finish_request(request, client_address)
|
| 617 |
+
status = 0
|
| 618 |
+
except Exception:
|
| 619 |
+
self.handle_error(request, client_address)
|
| 620 |
+
finally:
|
| 621 |
+
try:
|
| 622 |
+
self.shutdown_request(request)
|
| 623 |
+
finally:
|
| 624 |
+
os._exit(status)
|
| 625 |
+
|
| 626 |
+
def server_close(self):
|
| 627 |
+
super().server_close()
|
| 628 |
+
self.collect_children(blocking=self.block_on_close)
|
| 629 |
+
|
| 630 |
+
|
| 631 |
+
class _Threads(list):
|
| 632 |
+
"""
|
| 633 |
+
Joinable list of all non-daemon threads.
|
| 634 |
+
"""
|
| 635 |
+
def append(self, thread):
|
| 636 |
+
self.reap()
|
| 637 |
+
if thread.daemon:
|
| 638 |
+
return
|
| 639 |
+
super().append(thread)
|
| 640 |
+
|
| 641 |
+
def pop_all(self):
|
| 642 |
+
self[:], result = [], self[:]
|
| 643 |
+
return result
|
| 644 |
+
|
| 645 |
+
def join(self):
|
| 646 |
+
for thread in self.pop_all():
|
| 647 |
+
thread.join()
|
| 648 |
+
|
| 649 |
+
def reap(self):
|
| 650 |
+
self[:] = (thread for thread in self if thread.is_alive())
|
| 651 |
+
|
| 652 |
+
|
| 653 |
+
class _NoThreads:
|
| 654 |
+
"""
|
| 655 |
+
Degenerate version of _Threads.
|
| 656 |
+
"""
|
| 657 |
+
def append(self, thread):
|
| 658 |
+
pass
|
| 659 |
+
|
| 660 |
+
def join(self):
|
| 661 |
+
pass
|
| 662 |
+
|
| 663 |
+
|
| 664 |
+
class ThreadingMixIn:
|
| 665 |
+
"""Mix-in class to handle each request in a new thread."""
|
| 666 |
+
|
| 667 |
+
# Decides how threads will act upon termination of the
|
| 668 |
+
# main process
|
| 669 |
+
daemon_threads = False
|
| 670 |
+
# If true, server_close() waits until all non-daemonic threads terminate.
|
| 671 |
+
block_on_close = True
|
| 672 |
+
# Threads object
|
| 673 |
+
# used by server_close() to wait for all threads completion.
|
| 674 |
+
_threads = _NoThreads()
|
| 675 |
+
|
| 676 |
+
def process_request_thread(self, request, client_address):
|
| 677 |
+
"""Same as in BaseServer but as a thread.
|
| 678 |
+
|
| 679 |
+
In addition, exception handling is done here.
|
| 680 |
+
|
| 681 |
+
"""
|
| 682 |
+
try:
|
| 683 |
+
self.finish_request(request, client_address)
|
| 684 |
+
except Exception:
|
| 685 |
+
self.handle_error(request, client_address)
|
| 686 |
+
finally:
|
| 687 |
+
self.shutdown_request(request)
|
| 688 |
+
|
| 689 |
+
def process_request(self, request, client_address):
|
| 690 |
+
"""Start a new thread to process the request."""
|
| 691 |
+
if self.block_on_close:
|
| 692 |
+
vars(self).setdefault('_threads', _Threads())
|
| 693 |
+
t = threading.Thread(target = self.process_request_thread,
|
| 694 |
+
args = (request, client_address))
|
| 695 |
+
t.daemon = self.daemon_threads
|
| 696 |
+
self._threads.append(t)
|
| 697 |
+
t.start()
|
| 698 |
+
|
| 699 |
+
def server_close(self):
|
| 700 |
+
super().server_close()
|
| 701 |
+
self._threads.join()
|
| 702 |
+
|
| 703 |
+
|
| 704 |
+
if hasattr(os, "fork"):
|
| 705 |
+
class ForkingUDPServer(ForkingMixIn, UDPServer): pass
|
| 706 |
+
class ForkingTCPServer(ForkingMixIn, TCPServer): pass
|
| 707 |
+
|
| 708 |
+
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
|
| 709 |
+
class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
|
| 710 |
+
|
| 711 |
+
if hasattr(socket, 'AF_UNIX'):
|
| 712 |
+
|
| 713 |
+
class UnixStreamServer(TCPServer):
|
| 714 |
+
address_family = socket.AF_UNIX
|
| 715 |
+
|
| 716 |
+
class UnixDatagramServer(UDPServer):
|
| 717 |
+
address_family = socket.AF_UNIX
|
| 718 |
+
|
| 719 |
+
class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
|
| 720 |
+
|
| 721 |
+
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
|
| 722 |
+
|
| 723 |
+
class BaseRequestHandler:
|
| 724 |
+
|
| 725 |
+
"""Base class for request handler classes.
|
| 726 |
+
|
| 727 |
+
This class is instantiated for each request to be handled. The
|
| 728 |
+
constructor sets the instance variables request, client_address
|
| 729 |
+
and server, and then calls the handle() method. To implement a
|
| 730 |
+
specific service, all you need to do is to derive a class which
|
| 731 |
+
defines a handle() method.
|
| 732 |
+
|
| 733 |
+
The handle() method can find the request as self.request, the
|
| 734 |
+
client address as self.client_address, and the server (in case it
|
| 735 |
+
needs access to per-server information) as self.server. Since a
|
| 736 |
+
separate instance is created for each request, the handle() method
|
| 737 |
+
can define other arbitrary instance variables.
|
| 738 |
+
|
| 739 |
+
"""
|
| 740 |
+
|
| 741 |
+
def __init__(self, request, client_address, server):
|
| 742 |
+
self.request = request
|
| 743 |
+
self.client_address = client_address
|
| 744 |
+
self.server = server
|
| 745 |
+
self.setup()
|
| 746 |
+
try:
|
| 747 |
+
self.handle()
|
| 748 |
+
finally:
|
| 749 |
+
self.finish()
|
| 750 |
+
|
| 751 |
+
def setup(self):
|
| 752 |
+
pass
|
| 753 |
+
|
| 754 |
+
def handle(self):
|
| 755 |
+
pass
|
| 756 |
+
|
| 757 |
+
def finish(self):
|
| 758 |
+
pass
|
| 759 |
+
|
| 760 |
+
|
| 761 |
+
# The following two classes make it possible to use the same service
|
| 762 |
+
# class for stream or datagram servers.
|
| 763 |
+
# Each class sets up these instance variables:
|
| 764 |
+
# - rfile: a file object from which receives the request is read
|
| 765 |
+
# - wfile: a file object to which the reply is written
|
| 766 |
+
# When the handle() method returns, wfile is flushed properly
|
| 767 |
+
|
| 768 |
+
|
| 769 |
+
class StreamRequestHandler(BaseRequestHandler):
|
| 770 |
+
|
| 771 |
+
"""Define self.rfile and self.wfile for stream sockets."""
|
| 772 |
+
|
| 773 |
+
# Default buffer sizes for rfile, wfile.
|
| 774 |
+
# We default rfile to buffered because otherwise it could be
|
| 775 |
+
# really slow for large data (a getc() call per byte); we make
|
| 776 |
+
# wfile unbuffered because (a) often after a write() we want to
|
| 777 |
+
# read and we need to flush the line; (b) big writes to unbuffered
|
| 778 |
+
# files are typically optimized by stdio even when big reads
|
| 779 |
+
# aren't.
|
| 780 |
+
rbufsize = -1
|
| 781 |
+
wbufsize = 0
|
| 782 |
+
|
| 783 |
+
# A timeout to apply to the request socket, if not None.
|
| 784 |
+
timeout = None
|
| 785 |
+
|
| 786 |
+
# Disable nagle algorithm for this socket, if True.
|
| 787 |
+
# Use only when wbufsize != 0, to avoid small packets.
|
| 788 |
+
disable_nagle_algorithm = False
|
| 789 |
+
|
| 790 |
+
def setup(self):
|
| 791 |
+
self.connection = self.request
|
| 792 |
+
if self.timeout is not None:
|
| 793 |
+
self.connection.settimeout(self.timeout)
|
| 794 |
+
if self.disable_nagle_algorithm:
|
| 795 |
+
self.connection.setsockopt(socket.IPPROTO_TCP,
|
| 796 |
+
socket.TCP_NODELAY, True)
|
| 797 |
+
self.rfile = self.connection.makefile('rb', self.rbufsize)
|
| 798 |
+
if self.wbufsize == 0:
|
| 799 |
+
self.wfile = _SocketWriter(self.connection)
|
| 800 |
+
else:
|
| 801 |
+
self.wfile = self.connection.makefile('wb', self.wbufsize)
|
| 802 |
+
|
| 803 |
+
def finish(self):
|
| 804 |
+
if not self.wfile.closed:
|
| 805 |
+
try:
|
| 806 |
+
self.wfile.flush()
|
| 807 |
+
except socket.error:
|
| 808 |
+
# A final socket error may have occurred here, such as
|
| 809 |
+
# the local error ECONNABORTED.
|
| 810 |
+
pass
|
| 811 |
+
self.wfile.close()
|
| 812 |
+
self.rfile.close()
|
| 813 |
+
|
| 814 |
+
class _SocketWriter(BufferedIOBase):
|
| 815 |
+
"""Simple writable BufferedIOBase implementation for a socket
|
| 816 |
+
|
| 817 |
+
Does not hold data in a buffer, avoiding any need to call flush()."""
|
| 818 |
+
|
| 819 |
+
def __init__(self, sock):
|
| 820 |
+
self._sock = sock
|
| 821 |
+
|
| 822 |
+
def writable(self):
|
| 823 |
+
return True
|
| 824 |
+
|
| 825 |
+
def write(self, b):
|
| 826 |
+
self._sock.sendall(b)
|
| 827 |
+
with memoryview(b) as view:
|
| 828 |
+
return view.nbytes
|
| 829 |
+
|
| 830 |
+
def fileno(self):
|
| 831 |
+
return self._sock.fileno()
|
| 832 |
+
|
| 833 |
+
class DatagramRequestHandler(BaseRequestHandler):
|
| 834 |
+
|
| 835 |
+
"""Define self.rfile and self.wfile for datagram sockets."""
|
| 836 |
+
|
| 837 |
+
def setup(self):
|
| 838 |
+
from io import BytesIO
|
| 839 |
+
self.packet, self.socket = self.request
|
| 840 |
+
self.rfile = BytesIO(self.packet)
|
| 841 |
+
self.wfile = BytesIO()
|
| 842 |
+
|
| 843 |
+
def finish(self):
|
| 844 |
+
self.socket.sendto(self.wfile.getvalue(), self.client_address)
|
evalkit_cambrian/lib/python3.10/string.py
ADDED
|
@@ -0,0 +1,280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A collection of string constants.
|
| 2 |
+
|
| 3 |
+
Public module variables:
|
| 4 |
+
|
| 5 |
+
whitespace -- a string containing all ASCII whitespace
|
| 6 |
+
ascii_lowercase -- a string containing all ASCII lowercase letters
|
| 7 |
+
ascii_uppercase -- a string containing all ASCII uppercase letters
|
| 8 |
+
ascii_letters -- a string containing all ASCII letters
|
| 9 |
+
digits -- a string containing all ASCII decimal digits
|
| 10 |
+
hexdigits -- a string containing all ASCII hexadecimal digits
|
| 11 |
+
octdigits -- a string containing all ASCII octal digits
|
| 12 |
+
punctuation -- a string containing all ASCII punctuation characters
|
| 13 |
+
printable -- a string containing all ASCII characters considered printable
|
| 14 |
+
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
__all__ = ["ascii_letters", "ascii_lowercase", "ascii_uppercase", "capwords",
|
| 18 |
+
"digits", "hexdigits", "octdigits", "printable", "punctuation",
|
| 19 |
+
"whitespace", "Formatter", "Template"]
|
| 20 |
+
|
| 21 |
+
import _string
|
| 22 |
+
|
| 23 |
+
# Some strings for ctype-style character classification
|
| 24 |
+
whitespace = ' \t\n\r\v\f'
|
| 25 |
+
ascii_lowercase = 'abcdefghijklmnopqrstuvwxyz'
|
| 26 |
+
ascii_uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
| 27 |
+
ascii_letters = ascii_lowercase + ascii_uppercase
|
| 28 |
+
digits = '0123456789'
|
| 29 |
+
hexdigits = digits + 'abcdef' + 'ABCDEF'
|
| 30 |
+
octdigits = '01234567'
|
| 31 |
+
punctuation = r"""!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
|
| 32 |
+
printable = digits + ascii_letters + punctuation + whitespace
|
| 33 |
+
|
| 34 |
+
# Functions which aren't available as string methods.
|
| 35 |
+
|
| 36 |
+
# Capitalize the words in a string, e.g. " aBc dEf " -> "Abc Def".
|
| 37 |
+
def capwords(s, sep=None):
|
| 38 |
+
"""capwords(s [,sep]) -> string
|
| 39 |
+
|
| 40 |
+
Split the argument into words using split, capitalize each
|
| 41 |
+
word using capitalize, and join the capitalized words using
|
| 42 |
+
join. If the optional second argument sep is absent or None,
|
| 43 |
+
runs of whitespace characters are replaced by a single space
|
| 44 |
+
and leading and trailing whitespace are removed, otherwise
|
| 45 |
+
sep is used to split and join the words.
|
| 46 |
+
|
| 47 |
+
"""
|
| 48 |
+
return (sep or ' ').join(x.capitalize() for x in s.split(sep))
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
####################################################################
|
| 52 |
+
import re as _re
|
| 53 |
+
from collections import ChainMap as _ChainMap
|
| 54 |
+
|
| 55 |
+
_sentinel_dict = {}
|
| 56 |
+
|
| 57 |
+
class Template:
|
| 58 |
+
"""A string class for supporting $-substitutions."""
|
| 59 |
+
|
| 60 |
+
delimiter = '$'
|
| 61 |
+
# r'[a-z]' matches to non-ASCII letters when used with IGNORECASE, but
|
| 62 |
+
# without the ASCII flag. We can't add re.ASCII to flags because of
|
| 63 |
+
# backward compatibility. So we use the ?a local flag and [a-z] pattern.
|
| 64 |
+
# See https://bugs.python.org/issue31672
|
| 65 |
+
idpattern = r'(?a:[_a-z][_a-z0-9]*)'
|
| 66 |
+
braceidpattern = None
|
| 67 |
+
flags = _re.IGNORECASE
|
| 68 |
+
|
| 69 |
+
def __init_subclass__(cls):
|
| 70 |
+
super().__init_subclass__()
|
| 71 |
+
if 'pattern' in cls.__dict__:
|
| 72 |
+
pattern = cls.pattern
|
| 73 |
+
else:
|
| 74 |
+
delim = _re.escape(cls.delimiter)
|
| 75 |
+
id = cls.idpattern
|
| 76 |
+
bid = cls.braceidpattern or cls.idpattern
|
| 77 |
+
pattern = fr"""
|
| 78 |
+
{delim}(?:
|
| 79 |
+
(?P<escaped>{delim}) | # Escape sequence of two delimiters
|
| 80 |
+
(?P<named>{id}) | # delimiter and a Python identifier
|
| 81 |
+
{{(?P<braced>{bid})}} | # delimiter and a braced identifier
|
| 82 |
+
(?P<invalid>) # Other ill-formed delimiter exprs
|
| 83 |
+
)
|
| 84 |
+
"""
|
| 85 |
+
cls.pattern = _re.compile(pattern, cls.flags | _re.VERBOSE)
|
| 86 |
+
|
| 87 |
+
def __init__(self, template):
|
| 88 |
+
self.template = template
|
| 89 |
+
|
| 90 |
+
# Search for $$, $identifier, ${identifier}, and any bare $'s
|
| 91 |
+
|
| 92 |
+
def _invalid(self, mo):
|
| 93 |
+
i = mo.start('invalid')
|
| 94 |
+
lines = self.template[:i].splitlines(keepends=True)
|
| 95 |
+
if not lines:
|
| 96 |
+
colno = 1
|
| 97 |
+
lineno = 1
|
| 98 |
+
else:
|
| 99 |
+
colno = i - len(''.join(lines[:-1]))
|
| 100 |
+
lineno = len(lines)
|
| 101 |
+
raise ValueError('Invalid placeholder in string: line %d, col %d' %
|
| 102 |
+
(lineno, colno))
|
| 103 |
+
|
| 104 |
+
def substitute(self, mapping=_sentinel_dict, /, **kws):
|
| 105 |
+
if mapping is _sentinel_dict:
|
| 106 |
+
mapping = kws
|
| 107 |
+
elif kws:
|
| 108 |
+
mapping = _ChainMap(kws, mapping)
|
| 109 |
+
# Helper function for .sub()
|
| 110 |
+
def convert(mo):
|
| 111 |
+
# Check the most common path first.
|
| 112 |
+
named = mo.group('named') or mo.group('braced')
|
| 113 |
+
if named is not None:
|
| 114 |
+
return str(mapping[named])
|
| 115 |
+
if mo.group('escaped') is not None:
|
| 116 |
+
return self.delimiter
|
| 117 |
+
if mo.group('invalid') is not None:
|
| 118 |
+
self._invalid(mo)
|
| 119 |
+
raise ValueError('Unrecognized named group in pattern',
|
| 120 |
+
self.pattern)
|
| 121 |
+
return self.pattern.sub(convert, self.template)
|
| 122 |
+
|
| 123 |
+
def safe_substitute(self, mapping=_sentinel_dict, /, **kws):
|
| 124 |
+
if mapping is _sentinel_dict:
|
| 125 |
+
mapping = kws
|
| 126 |
+
elif kws:
|
| 127 |
+
mapping = _ChainMap(kws, mapping)
|
| 128 |
+
# Helper function for .sub()
|
| 129 |
+
def convert(mo):
|
| 130 |
+
named = mo.group('named') or mo.group('braced')
|
| 131 |
+
if named is not None:
|
| 132 |
+
try:
|
| 133 |
+
return str(mapping[named])
|
| 134 |
+
except KeyError:
|
| 135 |
+
return mo.group()
|
| 136 |
+
if mo.group('escaped') is not None:
|
| 137 |
+
return self.delimiter
|
| 138 |
+
if mo.group('invalid') is not None:
|
| 139 |
+
return mo.group()
|
| 140 |
+
raise ValueError('Unrecognized named group in pattern',
|
| 141 |
+
self.pattern)
|
| 142 |
+
return self.pattern.sub(convert, self.template)
|
| 143 |
+
|
| 144 |
+
# Initialize Template.pattern. __init_subclass__() is automatically called
|
| 145 |
+
# only for subclasses, not for the Template class itself.
|
| 146 |
+
Template.__init_subclass__()
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
########################################################################
|
| 150 |
+
# the Formatter class
|
| 151 |
+
# see PEP 3101 for details and purpose of this class
|
| 152 |
+
|
| 153 |
+
# The hard parts are reused from the C implementation. They're exposed as "_"
|
| 154 |
+
# prefixed methods of str.
|
| 155 |
+
|
| 156 |
+
# The overall parser is implemented in _string.formatter_parser.
|
| 157 |
+
# The field name parser is implemented in _string.formatter_field_name_split
|
| 158 |
+
|
| 159 |
+
class Formatter:
|
| 160 |
+
def format(self, format_string, /, *args, **kwargs):
|
| 161 |
+
return self.vformat(format_string, args, kwargs)
|
| 162 |
+
|
| 163 |
+
def vformat(self, format_string, args, kwargs):
|
| 164 |
+
used_args = set()
|
| 165 |
+
result, _ = self._vformat(format_string, args, kwargs, used_args, 2)
|
| 166 |
+
self.check_unused_args(used_args, args, kwargs)
|
| 167 |
+
return result
|
| 168 |
+
|
| 169 |
+
def _vformat(self, format_string, args, kwargs, used_args, recursion_depth,
|
| 170 |
+
auto_arg_index=0):
|
| 171 |
+
if recursion_depth < 0:
|
| 172 |
+
raise ValueError('Max string recursion exceeded')
|
| 173 |
+
result = []
|
| 174 |
+
for literal_text, field_name, format_spec, conversion in \
|
| 175 |
+
self.parse(format_string):
|
| 176 |
+
|
| 177 |
+
# output the literal text
|
| 178 |
+
if literal_text:
|
| 179 |
+
result.append(literal_text)
|
| 180 |
+
|
| 181 |
+
# if there's a field, output it
|
| 182 |
+
if field_name is not None:
|
| 183 |
+
# this is some markup, find the object and do
|
| 184 |
+
# the formatting
|
| 185 |
+
|
| 186 |
+
# handle arg indexing when empty field_names are given.
|
| 187 |
+
if field_name == '':
|
| 188 |
+
if auto_arg_index is False:
|
| 189 |
+
raise ValueError('cannot switch from manual field '
|
| 190 |
+
'specification to automatic field '
|
| 191 |
+
'numbering')
|
| 192 |
+
field_name = str(auto_arg_index)
|
| 193 |
+
auto_arg_index += 1
|
| 194 |
+
elif field_name.isdigit():
|
| 195 |
+
if auto_arg_index:
|
| 196 |
+
raise ValueError('cannot switch from manual field '
|
| 197 |
+
'specification to automatic field '
|
| 198 |
+
'numbering')
|
| 199 |
+
# disable auto arg incrementing, if it gets
|
| 200 |
+
# used later on, then an exception will be raised
|
| 201 |
+
auto_arg_index = False
|
| 202 |
+
|
| 203 |
+
# given the field_name, find the object it references
|
| 204 |
+
# and the argument it came from
|
| 205 |
+
obj, arg_used = self.get_field(field_name, args, kwargs)
|
| 206 |
+
used_args.add(arg_used)
|
| 207 |
+
|
| 208 |
+
# do any conversion on the resulting object
|
| 209 |
+
obj = self.convert_field(obj, conversion)
|
| 210 |
+
|
| 211 |
+
# expand the format spec, if needed
|
| 212 |
+
format_spec, auto_arg_index = self._vformat(
|
| 213 |
+
format_spec, args, kwargs,
|
| 214 |
+
used_args, recursion_depth-1,
|
| 215 |
+
auto_arg_index=auto_arg_index)
|
| 216 |
+
|
| 217 |
+
# format the object and append to the result
|
| 218 |
+
result.append(self.format_field(obj, format_spec))
|
| 219 |
+
|
| 220 |
+
return ''.join(result), auto_arg_index
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def get_value(self, key, args, kwargs):
|
| 224 |
+
if isinstance(key, int):
|
| 225 |
+
return args[key]
|
| 226 |
+
else:
|
| 227 |
+
return kwargs[key]
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
def check_unused_args(self, used_args, args, kwargs):
|
| 231 |
+
pass
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
def format_field(self, value, format_spec):
|
| 235 |
+
return format(value, format_spec)
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def convert_field(self, value, conversion):
|
| 239 |
+
# do any conversion on the resulting object
|
| 240 |
+
if conversion is None:
|
| 241 |
+
return value
|
| 242 |
+
elif conversion == 's':
|
| 243 |
+
return str(value)
|
| 244 |
+
elif conversion == 'r':
|
| 245 |
+
return repr(value)
|
| 246 |
+
elif conversion == 'a':
|
| 247 |
+
return ascii(value)
|
| 248 |
+
raise ValueError("Unknown conversion specifier {0!s}".format(conversion))
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
# returns an iterable that contains tuples of the form:
|
| 252 |
+
# (literal_text, field_name, format_spec, conversion)
|
| 253 |
+
# literal_text can be zero length
|
| 254 |
+
# field_name can be None, in which case there's no
|
| 255 |
+
# object to format and output
|
| 256 |
+
# if field_name is not None, it is looked up, formatted
|
| 257 |
+
# with format_spec and conversion and then used
|
| 258 |
+
def parse(self, format_string):
|
| 259 |
+
return _string.formatter_parser(format_string)
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
# given a field_name, find the object it references.
|
| 263 |
+
# field_name: the field being looked up, e.g. "0.name"
|
| 264 |
+
# or "lookup[3]"
|
| 265 |
+
# used_args: a set of which args have been used
|
| 266 |
+
# args, kwargs: as passed in to vformat
|
| 267 |
+
def get_field(self, field_name, args, kwargs):
|
| 268 |
+
first, rest = _string.formatter_field_name_split(field_name)
|
| 269 |
+
|
| 270 |
+
obj = self.get_value(first, args, kwargs)
|
| 271 |
+
|
| 272 |
+
# loop through the rest of the field_name, doing
|
| 273 |
+
# getattr or getitem as needed
|
| 274 |
+
for is_attr, i in rest:
|
| 275 |
+
if is_attr:
|
| 276 |
+
obj = getattr(obj, i)
|
| 277 |
+
else:
|
| 278 |
+
obj = obj[i]
|
| 279 |
+
|
| 280 |
+
return obj, first
|
evalkit_cambrian/lib/python3.10/textwrap.py
ADDED
|
@@ -0,0 +1,494 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Text wrapping and filling.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
# Copyright (C) 1999-2001 Gregory P. Ward.
|
| 5 |
+
# Copyright (C) 2002, 2003 Python Software Foundation.
|
| 6 |
+
# Written by Greg Ward <gward@python.net>
|
| 7 |
+
|
| 8 |
+
import re
|
| 9 |
+
|
| 10 |
+
__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent', 'indent', 'shorten']
|
| 11 |
+
|
| 12 |
+
# Hardcode the recognized whitespace characters to the US-ASCII
|
| 13 |
+
# whitespace characters. The main reason for doing this is that
|
| 14 |
+
# some Unicode spaces (like \u00a0) are non-breaking whitespaces.
|
| 15 |
+
_whitespace = '\t\n\x0b\x0c\r '
|
| 16 |
+
|
| 17 |
+
class TextWrapper:
|
| 18 |
+
"""
|
| 19 |
+
Object for wrapping/filling text. The public interface consists of
|
| 20 |
+
the wrap() and fill() methods; the other methods are just there for
|
| 21 |
+
subclasses to override in order to tweak the default behaviour.
|
| 22 |
+
If you want to completely replace the main wrapping algorithm,
|
| 23 |
+
you'll probably have to override _wrap_chunks().
|
| 24 |
+
|
| 25 |
+
Several instance attributes control various aspects of wrapping:
|
| 26 |
+
width (default: 70)
|
| 27 |
+
the maximum width of wrapped lines (unless break_long_words
|
| 28 |
+
is false)
|
| 29 |
+
initial_indent (default: "")
|
| 30 |
+
string that will be prepended to the first line of wrapped
|
| 31 |
+
output. Counts towards the line's width.
|
| 32 |
+
subsequent_indent (default: "")
|
| 33 |
+
string that will be prepended to all lines save the first
|
| 34 |
+
of wrapped output; also counts towards each line's width.
|
| 35 |
+
expand_tabs (default: true)
|
| 36 |
+
Expand tabs in input text to spaces before further processing.
|
| 37 |
+
Each tab will become 0 .. 'tabsize' spaces, depending on its position
|
| 38 |
+
in its line. If false, each tab is treated as a single character.
|
| 39 |
+
tabsize (default: 8)
|
| 40 |
+
Expand tabs in input text to 0 .. 'tabsize' spaces, unless
|
| 41 |
+
'expand_tabs' is false.
|
| 42 |
+
replace_whitespace (default: true)
|
| 43 |
+
Replace all whitespace characters in the input text by spaces
|
| 44 |
+
after tab expansion. Note that if expand_tabs is false and
|
| 45 |
+
replace_whitespace is true, every tab will be converted to a
|
| 46 |
+
single space!
|
| 47 |
+
fix_sentence_endings (default: false)
|
| 48 |
+
Ensure that sentence-ending punctuation is always followed
|
| 49 |
+
by two spaces. Off by default because the algorithm is
|
| 50 |
+
(unavoidably) imperfect.
|
| 51 |
+
break_long_words (default: true)
|
| 52 |
+
Break words longer than 'width'. If false, those words will not
|
| 53 |
+
be broken, and some lines might be longer than 'width'.
|
| 54 |
+
break_on_hyphens (default: true)
|
| 55 |
+
Allow breaking hyphenated words. If true, wrapping will occur
|
| 56 |
+
preferably on whitespaces and right after hyphens part of
|
| 57 |
+
compound words.
|
| 58 |
+
drop_whitespace (default: true)
|
| 59 |
+
Drop leading and trailing whitespace from lines.
|
| 60 |
+
max_lines (default: None)
|
| 61 |
+
Truncate wrapped lines.
|
| 62 |
+
placeholder (default: ' [...]')
|
| 63 |
+
Append to the last line of truncated text.
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
unicode_whitespace_trans = {}
|
| 67 |
+
uspace = ord(' ')
|
| 68 |
+
for x in _whitespace:
|
| 69 |
+
unicode_whitespace_trans[ord(x)] = uspace
|
| 70 |
+
|
| 71 |
+
# This funky little regex is just the trick for splitting
|
| 72 |
+
# text up into word-wrappable chunks. E.g.
|
| 73 |
+
# "Hello there -- you goof-ball, use the -b option!"
|
| 74 |
+
# splits into
|
| 75 |
+
# Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option!
|
| 76 |
+
# (after stripping out empty strings).
|
| 77 |
+
word_punct = r'[\w!"\'&.,?]'
|
| 78 |
+
letter = r'[^\d\W]'
|
| 79 |
+
whitespace = r'[%s]' % re.escape(_whitespace)
|
| 80 |
+
nowhitespace = '[^' + whitespace[1:]
|
| 81 |
+
wordsep_re = re.compile(r'''
|
| 82 |
+
( # any whitespace
|
| 83 |
+
%(ws)s+
|
| 84 |
+
| # em-dash between words
|
| 85 |
+
(?<=%(wp)s) -{2,} (?=\w)
|
| 86 |
+
| # word, possibly hyphenated
|
| 87 |
+
%(nws)s+? (?:
|
| 88 |
+
# hyphenated word
|
| 89 |
+
-(?: (?<=%(lt)s{2}-) | (?<=%(lt)s-%(lt)s-))
|
| 90 |
+
(?= %(lt)s -? %(lt)s)
|
| 91 |
+
| # end of word
|
| 92 |
+
(?=%(ws)s|\Z)
|
| 93 |
+
| # em-dash
|
| 94 |
+
(?<=%(wp)s) (?=-{2,}\w)
|
| 95 |
+
)
|
| 96 |
+
)''' % {'wp': word_punct, 'lt': letter,
|
| 97 |
+
'ws': whitespace, 'nws': nowhitespace},
|
| 98 |
+
re.VERBOSE)
|
| 99 |
+
del word_punct, letter, nowhitespace
|
| 100 |
+
|
| 101 |
+
# This less funky little regex just split on recognized spaces. E.g.
|
| 102 |
+
# "Hello there -- you goof-ball, use the -b option!"
|
| 103 |
+
# splits into
|
| 104 |
+
# Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/
|
| 105 |
+
wordsep_simple_re = re.compile(r'(%s+)' % whitespace)
|
| 106 |
+
del whitespace
|
| 107 |
+
|
| 108 |
+
# XXX this is not locale- or charset-aware -- string.lowercase
|
| 109 |
+
# is US-ASCII only (and therefore English-only)
|
| 110 |
+
sentence_end_re = re.compile(r'[a-z]' # lowercase letter
|
| 111 |
+
r'[\.\!\?]' # sentence-ending punct.
|
| 112 |
+
r'[\"\']?' # optional end-of-quote
|
| 113 |
+
r'\Z') # end of chunk
|
| 114 |
+
|
| 115 |
+
def __init__(self,
|
| 116 |
+
width=70,
|
| 117 |
+
initial_indent="",
|
| 118 |
+
subsequent_indent="",
|
| 119 |
+
expand_tabs=True,
|
| 120 |
+
replace_whitespace=True,
|
| 121 |
+
fix_sentence_endings=False,
|
| 122 |
+
break_long_words=True,
|
| 123 |
+
drop_whitespace=True,
|
| 124 |
+
break_on_hyphens=True,
|
| 125 |
+
tabsize=8,
|
| 126 |
+
*,
|
| 127 |
+
max_lines=None,
|
| 128 |
+
placeholder=' [...]'):
|
| 129 |
+
self.width = width
|
| 130 |
+
self.initial_indent = initial_indent
|
| 131 |
+
self.subsequent_indent = subsequent_indent
|
| 132 |
+
self.expand_tabs = expand_tabs
|
| 133 |
+
self.replace_whitespace = replace_whitespace
|
| 134 |
+
self.fix_sentence_endings = fix_sentence_endings
|
| 135 |
+
self.break_long_words = break_long_words
|
| 136 |
+
self.drop_whitespace = drop_whitespace
|
| 137 |
+
self.break_on_hyphens = break_on_hyphens
|
| 138 |
+
self.tabsize = tabsize
|
| 139 |
+
self.max_lines = max_lines
|
| 140 |
+
self.placeholder = placeholder
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
# -- Private methods -----------------------------------------------
|
| 144 |
+
# (possibly useful for subclasses to override)
|
| 145 |
+
|
| 146 |
+
def _munge_whitespace(self, text):
|
| 147 |
+
"""_munge_whitespace(text : string) -> string
|
| 148 |
+
|
| 149 |
+
Munge whitespace in text: expand tabs and convert all other
|
| 150 |
+
whitespace characters to spaces. Eg. " foo\\tbar\\n\\nbaz"
|
| 151 |
+
becomes " foo bar baz".
|
| 152 |
+
"""
|
| 153 |
+
if self.expand_tabs:
|
| 154 |
+
text = text.expandtabs(self.tabsize)
|
| 155 |
+
if self.replace_whitespace:
|
| 156 |
+
text = text.translate(self.unicode_whitespace_trans)
|
| 157 |
+
return text
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def _split(self, text):
|
| 161 |
+
"""_split(text : string) -> [string]
|
| 162 |
+
|
| 163 |
+
Split the text to wrap into indivisible chunks. Chunks are
|
| 164 |
+
not quite the same as words; see _wrap_chunks() for full
|
| 165 |
+
details. As an example, the text
|
| 166 |
+
Look, goof-ball -- use the -b option!
|
| 167 |
+
breaks into the following chunks:
|
| 168 |
+
'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ',
|
| 169 |
+
'use', ' ', 'the', ' ', '-b', ' ', 'option!'
|
| 170 |
+
if break_on_hyphens is True, or in:
|
| 171 |
+
'Look,', ' ', 'goof-ball', ' ', '--', ' ',
|
| 172 |
+
'use', ' ', 'the', ' ', '-b', ' ', option!'
|
| 173 |
+
otherwise.
|
| 174 |
+
"""
|
| 175 |
+
if self.break_on_hyphens is True:
|
| 176 |
+
chunks = self.wordsep_re.split(text)
|
| 177 |
+
else:
|
| 178 |
+
chunks = self.wordsep_simple_re.split(text)
|
| 179 |
+
chunks = [c for c in chunks if c]
|
| 180 |
+
return chunks
|
| 181 |
+
|
| 182 |
+
def _fix_sentence_endings(self, chunks):
|
| 183 |
+
"""_fix_sentence_endings(chunks : [string])
|
| 184 |
+
|
| 185 |
+
Correct for sentence endings buried in 'chunks'. Eg. when the
|
| 186 |
+
original text contains "... foo.\\nBar ...", munge_whitespace()
|
| 187 |
+
and split() will convert that to [..., "foo.", " ", "Bar", ...]
|
| 188 |
+
which has one too few spaces; this method simply changes the one
|
| 189 |
+
space to two.
|
| 190 |
+
"""
|
| 191 |
+
i = 0
|
| 192 |
+
patsearch = self.sentence_end_re.search
|
| 193 |
+
while i < len(chunks)-1:
|
| 194 |
+
if chunks[i+1] == " " and patsearch(chunks[i]):
|
| 195 |
+
chunks[i+1] = " "
|
| 196 |
+
i += 2
|
| 197 |
+
else:
|
| 198 |
+
i += 1
|
| 199 |
+
|
| 200 |
+
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
|
| 201 |
+
"""_handle_long_word(chunks : [string],
|
| 202 |
+
cur_line : [string],
|
| 203 |
+
cur_len : int, width : int)
|
| 204 |
+
|
| 205 |
+
Handle a chunk of text (most likely a word, not whitespace) that
|
| 206 |
+
is too long to fit in any line.
|
| 207 |
+
"""
|
| 208 |
+
# Figure out when indent is larger than the specified width, and make
|
| 209 |
+
# sure at least one character is stripped off on every pass
|
| 210 |
+
if width < 1:
|
| 211 |
+
space_left = 1
|
| 212 |
+
else:
|
| 213 |
+
space_left = width - cur_len
|
| 214 |
+
|
| 215 |
+
# If we're allowed to break long words, then do so: put as much
|
| 216 |
+
# of the next chunk onto the current line as will fit.
|
| 217 |
+
if self.break_long_words:
|
| 218 |
+
end = space_left
|
| 219 |
+
chunk = reversed_chunks[-1]
|
| 220 |
+
if self.break_on_hyphens and len(chunk) > space_left:
|
| 221 |
+
# break after last hyphen, but only if there are
|
| 222 |
+
# non-hyphens before it
|
| 223 |
+
hyphen = chunk.rfind('-', 0, space_left)
|
| 224 |
+
if hyphen > 0 and any(c != '-' for c in chunk[:hyphen]):
|
| 225 |
+
end = hyphen + 1
|
| 226 |
+
cur_line.append(chunk[:end])
|
| 227 |
+
reversed_chunks[-1] = chunk[end:]
|
| 228 |
+
|
| 229 |
+
# Otherwise, we have to preserve the long word intact. Only add
|
| 230 |
+
# it to the current line if there's nothing already there --
|
| 231 |
+
# that minimizes how much we violate the width constraint.
|
| 232 |
+
elif not cur_line:
|
| 233 |
+
cur_line.append(reversed_chunks.pop())
|
| 234 |
+
|
| 235 |
+
# If we're not allowed to break long words, and there's already
|
| 236 |
+
# text on the current line, do nothing. Next time through the
|
| 237 |
+
# main loop of _wrap_chunks(), we'll wind up here again, but
|
| 238 |
+
# cur_len will be zero, so the next line will be entirely
|
| 239 |
+
# devoted to the long word that we can't handle right now.
|
| 240 |
+
|
| 241 |
+
def _wrap_chunks(self, chunks):
|
| 242 |
+
"""_wrap_chunks(chunks : [string]) -> [string]
|
| 243 |
+
|
| 244 |
+
Wrap a sequence of text chunks and return a list of lines of
|
| 245 |
+
length 'self.width' or less. (If 'break_long_words' is false,
|
| 246 |
+
some lines may be longer than this.) Chunks correspond roughly
|
| 247 |
+
to words and the whitespace between them: each chunk is
|
| 248 |
+
indivisible (modulo 'break_long_words'), but a line break can
|
| 249 |
+
come between any two chunks. Chunks should not have internal
|
| 250 |
+
whitespace; ie. a chunk is either all whitespace or a "word".
|
| 251 |
+
Whitespace chunks will be removed from the beginning and end of
|
| 252 |
+
lines, but apart from that whitespace is preserved.
|
| 253 |
+
"""
|
| 254 |
+
lines = []
|
| 255 |
+
if self.width <= 0:
|
| 256 |
+
raise ValueError("invalid width %r (must be > 0)" % self.width)
|
| 257 |
+
if self.max_lines is not None:
|
| 258 |
+
if self.max_lines > 1:
|
| 259 |
+
indent = self.subsequent_indent
|
| 260 |
+
else:
|
| 261 |
+
indent = self.initial_indent
|
| 262 |
+
if len(indent) + len(self.placeholder.lstrip()) > self.width:
|
| 263 |
+
raise ValueError("placeholder too large for max width")
|
| 264 |
+
|
| 265 |
+
# Arrange in reverse order so items can be efficiently popped
|
| 266 |
+
# from a stack of chucks.
|
| 267 |
+
chunks.reverse()
|
| 268 |
+
|
| 269 |
+
while chunks:
|
| 270 |
+
|
| 271 |
+
# Start the list of chunks that will make up the current line.
|
| 272 |
+
# cur_len is just the length of all the chunks in cur_line.
|
| 273 |
+
cur_line = []
|
| 274 |
+
cur_len = 0
|
| 275 |
+
|
| 276 |
+
# Figure out which static string will prefix this line.
|
| 277 |
+
if lines:
|
| 278 |
+
indent = self.subsequent_indent
|
| 279 |
+
else:
|
| 280 |
+
indent = self.initial_indent
|
| 281 |
+
|
| 282 |
+
# Maximum width for this line.
|
| 283 |
+
width = self.width - len(indent)
|
| 284 |
+
|
| 285 |
+
# First chunk on line is whitespace -- drop it, unless this
|
| 286 |
+
# is the very beginning of the text (ie. no lines started yet).
|
| 287 |
+
if self.drop_whitespace and chunks[-1].strip() == '' and lines:
|
| 288 |
+
del chunks[-1]
|
| 289 |
+
|
| 290 |
+
while chunks:
|
| 291 |
+
l = len(chunks[-1])
|
| 292 |
+
|
| 293 |
+
# Can at least squeeze this chunk onto the current line.
|
| 294 |
+
if cur_len + l <= width:
|
| 295 |
+
cur_line.append(chunks.pop())
|
| 296 |
+
cur_len += l
|
| 297 |
+
|
| 298 |
+
# Nope, this line is full.
|
| 299 |
+
else:
|
| 300 |
+
break
|
| 301 |
+
|
| 302 |
+
# The current line is full, and the next chunk is too big to
|
| 303 |
+
# fit on *any* line (not just this one).
|
| 304 |
+
if chunks and len(chunks[-1]) > width:
|
| 305 |
+
self._handle_long_word(chunks, cur_line, cur_len, width)
|
| 306 |
+
cur_len = sum(map(len, cur_line))
|
| 307 |
+
|
| 308 |
+
# If the last chunk on this line is all whitespace, drop it.
|
| 309 |
+
if self.drop_whitespace and cur_line and cur_line[-1].strip() == '':
|
| 310 |
+
cur_len -= len(cur_line[-1])
|
| 311 |
+
del cur_line[-1]
|
| 312 |
+
|
| 313 |
+
if cur_line:
|
| 314 |
+
if (self.max_lines is None or
|
| 315 |
+
len(lines) + 1 < self.max_lines or
|
| 316 |
+
(not chunks or
|
| 317 |
+
self.drop_whitespace and
|
| 318 |
+
len(chunks) == 1 and
|
| 319 |
+
not chunks[0].strip()) and cur_len <= width):
|
| 320 |
+
# Convert current line back to a string and store it in
|
| 321 |
+
# list of all lines (return value).
|
| 322 |
+
lines.append(indent + ''.join(cur_line))
|
| 323 |
+
else:
|
| 324 |
+
while cur_line:
|
| 325 |
+
if (cur_line[-1].strip() and
|
| 326 |
+
cur_len + len(self.placeholder) <= width):
|
| 327 |
+
cur_line.append(self.placeholder)
|
| 328 |
+
lines.append(indent + ''.join(cur_line))
|
| 329 |
+
break
|
| 330 |
+
cur_len -= len(cur_line[-1])
|
| 331 |
+
del cur_line[-1]
|
| 332 |
+
else:
|
| 333 |
+
if lines:
|
| 334 |
+
prev_line = lines[-1].rstrip()
|
| 335 |
+
if (len(prev_line) + len(self.placeholder) <=
|
| 336 |
+
self.width):
|
| 337 |
+
lines[-1] = prev_line + self.placeholder
|
| 338 |
+
break
|
| 339 |
+
lines.append(indent + self.placeholder.lstrip())
|
| 340 |
+
break
|
| 341 |
+
|
| 342 |
+
return lines
|
| 343 |
+
|
| 344 |
+
def _split_chunks(self, text):
|
| 345 |
+
text = self._munge_whitespace(text)
|
| 346 |
+
return self._split(text)
|
| 347 |
+
|
| 348 |
+
# -- Public interface ----------------------------------------------
|
| 349 |
+
|
| 350 |
+
def wrap(self, text):
|
| 351 |
+
"""wrap(text : string) -> [string]
|
| 352 |
+
|
| 353 |
+
Reformat the single paragraph in 'text' so it fits in lines of
|
| 354 |
+
no more than 'self.width' columns, and return a list of wrapped
|
| 355 |
+
lines. Tabs in 'text' are expanded with string.expandtabs(),
|
| 356 |
+
and all other whitespace characters (including newline) are
|
| 357 |
+
converted to space.
|
| 358 |
+
"""
|
| 359 |
+
chunks = self._split_chunks(text)
|
| 360 |
+
if self.fix_sentence_endings:
|
| 361 |
+
self._fix_sentence_endings(chunks)
|
| 362 |
+
return self._wrap_chunks(chunks)
|
| 363 |
+
|
| 364 |
+
def fill(self, text):
|
| 365 |
+
"""fill(text : string) -> string
|
| 366 |
+
|
| 367 |
+
Reformat the single paragraph in 'text' to fit in lines of no
|
| 368 |
+
more than 'self.width' columns, and return a new string
|
| 369 |
+
containing the entire wrapped paragraph.
|
| 370 |
+
"""
|
| 371 |
+
return "\n".join(self.wrap(text))
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
# -- Convenience interface ---------------------------------------------
|
| 375 |
+
|
| 376 |
+
def wrap(text, width=70, **kwargs):
|
| 377 |
+
"""Wrap a single paragraph of text, returning a list of wrapped lines.
|
| 378 |
+
|
| 379 |
+
Reformat the single paragraph in 'text' so it fits in lines of no
|
| 380 |
+
more than 'width' columns, and return a list of wrapped lines. By
|
| 381 |
+
default, tabs in 'text' are expanded with string.expandtabs(), and
|
| 382 |
+
all other whitespace characters (including newline) are converted to
|
| 383 |
+
space. See TextWrapper class for available keyword args to customize
|
| 384 |
+
wrapping behaviour.
|
| 385 |
+
"""
|
| 386 |
+
w = TextWrapper(width=width, **kwargs)
|
| 387 |
+
return w.wrap(text)
|
| 388 |
+
|
| 389 |
+
def fill(text, width=70, **kwargs):
|
| 390 |
+
"""Fill a single paragraph of text, returning a new string.
|
| 391 |
+
|
| 392 |
+
Reformat the single paragraph in 'text' to fit in lines of no more
|
| 393 |
+
than 'width' columns, and return a new string containing the entire
|
| 394 |
+
wrapped paragraph. As with wrap(), tabs are expanded and other
|
| 395 |
+
whitespace characters converted to space. See TextWrapper class for
|
| 396 |
+
available keyword args to customize wrapping behaviour.
|
| 397 |
+
"""
|
| 398 |
+
w = TextWrapper(width=width, **kwargs)
|
| 399 |
+
return w.fill(text)
|
| 400 |
+
|
| 401 |
+
def shorten(text, width, **kwargs):
|
| 402 |
+
"""Collapse and truncate the given text to fit in the given width.
|
| 403 |
+
|
| 404 |
+
The text first has its whitespace collapsed. If it then fits in
|
| 405 |
+
the *width*, it is returned as is. Otherwise, as many words
|
| 406 |
+
as possible are joined and then the placeholder is appended::
|
| 407 |
+
|
| 408 |
+
>>> textwrap.shorten("Hello world!", width=12)
|
| 409 |
+
'Hello world!'
|
| 410 |
+
>>> textwrap.shorten("Hello world!", width=11)
|
| 411 |
+
'Hello [...]'
|
| 412 |
+
"""
|
| 413 |
+
w = TextWrapper(width=width, max_lines=1, **kwargs)
|
| 414 |
+
return w.fill(' '.join(text.strip().split()))
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
# -- Loosely related functionality -------------------------------------
|
| 418 |
+
|
| 419 |
+
_whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE)
|
| 420 |
+
_leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE)
|
| 421 |
+
|
| 422 |
+
def dedent(text):
|
| 423 |
+
"""Remove any common leading whitespace from every line in `text`.
|
| 424 |
+
|
| 425 |
+
This can be used to make triple-quoted strings line up with the left
|
| 426 |
+
edge of the display, while still presenting them in the source code
|
| 427 |
+
in indented form.
|
| 428 |
+
|
| 429 |
+
Note that tabs and spaces are both treated as whitespace, but they
|
| 430 |
+
are not equal: the lines " hello" and "\\thello" are
|
| 431 |
+
considered to have no common leading whitespace.
|
| 432 |
+
|
| 433 |
+
Entirely blank lines are normalized to a newline character.
|
| 434 |
+
"""
|
| 435 |
+
# Look for the longest leading string of spaces and tabs common to
|
| 436 |
+
# all lines.
|
| 437 |
+
margin = None
|
| 438 |
+
text = _whitespace_only_re.sub('', text)
|
| 439 |
+
indents = _leading_whitespace_re.findall(text)
|
| 440 |
+
for indent in indents:
|
| 441 |
+
if margin is None:
|
| 442 |
+
margin = indent
|
| 443 |
+
|
| 444 |
+
# Current line more deeply indented than previous winner:
|
| 445 |
+
# no change (previous winner is still on top).
|
| 446 |
+
elif indent.startswith(margin):
|
| 447 |
+
pass
|
| 448 |
+
|
| 449 |
+
# Current line consistent with and no deeper than previous winner:
|
| 450 |
+
# it's the new winner.
|
| 451 |
+
elif margin.startswith(indent):
|
| 452 |
+
margin = indent
|
| 453 |
+
|
| 454 |
+
# Find the largest common whitespace between current line and previous
|
| 455 |
+
# winner.
|
| 456 |
+
else:
|
| 457 |
+
for i, (x, y) in enumerate(zip(margin, indent)):
|
| 458 |
+
if x != y:
|
| 459 |
+
margin = margin[:i]
|
| 460 |
+
break
|
| 461 |
+
|
| 462 |
+
# sanity check (testing/debugging only)
|
| 463 |
+
if 0 and margin:
|
| 464 |
+
for line in text.split("\n"):
|
| 465 |
+
assert not line or line.startswith(margin), \
|
| 466 |
+
"line = %r, margin = %r" % (line, margin)
|
| 467 |
+
|
| 468 |
+
if margin:
|
| 469 |
+
text = re.sub(r'(?m)^' + margin, '', text)
|
| 470 |
+
return text
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def indent(text, prefix, predicate=None):
|
| 474 |
+
"""Adds 'prefix' to the beginning of selected lines in 'text'.
|
| 475 |
+
|
| 476 |
+
If 'predicate' is provided, 'prefix' will only be added to the lines
|
| 477 |
+
where 'predicate(line)' is True. If 'predicate' is not provided,
|
| 478 |
+
it will default to adding 'prefix' to all non-empty lines that do not
|
| 479 |
+
consist solely of whitespace characters.
|
| 480 |
+
"""
|
| 481 |
+
if predicate is None:
|
| 482 |
+
def predicate(line):
|
| 483 |
+
return line.strip()
|
| 484 |
+
|
| 485 |
+
def prefixed_lines():
|
| 486 |
+
for line in text.splitlines(True):
|
| 487 |
+
yield (prefix + line if predicate(line) else line)
|
| 488 |
+
return ''.join(prefixed_lines())
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
if __name__ == "__main__":
|
| 492 |
+
#print dedent("\tfoo\n\tbar")
|
| 493 |
+
#print dedent(" \thello there\n \t how are you?")
|
| 494 |
+
print(dedent("Hello there.\n This is indented."))
|
evalkit_cambrian/lib/python3.10/this.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
s = """Gur Mra bs Clguba, ol Gvz Crgref
|
| 2 |
+
|
| 3 |
+
Ornhgvshy vf orggre guna htyl.
|
| 4 |
+
Rkcyvpvg vf orggre guna vzcyvpvg.
|
| 5 |
+
Fvzcyr vf orggre guna pbzcyrk.
|
| 6 |
+
Pbzcyrk vf orggre guna pbzcyvpngrq.
|
| 7 |
+
Syng vf orggre guna arfgrq.
|
| 8 |
+
Fcnefr vf orggre guna qrafr.
|
| 9 |
+
Ernqnovyvgl pbhagf.
|
| 10 |
+
Fcrpvny pnfrf nera'g fcrpvny rabhtu gb oernx gur ehyrf.
|
| 11 |
+
Nygubhtu cenpgvpnyvgl orngf chevgl.
|
| 12 |
+
Reebef fubhyq arire cnff fvyragyl.
|
| 13 |
+
Hayrff rkcyvpvgyl fvyraprq.
|
| 14 |
+
Va gur snpr bs nzovthvgl, ershfr gur grzcgngvba gb thrff.
|
| 15 |
+
Gurer fubhyq or bar-- naq cersrenoyl bayl bar --boivbhf jnl gb qb vg.
|
| 16 |
+
Nygubhtu gung jnl znl abg or boivbhf ng svefg hayrff lbh'er Qhgpu.
|
| 17 |
+
Abj vf orggre guna arire.
|
| 18 |
+
Nygubhtu arire vf bsgra orggre guna *evtug* abj.
|
| 19 |
+
Vs gur vzcyrzragngvba vf uneq gb rkcynva, vg'f n onq vqrn.
|
| 20 |
+
Vs gur vzcyrzragngvba vf rnfl gb rkcynva, vg znl or n tbbq vqrn.
|
| 21 |
+
Anzrfcnprf ner bar ubaxvat terng vqrn -- yrg'f qb zber bs gubfr!"""
|
| 22 |
+
|
| 23 |
+
d = {}
|
| 24 |
+
for c in (65, 97):
|
| 25 |
+
for i in range(26):
|
| 26 |
+
d[chr(i+c)] = chr((i+13) % 26 + c)
|
| 27 |
+
|
| 28 |
+
print("".join([d.get(c, c) for c in s]))
|
evalkit_cambrian/lib/python3.10/token.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Token constants."""
|
| 2 |
+
# Auto-generated by Tools/scripts/generate_token.py
|
| 3 |
+
|
| 4 |
+
__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
|
| 5 |
+
|
| 6 |
+
ENDMARKER = 0
|
| 7 |
+
NAME = 1
|
| 8 |
+
NUMBER = 2
|
| 9 |
+
STRING = 3
|
| 10 |
+
NEWLINE = 4
|
| 11 |
+
INDENT = 5
|
| 12 |
+
DEDENT = 6
|
| 13 |
+
LPAR = 7
|
| 14 |
+
RPAR = 8
|
| 15 |
+
LSQB = 9
|
| 16 |
+
RSQB = 10
|
| 17 |
+
COLON = 11
|
| 18 |
+
COMMA = 12
|
| 19 |
+
SEMI = 13
|
| 20 |
+
PLUS = 14
|
| 21 |
+
MINUS = 15
|
| 22 |
+
STAR = 16
|
| 23 |
+
SLASH = 17
|
| 24 |
+
VBAR = 18
|
| 25 |
+
AMPER = 19
|
| 26 |
+
LESS = 20
|
| 27 |
+
GREATER = 21
|
| 28 |
+
EQUAL = 22
|
| 29 |
+
DOT = 23
|
| 30 |
+
PERCENT = 24
|
| 31 |
+
LBRACE = 25
|
| 32 |
+
RBRACE = 26
|
| 33 |
+
EQEQUAL = 27
|
| 34 |
+
NOTEQUAL = 28
|
| 35 |
+
LESSEQUAL = 29
|
| 36 |
+
GREATEREQUAL = 30
|
| 37 |
+
TILDE = 31
|
| 38 |
+
CIRCUMFLEX = 32
|
| 39 |
+
LEFTSHIFT = 33
|
| 40 |
+
RIGHTSHIFT = 34
|
| 41 |
+
DOUBLESTAR = 35
|
| 42 |
+
PLUSEQUAL = 36
|
| 43 |
+
MINEQUAL = 37
|
| 44 |
+
STAREQUAL = 38
|
| 45 |
+
SLASHEQUAL = 39
|
| 46 |
+
PERCENTEQUAL = 40
|
| 47 |
+
AMPEREQUAL = 41
|
| 48 |
+
VBAREQUAL = 42
|
| 49 |
+
CIRCUMFLEXEQUAL = 43
|
| 50 |
+
LEFTSHIFTEQUAL = 44
|
| 51 |
+
RIGHTSHIFTEQUAL = 45
|
| 52 |
+
DOUBLESTAREQUAL = 46
|
| 53 |
+
DOUBLESLASH = 47
|
| 54 |
+
DOUBLESLASHEQUAL = 48
|
| 55 |
+
AT = 49
|
| 56 |
+
ATEQUAL = 50
|
| 57 |
+
RARROW = 51
|
| 58 |
+
ELLIPSIS = 52
|
| 59 |
+
COLONEQUAL = 53
|
| 60 |
+
OP = 54
|
| 61 |
+
AWAIT = 55
|
| 62 |
+
ASYNC = 56
|
| 63 |
+
TYPE_IGNORE = 57
|
| 64 |
+
TYPE_COMMENT = 58
|
| 65 |
+
SOFT_KEYWORD = 59
|
| 66 |
+
# These aren't used by the C tokenizer but are needed for tokenize.py
|
| 67 |
+
ERRORTOKEN = 60
|
| 68 |
+
COMMENT = 61
|
| 69 |
+
NL = 62
|
| 70 |
+
ENCODING = 63
|
| 71 |
+
N_TOKENS = 64
|
| 72 |
+
# Special definitions for cooperation with parser
|
| 73 |
+
NT_OFFSET = 256
|
| 74 |
+
|
| 75 |
+
tok_name = {value: name
|
| 76 |
+
for name, value in globals().items()
|
| 77 |
+
if isinstance(value, int) and not name.startswith('_')}
|
| 78 |
+
__all__.extend(tok_name.values())
|
| 79 |
+
|
| 80 |
+
EXACT_TOKEN_TYPES = {
|
| 81 |
+
'!=': NOTEQUAL,
|
| 82 |
+
'%': PERCENT,
|
| 83 |
+
'%=': PERCENTEQUAL,
|
| 84 |
+
'&': AMPER,
|
| 85 |
+
'&=': AMPEREQUAL,
|
| 86 |
+
'(': LPAR,
|
| 87 |
+
')': RPAR,
|
| 88 |
+
'*': STAR,
|
| 89 |
+
'**': DOUBLESTAR,
|
| 90 |
+
'**=': DOUBLESTAREQUAL,
|
| 91 |
+
'*=': STAREQUAL,
|
| 92 |
+
'+': PLUS,
|
| 93 |
+
'+=': PLUSEQUAL,
|
| 94 |
+
',': COMMA,
|
| 95 |
+
'-': MINUS,
|
| 96 |
+
'-=': MINEQUAL,
|
| 97 |
+
'->': RARROW,
|
| 98 |
+
'.': DOT,
|
| 99 |
+
'...': ELLIPSIS,
|
| 100 |
+
'/': SLASH,
|
| 101 |
+
'//': DOUBLESLASH,
|
| 102 |
+
'//=': DOUBLESLASHEQUAL,
|
| 103 |
+
'/=': SLASHEQUAL,
|
| 104 |
+
':': COLON,
|
| 105 |
+
':=': COLONEQUAL,
|
| 106 |
+
';': SEMI,
|
| 107 |
+
'<': LESS,
|
| 108 |
+
'<<': LEFTSHIFT,
|
| 109 |
+
'<<=': LEFTSHIFTEQUAL,
|
| 110 |
+
'<=': LESSEQUAL,
|
| 111 |
+
'=': EQUAL,
|
| 112 |
+
'==': EQEQUAL,
|
| 113 |
+
'>': GREATER,
|
| 114 |
+
'>=': GREATEREQUAL,
|
| 115 |
+
'>>': RIGHTSHIFT,
|
| 116 |
+
'>>=': RIGHTSHIFTEQUAL,
|
| 117 |
+
'@': AT,
|
| 118 |
+
'@=': ATEQUAL,
|
| 119 |
+
'[': LSQB,
|
| 120 |
+
']': RSQB,
|
| 121 |
+
'^': CIRCUMFLEX,
|
| 122 |
+
'^=': CIRCUMFLEXEQUAL,
|
| 123 |
+
'{': LBRACE,
|
| 124 |
+
'|': VBAR,
|
| 125 |
+
'|=': VBAREQUAL,
|
| 126 |
+
'}': RBRACE,
|
| 127 |
+
'~': TILDE,
|
| 128 |
+
}
|
| 129 |
+
|
| 130 |
+
def ISTERMINAL(x):
|
| 131 |
+
return x < NT_OFFSET
|
| 132 |
+
|
| 133 |
+
def ISNONTERMINAL(x):
|
| 134 |
+
return x >= NT_OFFSET
|
| 135 |
+
|
| 136 |
+
def ISEOF(x):
|
| 137 |
+
return x == ENDMARKER
|
evalkit_cambrian/lib/python3.10/turtle.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|