Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +19 -0
- evalkit_cambrian/lib/python3.10/site-packages/nvidia/cudnn/lib/libcudnn_adv_train.so.8 +3 -0
- evalkit_tf446/lib/libasan.so +3 -0
- evalkit_tf446/lib/libasan.so.6 +3 -0
- evalkit_tf446/lib/libatomic.so.1.2.0 +3 -0
- evalkit_tf446/lib/libform.a +3 -0
- evalkit_tf446/lib/libgcc_s.so.1 +3 -0
- evalkit_tf446/lib/libgomp.so +3 -0
- evalkit_tf446/lib/liblsan.so +3 -0
- evalkit_tf446/lib/libncurses++.a +3 -0
- evalkit_tf446/lib/libncurses.a +3 -0
- evalkit_tf446/lib/libncursesw.so.6 +3 -0
- evalkit_tf446/lib/libsqlite3.so.0 +3 -0
- evalkit_tf446/lib/libsqlite3.so.0.8.6 +3 -0
- evalkit_tf446/lib/libstdc++.so +3 -0
- evalkit_tf446/lib/libtinfo.so +3 -0
- evalkit_tf446/lib/libtinfow.so +3 -0
- evalkit_tf446/lib/libtinfow.so.6.4 +3 -0
- evalkit_tf446/lib/libz.so.1 +3 -0
- evalkit_tf446/lib/python3.10/_aix_support.py +89 -0
- evalkit_tf446/lib/python3.10/_compression.py +162 -0
- evalkit_tf446/lib/python3.10/_osx_support.py +574 -0
- evalkit_tf446/lib/python3.10/_py_abc.py +147 -0
- evalkit_tf446/lib/python3.10/_sitebuiltins.py +103 -0
- evalkit_tf446/lib/python3.10/_strptime.py +579 -0
- evalkit_tf446/lib/python3.10/_sysconfigdata__linux_x86_64-linux-gnu.py +986 -0
- evalkit_tf446/lib/python3.10/_sysconfigdata__linux_x86_64-linux-gnu.py.orig +986 -0
- evalkit_tf446/lib/python3.10/_sysconfigdata_x86_64_conda_cos6_linux_gnu.py +986 -0
- evalkit_tf446/lib/python3.10/abc.py +188 -0
- evalkit_tf446/lib/python3.10/argparse.py +0 -0
- evalkit_tf446/lib/python3.10/ast.py +1709 -0
- evalkit_tf446/lib/python3.10/binhex.py +502 -0
- evalkit_tf446/lib/python3.10/bz2.py +344 -0
- evalkit_tf446/lib/python3.10/cProfile.py +191 -0
- evalkit_tf446/lib/python3.10/cgitb.py +321 -0
- evalkit_tf446/lib/python3.10/chunk.py +169 -0
- evalkit_tf446/lib/python3.10/cmd.py +401 -0
- evalkit_tf446/lib/python3.10/code.py +315 -0
- evalkit_tf446/lib/python3.10/codecs.py +1127 -0
- evalkit_tf446/lib/python3.10/codeop.py +153 -0
- evalkit_tf446/lib/python3.10/compileall.py +463 -0
- evalkit_tf446/lib/python3.10/contextvars.py +4 -0
- evalkit_tf446/lib/python3.10/copyreg.py +219 -0
- evalkit_tf446/lib/python3.10/decimal.py +11 -0
- evalkit_tf446/lib/python3.10/doctest.py +0 -0
- evalkit_tf446/lib/python3.10/enum.py +1053 -0
- evalkit_tf446/lib/python3.10/fileinput.py +462 -0
- evalkit_tf446/lib/python3.10/fnmatch.py +199 -0
- evalkit_tf446/lib/python3.10/fractions.py +748 -0
- evalkit_tf446/lib/python3.10/functools.py +992 -0
.gitattributes
CHANGED
|
@@ -2599,3 +2599,22 @@ evalkit_tf446/lib/libncurses.so.6 filter=lfs diff=lfs merge=lfs -text
|
|
| 2599 |
evalkit_tf446/lib/libtsan.so.0.0.0 filter=lfs diff=lfs merge=lfs -text
|
| 2600 |
evalkit_tf446/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
|
| 2601 |
evalkit_tf446/lib/libubsan.so.1.0.0 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2599 |
evalkit_tf446/lib/libtsan.so.0.0.0 filter=lfs diff=lfs merge=lfs -text
|
| 2600 |
evalkit_tf446/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
|
| 2601 |
evalkit_tf446/lib/libubsan.so.1.0.0 filter=lfs diff=lfs merge=lfs -text
|
| 2602 |
+
evalkit_tf446/lib/libgomp.so filter=lfs diff=lfs merge=lfs -text
|
| 2603 |
+
evalkit_tf446/lib/libasan.so.6 filter=lfs diff=lfs merge=lfs -text
|
| 2604 |
+
evalkit_tf446/lib/libncursesw.so.6 filter=lfs diff=lfs merge=lfs -text
|
| 2605 |
+
evalkit_cambrian/lib/python3.10/site-packages/nvidia/cudnn/lib/libcudnn_adv_train.so.8 filter=lfs diff=lfs merge=lfs -text
|
| 2606 |
+
evalkit_tf446/lib/libform.a filter=lfs diff=lfs merge=lfs -text
|
| 2607 |
+
evalkit_tf446/lib/liblsan.so filter=lfs diff=lfs merge=lfs -text
|
| 2608 |
+
evalkit_tf446/lib/libtinfow.so filter=lfs diff=lfs merge=lfs -text
|
| 2609 |
+
evalkit_tf446/lib/libgcc_s.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 2610 |
+
evalkit_tf446/lib/libz.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 2611 |
+
evalkit_tf446/lib/libatomic.so.1.2.0 filter=lfs diff=lfs merge=lfs -text
|
| 2612 |
+
evalkit_tf446/lib/libncurses.a filter=lfs diff=lfs merge=lfs -text
|
| 2613 |
+
evalkit_tf446/lib/libtinfow.so.6.4 filter=lfs diff=lfs merge=lfs -text
|
| 2614 |
+
infer_4_30_0/lib/python3.10/site-packages/torchaudio/lib/libtorchaudio.so filter=lfs diff=lfs merge=lfs -text
|
| 2615 |
+
evalkit_tf446/lib/libsqlite3.so.0 filter=lfs diff=lfs merge=lfs -text
|
| 2616 |
+
evalkit_tf446/lib/libsqlite3.so.0.8.6 filter=lfs diff=lfs merge=lfs -text
|
| 2617 |
+
evalkit_tf446/lib/libstdc++.so filter=lfs diff=lfs merge=lfs -text
|
| 2618 |
+
evalkit_tf446/lib/libtinfo.so filter=lfs diff=lfs merge=lfs -text
|
| 2619 |
+
evalkit_tf446/lib/libncurses++.a filter=lfs diff=lfs merge=lfs -text
|
| 2620 |
+
evalkit_tf446/lib/libasan.so filter=lfs diff=lfs merge=lfs -text
|
evalkit_cambrian/lib/python3.10/site-packages/nvidia/cudnn/lib/libcudnn_adv_train.so.8
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2e8b8107b64d4ab6a73ce184d301b71e14c2b1bb469ef5734a1baa3a100847d4
|
| 3 |
+
size 121126456
|
evalkit_tf446/lib/libasan.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2a8a7995a4d84a8817af8d1604bef621e99d0622df4eda14f6fe5245735a952e
|
| 3 |
+
size 7575272
|
evalkit_tf446/lib/libasan.so.6
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2a8a7995a4d84a8817af8d1604bef621e99d0622df4eda14f6fe5245735a952e
|
| 3 |
+
size 7575272
|
evalkit_tf446/lib/libatomic.so.1.2.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2f1a92c18f01c13c9a89908fb86a7309ae5b89a882db9914114957bc4b6fed92
|
| 3 |
+
size 143648
|
evalkit_tf446/lib/libform.a
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e38c05b95a8311e56f3f585bf38e66c9af44a8d6085d39648a770d611a6ebeeb
|
| 3 |
+
size 185212
|
evalkit_tf446/lib/libgcc_s.so.1
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d912bad5e511194c15b506fe7eafe4454bc2dc4073a0fdebc60e86af59a0f2bc
|
| 3 |
+
size 475272
|
evalkit_tf446/lib/libgomp.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e3b68c5f37afb7b70bd12273b69706ab33a397714e8336910f0e47f8f1cf6854
|
| 3 |
+
size 1265616
|
evalkit_tf446/lib/liblsan.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3b898a178573c3ec5feb7246182ac8ebc2664197e35bf08040e6ca2ee4719757
|
| 3 |
+
size 2691440
|
evalkit_tf446/lib/libncurses++.a
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:93b48c40f5d7b07e1a8c4bd9419df55c28e250cca1166be4aafd2fc7caf18823
|
| 3 |
+
size 187604
|
evalkit_tf446/lib/libncurses.a
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:873bc902fcba042e3e980451cae21218095aa52298dcce1a9ac7e415beecb1f0
|
| 3 |
+
size 544910
|
evalkit_tf446/lib/libncursesw.so.6
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7fa4e5e93804d78660b0eef727cdb4211209e1742e4ad3669348022668d90962
|
| 3 |
+
size 271304
|
evalkit_tf446/lib/libsqlite3.so.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:71932eb5bf89092fbd2c900601fc9f24aa184d65038aaec2445fd11b1d923327
|
| 3 |
+
size 1543808
|
evalkit_tf446/lib/libsqlite3.so.0.8.6
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:71932eb5bf89092fbd2c900601fc9f24aa184d65038aaec2445fd11b1d923327
|
| 3 |
+
size 1543808
|
evalkit_tf446/lib/libstdc++.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4f045231ff3a95c2fbfde450575f0ef45d23e95be15193c8729b521fc363ece4
|
| 3 |
+
size 17981480
|
evalkit_tf446/lib/libtinfo.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d3a95532c42fe7aecfbc6054688b9d474f9804e78646e4ead5f4b99e35bae9a2
|
| 3 |
+
size 287080
|
evalkit_tf446/lib/libtinfow.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d3a95532c42fe7aecfbc6054688b9d474f9804e78646e4ead5f4b99e35bae9a2
|
| 3 |
+
size 287080
|
evalkit_tf446/lib/libtinfow.so.6.4
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d3a95532c42fe7aecfbc6054688b9d474f9804e78646e4ead5f4b99e35bae9a2
|
| 3 |
+
size 287080
|
evalkit_tf446/lib/libz.so.1
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0b0e682a9dc7fd4895a6783288f851b793dc89633f28714027974fa4d66f3914
|
| 3 |
+
size 124744
|
evalkit_tf446/lib/python3.10/_aix_support.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Shared AIX support functions."""
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
import sysconfig
|
| 5 |
+
|
| 6 |
+
try:
|
| 7 |
+
import subprocess
|
| 8 |
+
except ImportError: # pragma: no cover
|
| 9 |
+
# _aix_support is used in distutils by setup.py to build C extensions,
|
| 10 |
+
# before subprocess dependencies like _posixsubprocess are available.
|
| 11 |
+
import _bootsubprocess as subprocess
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def _aix_tag(vrtl, bd):
|
| 15 |
+
# type: (List[int], int) -> str
|
| 16 |
+
# Infer the ABI bitwidth from maxsize (assuming 64 bit as the default)
|
| 17 |
+
_sz = 32 if sys.maxsize == (2**31-1) else 64
|
| 18 |
+
# vrtl[version, release, technology_level]
|
| 19 |
+
return "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(vrtl[0], vrtl[1], vrtl[2], bd, _sz)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# extract version, release and technology level from a VRMF string
|
| 23 |
+
def _aix_vrtl(vrmf):
|
| 24 |
+
# type: (str) -> List[int]
|
| 25 |
+
v, r, tl = vrmf.split(".")[:3]
|
| 26 |
+
return [int(v[-1]), int(r), int(tl)]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _aix_bosmp64():
|
| 30 |
+
# type: () -> Tuple[str, int]
|
| 31 |
+
"""
|
| 32 |
+
Return a Tuple[str, int] e.g., ['7.1.4.34', 1806]
|
| 33 |
+
The fileset bos.mp64 is the AIX kernel. It's VRMF and builddate
|
| 34 |
+
reflect the current ABI levels of the runtime environment.
|
| 35 |
+
"""
|
| 36 |
+
# We expect all AIX systems to have lslpp installed in this location
|
| 37 |
+
out = subprocess.check_output(["/usr/bin/lslpp", "-Lqc", "bos.mp64"])
|
| 38 |
+
out = out.decode("utf-8")
|
| 39 |
+
out = out.strip().split(":") # type: ignore
|
| 40 |
+
# Use str() and int() to help mypy see types
|
| 41 |
+
return (str(out[2]), int(out[-1]))
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def aix_platform():
|
| 45 |
+
# type: () -> str
|
| 46 |
+
"""
|
| 47 |
+
AIX filesets are identified by four decimal values: V.R.M.F.
|
| 48 |
+
V (version) and R (release) can be retreived using ``uname``
|
| 49 |
+
Since 2007, starting with AIX 5.3 TL7, the M value has been
|
| 50 |
+
included with the fileset bos.mp64 and represents the Technology
|
| 51 |
+
Level (TL) of AIX. The F (Fix) value also increases, but is not
|
| 52 |
+
relevant for comparing releases and binary compatibility.
|
| 53 |
+
For binary compatibility the so-called builddate is needed.
|
| 54 |
+
Again, the builddate of an AIX release is associated with bos.mp64.
|
| 55 |
+
AIX ABI compatibility is described as guaranteed at: https://www.ibm.com/\
|
| 56 |
+
support/knowledgecenter/en/ssw_aix_72/install/binary_compatability.html
|
| 57 |
+
|
| 58 |
+
For pep425 purposes the AIX platform tag becomes:
|
| 59 |
+
"aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(v, r, tl, builddate, bitsize)
|
| 60 |
+
e.g., "aix-6107-1415-32" for AIX 6.1 TL7 bd 1415, 32-bit
|
| 61 |
+
and, "aix-6107-1415-64" for AIX 6.1 TL7 bd 1415, 64-bit
|
| 62 |
+
"""
|
| 63 |
+
vrmf, bd = _aix_bosmp64()
|
| 64 |
+
return _aix_tag(_aix_vrtl(vrmf), bd)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# extract vrtl from the BUILD_GNU_TYPE as an int
|
| 68 |
+
def _aix_bgt():
|
| 69 |
+
# type: () -> List[int]
|
| 70 |
+
gnu_type = sysconfig.get_config_var("BUILD_GNU_TYPE")
|
| 71 |
+
if not gnu_type:
|
| 72 |
+
raise ValueError("BUILD_GNU_TYPE is not defined")
|
| 73 |
+
return _aix_vrtl(vrmf=gnu_type)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def aix_buildtag():
|
| 77 |
+
# type: () -> str
|
| 78 |
+
"""
|
| 79 |
+
Return the platform_tag of the system Python was built on.
|
| 80 |
+
"""
|
| 81 |
+
# AIX_BUILDDATE is defined by configure with:
|
| 82 |
+
# lslpp -Lcq bos.mp64 | awk -F: '{ print $NF }'
|
| 83 |
+
build_date = sysconfig.get_config_var("AIX_BUILDDATE")
|
| 84 |
+
try:
|
| 85 |
+
build_date = int(build_date)
|
| 86 |
+
except (ValueError, TypeError):
|
| 87 |
+
raise ValueError(f"AIX_BUILDDATE is not defined or invalid: "
|
| 88 |
+
f"{build_date!r}")
|
| 89 |
+
return _aix_tag(_aix_bgt(), build_date)
|
evalkit_tf446/lib/python3.10/_compression.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Internal classes used by the gzip, lzma and bz2 modules"""
|
| 2 |
+
|
| 3 |
+
import io
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE # Compressed data read chunk size
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class BaseStream(io.BufferedIOBase):
|
| 10 |
+
"""Mode-checking helper functions."""
|
| 11 |
+
|
| 12 |
+
def _check_not_closed(self):
|
| 13 |
+
if self.closed:
|
| 14 |
+
raise ValueError("I/O operation on closed file")
|
| 15 |
+
|
| 16 |
+
def _check_can_read(self):
|
| 17 |
+
if not self.readable():
|
| 18 |
+
raise io.UnsupportedOperation("File not open for reading")
|
| 19 |
+
|
| 20 |
+
def _check_can_write(self):
|
| 21 |
+
if not self.writable():
|
| 22 |
+
raise io.UnsupportedOperation("File not open for writing")
|
| 23 |
+
|
| 24 |
+
def _check_can_seek(self):
|
| 25 |
+
if not self.readable():
|
| 26 |
+
raise io.UnsupportedOperation("Seeking is only supported "
|
| 27 |
+
"on files open for reading")
|
| 28 |
+
if not self.seekable():
|
| 29 |
+
raise io.UnsupportedOperation("The underlying file object "
|
| 30 |
+
"does not support seeking")
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class DecompressReader(io.RawIOBase):
|
| 34 |
+
"""Adapts the decompressor API to a RawIOBase reader API"""
|
| 35 |
+
|
| 36 |
+
def readable(self):
|
| 37 |
+
return True
|
| 38 |
+
|
| 39 |
+
def __init__(self, fp, decomp_factory, trailing_error=(), **decomp_args):
|
| 40 |
+
self._fp = fp
|
| 41 |
+
self._eof = False
|
| 42 |
+
self._pos = 0 # Current offset in decompressed stream
|
| 43 |
+
|
| 44 |
+
# Set to size of decompressed stream once it is known, for SEEK_END
|
| 45 |
+
self._size = -1
|
| 46 |
+
|
| 47 |
+
# Save the decompressor factory and arguments.
|
| 48 |
+
# If the file contains multiple compressed streams, each
|
| 49 |
+
# stream will need a separate decompressor object. A new decompressor
|
| 50 |
+
# object is also needed when implementing a backwards seek().
|
| 51 |
+
self._decomp_factory = decomp_factory
|
| 52 |
+
self._decomp_args = decomp_args
|
| 53 |
+
self._decompressor = self._decomp_factory(**self._decomp_args)
|
| 54 |
+
|
| 55 |
+
# Exception class to catch from decompressor signifying invalid
|
| 56 |
+
# trailing data to ignore
|
| 57 |
+
self._trailing_error = trailing_error
|
| 58 |
+
|
| 59 |
+
def close(self):
|
| 60 |
+
self._decompressor = None
|
| 61 |
+
return super().close()
|
| 62 |
+
|
| 63 |
+
def seekable(self):
|
| 64 |
+
return self._fp.seekable()
|
| 65 |
+
|
| 66 |
+
def readinto(self, b):
|
| 67 |
+
with memoryview(b) as view, view.cast("B") as byte_view:
|
| 68 |
+
data = self.read(len(byte_view))
|
| 69 |
+
byte_view[:len(data)] = data
|
| 70 |
+
return len(data)
|
| 71 |
+
|
| 72 |
+
def read(self, size=-1):
|
| 73 |
+
if size < 0:
|
| 74 |
+
return self.readall()
|
| 75 |
+
|
| 76 |
+
if not size or self._eof:
|
| 77 |
+
return b""
|
| 78 |
+
data = None # Default if EOF is encountered
|
| 79 |
+
# Depending on the input data, our call to the decompressor may not
|
| 80 |
+
# return any data. In this case, try again after reading another block.
|
| 81 |
+
while True:
|
| 82 |
+
if self._decompressor.eof:
|
| 83 |
+
rawblock = (self._decompressor.unused_data or
|
| 84 |
+
self._fp.read(BUFFER_SIZE))
|
| 85 |
+
if not rawblock:
|
| 86 |
+
break
|
| 87 |
+
# Continue to next stream.
|
| 88 |
+
self._decompressor = self._decomp_factory(
|
| 89 |
+
**self._decomp_args)
|
| 90 |
+
try:
|
| 91 |
+
data = self._decompressor.decompress(rawblock, size)
|
| 92 |
+
except self._trailing_error:
|
| 93 |
+
# Trailing data isn't a valid compressed stream; ignore it.
|
| 94 |
+
break
|
| 95 |
+
else:
|
| 96 |
+
if self._decompressor.needs_input:
|
| 97 |
+
rawblock = self._fp.read(BUFFER_SIZE)
|
| 98 |
+
if not rawblock:
|
| 99 |
+
raise EOFError("Compressed file ended before the "
|
| 100 |
+
"end-of-stream marker was reached")
|
| 101 |
+
else:
|
| 102 |
+
rawblock = b""
|
| 103 |
+
data = self._decompressor.decompress(rawblock, size)
|
| 104 |
+
if data:
|
| 105 |
+
break
|
| 106 |
+
if not data:
|
| 107 |
+
self._eof = True
|
| 108 |
+
self._size = self._pos
|
| 109 |
+
return b""
|
| 110 |
+
self._pos += len(data)
|
| 111 |
+
return data
|
| 112 |
+
|
| 113 |
+
def readall(self):
|
| 114 |
+
chunks = []
|
| 115 |
+
# sys.maxsize means the max length of output buffer is unlimited,
|
| 116 |
+
# so that the whole input buffer can be decompressed within one
|
| 117 |
+
# .decompress() call.
|
| 118 |
+
while data := self.read(sys.maxsize):
|
| 119 |
+
chunks.append(data)
|
| 120 |
+
|
| 121 |
+
return b"".join(chunks)
|
| 122 |
+
|
| 123 |
+
# Rewind the file to the beginning of the data stream.
|
| 124 |
+
def _rewind(self):
|
| 125 |
+
self._fp.seek(0)
|
| 126 |
+
self._eof = False
|
| 127 |
+
self._pos = 0
|
| 128 |
+
self._decompressor = self._decomp_factory(**self._decomp_args)
|
| 129 |
+
|
| 130 |
+
def seek(self, offset, whence=io.SEEK_SET):
|
| 131 |
+
# Recalculate offset as an absolute file position.
|
| 132 |
+
if whence == io.SEEK_SET:
|
| 133 |
+
pass
|
| 134 |
+
elif whence == io.SEEK_CUR:
|
| 135 |
+
offset = self._pos + offset
|
| 136 |
+
elif whence == io.SEEK_END:
|
| 137 |
+
# Seeking relative to EOF - we need to know the file's size.
|
| 138 |
+
if self._size < 0:
|
| 139 |
+
while self.read(io.DEFAULT_BUFFER_SIZE):
|
| 140 |
+
pass
|
| 141 |
+
offset = self._size + offset
|
| 142 |
+
else:
|
| 143 |
+
raise ValueError("Invalid value for whence: {}".format(whence))
|
| 144 |
+
|
| 145 |
+
# Make it so that offset is the number of bytes to skip forward.
|
| 146 |
+
if offset < self._pos:
|
| 147 |
+
self._rewind()
|
| 148 |
+
else:
|
| 149 |
+
offset -= self._pos
|
| 150 |
+
|
| 151 |
+
# Read and discard data until we reach the desired position.
|
| 152 |
+
while offset > 0:
|
| 153 |
+
data = self.read(min(io.DEFAULT_BUFFER_SIZE, offset))
|
| 154 |
+
if not data:
|
| 155 |
+
break
|
| 156 |
+
offset -= len(data)
|
| 157 |
+
|
| 158 |
+
return self._pos
|
| 159 |
+
|
| 160 |
+
def tell(self):
|
| 161 |
+
"""Return the current file position."""
|
| 162 |
+
return self._pos
|
evalkit_tf446/lib/python3.10/_osx_support.py
ADDED
|
@@ -0,0 +1,574 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Shared OS X support functions."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import sys
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
'compiler_fixup',
|
| 9 |
+
'customize_config_vars',
|
| 10 |
+
'customize_compiler',
|
| 11 |
+
'get_platform_osx',
|
| 12 |
+
]
|
| 13 |
+
|
| 14 |
+
# configuration variables that may contain universal build flags,
|
| 15 |
+
# like "-arch" or "-isdkroot", that may need customization for
|
| 16 |
+
# the user environment
|
| 17 |
+
_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
|
| 18 |
+
'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
|
| 19 |
+
'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
|
| 20 |
+
'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
|
| 21 |
+
|
| 22 |
+
# configuration variables that may contain compiler calls
|
| 23 |
+
_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
|
| 24 |
+
|
| 25 |
+
# prefix added to original configuration variable names
|
| 26 |
+
_INITPRE = '_OSX_SUPPORT_INITIAL_'
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _find_executable(executable, path=None):
|
| 30 |
+
"""Tries to find 'executable' in the directories listed in 'path'.
|
| 31 |
+
|
| 32 |
+
A string listing directories separated by 'os.pathsep'; defaults to
|
| 33 |
+
os.environ['PATH']. Returns the complete filename or None if not found.
|
| 34 |
+
"""
|
| 35 |
+
if path is None:
|
| 36 |
+
path = os.environ['PATH']
|
| 37 |
+
|
| 38 |
+
paths = path.split(os.pathsep)
|
| 39 |
+
base, ext = os.path.splitext(executable)
|
| 40 |
+
|
| 41 |
+
if (sys.platform == 'win32') and (ext != '.exe'):
|
| 42 |
+
executable = executable + '.exe'
|
| 43 |
+
|
| 44 |
+
if not os.path.isfile(executable):
|
| 45 |
+
for p in paths:
|
| 46 |
+
f = os.path.join(p, executable)
|
| 47 |
+
if os.path.isfile(f):
|
| 48 |
+
# the file exists, we have a shot at spawn working
|
| 49 |
+
return f
|
| 50 |
+
return None
|
| 51 |
+
else:
|
| 52 |
+
return executable
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _read_output(commandstring, capture_stderr=False):
|
| 56 |
+
"""Output from successful command execution or None"""
|
| 57 |
+
# Similar to os.popen(commandstring, "r").read(),
|
| 58 |
+
# but without actually using os.popen because that
|
| 59 |
+
# function is not usable during python bootstrap.
|
| 60 |
+
# tempfile is also not available then.
|
| 61 |
+
import contextlib
|
| 62 |
+
try:
|
| 63 |
+
import tempfile
|
| 64 |
+
fp = tempfile.NamedTemporaryFile()
|
| 65 |
+
except ImportError:
|
| 66 |
+
fp = open("/tmp/_osx_support.%s"%(
|
| 67 |
+
os.getpid(),), "w+b")
|
| 68 |
+
|
| 69 |
+
with contextlib.closing(fp) as fp:
|
| 70 |
+
if capture_stderr:
|
| 71 |
+
cmd = "%s >'%s' 2>&1" % (commandstring, fp.name)
|
| 72 |
+
else:
|
| 73 |
+
cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
|
| 74 |
+
return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _find_build_tool(toolname):
|
| 78 |
+
"""Find a build tool on current path or using xcrun"""
|
| 79 |
+
return (_find_executable(toolname)
|
| 80 |
+
or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
|
| 81 |
+
or ''
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
_SYSTEM_VERSION = None
|
| 85 |
+
|
| 86 |
+
def _get_system_version():
|
| 87 |
+
"""Return the OS X system version as a string"""
|
| 88 |
+
# Reading this plist is a documented way to get the system
|
| 89 |
+
# version (see the documentation for the Gestalt Manager)
|
| 90 |
+
# We avoid using platform.mac_ver to avoid possible bootstrap issues during
|
| 91 |
+
# the build of Python itself (distutils is used to build standard library
|
| 92 |
+
# extensions).
|
| 93 |
+
|
| 94 |
+
global _SYSTEM_VERSION
|
| 95 |
+
|
| 96 |
+
if _SYSTEM_VERSION is None:
|
| 97 |
+
_SYSTEM_VERSION = ''
|
| 98 |
+
try:
|
| 99 |
+
f = open('/System/Library/CoreServices/SystemVersion.plist', encoding="utf-8")
|
| 100 |
+
except OSError:
|
| 101 |
+
# We're on a plain darwin box, fall back to the default
|
| 102 |
+
# behaviour.
|
| 103 |
+
pass
|
| 104 |
+
else:
|
| 105 |
+
try:
|
| 106 |
+
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
|
| 107 |
+
r'<string>(.*?)</string>', f.read())
|
| 108 |
+
finally:
|
| 109 |
+
f.close()
|
| 110 |
+
if m is not None:
|
| 111 |
+
_SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
|
| 112 |
+
# else: fall back to the default behaviour
|
| 113 |
+
|
| 114 |
+
return _SYSTEM_VERSION
|
| 115 |
+
|
| 116 |
+
_SYSTEM_VERSION_TUPLE = None
|
| 117 |
+
def _get_system_version_tuple():
|
| 118 |
+
"""
|
| 119 |
+
Return the macOS system version as a tuple
|
| 120 |
+
|
| 121 |
+
The return value is safe to use to compare
|
| 122 |
+
two version numbers.
|
| 123 |
+
"""
|
| 124 |
+
global _SYSTEM_VERSION_TUPLE
|
| 125 |
+
if _SYSTEM_VERSION_TUPLE is None:
|
| 126 |
+
osx_version = _get_system_version()
|
| 127 |
+
if osx_version:
|
| 128 |
+
try:
|
| 129 |
+
_SYSTEM_VERSION_TUPLE = tuple(int(i) for i in osx_version.split('.'))
|
| 130 |
+
except ValueError:
|
| 131 |
+
_SYSTEM_VERSION_TUPLE = ()
|
| 132 |
+
|
| 133 |
+
return _SYSTEM_VERSION_TUPLE
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def _remove_original_values(_config_vars):
|
| 137 |
+
"""Remove original unmodified values for testing"""
|
| 138 |
+
# This is needed for higher-level cross-platform tests of get_platform.
|
| 139 |
+
for k in list(_config_vars):
|
| 140 |
+
if k.startswith(_INITPRE):
|
| 141 |
+
del _config_vars[k]
|
| 142 |
+
|
| 143 |
+
def _save_modified_value(_config_vars, cv, newvalue):
|
| 144 |
+
"""Save modified and original unmodified value of configuration var"""
|
| 145 |
+
|
| 146 |
+
oldvalue = _config_vars.get(cv, '')
|
| 147 |
+
if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
|
| 148 |
+
_config_vars[_INITPRE + cv] = oldvalue
|
| 149 |
+
_config_vars[cv] = newvalue
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
_cache_default_sysroot = None
|
| 153 |
+
def _default_sysroot(cc):
|
| 154 |
+
""" Returns the root of the default SDK for this system, or '/' """
|
| 155 |
+
global _cache_default_sysroot
|
| 156 |
+
|
| 157 |
+
if _cache_default_sysroot is not None:
|
| 158 |
+
return _cache_default_sysroot
|
| 159 |
+
|
| 160 |
+
contents = _read_output('%s -c -E -v - </dev/null' % (cc,), True)
|
| 161 |
+
in_incdirs = False
|
| 162 |
+
for line in contents.splitlines():
|
| 163 |
+
if line.startswith("#include <...>"):
|
| 164 |
+
in_incdirs = True
|
| 165 |
+
elif line.startswith("End of search list"):
|
| 166 |
+
in_incdirs = False
|
| 167 |
+
elif in_incdirs:
|
| 168 |
+
line = line.strip()
|
| 169 |
+
if line == '/usr/include':
|
| 170 |
+
_cache_default_sysroot = '/'
|
| 171 |
+
elif line.endswith(".sdk/usr/include"):
|
| 172 |
+
_cache_default_sysroot = line[:-12]
|
| 173 |
+
if _cache_default_sysroot is None:
|
| 174 |
+
_cache_default_sysroot = '/'
|
| 175 |
+
|
| 176 |
+
return _cache_default_sysroot
|
| 177 |
+
|
| 178 |
+
def _supports_universal_builds():
|
| 179 |
+
"""Returns True if universal builds are supported on this system"""
|
| 180 |
+
# As an approximation, we assume that if we are running on 10.4 or above,
|
| 181 |
+
# then we are running with an Xcode environment that supports universal
|
| 182 |
+
# builds, in particular -isysroot and -arch arguments to the compiler. This
|
| 183 |
+
# is in support of allowing 10.4 universal builds to run on 10.3.x systems.
|
| 184 |
+
|
| 185 |
+
osx_version = _get_system_version_tuple()
|
| 186 |
+
return bool(osx_version >= (10, 4)) if osx_version else False
|
| 187 |
+
|
| 188 |
+
def _supports_arm64_builds():
|
| 189 |
+
"""Returns True if arm64 builds are supported on this system"""
|
| 190 |
+
# There are two sets of systems supporting macOS/arm64 builds:
|
| 191 |
+
# 1. macOS 11 and later, unconditionally
|
| 192 |
+
# 2. macOS 10.15 with Xcode 12.2 or later
|
| 193 |
+
# For now the second category is ignored.
|
| 194 |
+
osx_version = _get_system_version_tuple()
|
| 195 |
+
return osx_version >= (11, 0) if osx_version else False
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def _find_appropriate_compiler(_config_vars):
|
| 199 |
+
"""Find appropriate C compiler for extension module builds"""
|
| 200 |
+
|
| 201 |
+
# Issue #13590:
|
| 202 |
+
# The OSX location for the compiler varies between OSX
|
| 203 |
+
# (or rather Xcode) releases. With older releases (up-to 10.5)
|
| 204 |
+
# the compiler is in /usr/bin, with newer releases the compiler
|
| 205 |
+
# can only be found inside Xcode.app if the "Command Line Tools"
|
| 206 |
+
# are not installed.
|
| 207 |
+
#
|
| 208 |
+
# Furthermore, the compiler that can be used varies between
|
| 209 |
+
# Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2'
|
| 210 |
+
# as the compiler, after that 'clang' should be used because
|
| 211 |
+
# gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
|
| 212 |
+
# miscompiles Python.
|
| 213 |
+
|
| 214 |
+
# skip checks if the compiler was overridden with a CC env variable
|
| 215 |
+
if 'CC' in os.environ:
|
| 216 |
+
return _config_vars
|
| 217 |
+
|
| 218 |
+
# The CC config var might contain additional arguments.
|
| 219 |
+
# Ignore them while searching.
|
| 220 |
+
cc = oldcc = _config_vars['CC'].split()[0]
|
| 221 |
+
if not _find_executable(cc):
|
| 222 |
+
# Compiler is not found on the shell search PATH.
|
| 223 |
+
# Now search for clang, first on PATH (if the Command LIne
|
| 224 |
+
# Tools have been installed in / or if the user has provided
|
| 225 |
+
# another location via CC). If not found, try using xcrun
|
| 226 |
+
# to find an uninstalled clang (within a selected Xcode).
|
| 227 |
+
|
| 228 |
+
# NOTE: Cannot use subprocess here because of bootstrap
|
| 229 |
+
# issues when building Python itself (and os.popen is
|
| 230 |
+
# implemented on top of subprocess and is therefore not
|
| 231 |
+
# usable as well)
|
| 232 |
+
|
| 233 |
+
cc = _find_build_tool('clang')
|
| 234 |
+
|
| 235 |
+
elif os.path.basename(cc).startswith('gcc'):
|
| 236 |
+
# Compiler is GCC, check if it is LLVM-GCC
|
| 237 |
+
data = _read_output("'%s' --version"
|
| 238 |
+
% (cc.replace("'", "'\"'\"'"),))
|
| 239 |
+
if data and 'llvm-gcc' in data:
|
| 240 |
+
# Found LLVM-GCC, fall back to clang
|
| 241 |
+
cc = _find_build_tool('clang')
|
| 242 |
+
|
| 243 |
+
if not cc:
|
| 244 |
+
raise SystemError(
|
| 245 |
+
"Cannot locate working compiler")
|
| 246 |
+
|
| 247 |
+
if cc != oldcc:
|
| 248 |
+
# Found a replacement compiler.
|
| 249 |
+
# Modify config vars using new compiler, if not already explicitly
|
| 250 |
+
# overridden by an env variable, preserving additional arguments.
|
| 251 |
+
for cv in _COMPILER_CONFIG_VARS:
|
| 252 |
+
if cv in _config_vars and cv not in os.environ:
|
| 253 |
+
cv_split = _config_vars[cv].split()
|
| 254 |
+
cv_split[0] = cc if cv != 'CXX' else cc + '++'
|
| 255 |
+
_save_modified_value(_config_vars, cv, ' '.join(cv_split))
|
| 256 |
+
|
| 257 |
+
return _config_vars
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def _remove_universal_flags(_config_vars):
|
| 261 |
+
"""Remove all universal build arguments from config vars"""
|
| 262 |
+
|
| 263 |
+
for cv in _UNIVERSAL_CONFIG_VARS:
|
| 264 |
+
# Do not alter a config var explicitly overridden by env var
|
| 265 |
+
if cv in _config_vars and cv not in os.environ:
|
| 266 |
+
flags = _config_vars[cv]
|
| 267 |
+
flags = re.sub(r'-arch\s+\w+\s', ' ', flags, flags=re.ASCII)
|
| 268 |
+
flags = re.sub(r'-isysroot\s*\S+', ' ', flags)
|
| 269 |
+
_save_modified_value(_config_vars, cv, flags)
|
| 270 |
+
|
| 271 |
+
return _config_vars
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def _remove_unsupported_archs(_config_vars):
|
| 275 |
+
"""Remove any unsupported archs from config vars"""
|
| 276 |
+
# Different Xcode releases support different sets for '-arch'
|
| 277 |
+
# flags. In particular, Xcode 4.x no longer supports the
|
| 278 |
+
# PPC architectures.
|
| 279 |
+
#
|
| 280 |
+
# This code automatically removes '-arch ppc' and '-arch ppc64'
|
| 281 |
+
# when these are not supported. That makes it possible to
|
| 282 |
+
# build extensions on OSX 10.7 and later with the prebuilt
|
| 283 |
+
# 32-bit installer on the python.org website.
|
| 284 |
+
|
| 285 |
+
# skip checks if the compiler was overridden with a CC env variable
|
| 286 |
+
if 'CC' in os.environ:
|
| 287 |
+
return _config_vars
|
| 288 |
+
|
| 289 |
+
if re.search(r'-arch\s+ppc', _config_vars['CFLAGS']) is not None:
|
| 290 |
+
# NOTE: Cannot use subprocess here because of bootstrap
|
| 291 |
+
# issues when building Python itself
|
| 292 |
+
status = os.system(
|
| 293 |
+
"""echo 'int main{};' | """
|
| 294 |
+
"""'%s' -c -arch ppc -x c -o /dev/null /dev/null 2>/dev/null"""
|
| 295 |
+
%(_config_vars['CC'].replace("'", "'\"'\"'"),))
|
| 296 |
+
if status:
|
| 297 |
+
# The compile failed for some reason. Because of differences
|
| 298 |
+
# across Xcode and compiler versions, there is no reliable way
|
| 299 |
+
# to be sure why it failed. Assume here it was due to lack of
|
| 300 |
+
# PPC support and remove the related '-arch' flags from each
|
| 301 |
+
# config variables not explicitly overridden by an environment
|
| 302 |
+
# variable. If the error was for some other reason, we hope the
|
| 303 |
+
# failure will show up again when trying to compile an extension
|
| 304 |
+
# module.
|
| 305 |
+
for cv in _UNIVERSAL_CONFIG_VARS:
|
| 306 |
+
if cv in _config_vars and cv not in os.environ:
|
| 307 |
+
flags = _config_vars[cv]
|
| 308 |
+
flags = re.sub(r'-arch\s+ppc\w*\s', ' ', flags)
|
| 309 |
+
_save_modified_value(_config_vars, cv, flags)
|
| 310 |
+
|
| 311 |
+
return _config_vars
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def _override_all_archs(_config_vars):
|
| 315 |
+
"""Allow override of all archs with ARCHFLAGS env var"""
|
| 316 |
+
# NOTE: This name was introduced by Apple in OSX 10.5 and
|
| 317 |
+
# is used by several scripting languages distributed with
|
| 318 |
+
# that OS release.
|
| 319 |
+
if 'ARCHFLAGS' in os.environ:
|
| 320 |
+
arch = os.environ['ARCHFLAGS']
|
| 321 |
+
for cv in _UNIVERSAL_CONFIG_VARS:
|
| 322 |
+
if cv in _config_vars and '-arch' in _config_vars[cv]:
|
| 323 |
+
flags = _config_vars[cv]
|
| 324 |
+
flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
|
| 325 |
+
flags = flags + ' ' + arch
|
| 326 |
+
_save_modified_value(_config_vars, cv, flags)
|
| 327 |
+
|
| 328 |
+
return _config_vars
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
def _check_for_unavailable_sdk(_config_vars):
|
| 332 |
+
"""Remove references to any SDKs not available"""
|
| 333 |
+
# If we're on OSX 10.5 or later and the user tries to
|
| 334 |
+
# compile an extension using an SDK that is not present
|
| 335 |
+
# on the current machine it is better to not use an SDK
|
| 336 |
+
# than to fail. This is particularly important with
|
| 337 |
+
# the standalone Command Line Tools alternative to a
|
| 338 |
+
# full-blown Xcode install since the CLT packages do not
|
| 339 |
+
# provide SDKs. If the SDK is not present, it is assumed
|
| 340 |
+
# that the header files and dev libs have been installed
|
| 341 |
+
# to /usr and /System/Library by either a standalone CLT
|
| 342 |
+
# package or the CLT component within Xcode.
|
| 343 |
+
cflags = _config_vars.get('CFLAGS', '')
|
| 344 |
+
m = re.search(r'-isysroot\s*(\S+)', cflags)
|
| 345 |
+
if m is not None:
|
| 346 |
+
sdk = m.group(1)
|
| 347 |
+
if not os.path.exists(sdk):
|
| 348 |
+
for cv in _UNIVERSAL_CONFIG_VARS:
|
| 349 |
+
# Do not alter a config var explicitly overridden by env var
|
| 350 |
+
if cv in _config_vars and cv not in os.environ:
|
| 351 |
+
flags = _config_vars[cv]
|
| 352 |
+
flags = re.sub(r'-isysroot\s*\S+(?:\s|$)', ' ', flags)
|
| 353 |
+
_save_modified_value(_config_vars, cv, flags)
|
| 354 |
+
|
| 355 |
+
return _config_vars
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def compiler_fixup(compiler_so, cc_args):
|
| 359 |
+
"""
|
| 360 |
+
This function will strip '-isysroot PATH' and '-arch ARCH' from the
|
| 361 |
+
compile flags if the user has specified one them in extra_compile_flags.
|
| 362 |
+
|
| 363 |
+
This is needed because '-arch ARCH' adds another architecture to the
|
| 364 |
+
build, without a way to remove an architecture. Furthermore GCC will
|
| 365 |
+
barf if multiple '-isysroot' arguments are present.
|
| 366 |
+
"""
|
| 367 |
+
stripArch = stripSysroot = False
|
| 368 |
+
|
| 369 |
+
compiler_so = list(compiler_so)
|
| 370 |
+
|
| 371 |
+
if not _supports_universal_builds():
|
| 372 |
+
# OSX before 10.4.0, these don't support -arch and -isysroot at
|
| 373 |
+
# all.
|
| 374 |
+
stripArch = stripSysroot = True
|
| 375 |
+
else:
|
| 376 |
+
stripArch = '-arch' in cc_args
|
| 377 |
+
stripSysroot = any(arg for arg in cc_args if arg.startswith('-isysroot'))
|
| 378 |
+
|
| 379 |
+
if stripArch or 'ARCHFLAGS' in os.environ:
|
| 380 |
+
while True:
|
| 381 |
+
try:
|
| 382 |
+
index = compiler_so.index('-arch')
|
| 383 |
+
# Strip this argument and the next one:
|
| 384 |
+
del compiler_so[index:index+2]
|
| 385 |
+
except ValueError:
|
| 386 |
+
break
|
| 387 |
+
|
| 388 |
+
elif not _supports_arm64_builds():
|
| 389 |
+
# Look for "-arch arm64" and drop that
|
| 390 |
+
for idx in reversed(range(len(compiler_so))):
|
| 391 |
+
if compiler_so[idx] == '-arch' and compiler_so[idx+1] == "arm64":
|
| 392 |
+
del compiler_so[idx:idx+2]
|
| 393 |
+
|
| 394 |
+
if 'ARCHFLAGS' in os.environ and not stripArch:
|
| 395 |
+
# User specified different -arch flags in the environ,
|
| 396 |
+
# see also distutils.sysconfig
|
| 397 |
+
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
|
| 398 |
+
|
| 399 |
+
if stripSysroot:
|
| 400 |
+
while True:
|
| 401 |
+
indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
|
| 402 |
+
if not indices:
|
| 403 |
+
break
|
| 404 |
+
index = indices[0]
|
| 405 |
+
if compiler_so[index] == '-isysroot':
|
| 406 |
+
# Strip this argument and the next one:
|
| 407 |
+
del compiler_so[index:index+2]
|
| 408 |
+
else:
|
| 409 |
+
# It's '-isysroot/some/path' in one arg
|
| 410 |
+
del compiler_so[index:index+1]
|
| 411 |
+
|
| 412 |
+
# Check if the SDK that is used during compilation actually exists,
|
| 413 |
+
# the universal build requires the usage of a universal SDK and not all
|
| 414 |
+
# users have that installed by default.
|
| 415 |
+
sysroot = None
|
| 416 |
+
argvar = cc_args
|
| 417 |
+
indices = [i for i,x in enumerate(cc_args) if x.startswith('-isysroot')]
|
| 418 |
+
if not indices:
|
| 419 |
+
argvar = compiler_so
|
| 420 |
+
indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
|
| 421 |
+
|
| 422 |
+
for idx in indices:
|
| 423 |
+
if argvar[idx] == '-isysroot':
|
| 424 |
+
sysroot = argvar[idx+1]
|
| 425 |
+
break
|
| 426 |
+
else:
|
| 427 |
+
sysroot = argvar[idx][len('-isysroot'):]
|
| 428 |
+
break
|
| 429 |
+
|
| 430 |
+
if sysroot and not os.path.isdir(sysroot):
|
| 431 |
+
sys.stderr.write(f"Compiling with an SDK that doesn't seem to exist: {sysroot}\n")
|
| 432 |
+
sys.stderr.write("Please check your Xcode installation\n")
|
| 433 |
+
sys.stderr.flush()
|
| 434 |
+
|
| 435 |
+
return compiler_so
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
def customize_config_vars(_config_vars):
|
| 439 |
+
"""Customize Python build configuration variables.
|
| 440 |
+
|
| 441 |
+
Called internally from sysconfig with a mutable mapping
|
| 442 |
+
containing name/value pairs parsed from the configured
|
| 443 |
+
makefile used to build this interpreter. Returns
|
| 444 |
+
the mapping updated as needed to reflect the environment
|
| 445 |
+
in which the interpreter is running; in the case of
|
| 446 |
+
a Python from a binary installer, the installed
|
| 447 |
+
environment may be very different from the build
|
| 448 |
+
environment, i.e. different OS levels, different
|
| 449 |
+
built tools, different available CPU architectures.
|
| 450 |
+
|
| 451 |
+
This customization is performed whenever
|
| 452 |
+
distutils.sysconfig.get_config_vars() is first
|
| 453 |
+
called. It may be used in environments where no
|
| 454 |
+
compilers are present, i.e. when installing pure
|
| 455 |
+
Python dists. Customization of compiler paths
|
| 456 |
+
and detection of unavailable archs is deferred
|
| 457 |
+
until the first extension module build is
|
| 458 |
+
requested (in distutils.sysconfig.customize_compiler).
|
| 459 |
+
|
| 460 |
+
Currently called from distutils.sysconfig
|
| 461 |
+
"""
|
| 462 |
+
|
| 463 |
+
if not _supports_universal_builds():
|
| 464 |
+
# On Mac OS X before 10.4, check if -arch and -isysroot
|
| 465 |
+
# are in CFLAGS or LDFLAGS and remove them if they are.
|
| 466 |
+
# This is needed when building extensions on a 10.3 system
|
| 467 |
+
# using a universal build of python.
|
| 468 |
+
_remove_universal_flags(_config_vars)
|
| 469 |
+
|
| 470 |
+
# Allow user to override all archs with ARCHFLAGS env var
|
| 471 |
+
_override_all_archs(_config_vars)
|
| 472 |
+
|
| 473 |
+
# Remove references to sdks that are not found
|
| 474 |
+
_check_for_unavailable_sdk(_config_vars)
|
| 475 |
+
|
| 476 |
+
return _config_vars
|
| 477 |
+
|
| 478 |
+
|
| 479 |
+
def customize_compiler(_config_vars):
|
| 480 |
+
"""Customize compiler path and configuration variables.
|
| 481 |
+
|
| 482 |
+
This customization is performed when the first
|
| 483 |
+
extension module build is requested
|
| 484 |
+
in distutils.sysconfig.customize_compiler.
|
| 485 |
+
"""
|
| 486 |
+
|
| 487 |
+
# Find a compiler to use for extension module builds
|
| 488 |
+
_find_appropriate_compiler(_config_vars)
|
| 489 |
+
|
| 490 |
+
# Remove ppc arch flags if not supported here
|
| 491 |
+
_remove_unsupported_archs(_config_vars)
|
| 492 |
+
|
| 493 |
+
# Allow user to override all archs with ARCHFLAGS env var
|
| 494 |
+
_override_all_archs(_config_vars)
|
| 495 |
+
|
| 496 |
+
return _config_vars
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
def get_platform_osx(_config_vars, osname, release, machine):
|
| 500 |
+
"""Filter values for get_platform()"""
|
| 501 |
+
# called from get_platform() in sysconfig and distutils.util
|
| 502 |
+
#
|
| 503 |
+
# For our purposes, we'll assume that the system version from
|
| 504 |
+
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
|
| 505 |
+
# to. This makes the compatibility story a bit more sane because the
|
| 506 |
+
# machine is going to compile and link as if it were
|
| 507 |
+
# MACOSX_DEPLOYMENT_TARGET.
|
| 508 |
+
|
| 509 |
+
macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
|
| 510 |
+
macrelease = _get_system_version() or macver
|
| 511 |
+
macver = macver or macrelease
|
| 512 |
+
|
| 513 |
+
if macver:
|
| 514 |
+
release = macver
|
| 515 |
+
osname = "macosx"
|
| 516 |
+
|
| 517 |
+
# Use the original CFLAGS value, if available, so that we
|
| 518 |
+
# return the same machine type for the platform string.
|
| 519 |
+
# Otherwise, distutils may consider this a cross-compiling
|
| 520 |
+
# case and disallow installs.
|
| 521 |
+
cflags = _config_vars.get(_INITPRE+'CFLAGS',
|
| 522 |
+
_config_vars.get('CFLAGS', ''))
|
| 523 |
+
if macrelease:
|
| 524 |
+
try:
|
| 525 |
+
macrelease = tuple(int(i) for i in macrelease.split('.')[0:2])
|
| 526 |
+
except ValueError:
|
| 527 |
+
macrelease = (10, 3)
|
| 528 |
+
else:
|
| 529 |
+
# assume no universal support
|
| 530 |
+
macrelease = (10, 3)
|
| 531 |
+
|
| 532 |
+
if (macrelease >= (10, 4)) and '-arch' in cflags.strip():
|
| 533 |
+
# The universal build will build fat binaries, but not on
|
| 534 |
+
# systems before 10.4
|
| 535 |
+
|
| 536 |
+
machine = 'fat'
|
| 537 |
+
|
| 538 |
+
archs = re.findall(r'-arch\s+(\S+)', cflags)
|
| 539 |
+
archs = tuple(sorted(set(archs)))
|
| 540 |
+
|
| 541 |
+
if len(archs) == 1:
|
| 542 |
+
machine = archs[0]
|
| 543 |
+
elif archs == ('arm64', 'x86_64'):
|
| 544 |
+
machine = 'universal2'
|
| 545 |
+
elif archs == ('i386', 'ppc'):
|
| 546 |
+
machine = 'fat'
|
| 547 |
+
elif archs == ('i386', 'x86_64'):
|
| 548 |
+
machine = 'intel'
|
| 549 |
+
elif archs == ('i386', 'ppc', 'x86_64'):
|
| 550 |
+
machine = 'fat3'
|
| 551 |
+
elif archs == ('ppc64', 'x86_64'):
|
| 552 |
+
machine = 'fat64'
|
| 553 |
+
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
|
| 554 |
+
machine = 'universal'
|
| 555 |
+
else:
|
| 556 |
+
raise ValueError(
|
| 557 |
+
"Don't know machine value for archs=%r" % (archs,))
|
| 558 |
+
|
| 559 |
+
elif machine == 'i386':
|
| 560 |
+
# On OSX the machine type returned by uname is always the
|
| 561 |
+
# 32-bit variant, even if the executable architecture is
|
| 562 |
+
# the 64-bit variant
|
| 563 |
+
if sys.maxsize >= 2**32:
|
| 564 |
+
machine = 'x86_64'
|
| 565 |
+
|
| 566 |
+
elif machine in ('PowerPC', 'Power_Macintosh'):
|
| 567 |
+
# Pick a sane name for the PPC architecture.
|
| 568 |
+
# See 'i386' case
|
| 569 |
+
if sys.maxsize >= 2**32:
|
| 570 |
+
machine = 'ppc64'
|
| 571 |
+
else:
|
| 572 |
+
machine = 'ppc'
|
| 573 |
+
|
| 574 |
+
return (osname, release, machine)
|
evalkit_tf446/lib/python3.10/_py_abc.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from _weakrefset import WeakSet
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def get_cache_token():
|
| 5 |
+
"""Returns the current ABC cache token.
|
| 6 |
+
|
| 7 |
+
The token is an opaque object (supporting equality testing) identifying the
|
| 8 |
+
current version of the ABC cache for virtual subclasses. The token changes
|
| 9 |
+
with every call to ``register()`` on any ABC.
|
| 10 |
+
"""
|
| 11 |
+
return ABCMeta._abc_invalidation_counter
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class ABCMeta(type):
|
| 15 |
+
"""Metaclass for defining Abstract Base Classes (ABCs).
|
| 16 |
+
|
| 17 |
+
Use this metaclass to create an ABC. An ABC can be subclassed
|
| 18 |
+
directly, and then acts as a mix-in class. You can also register
|
| 19 |
+
unrelated concrete classes (even built-in classes) and unrelated
|
| 20 |
+
ABCs as 'virtual subclasses' -- these and their descendants will
|
| 21 |
+
be considered subclasses of the registering ABC by the built-in
|
| 22 |
+
issubclass() function, but the registering ABC won't show up in
|
| 23 |
+
their MRO (Method Resolution Order) nor will method
|
| 24 |
+
implementations defined by the registering ABC be callable (not
|
| 25 |
+
even via super()).
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
# A global counter that is incremented each time a class is
|
| 29 |
+
# registered as a virtual subclass of anything. It forces the
|
| 30 |
+
# negative cache to be cleared before its next use.
|
| 31 |
+
# Note: this counter is private. Use `abc.get_cache_token()` for
|
| 32 |
+
# external code.
|
| 33 |
+
_abc_invalidation_counter = 0
|
| 34 |
+
|
| 35 |
+
def __new__(mcls, name, bases, namespace, /, **kwargs):
|
| 36 |
+
cls = super().__new__(mcls, name, bases, namespace, **kwargs)
|
| 37 |
+
# Compute set of abstract method names
|
| 38 |
+
abstracts = {name
|
| 39 |
+
for name, value in namespace.items()
|
| 40 |
+
if getattr(value, "__isabstractmethod__", False)}
|
| 41 |
+
for base in bases:
|
| 42 |
+
for name in getattr(base, "__abstractmethods__", set()):
|
| 43 |
+
value = getattr(cls, name, None)
|
| 44 |
+
if getattr(value, "__isabstractmethod__", False):
|
| 45 |
+
abstracts.add(name)
|
| 46 |
+
cls.__abstractmethods__ = frozenset(abstracts)
|
| 47 |
+
# Set up inheritance registry
|
| 48 |
+
cls._abc_registry = WeakSet()
|
| 49 |
+
cls._abc_cache = WeakSet()
|
| 50 |
+
cls._abc_negative_cache = WeakSet()
|
| 51 |
+
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
|
| 52 |
+
return cls
|
| 53 |
+
|
| 54 |
+
def register(cls, subclass):
|
| 55 |
+
"""Register a virtual subclass of an ABC.
|
| 56 |
+
|
| 57 |
+
Returns the subclass, to allow usage as a class decorator.
|
| 58 |
+
"""
|
| 59 |
+
if not isinstance(subclass, type):
|
| 60 |
+
raise TypeError("Can only register classes")
|
| 61 |
+
if issubclass(subclass, cls):
|
| 62 |
+
return subclass # Already a subclass
|
| 63 |
+
# Subtle: test for cycles *after* testing for "already a subclass";
|
| 64 |
+
# this means we allow X.register(X) and interpret it as a no-op.
|
| 65 |
+
if issubclass(cls, subclass):
|
| 66 |
+
# This would create a cycle, which is bad for the algorithm below
|
| 67 |
+
raise RuntimeError("Refusing to create an inheritance cycle")
|
| 68 |
+
cls._abc_registry.add(subclass)
|
| 69 |
+
ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache
|
| 70 |
+
return subclass
|
| 71 |
+
|
| 72 |
+
def _dump_registry(cls, file=None):
|
| 73 |
+
"""Debug helper to print the ABC registry."""
|
| 74 |
+
print(f"Class: {cls.__module__}.{cls.__qualname__}", file=file)
|
| 75 |
+
print(f"Inv. counter: {get_cache_token()}", file=file)
|
| 76 |
+
for name in cls.__dict__:
|
| 77 |
+
if name.startswith("_abc_"):
|
| 78 |
+
value = getattr(cls, name)
|
| 79 |
+
if isinstance(value, WeakSet):
|
| 80 |
+
value = set(value)
|
| 81 |
+
print(f"{name}: {value!r}", file=file)
|
| 82 |
+
|
| 83 |
+
def _abc_registry_clear(cls):
|
| 84 |
+
"""Clear the registry (for debugging or testing)."""
|
| 85 |
+
cls._abc_registry.clear()
|
| 86 |
+
|
| 87 |
+
def _abc_caches_clear(cls):
|
| 88 |
+
"""Clear the caches (for debugging or testing)."""
|
| 89 |
+
cls._abc_cache.clear()
|
| 90 |
+
cls._abc_negative_cache.clear()
|
| 91 |
+
|
| 92 |
+
def __instancecheck__(cls, instance):
|
| 93 |
+
"""Override for isinstance(instance, cls)."""
|
| 94 |
+
# Inline the cache checking
|
| 95 |
+
subclass = instance.__class__
|
| 96 |
+
if subclass in cls._abc_cache:
|
| 97 |
+
return True
|
| 98 |
+
subtype = type(instance)
|
| 99 |
+
if subtype is subclass:
|
| 100 |
+
if (cls._abc_negative_cache_version ==
|
| 101 |
+
ABCMeta._abc_invalidation_counter and
|
| 102 |
+
subclass in cls._abc_negative_cache):
|
| 103 |
+
return False
|
| 104 |
+
# Fall back to the subclass check.
|
| 105 |
+
return cls.__subclasscheck__(subclass)
|
| 106 |
+
return any(cls.__subclasscheck__(c) for c in (subclass, subtype))
|
| 107 |
+
|
| 108 |
+
def __subclasscheck__(cls, subclass):
|
| 109 |
+
"""Override for issubclass(subclass, cls)."""
|
| 110 |
+
if not isinstance(subclass, type):
|
| 111 |
+
raise TypeError('issubclass() arg 1 must be a class')
|
| 112 |
+
# Check cache
|
| 113 |
+
if subclass in cls._abc_cache:
|
| 114 |
+
return True
|
| 115 |
+
# Check negative cache; may have to invalidate
|
| 116 |
+
if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
|
| 117 |
+
# Invalidate the negative cache
|
| 118 |
+
cls._abc_negative_cache = WeakSet()
|
| 119 |
+
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
|
| 120 |
+
elif subclass in cls._abc_negative_cache:
|
| 121 |
+
return False
|
| 122 |
+
# Check the subclass hook
|
| 123 |
+
ok = cls.__subclasshook__(subclass)
|
| 124 |
+
if ok is not NotImplemented:
|
| 125 |
+
assert isinstance(ok, bool)
|
| 126 |
+
if ok:
|
| 127 |
+
cls._abc_cache.add(subclass)
|
| 128 |
+
else:
|
| 129 |
+
cls._abc_negative_cache.add(subclass)
|
| 130 |
+
return ok
|
| 131 |
+
# Check if it's a direct subclass
|
| 132 |
+
if cls in getattr(subclass, '__mro__', ()):
|
| 133 |
+
cls._abc_cache.add(subclass)
|
| 134 |
+
return True
|
| 135 |
+
# Check if it's a subclass of a registered class (recursive)
|
| 136 |
+
for rcls in cls._abc_registry:
|
| 137 |
+
if issubclass(subclass, rcls):
|
| 138 |
+
cls._abc_cache.add(subclass)
|
| 139 |
+
return True
|
| 140 |
+
# Check if it's a subclass of a subclass (recursive)
|
| 141 |
+
for scls in cls.__subclasses__():
|
| 142 |
+
if issubclass(subclass, scls):
|
| 143 |
+
cls._abc_cache.add(subclass)
|
| 144 |
+
return True
|
| 145 |
+
# No dice; update negative cache
|
| 146 |
+
cls._abc_negative_cache.add(subclass)
|
| 147 |
+
return False
|
evalkit_tf446/lib/python3.10/_sitebuiltins.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
The objects used by the site module to add custom builtins.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
# Those objects are almost immortal and they keep a reference to their module
|
| 6 |
+
# globals. Defining them in the site module would keep too many references
|
| 7 |
+
# alive.
|
| 8 |
+
# Note this means this module should also avoid keep things alive in its
|
| 9 |
+
# globals.
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
|
| 13 |
+
class Quitter(object):
|
| 14 |
+
def __init__(self, name, eof):
|
| 15 |
+
self.name = name
|
| 16 |
+
self.eof = eof
|
| 17 |
+
def __repr__(self):
|
| 18 |
+
return 'Use %s() or %s to exit' % (self.name, self.eof)
|
| 19 |
+
def __call__(self, code=None):
|
| 20 |
+
# Shells like IDLE catch the SystemExit, but listen when their
|
| 21 |
+
# stdin wrapper is closed.
|
| 22 |
+
try:
|
| 23 |
+
sys.stdin.close()
|
| 24 |
+
except:
|
| 25 |
+
pass
|
| 26 |
+
raise SystemExit(code)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class _Printer(object):
|
| 30 |
+
"""interactive prompt objects for printing the license text, a list of
|
| 31 |
+
contributors and the copyright notice."""
|
| 32 |
+
|
| 33 |
+
MAXLINES = 23
|
| 34 |
+
|
| 35 |
+
def __init__(self, name, data, files=(), dirs=()):
|
| 36 |
+
import os
|
| 37 |
+
self.__name = name
|
| 38 |
+
self.__data = data
|
| 39 |
+
self.__lines = None
|
| 40 |
+
self.__filenames = [os.path.join(dir, filename)
|
| 41 |
+
for dir in dirs
|
| 42 |
+
for filename in files]
|
| 43 |
+
|
| 44 |
+
def __setup(self):
|
| 45 |
+
if self.__lines:
|
| 46 |
+
return
|
| 47 |
+
data = None
|
| 48 |
+
for filename in self.__filenames:
|
| 49 |
+
try:
|
| 50 |
+
with open(filename, encoding='utf-8') as fp:
|
| 51 |
+
data = fp.read()
|
| 52 |
+
break
|
| 53 |
+
except OSError:
|
| 54 |
+
pass
|
| 55 |
+
if not data:
|
| 56 |
+
data = self.__data
|
| 57 |
+
self.__lines = data.split('\n')
|
| 58 |
+
self.__linecnt = len(self.__lines)
|
| 59 |
+
|
| 60 |
+
def __repr__(self):
|
| 61 |
+
self.__setup()
|
| 62 |
+
if len(self.__lines) <= self.MAXLINES:
|
| 63 |
+
return "\n".join(self.__lines)
|
| 64 |
+
else:
|
| 65 |
+
return "Type %s() to see the full %s text" % ((self.__name,)*2)
|
| 66 |
+
|
| 67 |
+
def __call__(self):
|
| 68 |
+
self.__setup()
|
| 69 |
+
prompt = 'Hit Return for more, or q (and Return) to quit: '
|
| 70 |
+
lineno = 0
|
| 71 |
+
while 1:
|
| 72 |
+
try:
|
| 73 |
+
for i in range(lineno, lineno + self.MAXLINES):
|
| 74 |
+
print(self.__lines[i])
|
| 75 |
+
except IndexError:
|
| 76 |
+
break
|
| 77 |
+
else:
|
| 78 |
+
lineno += self.MAXLINES
|
| 79 |
+
key = None
|
| 80 |
+
while key is None:
|
| 81 |
+
key = input(prompt)
|
| 82 |
+
if key not in ('', 'q'):
|
| 83 |
+
key = None
|
| 84 |
+
if key == 'q':
|
| 85 |
+
break
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class _Helper(object):
|
| 89 |
+
"""Define the builtin 'help'.
|
| 90 |
+
|
| 91 |
+
This is a wrapper around pydoc.help that provides a helpful message
|
| 92 |
+
when 'help' is typed at the Python interactive prompt.
|
| 93 |
+
|
| 94 |
+
Calling help() at the Python prompt starts an interactive help session.
|
| 95 |
+
Calling help(thing) prints help for the python object 'thing'.
|
| 96 |
+
"""
|
| 97 |
+
|
| 98 |
+
def __repr__(self):
|
| 99 |
+
return "Type help() for interactive help, " \
|
| 100 |
+
"or help(object) for help about object."
|
| 101 |
+
def __call__(self, *args, **kwds):
|
| 102 |
+
import pydoc
|
| 103 |
+
return pydoc.help(*args, **kwds)
|
evalkit_tf446/lib/python3.10/_strptime.py
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Strptime-related classes and functions.
|
| 2 |
+
|
| 3 |
+
CLASSES:
|
| 4 |
+
LocaleTime -- Discovers and stores locale-specific time information
|
| 5 |
+
TimeRE -- Creates regexes for pattern matching a string of text containing
|
| 6 |
+
time information
|
| 7 |
+
|
| 8 |
+
FUNCTIONS:
|
| 9 |
+
_getlang -- Figure out what language is being used for the locale
|
| 10 |
+
strptime -- Calculates the time struct represented by the passed-in string
|
| 11 |
+
|
| 12 |
+
"""
|
| 13 |
+
import time
|
| 14 |
+
import locale
|
| 15 |
+
import calendar
|
| 16 |
+
from re import compile as re_compile
|
| 17 |
+
from re import IGNORECASE
|
| 18 |
+
from re import escape as re_escape
|
| 19 |
+
from datetime import (date as datetime_date,
|
| 20 |
+
timedelta as datetime_timedelta,
|
| 21 |
+
timezone as datetime_timezone)
|
| 22 |
+
from _thread import allocate_lock as _thread_allocate_lock
|
| 23 |
+
|
| 24 |
+
__all__ = []
|
| 25 |
+
|
| 26 |
+
def _getlang():
|
| 27 |
+
# Figure out what the current language is set to.
|
| 28 |
+
return locale.getlocale(locale.LC_TIME)
|
| 29 |
+
|
| 30 |
+
class LocaleTime(object):
|
| 31 |
+
"""Stores and handles locale-specific information related to time.
|
| 32 |
+
|
| 33 |
+
ATTRIBUTES:
|
| 34 |
+
f_weekday -- full weekday names (7-item list)
|
| 35 |
+
a_weekday -- abbreviated weekday names (7-item list)
|
| 36 |
+
f_month -- full month names (13-item list; dummy value in [0], which
|
| 37 |
+
is added by code)
|
| 38 |
+
a_month -- abbreviated month names (13-item list, dummy value in
|
| 39 |
+
[0], which is added by code)
|
| 40 |
+
am_pm -- AM/PM representation (2-item list)
|
| 41 |
+
LC_date_time -- format string for date/time representation (string)
|
| 42 |
+
LC_date -- format string for date representation (string)
|
| 43 |
+
LC_time -- format string for time representation (string)
|
| 44 |
+
timezone -- daylight- and non-daylight-savings timezone representation
|
| 45 |
+
(2-item list of sets)
|
| 46 |
+
lang -- Language used by instance (2-item tuple)
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
def __init__(self):
|
| 50 |
+
"""Set all attributes.
|
| 51 |
+
|
| 52 |
+
Order of methods called matters for dependency reasons.
|
| 53 |
+
|
| 54 |
+
The locale language is set at the offset and then checked again before
|
| 55 |
+
exiting. This is to make sure that the attributes were not set with a
|
| 56 |
+
mix of information from more than one locale. This would most likely
|
| 57 |
+
happen when using threads where one thread calls a locale-dependent
|
| 58 |
+
function while another thread changes the locale while the function in
|
| 59 |
+
the other thread is still running. Proper coding would call for
|
| 60 |
+
locks to prevent changing the locale while locale-dependent code is
|
| 61 |
+
running. The check here is done in case someone does not think about
|
| 62 |
+
doing this.
|
| 63 |
+
|
| 64 |
+
Only other possible issue is if someone changed the timezone and did
|
| 65 |
+
not call tz.tzset . That is an issue for the programmer, though,
|
| 66 |
+
since changing the timezone is worthless without that call.
|
| 67 |
+
|
| 68 |
+
"""
|
| 69 |
+
self.lang = _getlang()
|
| 70 |
+
self.__calc_weekday()
|
| 71 |
+
self.__calc_month()
|
| 72 |
+
self.__calc_am_pm()
|
| 73 |
+
self.__calc_timezone()
|
| 74 |
+
self.__calc_date_time()
|
| 75 |
+
if _getlang() != self.lang:
|
| 76 |
+
raise ValueError("locale changed during initialization")
|
| 77 |
+
if time.tzname != self.tzname or time.daylight != self.daylight:
|
| 78 |
+
raise ValueError("timezone changed during initialization")
|
| 79 |
+
|
| 80 |
+
def __calc_weekday(self):
|
| 81 |
+
# Set self.a_weekday and self.f_weekday using the calendar
|
| 82 |
+
# module.
|
| 83 |
+
a_weekday = [calendar.day_abbr[i].lower() for i in range(7)]
|
| 84 |
+
f_weekday = [calendar.day_name[i].lower() for i in range(7)]
|
| 85 |
+
self.a_weekday = a_weekday
|
| 86 |
+
self.f_weekday = f_weekday
|
| 87 |
+
|
| 88 |
+
def __calc_month(self):
|
| 89 |
+
# Set self.f_month and self.a_month using the calendar module.
|
| 90 |
+
a_month = [calendar.month_abbr[i].lower() for i in range(13)]
|
| 91 |
+
f_month = [calendar.month_name[i].lower() for i in range(13)]
|
| 92 |
+
self.a_month = a_month
|
| 93 |
+
self.f_month = f_month
|
| 94 |
+
|
| 95 |
+
def __calc_am_pm(self):
|
| 96 |
+
# Set self.am_pm by using time.strftime().
|
| 97 |
+
|
| 98 |
+
# The magic date (1999,3,17,hour,44,55,2,76,0) is not really that
|
| 99 |
+
# magical; just happened to have used it everywhere else where a
|
| 100 |
+
# static date was needed.
|
| 101 |
+
am_pm = []
|
| 102 |
+
for hour in (1, 22):
|
| 103 |
+
time_tuple = time.struct_time((1999,3,17,hour,44,55,2,76,0))
|
| 104 |
+
am_pm.append(time.strftime("%p", time_tuple).lower())
|
| 105 |
+
self.am_pm = am_pm
|
| 106 |
+
|
| 107 |
+
def __calc_date_time(self):
|
| 108 |
+
# Set self.date_time, self.date, & self.time by using
|
| 109 |
+
# time.strftime().
|
| 110 |
+
|
| 111 |
+
# Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of
|
| 112 |
+
# overloaded numbers is minimized. The order in which searches for
|
| 113 |
+
# values within the format string is very important; it eliminates
|
| 114 |
+
# possible ambiguity for what something represents.
|
| 115 |
+
time_tuple = time.struct_time((1999,3,17,22,44,55,2,76,0))
|
| 116 |
+
date_time = [None, None, None]
|
| 117 |
+
date_time[0] = time.strftime("%c", time_tuple).lower()
|
| 118 |
+
date_time[1] = time.strftime("%x", time_tuple).lower()
|
| 119 |
+
date_time[2] = time.strftime("%X", time_tuple).lower()
|
| 120 |
+
replacement_pairs = [('%', '%%'), (self.f_weekday[2], '%A'),
|
| 121 |
+
(self.f_month[3], '%B'), (self.a_weekday[2], '%a'),
|
| 122 |
+
(self.a_month[3], '%b'), (self.am_pm[1], '%p'),
|
| 123 |
+
('1999', '%Y'), ('99', '%y'), ('22', '%H'),
|
| 124 |
+
('44', '%M'), ('55', '%S'), ('76', '%j'),
|
| 125 |
+
('17', '%d'), ('03', '%m'), ('3', '%m'),
|
| 126 |
+
# '3' needed for when no leading zero.
|
| 127 |
+
('2', '%w'), ('10', '%I')]
|
| 128 |
+
replacement_pairs.extend([(tz, "%Z") for tz_values in self.timezone
|
| 129 |
+
for tz in tz_values])
|
| 130 |
+
for offset,directive in ((0,'%c'), (1,'%x'), (2,'%X')):
|
| 131 |
+
current_format = date_time[offset]
|
| 132 |
+
for old, new in replacement_pairs:
|
| 133 |
+
# Must deal with possible lack of locale info
|
| 134 |
+
# manifesting itself as the empty string (e.g., Swedish's
|
| 135 |
+
# lack of AM/PM info) or a platform returning a tuple of empty
|
| 136 |
+
# strings (e.g., MacOS 9 having timezone as ('','')).
|
| 137 |
+
if old:
|
| 138 |
+
current_format = current_format.replace(old, new)
|
| 139 |
+
# If %W is used, then Sunday, 2005-01-03 will fall on week 0 since
|
| 140 |
+
# 2005-01-03 occurs before the first Monday of the year. Otherwise
|
| 141 |
+
# %U is used.
|
| 142 |
+
time_tuple = time.struct_time((1999,1,3,1,1,1,6,3,0))
|
| 143 |
+
if '00' in time.strftime(directive, time_tuple):
|
| 144 |
+
U_W = '%W'
|
| 145 |
+
else:
|
| 146 |
+
U_W = '%U'
|
| 147 |
+
date_time[offset] = current_format.replace('11', U_W)
|
| 148 |
+
self.LC_date_time = date_time[0]
|
| 149 |
+
self.LC_date = date_time[1]
|
| 150 |
+
self.LC_time = date_time[2]
|
| 151 |
+
|
| 152 |
+
def __calc_timezone(self):
|
| 153 |
+
# Set self.timezone by using time.tzname.
|
| 154 |
+
# Do not worry about possibility of time.tzname[0] == time.tzname[1]
|
| 155 |
+
# and time.daylight; handle that in strptime.
|
| 156 |
+
try:
|
| 157 |
+
time.tzset()
|
| 158 |
+
except AttributeError:
|
| 159 |
+
pass
|
| 160 |
+
self.tzname = time.tzname
|
| 161 |
+
self.daylight = time.daylight
|
| 162 |
+
no_saving = frozenset({"utc", "gmt", self.tzname[0].lower()})
|
| 163 |
+
if self.daylight:
|
| 164 |
+
has_saving = frozenset({self.tzname[1].lower()})
|
| 165 |
+
else:
|
| 166 |
+
has_saving = frozenset()
|
| 167 |
+
self.timezone = (no_saving, has_saving)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class TimeRE(dict):
|
| 171 |
+
"""Handle conversion from format directives to regexes."""
|
| 172 |
+
|
| 173 |
+
def __init__(self, locale_time=None):
|
| 174 |
+
"""Create keys/values.
|
| 175 |
+
|
| 176 |
+
Order of execution is important for dependency reasons.
|
| 177 |
+
|
| 178 |
+
"""
|
| 179 |
+
if locale_time:
|
| 180 |
+
self.locale_time = locale_time
|
| 181 |
+
else:
|
| 182 |
+
self.locale_time = LocaleTime()
|
| 183 |
+
base = super()
|
| 184 |
+
base.__init__({
|
| 185 |
+
# The " [1-9]" part of the regex is to make %c from ANSI C work
|
| 186 |
+
'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
|
| 187 |
+
'f': r"(?P<f>[0-9]{1,6})",
|
| 188 |
+
'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
|
| 189 |
+
'I': r"(?P<I>1[0-2]|0[1-9]|[1-9])",
|
| 190 |
+
'G': r"(?P<G>\d\d\d\d)",
|
| 191 |
+
'j': r"(?P<j>36[0-6]|3[0-5]\d|[1-2]\d\d|0[1-9]\d|00[1-9]|[1-9]\d|0[1-9]|[1-9])",
|
| 192 |
+
'm': r"(?P<m>1[0-2]|0[1-9]|[1-9])",
|
| 193 |
+
'M': r"(?P<M>[0-5]\d|\d)",
|
| 194 |
+
'S': r"(?P<S>6[0-1]|[0-5]\d|\d)",
|
| 195 |
+
'U': r"(?P<U>5[0-3]|[0-4]\d|\d)",
|
| 196 |
+
'w': r"(?P<w>[0-6])",
|
| 197 |
+
'u': r"(?P<u>[1-7])",
|
| 198 |
+
'V': r"(?P<V>5[0-3]|0[1-9]|[1-4]\d|\d)",
|
| 199 |
+
# W is set below by using 'U'
|
| 200 |
+
'y': r"(?P<y>\d\d)",
|
| 201 |
+
#XXX: Does 'Y' need to worry about having less or more than
|
| 202 |
+
# 4 digits?
|
| 203 |
+
'Y': r"(?P<Y>\d\d\d\d)",
|
| 204 |
+
'z': r"(?P<z>[+-]\d\d:?[0-5]\d(:?[0-5]\d(\.\d{1,6})?)?|(?-i:Z))",
|
| 205 |
+
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
|
| 206 |
+
'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
|
| 207 |
+
'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
|
| 208 |
+
'b': self.__seqToRE(self.locale_time.a_month[1:], 'b'),
|
| 209 |
+
'p': self.__seqToRE(self.locale_time.am_pm, 'p'),
|
| 210 |
+
'Z': self.__seqToRE((tz for tz_names in self.locale_time.timezone
|
| 211 |
+
for tz in tz_names),
|
| 212 |
+
'Z'),
|
| 213 |
+
'%': '%'})
|
| 214 |
+
base.__setitem__('W', base.__getitem__('U').replace('U', 'W'))
|
| 215 |
+
base.__setitem__('c', self.pattern(self.locale_time.LC_date_time))
|
| 216 |
+
base.__setitem__('x', self.pattern(self.locale_time.LC_date))
|
| 217 |
+
base.__setitem__('X', self.pattern(self.locale_time.LC_time))
|
| 218 |
+
|
| 219 |
+
def __seqToRE(self, to_convert, directive):
|
| 220 |
+
"""Convert a list to a regex string for matching a directive.
|
| 221 |
+
|
| 222 |
+
Want possible matching values to be from longest to shortest. This
|
| 223 |
+
prevents the possibility of a match occurring for a value that also
|
| 224 |
+
a substring of a larger value that should have matched (e.g., 'abc'
|
| 225 |
+
matching when 'abcdef' should have been the match).
|
| 226 |
+
|
| 227 |
+
"""
|
| 228 |
+
to_convert = sorted(to_convert, key=len, reverse=True)
|
| 229 |
+
for value in to_convert:
|
| 230 |
+
if value != '':
|
| 231 |
+
break
|
| 232 |
+
else:
|
| 233 |
+
return ''
|
| 234 |
+
regex = '|'.join(re_escape(stuff) for stuff in to_convert)
|
| 235 |
+
regex = '(?P<%s>%s' % (directive, regex)
|
| 236 |
+
return '%s)' % regex
|
| 237 |
+
|
| 238 |
+
def pattern(self, format):
|
| 239 |
+
"""Return regex pattern for the format string.
|
| 240 |
+
|
| 241 |
+
Need to make sure that any characters that might be interpreted as
|
| 242 |
+
regex syntax are escaped.
|
| 243 |
+
|
| 244 |
+
"""
|
| 245 |
+
processed_format = ''
|
| 246 |
+
# The sub() call escapes all characters that might be misconstrued
|
| 247 |
+
# as regex syntax. Cannot use re.escape since we have to deal with
|
| 248 |
+
# format directives (%m, etc.).
|
| 249 |
+
regex_chars = re_compile(r"([\\.^$*+?\(\){}\[\]|])")
|
| 250 |
+
format = regex_chars.sub(r"\\\1", format)
|
| 251 |
+
whitespace_replacement = re_compile(r'\s+')
|
| 252 |
+
format = whitespace_replacement.sub(r'\\s+', format)
|
| 253 |
+
while '%' in format:
|
| 254 |
+
directive_index = format.index('%')+1
|
| 255 |
+
processed_format = "%s%s%s" % (processed_format,
|
| 256 |
+
format[:directive_index-1],
|
| 257 |
+
self[format[directive_index]])
|
| 258 |
+
format = format[directive_index+1:]
|
| 259 |
+
return "%s%s" % (processed_format, format)
|
| 260 |
+
|
| 261 |
+
def compile(self, format):
|
| 262 |
+
"""Return a compiled re object for the format string."""
|
| 263 |
+
return re_compile(self.pattern(format), IGNORECASE)
|
| 264 |
+
|
| 265 |
+
_cache_lock = _thread_allocate_lock()
|
| 266 |
+
# DO NOT modify _TimeRE_cache or _regex_cache without acquiring the cache lock
|
| 267 |
+
# first!
|
| 268 |
+
_TimeRE_cache = TimeRE()
|
| 269 |
+
_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
|
| 270 |
+
_regex_cache = {}
|
| 271 |
+
|
| 272 |
+
def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
|
| 273 |
+
"""Calculate the Julian day based on the year, week of the year, and day of
|
| 274 |
+
the week, with week_start_day representing whether the week of the year
|
| 275 |
+
assumes the week starts on Sunday or Monday (6 or 0)."""
|
| 276 |
+
first_weekday = datetime_date(year, 1, 1).weekday()
|
| 277 |
+
# If we are dealing with the %U directive (week starts on Sunday), it's
|
| 278 |
+
# easier to just shift the view to Sunday being the first day of the
|
| 279 |
+
# week.
|
| 280 |
+
if not week_starts_Mon:
|
| 281 |
+
first_weekday = (first_weekday + 1) % 7
|
| 282 |
+
day_of_week = (day_of_week + 1) % 7
|
| 283 |
+
# Need to watch out for a week 0 (when the first day of the year is not
|
| 284 |
+
# the same as that specified by %U or %W).
|
| 285 |
+
week_0_length = (7 - first_weekday) % 7
|
| 286 |
+
if week_of_year == 0:
|
| 287 |
+
return 1 + day_of_week - first_weekday
|
| 288 |
+
else:
|
| 289 |
+
days_to_week = week_0_length + (7 * (week_of_year - 1))
|
| 290 |
+
return 1 + days_to_week + day_of_week
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
def _calc_julian_from_V(iso_year, iso_week, iso_weekday):
|
| 294 |
+
"""Calculate the Julian day based on the ISO 8601 year, week, and weekday.
|
| 295 |
+
ISO weeks start on Mondays, with week 01 being the week containing 4 Jan.
|
| 296 |
+
ISO week days range from 1 (Monday) to 7 (Sunday).
|
| 297 |
+
"""
|
| 298 |
+
correction = datetime_date(iso_year, 1, 4).isoweekday() + 3
|
| 299 |
+
ordinal = (iso_week * 7) + iso_weekday - correction
|
| 300 |
+
# ordinal may be negative or 0 now, which means the date is in the previous
|
| 301 |
+
# calendar year
|
| 302 |
+
if ordinal < 1:
|
| 303 |
+
ordinal += datetime_date(iso_year, 1, 1).toordinal()
|
| 304 |
+
iso_year -= 1
|
| 305 |
+
ordinal -= datetime_date(iso_year, 1, 1).toordinal()
|
| 306 |
+
return iso_year, ordinal
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
|
| 310 |
+
"""Return a 2-tuple consisting of a time struct and an int containing
|
| 311 |
+
the number of microseconds based on the input string and the
|
| 312 |
+
format string."""
|
| 313 |
+
|
| 314 |
+
for index, arg in enumerate([data_string, format]):
|
| 315 |
+
if not isinstance(arg, str):
|
| 316 |
+
msg = "strptime() argument {} must be str, not {}"
|
| 317 |
+
raise TypeError(msg.format(index, type(arg)))
|
| 318 |
+
|
| 319 |
+
global _TimeRE_cache, _regex_cache
|
| 320 |
+
with _cache_lock:
|
| 321 |
+
locale_time = _TimeRE_cache.locale_time
|
| 322 |
+
if (_getlang() != locale_time.lang or
|
| 323 |
+
time.tzname != locale_time.tzname or
|
| 324 |
+
time.daylight != locale_time.daylight):
|
| 325 |
+
_TimeRE_cache = TimeRE()
|
| 326 |
+
_regex_cache.clear()
|
| 327 |
+
locale_time = _TimeRE_cache.locale_time
|
| 328 |
+
if len(_regex_cache) > _CACHE_MAX_SIZE:
|
| 329 |
+
_regex_cache.clear()
|
| 330 |
+
format_regex = _regex_cache.get(format)
|
| 331 |
+
if not format_regex:
|
| 332 |
+
try:
|
| 333 |
+
format_regex = _TimeRE_cache.compile(format)
|
| 334 |
+
# KeyError raised when a bad format is found; can be specified as
|
| 335 |
+
# \\, in which case it was a stray % but with a space after it
|
| 336 |
+
except KeyError as err:
|
| 337 |
+
bad_directive = err.args[0]
|
| 338 |
+
if bad_directive == "\\":
|
| 339 |
+
bad_directive = "%"
|
| 340 |
+
del err
|
| 341 |
+
raise ValueError("'%s' is a bad directive in format '%s'" %
|
| 342 |
+
(bad_directive, format)) from None
|
| 343 |
+
# IndexError only occurs when the format string is "%"
|
| 344 |
+
except IndexError:
|
| 345 |
+
raise ValueError("stray %% in format '%s'" % format) from None
|
| 346 |
+
_regex_cache[format] = format_regex
|
| 347 |
+
found = format_regex.match(data_string)
|
| 348 |
+
if not found:
|
| 349 |
+
raise ValueError("time data %r does not match format %r" %
|
| 350 |
+
(data_string, format))
|
| 351 |
+
if len(data_string) != found.end():
|
| 352 |
+
raise ValueError("unconverted data remains: %s" %
|
| 353 |
+
data_string[found.end():])
|
| 354 |
+
|
| 355 |
+
iso_year = year = None
|
| 356 |
+
month = day = 1
|
| 357 |
+
hour = minute = second = fraction = 0
|
| 358 |
+
tz = -1
|
| 359 |
+
gmtoff = None
|
| 360 |
+
gmtoff_fraction = 0
|
| 361 |
+
# Default to -1 to signify that values not known; not critical to have,
|
| 362 |
+
# though
|
| 363 |
+
iso_week = week_of_year = None
|
| 364 |
+
week_of_year_start = None
|
| 365 |
+
# weekday and julian defaulted to None so as to signal need to calculate
|
| 366 |
+
# values
|
| 367 |
+
weekday = julian = None
|
| 368 |
+
found_dict = found.groupdict()
|
| 369 |
+
for group_key in found_dict.keys():
|
| 370 |
+
# Directives not explicitly handled below:
|
| 371 |
+
# c, x, X
|
| 372 |
+
# handled by making out of other directives
|
| 373 |
+
# U, W
|
| 374 |
+
# worthless without day of the week
|
| 375 |
+
if group_key == 'y':
|
| 376 |
+
year = int(found_dict['y'])
|
| 377 |
+
# Open Group specification for strptime() states that a %y
|
| 378 |
+
#value in the range of [00, 68] is in the century 2000, while
|
| 379 |
+
#[69,99] is in the century 1900
|
| 380 |
+
if year <= 68:
|
| 381 |
+
year += 2000
|
| 382 |
+
else:
|
| 383 |
+
year += 1900
|
| 384 |
+
elif group_key == 'Y':
|
| 385 |
+
year = int(found_dict['Y'])
|
| 386 |
+
elif group_key == 'G':
|
| 387 |
+
iso_year = int(found_dict['G'])
|
| 388 |
+
elif group_key == 'm':
|
| 389 |
+
month = int(found_dict['m'])
|
| 390 |
+
elif group_key == 'B':
|
| 391 |
+
month = locale_time.f_month.index(found_dict['B'].lower())
|
| 392 |
+
elif group_key == 'b':
|
| 393 |
+
month = locale_time.a_month.index(found_dict['b'].lower())
|
| 394 |
+
elif group_key == 'd':
|
| 395 |
+
day = int(found_dict['d'])
|
| 396 |
+
elif group_key == 'H':
|
| 397 |
+
hour = int(found_dict['H'])
|
| 398 |
+
elif group_key == 'I':
|
| 399 |
+
hour = int(found_dict['I'])
|
| 400 |
+
ampm = found_dict.get('p', '').lower()
|
| 401 |
+
# If there was no AM/PM indicator, we'll treat this like AM
|
| 402 |
+
if ampm in ('', locale_time.am_pm[0]):
|
| 403 |
+
# We're in AM so the hour is correct unless we're
|
| 404 |
+
# looking at 12 midnight.
|
| 405 |
+
# 12 midnight == 12 AM == hour 0
|
| 406 |
+
if hour == 12:
|
| 407 |
+
hour = 0
|
| 408 |
+
elif ampm == locale_time.am_pm[1]:
|
| 409 |
+
# We're in PM so we need to add 12 to the hour unless
|
| 410 |
+
# we're looking at 12 noon.
|
| 411 |
+
# 12 noon == 12 PM == hour 12
|
| 412 |
+
if hour != 12:
|
| 413 |
+
hour += 12
|
| 414 |
+
elif group_key == 'M':
|
| 415 |
+
minute = int(found_dict['M'])
|
| 416 |
+
elif group_key == 'S':
|
| 417 |
+
second = int(found_dict['S'])
|
| 418 |
+
elif group_key == 'f':
|
| 419 |
+
s = found_dict['f']
|
| 420 |
+
# Pad to always return microseconds.
|
| 421 |
+
s += "0" * (6 - len(s))
|
| 422 |
+
fraction = int(s)
|
| 423 |
+
elif group_key == 'A':
|
| 424 |
+
weekday = locale_time.f_weekday.index(found_dict['A'].lower())
|
| 425 |
+
elif group_key == 'a':
|
| 426 |
+
weekday = locale_time.a_weekday.index(found_dict['a'].lower())
|
| 427 |
+
elif group_key == 'w':
|
| 428 |
+
weekday = int(found_dict['w'])
|
| 429 |
+
if weekday == 0:
|
| 430 |
+
weekday = 6
|
| 431 |
+
else:
|
| 432 |
+
weekday -= 1
|
| 433 |
+
elif group_key == 'u':
|
| 434 |
+
weekday = int(found_dict['u'])
|
| 435 |
+
weekday -= 1
|
| 436 |
+
elif group_key == 'j':
|
| 437 |
+
julian = int(found_dict['j'])
|
| 438 |
+
elif group_key in ('U', 'W'):
|
| 439 |
+
week_of_year = int(found_dict[group_key])
|
| 440 |
+
if group_key == 'U':
|
| 441 |
+
# U starts week on Sunday.
|
| 442 |
+
week_of_year_start = 6
|
| 443 |
+
else:
|
| 444 |
+
# W starts week on Monday.
|
| 445 |
+
week_of_year_start = 0
|
| 446 |
+
elif group_key == 'V':
|
| 447 |
+
iso_week = int(found_dict['V'])
|
| 448 |
+
elif group_key == 'z':
|
| 449 |
+
z = found_dict['z']
|
| 450 |
+
if z == 'Z':
|
| 451 |
+
gmtoff = 0
|
| 452 |
+
else:
|
| 453 |
+
if z[3] == ':':
|
| 454 |
+
z = z[:3] + z[4:]
|
| 455 |
+
if len(z) > 5:
|
| 456 |
+
if z[5] != ':':
|
| 457 |
+
msg = f"Inconsistent use of : in {found_dict['z']}"
|
| 458 |
+
raise ValueError(msg)
|
| 459 |
+
z = z[:5] + z[6:]
|
| 460 |
+
hours = int(z[1:3])
|
| 461 |
+
minutes = int(z[3:5])
|
| 462 |
+
seconds = int(z[5:7] or 0)
|
| 463 |
+
gmtoff = (hours * 60 * 60) + (minutes * 60) + seconds
|
| 464 |
+
gmtoff_remainder = z[8:]
|
| 465 |
+
# Pad to always return microseconds.
|
| 466 |
+
gmtoff_remainder_padding = "0" * (6 - len(gmtoff_remainder))
|
| 467 |
+
gmtoff_fraction = int(gmtoff_remainder + gmtoff_remainder_padding)
|
| 468 |
+
if z.startswith("-"):
|
| 469 |
+
gmtoff = -gmtoff
|
| 470 |
+
gmtoff_fraction = -gmtoff_fraction
|
| 471 |
+
elif group_key == 'Z':
|
| 472 |
+
# Since -1 is default value only need to worry about setting tz if
|
| 473 |
+
# it can be something other than -1.
|
| 474 |
+
found_zone = found_dict['Z'].lower()
|
| 475 |
+
for value, tz_values in enumerate(locale_time.timezone):
|
| 476 |
+
if found_zone in tz_values:
|
| 477 |
+
# Deal with bad locale setup where timezone names are the
|
| 478 |
+
# same and yet time.daylight is true; too ambiguous to
|
| 479 |
+
# be able to tell what timezone has daylight savings
|
| 480 |
+
if (time.tzname[0] == time.tzname[1] and
|
| 481 |
+
time.daylight and found_zone not in ("utc", "gmt")):
|
| 482 |
+
break
|
| 483 |
+
else:
|
| 484 |
+
tz = value
|
| 485 |
+
break
|
| 486 |
+
# Deal with the cases where ambiguities arize
|
| 487 |
+
# don't assume default values for ISO week/year
|
| 488 |
+
if year is None and iso_year is not None:
|
| 489 |
+
if iso_week is None or weekday is None:
|
| 490 |
+
raise ValueError("ISO year directive '%G' must be used with "
|
| 491 |
+
"the ISO week directive '%V' and a weekday "
|
| 492 |
+
"directive ('%A', '%a', '%w', or '%u').")
|
| 493 |
+
if julian is not None:
|
| 494 |
+
raise ValueError("Day of the year directive '%j' is not "
|
| 495 |
+
"compatible with ISO year directive '%G'. "
|
| 496 |
+
"Use '%Y' instead.")
|
| 497 |
+
elif week_of_year is None and iso_week is not None:
|
| 498 |
+
if weekday is None:
|
| 499 |
+
raise ValueError("ISO week directive '%V' must be used with "
|
| 500 |
+
"the ISO year directive '%G' and a weekday "
|
| 501 |
+
"directive ('%A', '%a', '%w', or '%u').")
|
| 502 |
+
else:
|
| 503 |
+
raise ValueError("ISO week directive '%V' is incompatible with "
|
| 504 |
+
"the year directive '%Y'. Use the ISO year '%G' "
|
| 505 |
+
"instead.")
|
| 506 |
+
|
| 507 |
+
leap_year_fix = False
|
| 508 |
+
if year is None and month == 2 and day == 29:
|
| 509 |
+
year = 1904 # 1904 is first leap year of 20th century
|
| 510 |
+
leap_year_fix = True
|
| 511 |
+
elif year is None:
|
| 512 |
+
year = 1900
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
# If we know the week of the year and what day of that week, we can figure
|
| 516 |
+
# out the Julian day of the year.
|
| 517 |
+
if julian is None and weekday is not None:
|
| 518 |
+
if week_of_year is not None:
|
| 519 |
+
week_starts_Mon = True if week_of_year_start == 0 else False
|
| 520 |
+
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
|
| 521 |
+
week_starts_Mon)
|
| 522 |
+
elif iso_year is not None and iso_week is not None:
|
| 523 |
+
year, julian = _calc_julian_from_V(iso_year, iso_week, weekday + 1)
|
| 524 |
+
if julian is not None and julian <= 0:
|
| 525 |
+
year -= 1
|
| 526 |
+
yday = 366 if calendar.isleap(year) else 365
|
| 527 |
+
julian += yday
|
| 528 |
+
|
| 529 |
+
if julian is None:
|
| 530 |
+
# Cannot pre-calculate datetime_date() since can change in Julian
|
| 531 |
+
# calculation and thus could have different value for the day of
|
| 532 |
+
# the week calculation.
|
| 533 |
+
# Need to add 1 to result since first day of the year is 1, not 0.
|
| 534 |
+
julian = datetime_date(year, month, day).toordinal() - \
|
| 535 |
+
datetime_date(year, 1, 1).toordinal() + 1
|
| 536 |
+
else: # Assume that if they bothered to include Julian day (or if it was
|
| 537 |
+
# calculated above with year/week/weekday) it will be accurate.
|
| 538 |
+
datetime_result = datetime_date.fromordinal(
|
| 539 |
+
(julian - 1) +
|
| 540 |
+
datetime_date(year, 1, 1).toordinal())
|
| 541 |
+
year = datetime_result.year
|
| 542 |
+
month = datetime_result.month
|
| 543 |
+
day = datetime_result.day
|
| 544 |
+
if weekday is None:
|
| 545 |
+
weekday = datetime_date(year, month, day).weekday()
|
| 546 |
+
# Add timezone info
|
| 547 |
+
tzname = found_dict.get("Z")
|
| 548 |
+
|
| 549 |
+
if leap_year_fix:
|
| 550 |
+
# the caller didn't supply a year but asked for Feb 29th. We couldn't
|
| 551 |
+
# use the default of 1900 for computations. We set it back to ensure
|
| 552 |
+
# that February 29th is smaller than March 1st.
|
| 553 |
+
year = 1900
|
| 554 |
+
|
| 555 |
+
return (year, month, day,
|
| 556 |
+
hour, minute, second,
|
| 557 |
+
weekday, julian, tz, tzname, gmtoff), fraction, gmtoff_fraction
|
| 558 |
+
|
| 559 |
+
def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"):
|
| 560 |
+
"""Return a time struct based on the input string and the
|
| 561 |
+
format string."""
|
| 562 |
+
tt = _strptime(data_string, format)[0]
|
| 563 |
+
return time.struct_time(tt[:time._STRUCT_TM_ITEMS])
|
| 564 |
+
|
| 565 |
+
def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"):
|
| 566 |
+
"""Return a class cls instance based on the input string and the
|
| 567 |
+
format string."""
|
| 568 |
+
tt, fraction, gmtoff_fraction = _strptime(data_string, format)
|
| 569 |
+
tzname, gmtoff = tt[-2:]
|
| 570 |
+
args = tt[:6] + (fraction,)
|
| 571 |
+
if gmtoff is not None:
|
| 572 |
+
tzdelta = datetime_timedelta(seconds=gmtoff, microseconds=gmtoff_fraction)
|
| 573 |
+
if tzname:
|
| 574 |
+
tz = datetime_timezone(tzdelta, tzname)
|
| 575 |
+
else:
|
| 576 |
+
tz = datetime_timezone(tzdelta)
|
| 577 |
+
args += (tz,)
|
| 578 |
+
|
| 579 |
+
return cls(*args)
|
evalkit_tf446/lib/python3.10/_sysconfigdata__linux_x86_64-linux-gnu.py
ADDED
|
@@ -0,0 +1,986 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# system configuration generated and used by the sysconfig module
|
| 2 |
+
build_time_vars = {'ABIFLAGS': '',
|
| 3 |
+
'AC_APPLE_UNIVERSAL_BUILD': 0,
|
| 4 |
+
'AIX_BUILDDATE': 0,
|
| 5 |
+
'AIX_GENUINE_CPLUSPLUS': 0,
|
| 6 |
+
'ALIGNOF_LONG': 8,
|
| 7 |
+
'ALIGNOF_SIZE_T': 8,
|
| 8 |
+
'ALT_SOABI': 0,
|
| 9 |
+
'ANDROID_API_LEVEL': 0,
|
| 10 |
+
'AR': 'ar',
|
| 11 |
+
'ARFLAGS': 'rcs',
|
| 12 |
+
'BASECFLAGS': '-Wno-unused-result -Wsign-compare',
|
| 13 |
+
'BASECPPFLAGS': '-IObjects -IInclude -IPython',
|
| 14 |
+
'BASEMODLIBS': '',
|
| 15 |
+
'BINDIR': '/root/envs/evalkit_tf446/bin',
|
| 16 |
+
'BINLIBDEST': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 17 |
+
'BLDLIBRARY': 'libpython3.10.a',
|
| 18 |
+
'BLDSHARED': 'gcc -pthread -B /root/envs/evalkit_tf446/compiler_compat -shared '
|
| 19 |
+
''
|
| 20 |
+
''
|
| 21 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 22 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 23 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 24 |
+
''
|
| 25 |
+
''
|
| 26 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 27 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 28 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 29 |
+
'BUILDEXE': '',
|
| 30 |
+
'BUILDPYTHON': 'python',
|
| 31 |
+
'BUILD_GNU_TYPE': 'x86_64-conda_cos6-linux-gnu',
|
| 32 |
+
'BYTESTR_DEPS': '\\',
|
| 33 |
+
'CC': 'gcc -pthread -B /root/envs/evalkit_tf446/compiler_compat',
|
| 34 |
+
'CCSHARED': '-fPIC',
|
| 35 |
+
'CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 36 |
+
'-fPIC '
|
| 37 |
+
'-O2 '
|
| 38 |
+
'-isystem '
|
| 39 |
+
'/root/envs/evalkit_tf446/include '
|
| 40 |
+
''
|
| 41 |
+
''
|
| 42 |
+
''
|
| 43 |
+
'-fPIC '
|
| 44 |
+
'-O2 '
|
| 45 |
+
'-isystem '
|
| 46 |
+
'/root/envs/evalkit_tf446/include '
|
| 47 |
+
''
|
| 48 |
+
''
|
| 49 |
+
'',
|
| 50 |
+
'CFLAGSFORSHARED': '',
|
| 51 |
+
'CFLAGS_ALIASING': '',
|
| 52 |
+
'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in '
|
| 53 |
+
'Makefile.pre.in',
|
| 54 |
+
'CONFIGURE_CFLAGS': '-fPIC '
|
| 55 |
+
'-O2 '
|
| 56 |
+
'-isystem '
|
| 57 |
+
'/root/envs/evalkit_tf446/include '
|
| 58 |
+
''
|
| 59 |
+
''
|
| 60 |
+
''
|
| 61 |
+
'',
|
| 62 |
+
'CONFIGURE_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 63 |
+
''
|
| 64 |
+
'-g -std=c99 -Wextra '
|
| 65 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 66 |
+
'-Wno-missing-field-initializers '
|
| 67 |
+
'-Werror=implicit-function-declaration '
|
| 68 |
+
'-fvisibility=hidden',
|
| 69 |
+
'CONFIGURE_CPPFLAGS': '-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 70 |
+
'/root/envs/evalkit_tf446/include '
|
| 71 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 72 |
+
'CONFIGURE_LDFLAGS': ''
|
| 73 |
+
''
|
| 74 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 75 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 76 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 77 |
+
'CONFIGURE_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 78 |
+
''
|
| 79 |
+
'-g',
|
| 80 |
+
'CONFIG_ARGS': "'--prefix=/root/envs/evalkit_tf446'"
|
| 81 |
+
"'--build=x86_64-conda_cos6-linux-gnu'"
|
| 82 |
+
"'--host=x86_64-conda_cos6-linux-gnu''--enable-ipv6'"
|
| 83 |
+
"'--with-ensurepip=no'"
|
| 84 |
+
"'--with-tzpath=/root/envs/evalkit_tf446/share/zoneinfo:/root/envs/evalkit_tf446/share/tzinfo'"
|
| 85 |
+
"'--with-computed-gotos''--with-system-ffi'"
|
| 86 |
+
"'--enable-loadable-sqlite-extensions'"
|
| 87 |
+
"'--with-tcltk-includes=-I/root/envs/evalkit_tf446/include'"
|
| 88 |
+
"'--with-tcltk-libs=-L/root/envs/evalkit_tf446/lib "
|
| 89 |
+
"-ltcl8.6 -ltk8.6''--with-platlibdir=lib''--with-lto'"
|
| 90 |
+
"'--enable-optimizations'"
|
| 91 |
+
"'-oldincludedir=/croot/python-split_1733933809325/_build_env/x86_64-conda_cos6-linux-gnu/sysroot/usr/include'"
|
| 92 |
+
"'--disable-shared''PROFILE_TASK=-m test --pgo'"
|
| 93 |
+
"'build_alias=x86_64-conda_cos6-linux-gnu'"
|
| 94 |
+
"'host_alias=x86_64-conda_cos6-linux-gnu''MACHDEP=linux'"
|
| 95 |
+
"'CC=gcc''CFLAGS= "
|
| 96 |
+
'-fPIC '
|
| 97 |
+
'-O2 '
|
| 98 |
+
'-isystem '
|
| 99 |
+
'/root/envs/evalkit_tf446/include '
|
| 100 |
+
''
|
| 101 |
+
''
|
| 102 |
+
''
|
| 103 |
+
"''LDFLAGS= "
|
| 104 |
+
''
|
| 105 |
+
''
|
| 106 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 107 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 108 |
+
"-L/root/envs/evalkit_tf446/lib'"
|
| 109 |
+
"'CPPFLAGS=-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem "
|
| 110 |
+
'/root/envs/evalkit_tf446/include '
|
| 111 |
+
"-I/root/envs/evalkit_tf446/include'"
|
| 112 |
+
"'CPP=/croot/python-split_1733933809325/_build_env/bin/cpp'"
|
| 113 |
+
"'PKG_CONFIG_PATH=/root/envs/evalkit_tf446/lib/pkgconfig'",
|
| 114 |
+
'CONFINCLUDEDIR': '/root/envs/evalkit_tf446/include',
|
| 115 |
+
'CONFINCLUDEPY': '/root/envs/evalkit_tf446/include/python3.10',
|
| 116 |
+
'COREPYTHONPATH': '',
|
| 117 |
+
'COVERAGE_INFO': '/croot/python-split_1733933809325/work/build-static/coverage.info',
|
| 118 |
+
'COVERAGE_REPORT': '/croot/python-split_1733933809325/work/build-static/lcov-report',
|
| 119 |
+
'COVERAGE_REPORT_OPTIONS': '--no-branch-coverage --title "CPython lcov '
|
| 120 |
+
'report"',
|
| 121 |
+
'CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 122 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 123 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 124 |
+
'/root/envs/evalkit_tf446/include '
|
| 125 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 126 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 127 |
+
'/root/envs/evalkit_tf446/include '
|
| 128 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 129 |
+
'CXX': 'g++ -pthread -B /root/envs/evalkit_tf446/compiler_compat',
|
| 130 |
+
'DESTDIRS': '/root/envs/evalkit_tf446 '
|
| 131 |
+
'/root/envs/evalkit_tf446/lib '
|
| 132 |
+
'/root/envs/evalkit_tf446/lib/python3.10 '
|
| 133 |
+
'/root/envs/evalkit_tf446/lib/python3.10/lib-dynload',
|
| 134 |
+
'DESTLIB': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 135 |
+
'DESTPATH': '',
|
| 136 |
+
'DESTSHARED': '/root/envs/evalkit_tf446/lib/python3.10/lib-dynload',
|
| 137 |
+
'DFLAGS': '',
|
| 138 |
+
'DIRMODE': 755,
|
| 139 |
+
'DIST': 'README.rst ChangeLog configure configure.ac acconfig.h pyconfig.h.in '
|
| 140 |
+
'Makefile.pre.in Include Lib Misc Ext-dummy',
|
| 141 |
+
'DISTDIRS': 'Include Lib Misc Ext-dummy',
|
| 142 |
+
'DISTFILES': 'README.rst ChangeLog configure configure.ac acconfig.h '
|
| 143 |
+
'pyconfig.h.in Makefile.pre.in',
|
| 144 |
+
'DLINCLDIR': '.',
|
| 145 |
+
'DLLLIBRARY': '',
|
| 146 |
+
'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754': 0,
|
| 147 |
+
'DOUBLE_IS_BIG_ENDIAN_IEEE754': 0,
|
| 148 |
+
'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1,
|
| 149 |
+
'DTRACE': '',
|
| 150 |
+
'DTRACE_DEPS': '\\',
|
| 151 |
+
'DTRACE_HEADERS': '',
|
| 152 |
+
'DTRACE_OBJS': '',
|
| 153 |
+
'DYNLOADFILE': 'dynload_shlib.o',
|
| 154 |
+
'ENABLE_IPV6': 1,
|
| 155 |
+
'ENSUREPIP': 'no',
|
| 156 |
+
'EXE': '',
|
| 157 |
+
'EXEMODE': 755,
|
| 158 |
+
'EXPERIMENTAL_ISOLATED_SUBINTERPRETERS': 0,
|
| 159 |
+
'EXPORTSFROM': '',
|
| 160 |
+
'EXPORTSYMS': '',
|
| 161 |
+
'EXTRATESTOPTS': '',
|
| 162 |
+
'EXT_SUFFIX': '.cpython-310-x86_64-linux-gnu.so',
|
| 163 |
+
'FILEMODE': 644,
|
| 164 |
+
'FLOAT_WORDS_BIGENDIAN': 0,
|
| 165 |
+
'FLOCK_NEEDS_LIBBSD': 0,
|
| 166 |
+
'GETPGRP_HAVE_ARG': 0,
|
| 167 |
+
'GITBRANCH': '',
|
| 168 |
+
'GITTAG': '',
|
| 169 |
+
'GITVERSION': '',
|
| 170 |
+
'GNULD': 'no',
|
| 171 |
+
'HAVE_ACCEPT4': 1,
|
| 172 |
+
'HAVE_ACOSH': 1,
|
| 173 |
+
'HAVE_ADDRINFO': 1,
|
| 174 |
+
'HAVE_ALARM': 1,
|
| 175 |
+
'HAVE_ALIGNED_REQUIRED': 0,
|
| 176 |
+
'HAVE_ALLOCA_H': 1,
|
| 177 |
+
'HAVE_ALTZONE': 0,
|
| 178 |
+
'HAVE_ASINH': 1,
|
| 179 |
+
'HAVE_ASM_TYPES_H': 1,
|
| 180 |
+
'HAVE_ATANH': 1,
|
| 181 |
+
'HAVE_BIND_TEXTDOMAIN_CODESET': 1,
|
| 182 |
+
'HAVE_BLUETOOTH_BLUETOOTH_H': 0,
|
| 183 |
+
'HAVE_BLUETOOTH_H': 0,
|
| 184 |
+
'HAVE_BROKEN_MBSTOWCS': 0,
|
| 185 |
+
'HAVE_BROKEN_NICE': 0,
|
| 186 |
+
'HAVE_BROKEN_PIPE_BUF': 0,
|
| 187 |
+
'HAVE_BROKEN_POLL': 0,
|
| 188 |
+
'HAVE_BROKEN_POSIX_SEMAPHORES': 0,
|
| 189 |
+
'HAVE_BROKEN_PTHREAD_SIGMASK': 0,
|
| 190 |
+
'HAVE_BROKEN_SEM_GETVALUE': 0,
|
| 191 |
+
'HAVE_BROKEN_UNSETENV': 0,
|
| 192 |
+
'HAVE_BUILTIN_ATOMIC': 1,
|
| 193 |
+
'HAVE_CHFLAGS': 0,
|
| 194 |
+
'HAVE_CHOWN': 1,
|
| 195 |
+
'HAVE_CHROOT': 1,
|
| 196 |
+
'HAVE_CLOCK': 1,
|
| 197 |
+
'HAVE_CLOCK_GETRES': 1,
|
| 198 |
+
'HAVE_CLOCK_GETTIME': 1,
|
| 199 |
+
'HAVE_CLOCK_SETTIME': 1,
|
| 200 |
+
'HAVE_CLOSE_RANGE': 0,
|
| 201 |
+
'HAVE_COMPUTED_GOTOS': 1,
|
| 202 |
+
'HAVE_CONFSTR': 1,
|
| 203 |
+
'HAVE_CONIO_H': 0,
|
| 204 |
+
'HAVE_COPYSIGN': 1,
|
| 205 |
+
'HAVE_COPY_FILE_RANGE': 0,
|
| 206 |
+
'HAVE_CRYPT_H': 1,
|
| 207 |
+
'HAVE_CRYPT_R': 1,
|
| 208 |
+
'HAVE_CTERMID': 1,
|
| 209 |
+
'HAVE_CTERMID_R': 0,
|
| 210 |
+
'HAVE_CURSES_FILTER': 1,
|
| 211 |
+
'HAVE_CURSES_H': 1,
|
| 212 |
+
'HAVE_CURSES_HAS_KEY': 1,
|
| 213 |
+
'HAVE_CURSES_IMMEDOK': 1,
|
| 214 |
+
'HAVE_CURSES_IS_PAD': 1,
|
| 215 |
+
'HAVE_CURSES_IS_TERM_RESIZED': 1,
|
| 216 |
+
'HAVE_CURSES_RESIZETERM': 1,
|
| 217 |
+
'HAVE_CURSES_RESIZE_TERM': 1,
|
| 218 |
+
'HAVE_CURSES_SYNCOK': 1,
|
| 219 |
+
'HAVE_CURSES_TYPEAHEAD': 1,
|
| 220 |
+
'HAVE_CURSES_USE_ENV': 1,
|
| 221 |
+
'HAVE_CURSES_WCHGAT': 1,
|
| 222 |
+
'HAVE_DECL_ISFINITE': 1,
|
| 223 |
+
'HAVE_DECL_ISINF': 1,
|
| 224 |
+
'HAVE_DECL_ISNAN': 1,
|
| 225 |
+
'HAVE_DECL_RTLD_DEEPBIND': 1,
|
| 226 |
+
'HAVE_DECL_RTLD_GLOBAL': 1,
|
| 227 |
+
'HAVE_DECL_RTLD_LAZY': 1,
|
| 228 |
+
'HAVE_DECL_RTLD_LOCAL': 1,
|
| 229 |
+
'HAVE_DECL_RTLD_MEMBER': 0,
|
| 230 |
+
'HAVE_DECL_RTLD_NODELETE': 1,
|
| 231 |
+
'HAVE_DECL_RTLD_NOLOAD': 1,
|
| 232 |
+
'HAVE_DECL_RTLD_NOW': 1,
|
| 233 |
+
'HAVE_DECL_TZNAME': 0,
|
| 234 |
+
'HAVE_DEVICE_MACROS': 1,
|
| 235 |
+
'HAVE_DEV_PTC': 0,
|
| 236 |
+
'HAVE_DEV_PTMX': 1,
|
| 237 |
+
'HAVE_DIRECT_H': 0,
|
| 238 |
+
'HAVE_DIRENT_D_TYPE': 1,
|
| 239 |
+
'HAVE_DIRENT_H': 1,
|
| 240 |
+
'HAVE_DIRFD': 1,
|
| 241 |
+
'HAVE_DLFCN_H': 1,
|
| 242 |
+
'HAVE_DLOPEN': 1,
|
| 243 |
+
'HAVE_DUP2': 1,
|
| 244 |
+
'HAVE_DUP3': 1,
|
| 245 |
+
'HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH': 0,
|
| 246 |
+
'HAVE_DYNAMIC_LOADING': 1,
|
| 247 |
+
'HAVE_ENDIAN_H': 1,
|
| 248 |
+
'HAVE_EPOLL': 1,
|
| 249 |
+
'HAVE_EPOLL_CREATE1': 1,
|
| 250 |
+
'HAVE_ERF': 1,
|
| 251 |
+
'HAVE_ERFC': 1,
|
| 252 |
+
'HAVE_ERRNO_H': 1,
|
| 253 |
+
'HAVE_EVENTFD': 1,
|
| 254 |
+
'HAVE_EXECV': 1,
|
| 255 |
+
'HAVE_EXPLICIT_BZERO': 0,
|
| 256 |
+
'HAVE_EXPLICIT_MEMSET': 0,
|
| 257 |
+
'HAVE_EXPM1': 1,
|
| 258 |
+
'HAVE_FACCESSAT': 1,
|
| 259 |
+
'HAVE_FCHDIR': 1,
|
| 260 |
+
'HAVE_FCHMOD': 1,
|
| 261 |
+
'HAVE_FCHMODAT': 1,
|
| 262 |
+
'HAVE_FCHOWN': 1,
|
| 263 |
+
'HAVE_FCHOWNAT': 1,
|
| 264 |
+
'HAVE_FCNTL_H': 1,
|
| 265 |
+
'HAVE_FDATASYNC': 1,
|
| 266 |
+
'HAVE_FDOPENDIR': 1,
|
| 267 |
+
'HAVE_FDWALK': 0,
|
| 268 |
+
'HAVE_FEXECVE': 1,
|
| 269 |
+
'HAVE_FINITE': 1,
|
| 270 |
+
'HAVE_FLOCK': 1,
|
| 271 |
+
'HAVE_FORK': 1,
|
| 272 |
+
'HAVE_FORKPTY': 1,
|
| 273 |
+
'HAVE_FPATHCONF': 1,
|
| 274 |
+
'HAVE_FSEEK64': 0,
|
| 275 |
+
'HAVE_FSEEKO': 1,
|
| 276 |
+
'HAVE_FSTATAT': 1,
|
| 277 |
+
'HAVE_FSTATVFS': 1,
|
| 278 |
+
'HAVE_FSYNC': 1,
|
| 279 |
+
'HAVE_FTELL64': 0,
|
| 280 |
+
'HAVE_FTELLO': 1,
|
| 281 |
+
'HAVE_FTIME': 1,
|
| 282 |
+
'HAVE_FTRUNCATE': 1,
|
| 283 |
+
'HAVE_FUTIMENS': 1,
|
| 284 |
+
'HAVE_FUTIMES': 1,
|
| 285 |
+
'HAVE_FUTIMESAT': 1,
|
| 286 |
+
'HAVE_GAI_STRERROR': 1,
|
| 287 |
+
'HAVE_GAMMA': 1,
|
| 288 |
+
'HAVE_GCC_ASM_FOR_MC68881': 0,
|
| 289 |
+
'HAVE_GCC_ASM_FOR_X64': 1,
|
| 290 |
+
'HAVE_GCC_ASM_FOR_X87': 1,
|
| 291 |
+
'HAVE_GCC_UINT128_T': 1,
|
| 292 |
+
'HAVE_GETADDRINFO': 1,
|
| 293 |
+
'HAVE_GETC_UNLOCKED': 1,
|
| 294 |
+
'HAVE_GETENTROPY': 0,
|
| 295 |
+
'HAVE_GETGRGID_R': 1,
|
| 296 |
+
'HAVE_GETGRNAM_R': 1,
|
| 297 |
+
'HAVE_GETGROUPLIST': 1,
|
| 298 |
+
'HAVE_GETGROUPS': 1,
|
| 299 |
+
'HAVE_GETHOSTBYNAME': 0,
|
| 300 |
+
'HAVE_GETHOSTBYNAME_R': 1,
|
| 301 |
+
'HAVE_GETHOSTBYNAME_R_3_ARG': 0,
|
| 302 |
+
'HAVE_GETHOSTBYNAME_R_5_ARG': 0,
|
| 303 |
+
'HAVE_GETHOSTBYNAME_R_6_ARG': 1,
|
| 304 |
+
'HAVE_GETITIMER': 1,
|
| 305 |
+
'HAVE_GETLOADAVG': 1,
|
| 306 |
+
'HAVE_GETLOGIN': 1,
|
| 307 |
+
'HAVE_GETNAMEINFO': 1,
|
| 308 |
+
'HAVE_GETPAGESIZE': 1,
|
| 309 |
+
'HAVE_GETPEERNAME': 1,
|
| 310 |
+
'HAVE_GETPGID': 1,
|
| 311 |
+
'HAVE_GETPGRP': 1,
|
| 312 |
+
'HAVE_GETPID': 1,
|
| 313 |
+
'HAVE_GETPRIORITY': 1,
|
| 314 |
+
'HAVE_GETPWENT': 1,
|
| 315 |
+
'HAVE_GETPWNAM_R': 1,
|
| 316 |
+
'HAVE_GETPWUID_R': 1,
|
| 317 |
+
'HAVE_GETRANDOM': 0,
|
| 318 |
+
'HAVE_GETRANDOM_SYSCALL': 1,
|
| 319 |
+
'HAVE_GETRESGID': 1,
|
| 320 |
+
'HAVE_GETRESUID': 1,
|
| 321 |
+
'HAVE_GETSID': 1,
|
| 322 |
+
'HAVE_GETSPENT': 1,
|
| 323 |
+
'HAVE_GETSPNAM': 1,
|
| 324 |
+
'HAVE_GETWD': 1,
|
| 325 |
+
'HAVE_GLIBC_MEMMOVE_BUG': 0,
|
| 326 |
+
'HAVE_GRP_H': 1,
|
| 327 |
+
'HAVE_HSTRERROR': 1,
|
| 328 |
+
'HAVE_HTOLE64': 1,
|
| 329 |
+
'HAVE_HYPOT': 1,
|
| 330 |
+
'HAVE_IEEEFP_H': 0,
|
| 331 |
+
'HAVE_IF_NAMEINDEX': 1,
|
| 332 |
+
'HAVE_INET_ATON': 1,
|
| 333 |
+
'HAVE_INET_PTON': 1,
|
| 334 |
+
'HAVE_INITGROUPS': 1,
|
| 335 |
+
'HAVE_INTTYPES_H': 1,
|
| 336 |
+
'HAVE_IO_H': 0,
|
| 337 |
+
'HAVE_IPA_PURE_CONST_BUG': 0,
|
| 338 |
+
'HAVE_KILL': 1,
|
| 339 |
+
'HAVE_KILLPG': 1,
|
| 340 |
+
'HAVE_KQUEUE': 0,
|
| 341 |
+
'HAVE_LANGINFO_H': 1,
|
| 342 |
+
'HAVE_LARGEFILE_SUPPORT': 0,
|
| 343 |
+
'HAVE_LCHFLAGS': 0,
|
| 344 |
+
'HAVE_LCHMOD': 0,
|
| 345 |
+
'HAVE_LCHOWN': 1,
|
| 346 |
+
'HAVE_LGAMMA': 1,
|
| 347 |
+
'HAVE_LIBDL': 1,
|
| 348 |
+
'HAVE_LIBDLD': 0,
|
| 349 |
+
'HAVE_LIBIEEE': 0,
|
| 350 |
+
'HAVE_LIBINTL_H': 1,
|
| 351 |
+
'HAVE_LIBREADLINE': 1,
|
| 352 |
+
'HAVE_LIBRESOLV': 0,
|
| 353 |
+
'HAVE_LIBSENDFILE': 0,
|
| 354 |
+
'HAVE_LIBUTIL_H': 0,
|
| 355 |
+
'HAVE_LIBUUID': 1,
|
| 356 |
+
'HAVE_LINK': 1,
|
| 357 |
+
'HAVE_LINKAT': 1,
|
| 358 |
+
'HAVE_LINUX_AUXVEC_H': 1,
|
| 359 |
+
'HAVE_LINUX_CAN_BCM_H': 1,
|
| 360 |
+
'HAVE_LINUX_CAN_H': 1,
|
| 361 |
+
'HAVE_LINUX_CAN_J1939_H': 0,
|
| 362 |
+
'HAVE_LINUX_CAN_RAW_FD_FRAMES': 1,
|
| 363 |
+
'HAVE_LINUX_CAN_RAW_H': 1,
|
| 364 |
+
'HAVE_LINUX_CAN_RAW_JOIN_FILTERS': 1,
|
| 365 |
+
'HAVE_LINUX_MEMFD_H': 1,
|
| 366 |
+
'HAVE_LINUX_NETLINK_H': 1,
|
| 367 |
+
'HAVE_LINUX_QRTR_H': 0,
|
| 368 |
+
'HAVE_LINUX_RANDOM_H': 1,
|
| 369 |
+
'HAVE_LINUX_TIPC_H': 1,
|
| 370 |
+
'HAVE_LINUX_VM_SOCKETS_H': 1,
|
| 371 |
+
'HAVE_LINUX_WAIT_H': 1,
|
| 372 |
+
'HAVE_LOCKF': 1,
|
| 373 |
+
'HAVE_LOG1P': 1,
|
| 374 |
+
'HAVE_LOG2': 1,
|
| 375 |
+
'HAVE_LONG_DOUBLE': 1,
|
| 376 |
+
'HAVE_LSTAT': 1,
|
| 377 |
+
'HAVE_LUTIMES': 1,
|
| 378 |
+
'HAVE_MADVISE': 1,
|
| 379 |
+
'HAVE_MAKEDEV': 1,
|
| 380 |
+
'HAVE_MBRTOWC': 1,
|
| 381 |
+
'HAVE_MEMFD_CREATE': 0,
|
| 382 |
+
'HAVE_MEMORY_H': 1,
|
| 383 |
+
'HAVE_MEMRCHR': 1,
|
| 384 |
+
'HAVE_MKDIRAT': 1,
|
| 385 |
+
'HAVE_MKFIFO': 1,
|
| 386 |
+
'HAVE_MKFIFOAT': 1,
|
| 387 |
+
'HAVE_MKNOD': 1,
|
| 388 |
+
'HAVE_MKNODAT': 1,
|
| 389 |
+
'HAVE_MKTIME': 1,
|
| 390 |
+
'HAVE_MMAP': 1,
|
| 391 |
+
'HAVE_MREMAP': 1,
|
| 392 |
+
'HAVE_NCURSES_H': 1,
|
| 393 |
+
'HAVE_NDIR_H': 0,
|
| 394 |
+
'HAVE_NETPACKET_PACKET_H': 1,
|
| 395 |
+
'HAVE_NET_IF_H': 1,
|
| 396 |
+
'HAVE_NICE': 1,
|
| 397 |
+
'HAVE_NON_UNICODE_WCHAR_T_REPRESENTATION': 0,
|
| 398 |
+
'HAVE_OPENAT': 1,
|
| 399 |
+
'HAVE_OPENPTY': 1,
|
| 400 |
+
'HAVE_PATHCONF': 1,
|
| 401 |
+
'HAVE_PAUSE': 1,
|
| 402 |
+
'HAVE_PIPE2': 1,
|
| 403 |
+
'HAVE_PLOCK': 0,
|
| 404 |
+
'HAVE_POLL': 1,
|
| 405 |
+
'HAVE_POLL_H': 1,
|
| 406 |
+
'HAVE_POSIX_FADVISE': 1,
|
| 407 |
+
'HAVE_POSIX_FALLOCATE': 1,
|
| 408 |
+
'HAVE_POSIX_SPAWN': 1,
|
| 409 |
+
'HAVE_POSIX_SPAWNP': 1,
|
| 410 |
+
'HAVE_PREAD': 1,
|
| 411 |
+
'HAVE_PREADV': 1,
|
| 412 |
+
'HAVE_PREADV2': 0,
|
| 413 |
+
'HAVE_PRLIMIT': 1,
|
| 414 |
+
'HAVE_PROCESS_H': 0,
|
| 415 |
+
'HAVE_PROTOTYPES': 1,
|
| 416 |
+
'HAVE_PTHREAD_CONDATTR_SETCLOCK': 1,
|
| 417 |
+
'HAVE_PTHREAD_DESTRUCTOR': 0,
|
| 418 |
+
'HAVE_PTHREAD_GETCPUCLOCKID': 1,
|
| 419 |
+
'HAVE_PTHREAD_H': 1,
|
| 420 |
+
'HAVE_PTHREAD_INIT': 0,
|
| 421 |
+
'HAVE_PTHREAD_KILL': 1,
|
| 422 |
+
'HAVE_PTHREAD_SIGMASK': 1,
|
| 423 |
+
'HAVE_PTY_H': 1,
|
| 424 |
+
'HAVE_PWRITE': 1,
|
| 425 |
+
'HAVE_PWRITEV': 1,
|
| 426 |
+
'HAVE_PWRITEV2': 0,
|
| 427 |
+
'HAVE_READLINK': 1,
|
| 428 |
+
'HAVE_READLINKAT': 1,
|
| 429 |
+
'HAVE_READV': 1,
|
| 430 |
+
'HAVE_REALPATH': 1,
|
| 431 |
+
'HAVE_RENAMEAT': 1,
|
| 432 |
+
'HAVE_RL_APPEND_HISTORY': 1,
|
| 433 |
+
'HAVE_RL_CATCH_SIGNAL': 1,
|
| 434 |
+
'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1,
|
| 435 |
+
'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1,
|
| 436 |
+
'HAVE_RL_COMPLETION_MATCHES': 1,
|
| 437 |
+
'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1,
|
| 438 |
+
'HAVE_RL_PRE_INPUT_HOOK': 1,
|
| 439 |
+
'HAVE_RL_RESIZE_TERMINAL': 1,
|
| 440 |
+
'HAVE_ROUND': 1,
|
| 441 |
+
'HAVE_RTPSPAWN': 0,
|
| 442 |
+
'HAVE_SCHED_GET_PRIORITY_MAX': 1,
|
| 443 |
+
'HAVE_SCHED_H': 1,
|
| 444 |
+
'HAVE_SCHED_RR_GET_INTERVAL': 1,
|
| 445 |
+
'HAVE_SCHED_SETAFFINITY': 1,
|
| 446 |
+
'HAVE_SCHED_SETPARAM': 1,
|
| 447 |
+
'HAVE_SCHED_SETSCHEDULER': 1,
|
| 448 |
+
'HAVE_SEM_CLOCKWAIT': 0,
|
| 449 |
+
'HAVE_SEM_GETVALUE': 1,
|
| 450 |
+
'HAVE_SEM_OPEN': 1,
|
| 451 |
+
'HAVE_SEM_TIMEDWAIT': 1,
|
| 452 |
+
'HAVE_SEM_UNLINK': 1,
|
| 453 |
+
'HAVE_SENDFILE': 1,
|
| 454 |
+
'HAVE_SETEGID': 1,
|
| 455 |
+
'HAVE_SETEUID': 1,
|
| 456 |
+
'HAVE_SETGID': 1,
|
| 457 |
+
'HAVE_SETGROUPS': 1,
|
| 458 |
+
'HAVE_SETHOSTNAME': 1,
|
| 459 |
+
'HAVE_SETITIMER': 1,
|
| 460 |
+
'HAVE_SETLOCALE': 1,
|
| 461 |
+
'HAVE_SETPGID': 1,
|
| 462 |
+
'HAVE_SETPGRP': 1,
|
| 463 |
+
'HAVE_SETPRIORITY': 1,
|
| 464 |
+
'HAVE_SETREGID': 1,
|
| 465 |
+
'HAVE_SETRESGID': 1,
|
| 466 |
+
'HAVE_SETRESUID': 1,
|
| 467 |
+
'HAVE_SETREUID': 1,
|
| 468 |
+
'HAVE_SETSID': 1,
|
| 469 |
+
'HAVE_SETUID': 1,
|
| 470 |
+
'HAVE_SETVBUF': 1,
|
| 471 |
+
'HAVE_SHADOW_H': 1,
|
| 472 |
+
'HAVE_SHM_OPEN': 1,
|
| 473 |
+
'HAVE_SHM_UNLINK': 1,
|
| 474 |
+
'HAVE_SIGACTION': 1,
|
| 475 |
+
'HAVE_SIGALTSTACK': 1,
|
| 476 |
+
'HAVE_SIGFILLSET': 1,
|
| 477 |
+
'HAVE_SIGINFO_T_SI_BAND': 1,
|
| 478 |
+
'HAVE_SIGINTERRUPT': 1,
|
| 479 |
+
'HAVE_SIGNAL_H': 1,
|
| 480 |
+
'HAVE_SIGPENDING': 1,
|
| 481 |
+
'HAVE_SIGRELSE': 1,
|
| 482 |
+
'HAVE_SIGTIMEDWAIT': 1,
|
| 483 |
+
'HAVE_SIGWAIT': 1,
|
| 484 |
+
'HAVE_SIGWAITINFO': 1,
|
| 485 |
+
'HAVE_SNPRINTF': 1,
|
| 486 |
+
'HAVE_SOCKADDR_ALG': 1,
|
| 487 |
+
'HAVE_SOCKADDR_SA_LEN': 0,
|
| 488 |
+
'HAVE_SOCKADDR_STORAGE': 1,
|
| 489 |
+
'HAVE_SOCKETPAIR': 1,
|
| 490 |
+
'HAVE_SPAWN_H': 1,
|
| 491 |
+
'HAVE_SPLICE': 1,
|
| 492 |
+
'HAVE_SSIZE_T': 1,
|
| 493 |
+
'HAVE_STATVFS': 1,
|
| 494 |
+
'HAVE_STAT_TV_NSEC': 1,
|
| 495 |
+
'HAVE_STAT_TV_NSEC2': 0,
|
| 496 |
+
'HAVE_STDARG_PROTOTYPES': 1,
|
| 497 |
+
'HAVE_STDINT_H': 1,
|
| 498 |
+
'HAVE_STDLIB_H': 1,
|
| 499 |
+
'HAVE_STD_ATOMIC': 1,
|
| 500 |
+
'HAVE_STRFTIME': 1,
|
| 501 |
+
'HAVE_STRINGS_H': 1,
|
| 502 |
+
'HAVE_STRING_H': 1,
|
| 503 |
+
'HAVE_STRLCPY': 0,
|
| 504 |
+
'HAVE_STROPTS_H': 0,
|
| 505 |
+
'HAVE_STRSIGNAL': 1,
|
| 506 |
+
'HAVE_STRUCT_PASSWD_PW_GECOS': 1,
|
| 507 |
+
'HAVE_STRUCT_PASSWD_PW_PASSWD': 1,
|
| 508 |
+
'HAVE_STRUCT_STAT_ST_BIRTHTIME': 0,
|
| 509 |
+
'HAVE_STRUCT_STAT_ST_BLKSIZE': 1,
|
| 510 |
+
'HAVE_STRUCT_STAT_ST_BLOCKS': 1,
|
| 511 |
+
'HAVE_STRUCT_STAT_ST_FLAGS': 0,
|
| 512 |
+
'HAVE_STRUCT_STAT_ST_GEN': 0,
|
| 513 |
+
'HAVE_STRUCT_STAT_ST_RDEV': 1,
|
| 514 |
+
'HAVE_STRUCT_TM_TM_ZONE': 1,
|
| 515 |
+
'HAVE_SYMLINK': 1,
|
| 516 |
+
'HAVE_SYMLINKAT': 1,
|
| 517 |
+
'HAVE_SYNC': 1,
|
| 518 |
+
'HAVE_SYSCONF': 1,
|
| 519 |
+
'HAVE_SYSEXITS_H': 1,
|
| 520 |
+
'HAVE_SYS_AUDIOIO_H': 0,
|
| 521 |
+
'HAVE_SYS_AUXV_H': 1,
|
| 522 |
+
'HAVE_SYS_BSDTTY_H': 0,
|
| 523 |
+
'HAVE_SYS_DEVPOLL_H': 0,
|
| 524 |
+
'HAVE_SYS_DIR_H': 0,
|
| 525 |
+
'HAVE_SYS_ENDIAN_H': 0,
|
| 526 |
+
'HAVE_SYS_EPOLL_H': 1,
|
| 527 |
+
'HAVE_SYS_EVENTFD_H': 1,
|
| 528 |
+
'HAVE_SYS_EVENT_H': 0,
|
| 529 |
+
'HAVE_SYS_FILE_H': 1,
|
| 530 |
+
'HAVE_SYS_IOCTL_H': 1,
|
| 531 |
+
'HAVE_SYS_KERN_CONTROL_H': 0,
|
| 532 |
+
'HAVE_SYS_LOADAVG_H': 0,
|
| 533 |
+
'HAVE_SYS_LOCK_H': 0,
|
| 534 |
+
'HAVE_SYS_MEMFD_H': 0,
|
| 535 |
+
'HAVE_SYS_MKDEV_H': 0,
|
| 536 |
+
'HAVE_SYS_MMAN_H': 1,
|
| 537 |
+
'HAVE_SYS_MODEM_H': 0,
|
| 538 |
+
'HAVE_SYS_NDIR_H': 0,
|
| 539 |
+
'HAVE_SYS_PARAM_H': 1,
|
| 540 |
+
'HAVE_SYS_POLL_H': 1,
|
| 541 |
+
'HAVE_SYS_RANDOM_H': 0,
|
| 542 |
+
'HAVE_SYS_RESOURCE_H': 1,
|
| 543 |
+
'HAVE_SYS_SELECT_H': 1,
|
| 544 |
+
'HAVE_SYS_SENDFILE_H': 1,
|
| 545 |
+
'HAVE_SYS_SOCKET_H': 1,
|
| 546 |
+
'HAVE_SYS_STATVFS_H': 1,
|
| 547 |
+
'HAVE_SYS_STAT_H': 1,
|
| 548 |
+
'HAVE_SYS_SYSCALL_H': 1,
|
| 549 |
+
'HAVE_SYS_SYSMACROS_H': 1,
|
| 550 |
+
'HAVE_SYS_SYS_DOMAIN_H': 0,
|
| 551 |
+
'HAVE_SYS_TERMIO_H': 0,
|
| 552 |
+
'HAVE_SYS_TIMES_H': 1,
|
| 553 |
+
'HAVE_SYS_TIME_H': 1,
|
| 554 |
+
'HAVE_SYS_TYPES_H': 1,
|
| 555 |
+
'HAVE_SYS_UIO_H': 1,
|
| 556 |
+
'HAVE_SYS_UN_H': 1,
|
| 557 |
+
'HAVE_SYS_UTSNAME_H': 1,
|
| 558 |
+
'HAVE_SYS_WAIT_H': 1,
|
| 559 |
+
'HAVE_SYS_XATTR_H': 1,
|
| 560 |
+
'HAVE_TCGETPGRP': 1,
|
| 561 |
+
'HAVE_TCSETPGRP': 1,
|
| 562 |
+
'HAVE_TEMPNAM': 1,
|
| 563 |
+
'HAVE_TERMIOS_H': 1,
|
| 564 |
+
'HAVE_TERM_H': 1,
|
| 565 |
+
'HAVE_TGAMMA': 1,
|
| 566 |
+
'HAVE_TIMEGM': 1,
|
| 567 |
+
'HAVE_TIMES': 1,
|
| 568 |
+
'HAVE_TMPFILE': 1,
|
| 569 |
+
'HAVE_TMPNAM': 1,
|
| 570 |
+
'HAVE_TMPNAM_R': 1,
|
| 571 |
+
'HAVE_TM_ZONE': 1,
|
| 572 |
+
'HAVE_TRUNCATE': 1,
|
| 573 |
+
'HAVE_TZNAME': 0,
|
| 574 |
+
'HAVE_UCS4_TCL': 0,
|
| 575 |
+
'HAVE_UNAME': 1,
|
| 576 |
+
'HAVE_UNISTD_H': 1,
|
| 577 |
+
'HAVE_UNLINKAT': 1,
|
| 578 |
+
'HAVE_USABLE_WCHAR_T': 0,
|
| 579 |
+
'HAVE_UTIL_H': 0,
|
| 580 |
+
'HAVE_UTIMENSAT': 1,
|
| 581 |
+
'HAVE_UTIMES': 1,
|
| 582 |
+
'HAVE_UTIME_H': 1,
|
| 583 |
+
'HAVE_UUID_CREATE': 0,
|
| 584 |
+
'HAVE_UUID_ENC_BE': 0,
|
| 585 |
+
'HAVE_UUID_GENERATE_TIME_SAFE': 1,
|
| 586 |
+
'HAVE_UUID_H': 1,
|
| 587 |
+
'HAVE_UUID_UUID_H': 1,
|
| 588 |
+
'HAVE_VFORK': 1,
|
| 589 |
+
'HAVE_WAIT3': 1,
|
| 590 |
+
'HAVE_WAIT4': 1,
|
| 591 |
+
'HAVE_WAITID': 1,
|
| 592 |
+
'HAVE_WAITPID': 1,
|
| 593 |
+
'HAVE_WCHAR_H': 1,
|
| 594 |
+
'HAVE_WCSCOLL': 1,
|
| 595 |
+
'HAVE_WCSFTIME': 1,
|
| 596 |
+
'HAVE_WCSXFRM': 1,
|
| 597 |
+
'HAVE_WMEMCMP': 1,
|
| 598 |
+
'HAVE_WORKING_TZSET': 1,
|
| 599 |
+
'HAVE_WRITEV': 1,
|
| 600 |
+
'HAVE_ZLIB_COPY': 1,
|
| 601 |
+
'HAVE__GETPTY': 0,
|
| 602 |
+
'HOST_GNU_TYPE': 'x86_64-conda_cos6-linux-gnu',
|
| 603 |
+
'INCLDIRSTOMAKE': '/root/envs/evalkit_tf446/include '
|
| 604 |
+
'/root/envs/evalkit_tf446/include '
|
| 605 |
+
'/root/envs/evalkit_tf446/include/python3.10 '
|
| 606 |
+
'/root/envs/evalkit_tf446/include/python3.10',
|
| 607 |
+
'INCLUDEDIR': '/root/envs/evalkit_tf446/include',
|
| 608 |
+
'INCLUDEPY': '/root/envs/evalkit_tf446/include/python3.10',
|
| 609 |
+
'INSTALL': '/usr/bin/install -c',
|
| 610 |
+
'INSTALL_DATA': '/usr/bin/install -c -m 644',
|
| 611 |
+
'INSTALL_PROGRAM': '/usr/bin/install -c',
|
| 612 |
+
'INSTALL_SCRIPT': '/usr/bin/install -c',
|
| 613 |
+
'INSTALL_SHARED': '/usr/bin/install -c -m 755',
|
| 614 |
+
'INSTSONAME': 'libpython3.10.a',
|
| 615 |
+
'IO_H': 'Modules/_io/_iomodule.h',
|
| 616 |
+
'IO_OBJS': '\\',
|
| 617 |
+
'LDCXXSHARED': 'g++ -pthread -B /root/envs/evalkit_tf446/compiler_compat -shared',
|
| 618 |
+
'LDFLAGS': ''
|
| 619 |
+
''
|
| 620 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 621 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 622 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 623 |
+
''
|
| 624 |
+
''
|
| 625 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 626 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 627 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 628 |
+
'LDLIBRARY': 'libpython3.10.a',
|
| 629 |
+
'LDLIBRARYDIR': '',
|
| 630 |
+
'LDSHARED': 'gcc -pthread -B /root/envs/evalkit_tf446/compiler_compat -shared '
|
| 631 |
+
''
|
| 632 |
+
''
|
| 633 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 634 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 635 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 636 |
+
''
|
| 637 |
+
''
|
| 638 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 639 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 640 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 641 |
+
'LDVERSION': '3.10',
|
| 642 |
+
'LIBC': '',
|
| 643 |
+
'LIBDEST': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 644 |
+
'LIBDIR': '/root/envs/evalkit_tf446/lib',
|
| 645 |
+
'LIBFFI_INCLUDEDIR': '/root/envs/evalkit_tf446/include',
|
| 646 |
+
'LIBM': '-lm',
|
| 647 |
+
'LIBOBJDIR': 'Python/',
|
| 648 |
+
'LIBOBJS': '',
|
| 649 |
+
'LIBPC': '/root/envs/evalkit_tf446/lib/pkgconfig',
|
| 650 |
+
'LIBPL': '/root/envs/evalkit_tf446/lib/python3.10/config-3.10-x86_64-linux-gnu',
|
| 651 |
+
'LIBPYTHON': '',
|
| 652 |
+
'LIBRARY': 'libpython3.10.a',
|
| 653 |
+
'LIBRARY_DEPS': 'libpython3.10.a',
|
| 654 |
+
'LIBRARY_OBJS': '\\',
|
| 655 |
+
'LIBRARY_OBJS_OMIT_FROZEN': '\\',
|
| 656 |
+
'LIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 657 |
+
'LIBSUBDIRS': 'asyncio \\',
|
| 658 |
+
'LINKCC': 'gcc -pthread -B /root/envs/evalkit_tf446/compiler_compat',
|
| 659 |
+
'LINKFORSHARED': '-Xlinker -export-dynamic',
|
| 660 |
+
'LIPO_32BIT_FLAGS': '',
|
| 661 |
+
'LIPO_INTEL64_FLAGS': '',
|
| 662 |
+
'LLVM_PROF_ERR': 'no',
|
| 663 |
+
'LLVM_PROF_FILE': '',
|
| 664 |
+
'LLVM_PROF_MERGER': 'true',
|
| 665 |
+
'LN': 'ln',
|
| 666 |
+
'LOCALMODLIBS': '',
|
| 667 |
+
'MACHDEP': 'linux',
|
| 668 |
+
'MACHDEP_OBJS': '',
|
| 669 |
+
'MACHDESTLIB': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 670 |
+
'MACOSX_DEPLOYMENT_TARGET': '',
|
| 671 |
+
'MAINCC': 'gcc -pthread -B /root/envs/evalkit_tf446/compiler_compat',
|
| 672 |
+
'MAJOR_IN_MKDEV': 0,
|
| 673 |
+
'MAJOR_IN_SYSMACROS': 0,
|
| 674 |
+
'MAKESETUP': '/croot/python-split_1733933809325/work/Modules/makesetup',
|
| 675 |
+
'MANDIR': '/root/envs/evalkit_tf446/share/man',
|
| 676 |
+
'MKDIR_P': '/usr/bin/mkdir -p',
|
| 677 |
+
'MODBUILT_NAMES': 'posix errno pwd _sre _codecs _weakref _functools '
|
| 678 |
+
'_operator _collections _abc itertools atexit _signal '
|
| 679 |
+
'_stat time _thread _locale _io faulthandler '
|
| 680 |
+
'_tracemalloc _symtable xxsubtype',
|
| 681 |
+
'MODDISABLED_NAMES': '',
|
| 682 |
+
'MODLIBS': '',
|
| 683 |
+
'MODOBJS': 'Modules/posixmodule.o Modules/errnomodule.o '
|
| 684 |
+
'Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o '
|
| 685 |
+
'Modules/_weakref.o Modules/_functoolsmodule.o '
|
| 686 |
+
'Modules/_operator.o Modules/_collectionsmodule.o '
|
| 687 |
+
'Modules/_abc.o Modules/itertoolsmodule.o '
|
| 688 |
+
'Modules/atexitmodule.o Modules/signalmodule.o Modules/_stat.o '
|
| 689 |
+
'Modules/timemodule.o Modules/_threadmodule.o '
|
| 690 |
+
'Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o '
|
| 691 |
+
'Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o '
|
| 692 |
+
'Modules/textio.o Modules/stringio.o Modules/faulthandler.o '
|
| 693 |
+
'Modules/_tracemalloc.o Modules/symtablemodule.o '
|
| 694 |
+
'Modules/xxsubtype.o',
|
| 695 |
+
'MODULE_OBJS': '\\',
|
| 696 |
+
'MULTIARCH': 'x86_64-linux-gnu',
|
| 697 |
+
'MULTIARCH_CPPFLAGS': '-DMULTIARCH=\\"x86_64-linux-gnu\\"',
|
| 698 |
+
'MVWDELCH_IS_EXPRESSION': 1,
|
| 699 |
+
'NO_AS_NEEDED': '-Wl,--no-as-needed',
|
| 700 |
+
'OBJECT_OBJS': '\\',
|
| 701 |
+
'OPENSSL_INCLUDES': '-I/root/envs/evalkit_tf446/include',
|
| 702 |
+
'OPENSSL_LDFLAGS': '-L/root/envs/evalkit_tf446/lib',
|
| 703 |
+
'OPENSSL_LIBS': '-lssl -lcrypto',
|
| 704 |
+
'OPENSSL_RPATH': '',
|
| 705 |
+
'OPT': '-DNDEBUG -fwrapv -O2 -Wall',
|
| 706 |
+
'OTHER_LIBTOOL_OPT': '',
|
| 707 |
+
'PACKAGE_BUGREPORT': 0,
|
| 708 |
+
'PACKAGE_NAME': 0,
|
| 709 |
+
'PACKAGE_STRING': 0,
|
| 710 |
+
'PACKAGE_TARNAME': 0,
|
| 711 |
+
'PACKAGE_URL': 0,
|
| 712 |
+
'PACKAGE_VERSION': 0,
|
| 713 |
+
'PARSER_HEADERS': '\\',
|
| 714 |
+
'PARSER_OBJS': '\\ \\ Parser/myreadline.o Parser/tokenizer.o',
|
| 715 |
+
'PEGEN_HEADERS': '\\',
|
| 716 |
+
'PEGEN_OBJS': '\\',
|
| 717 |
+
'PGO_PROF_GEN_FLAG': '-fprofile-generate',
|
| 718 |
+
'PGO_PROF_USE_FLAG': '',
|
| 719 |
+
'PLATLIBDIR': 'lib',
|
| 720 |
+
'POBJS': '\\',
|
| 721 |
+
'POSIX_SEMAPHORES_NOT_ENABLED': 0,
|
| 722 |
+
'PROFILE_TASK': '-m test --pgo',
|
| 723 |
+
'PTHREAD_KEY_T_IS_COMPATIBLE_WITH_INT': 1,
|
| 724 |
+
'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1,
|
| 725 |
+
'PURIFY': '',
|
| 726 |
+
'PY3LIBRARY': '',
|
| 727 |
+
'PYLONG_BITS_IN_DIGIT': 0,
|
| 728 |
+
'PYTHON': 'python',
|
| 729 |
+
'PYTHONFRAMEWORK': '',
|
| 730 |
+
'PYTHONFRAMEWORKDIR': 'no-framework',
|
| 731 |
+
'PYTHONFRAMEWORKINSTALLDIR': '',
|
| 732 |
+
'PYTHONFRAMEWORKPREFIX': '',
|
| 733 |
+
'PYTHONPATH': '',
|
| 734 |
+
'PYTHON_FOR_BUILD': './python -E',
|
| 735 |
+
'PYTHON_FOR_REGEN': '',
|
| 736 |
+
'PYTHON_HEADERS': '\\',
|
| 737 |
+
'PYTHON_OBJS': '\\',
|
| 738 |
+
'PY_BUILD_ENVIRON': '',
|
| 739 |
+
'PY_BUILTIN_HASHLIB_HASHES': '"md5,sha1,sha256,sha512,sha3,blake2"',
|
| 740 |
+
'PY_BUILTIN_MODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG '
|
| 741 |
+
'-fwrapv -O2 -Wall '
|
| 742 |
+
'-fPIC '
|
| 743 |
+
'-O2 -isystem '
|
| 744 |
+
'/root/envs/evalkit_tf446/include '
|
| 745 |
+
''
|
| 746 |
+
''
|
| 747 |
+
''
|
| 748 |
+
''
|
| 749 |
+
'-fPIC '
|
| 750 |
+
'-O2 '
|
| 751 |
+
'-isystem '
|
| 752 |
+
'/root/envs/evalkit_tf446/include '
|
| 753 |
+
''
|
| 754 |
+
''
|
| 755 |
+
''
|
| 756 |
+
''
|
| 757 |
+
'-fno-semantic-interposition '
|
| 758 |
+
''
|
| 759 |
+
'-g -std=c99 -Wextra '
|
| 760 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 761 |
+
'-Wno-missing-field-initializers '
|
| 762 |
+
'-Werror=implicit-function-declaration '
|
| 763 |
+
'-fvisibility=hidden '
|
| 764 |
+
''
|
| 765 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 766 |
+
'-IObjects -IInclude -IPython -I. '
|
| 767 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 768 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 769 |
+
'/root/envs/evalkit_tf446/include '
|
| 770 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 771 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 772 |
+
'/root/envs/evalkit_tf446/include '
|
| 773 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 774 |
+
'-DPy_BUILD_CORE_BUILTIN',
|
| 775 |
+
'PY_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 776 |
+
'-fPIC '
|
| 777 |
+
'-O2 '
|
| 778 |
+
'-isystem '
|
| 779 |
+
'/root/envs/evalkit_tf446/include '
|
| 780 |
+
''
|
| 781 |
+
''
|
| 782 |
+
''
|
| 783 |
+
'-fPIC '
|
| 784 |
+
'-O2 '
|
| 785 |
+
'-isystem '
|
| 786 |
+
'/root/envs/evalkit_tf446/include '
|
| 787 |
+
''
|
| 788 |
+
''
|
| 789 |
+
''
|
| 790 |
+
'',
|
| 791 |
+
'PY_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 792 |
+
'-g -std=c99 '
|
| 793 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 794 |
+
'-Wno-missing-field-initializers '
|
| 795 |
+
'-Werror=implicit-function-declaration '
|
| 796 |
+
'-fvisibility=hidden '
|
| 797 |
+
'-I/croot/python-split_1733933809325/work/Include/internal',
|
| 798 |
+
'PY_COERCE_C_LOCALE': 1,
|
| 799 |
+
'PY_CORE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 '
|
| 800 |
+
'-Wall -fPIC '
|
| 801 |
+
'-O2 '
|
| 802 |
+
'-isystem '
|
| 803 |
+
'/root/envs/evalkit_tf446/include '
|
| 804 |
+
''
|
| 805 |
+
''
|
| 806 |
+
''
|
| 807 |
+
'-fPIC '
|
| 808 |
+
'-O2 '
|
| 809 |
+
'-isystem '
|
| 810 |
+
'/root/envs/evalkit_tf446/include '
|
| 811 |
+
''
|
| 812 |
+
''
|
| 813 |
+
''
|
| 814 |
+
'-fno-semantic-interposition '
|
| 815 |
+
''
|
| 816 |
+
'-g -std=c99 -Wextra -Wno-unused-result '
|
| 817 |
+
'-Wno-unused-parameter -Wno-missing-field-initializers '
|
| 818 |
+
'-Werror=implicit-function-declaration -fvisibility=hidden '
|
| 819 |
+
''
|
| 820 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 821 |
+
'-IObjects -IInclude -IPython -I. '
|
| 822 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 823 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 824 |
+
'/root/envs/evalkit_tf446/include '
|
| 825 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 826 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 827 |
+
'/root/envs/evalkit_tf446/include '
|
| 828 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 829 |
+
'-DPy_BUILD_CORE',
|
| 830 |
+
'PY_CORE_LDFLAGS': ''
|
| 831 |
+
''
|
| 832 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 833 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 834 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 835 |
+
''
|
| 836 |
+
''
|
| 837 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 838 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 839 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 840 |
+
'-fno-semantic-interposition '
|
| 841 |
+
'-g',
|
| 842 |
+
'PY_CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 843 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 844 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 845 |
+
'/root/envs/evalkit_tf446/include '
|
| 846 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 847 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 848 |
+
'/root/envs/evalkit_tf446/include '
|
| 849 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 850 |
+
'PY_ENABLE_SHARED': 0,
|
| 851 |
+
'PY_FORMAT_SIZE_T': '"z"',
|
| 852 |
+
'PY_LDFLAGS': ''
|
| 853 |
+
''
|
| 854 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 855 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 856 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 857 |
+
''
|
| 858 |
+
''
|
| 859 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 860 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 861 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 862 |
+
'PY_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 863 |
+
'-g',
|
| 864 |
+
'PY_SSL_DEFAULT_CIPHERS': 1,
|
| 865 |
+
'PY_SSL_DEFAULT_CIPHER_STRING': 0,
|
| 866 |
+
'PY_STDMODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv '
|
| 867 |
+
'-O2 -Wall '
|
| 868 |
+
'-fPIC '
|
| 869 |
+
'-O2 -isystem '
|
| 870 |
+
'/root/envs/evalkit_tf446/include '
|
| 871 |
+
''
|
| 872 |
+
''
|
| 873 |
+
''
|
| 874 |
+
''
|
| 875 |
+
'-fPIC '
|
| 876 |
+
'-O2 '
|
| 877 |
+
'-isystem '
|
| 878 |
+
'/root/envs/evalkit_tf446/include '
|
| 879 |
+
''
|
| 880 |
+
''
|
| 881 |
+
''
|
| 882 |
+
''
|
| 883 |
+
'-fno-semantic-interposition '
|
| 884 |
+
'-g -std=c99 '
|
| 885 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 886 |
+
'-Wno-missing-field-initializers '
|
| 887 |
+
'-Werror=implicit-function-declaration '
|
| 888 |
+
'-fvisibility=hidden '
|
| 889 |
+
''
|
| 890 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 891 |
+
'-IObjects -IInclude -IPython -I. '
|
| 892 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 893 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 894 |
+
'/root/envs/evalkit_tf446/include '
|
| 895 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 896 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 897 |
+
'/root/envs/evalkit_tf446/include '
|
| 898 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 899 |
+
'Py_DEBUG': 0,
|
| 900 |
+
'Py_ENABLE_SHARED': 0,
|
| 901 |
+
'Py_HASH_ALGORITHM': 0,
|
| 902 |
+
'Py_TRACE_REFS': 0,
|
| 903 |
+
'QUICKTESTOPTS': '-x test_subprocess test_io test_lib2to3 \\',
|
| 904 |
+
'READELF': 'readelf',
|
| 905 |
+
'RESSRCDIR': 'Mac/Resources/framework',
|
| 906 |
+
'RETSIGTYPE': 'void',
|
| 907 |
+
'RUNSHARED': '',
|
| 908 |
+
'SCRIPTDIR': '/root/envs/evalkit_tf446/lib',
|
| 909 |
+
'SETPGRP_HAVE_ARG': 0,
|
| 910 |
+
'SHELL': '/bin/sh',
|
| 911 |
+
'SHLIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 912 |
+
'SHLIB_SUFFIX': '.so',
|
| 913 |
+
'SHM_NEEDS_LIBRT': 1,
|
| 914 |
+
'SIGNED_RIGHT_SHIFT_ZERO_FILLS': 0,
|
| 915 |
+
'SITEPATH': '',
|
| 916 |
+
'SIZEOF_DOUBLE': 8,
|
| 917 |
+
'SIZEOF_FLOAT': 4,
|
| 918 |
+
'SIZEOF_FPOS_T': 16,
|
| 919 |
+
'SIZEOF_INT': 4,
|
| 920 |
+
'SIZEOF_LONG': 8,
|
| 921 |
+
'SIZEOF_LONG_DOUBLE': 16,
|
| 922 |
+
'SIZEOF_LONG_LONG': 8,
|
| 923 |
+
'SIZEOF_OFF_T': 8,
|
| 924 |
+
'SIZEOF_PID_T': 4,
|
| 925 |
+
'SIZEOF_PTHREAD_KEY_T': 4,
|
| 926 |
+
'SIZEOF_PTHREAD_T': 8,
|
| 927 |
+
'SIZEOF_SHORT': 2,
|
| 928 |
+
'SIZEOF_SIZE_T': 8,
|
| 929 |
+
'SIZEOF_TIME_T': 8,
|
| 930 |
+
'SIZEOF_UINTPTR_T': 8,
|
| 931 |
+
'SIZEOF_VOID_P': 8,
|
| 932 |
+
'SIZEOF_WCHAR_T': 4,
|
| 933 |
+
'SIZEOF__BOOL': 1,
|
| 934 |
+
'SOABI': 'cpython-310-x86_64-linux-gnu',
|
| 935 |
+
'SRCDIRS': 'Parser Objects Python Modules Modules/_io Programs',
|
| 936 |
+
'SRC_GDB_HOOKS': '/croot/python-split_1733933809325/work/Tools/gdb/libpython.py',
|
| 937 |
+
'STATIC_LIBPYTHON': 1,
|
| 938 |
+
'STDC_HEADERS': 1,
|
| 939 |
+
'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */",
|
| 940 |
+
'STRIPFLAG': '-s',
|
| 941 |
+
'SUBDIRS': '',
|
| 942 |
+
'SUBDIRSTOO': 'Include Lib Misc',
|
| 943 |
+
'SYSLIBS': '-lm',
|
| 944 |
+
'SYS_SELECT_WITH_SYS_TIME': 1,
|
| 945 |
+
'TCLTK_INCLUDES': '-I/root/envs/evalkit_tf446/include',
|
| 946 |
+
'TCLTK_LIBS': '-L/root/envs/evalkit_tf446/lib '
|
| 947 |
+
'-ltcl8.6 -ltk8.6',
|
| 948 |
+
'TESTOPTS': '',
|
| 949 |
+
'TESTPATH': '',
|
| 950 |
+
'TESTPYTHON': './python',
|
| 951 |
+
'TESTPYTHONOPTS': '',
|
| 952 |
+
'TESTRUNNER': './python '
|
| 953 |
+
'/croot/python-split_1733933809325/work/Tools/scripts/run_tests.py',
|
| 954 |
+
'TESTSUBDIRS': 'ctypes/test \\',
|
| 955 |
+
'TESTTIMEOUT': 1200,
|
| 956 |
+
'TEST_MODULES': 'yes',
|
| 957 |
+
'THREAD_STACK_SIZE': 0,
|
| 958 |
+
'TIMEMODULE_LIB': 0,
|
| 959 |
+
'TIME_WITH_SYS_TIME': 1,
|
| 960 |
+
'TM_IN_SYS_TIME': 0,
|
| 961 |
+
'TZPATH': '/root/envs/evalkit_tf446/share/zoneinfo:/root/envs/evalkit_tf446/share/tzinfo',
|
| 962 |
+
'UNICODE_DEPS': '\\',
|
| 963 |
+
'UNIVERSALSDK': '',
|
| 964 |
+
'UPDATE_FILE': '/croot/python-split_1733933809325/work/Tools/scripts/update_file.py',
|
| 965 |
+
'USE_COMPUTED_GOTOS': 1,
|
| 966 |
+
'VERSION': '3.10',
|
| 967 |
+
'VPATH': '/croot/python-split_1733933809325/work',
|
| 968 |
+
'WHEEL_PKG_DIR': '',
|
| 969 |
+
'WINDOW_HAS_FLAGS': 1,
|
| 970 |
+
'WITH_DECIMAL_CONTEXTVAR': 1,
|
| 971 |
+
'WITH_DOC_STRINGS': 1,
|
| 972 |
+
'WITH_DTRACE': 0,
|
| 973 |
+
'WITH_DYLD': 0,
|
| 974 |
+
'WITH_EDITLINE': 0,
|
| 975 |
+
'WITH_LIBINTL': 0,
|
| 976 |
+
'WITH_NEXT_FRAMEWORK': 0,
|
| 977 |
+
'WITH_PYMALLOC': 1,
|
| 978 |
+
'WITH_VALGRIND': 0,
|
| 979 |
+
'X87_DOUBLE_ROUNDING': 0,
|
| 980 |
+
'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax',
|
| 981 |
+
'abs_builddir': '/croot/python-split_1733933809325/work/build-static',
|
| 982 |
+
'abs_srcdir': '/croot/python-split_1733933809325/work',
|
| 983 |
+
'datarootdir': '/root/envs/evalkit_tf446/share',
|
| 984 |
+
'exec_prefix': '/root/envs/evalkit_tf446',
|
| 985 |
+
'prefix': '/root/envs/evalkit_tf446',
|
| 986 |
+
'srcdir': '/croot/python-split_1733933809325/work'}
|
evalkit_tf446/lib/python3.10/_sysconfigdata__linux_x86_64-linux-gnu.py.orig
ADDED
|
@@ -0,0 +1,986 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# system configuration generated and used by the sysconfig module
|
| 2 |
+
build_time_vars = {'ABIFLAGS': '',
|
| 3 |
+
'AC_APPLE_UNIVERSAL_BUILD': 0,
|
| 4 |
+
'AIX_BUILDDATE': 0,
|
| 5 |
+
'AIX_GENUINE_CPLUSPLUS': 0,
|
| 6 |
+
'ALIGNOF_LONG': 8,
|
| 7 |
+
'ALIGNOF_SIZE_T': 8,
|
| 8 |
+
'ALT_SOABI': 0,
|
| 9 |
+
'ANDROID_API_LEVEL': 0,
|
| 10 |
+
'AR': 'x86_64-conda-linux-gnu-ar',
|
| 11 |
+
'ARFLAGS': 'rcs',
|
| 12 |
+
'BASECFLAGS': '-Wno-unused-result -Wsign-compare',
|
| 13 |
+
'BASECPPFLAGS': '-IObjects -IInclude -IPython',
|
| 14 |
+
'BASEMODLIBS': '',
|
| 15 |
+
'BINDIR': '/root/envs/evalkit_tf446/bin',
|
| 16 |
+
'BINLIBDEST': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 17 |
+
'BLDLIBRARY': 'libpython3.10.a',
|
| 18 |
+
'BLDSHARED': 'x86_64-conda-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 19 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 20 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 21 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 22 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 23 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 24 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 25 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 26 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 27 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 28 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 29 |
+
'BUILDEXE': '',
|
| 30 |
+
'BUILDPYTHON': 'python',
|
| 31 |
+
'BUILD_GNU_TYPE': 'x86_64-conda-linux-gnu',
|
| 32 |
+
'BYTESTR_DEPS': '\\',
|
| 33 |
+
'CC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 34 |
+
'CCSHARED': '-fPIC',
|
| 35 |
+
'CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 36 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 37 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 38 |
+
'-isystem '
|
| 39 |
+
'/root/envs/evalkit_tf446/include '
|
| 40 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 41 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 42 |
+
' '
|
| 43 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 44 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 45 |
+
'-isystem '
|
| 46 |
+
'/root/envs/evalkit_tf446/include '
|
| 47 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 48 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 49 |
+
' ',
|
| 50 |
+
'CFLAGSFORSHARED': '',
|
| 51 |
+
'CFLAGS_ALIASING': '',
|
| 52 |
+
'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in '
|
| 53 |
+
'Makefile.pre.in',
|
| 54 |
+
'CONFIGURE_CFLAGS': '-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 55 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 56 |
+
'-ffunction-sections -pipe -isystem '
|
| 57 |
+
'/root/envs/evalkit_tf446/include '
|
| 58 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 59 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 60 |
+
' '
|
| 61 |
+
' ',
|
| 62 |
+
'CONFIGURE_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 63 |
+
' '
|
| 64 |
+
' -g -std=c99 -Wextra '
|
| 65 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 66 |
+
'-Wno-missing-field-initializers '
|
| 67 |
+
'-Werror=implicit-function-declaration '
|
| 68 |
+
'-fvisibility=hidden',
|
| 69 |
+
'CONFIGURE_CPPFLAGS': '-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 70 |
+
'/root/envs/evalkit_tf446/include '
|
| 71 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 72 |
+
'CONFIGURE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 73 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 74 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 75 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 76 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 77 |
+
'CONFIGURE_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 78 |
+
' '
|
| 79 |
+
' -g',
|
| 80 |
+
'CONFIG_ARGS': "'--prefix=/root/envs/evalkit_tf446' "
|
| 81 |
+
"'--build=x86_64-conda-linux-gnu' "
|
| 82 |
+
"'--host=x86_64-conda-linux-gnu' '--enable-ipv6' "
|
| 83 |
+
"'--with-ensurepip=no' "
|
| 84 |
+
"'--with-tzpath=/root/envs/evalkit_tf446/share/zoneinfo' "
|
| 85 |
+
"'--with-computed-gotos' '--with-system-ffi' "
|
| 86 |
+
"'--enable-loadable-sqlite-extensions' "
|
| 87 |
+
"'--with-tcltk-includes=-I/root/envs/evalkit_tf446/include' "
|
| 88 |
+
"'--with-tcltk-libs=-L/root/envs/evalkit_tf446/lib "
|
| 89 |
+
"-ltcl8.6 -ltk8.6' '--with-platlibdir=lib' '--with-lto' "
|
| 90 |
+
"'--enable-optimizations' "
|
| 91 |
+
"'-oldincludedir=/croot/python-split_1733933809325/_build_env/x86_64-conda-linux-gnu/sysroot/usr/include' "
|
| 92 |
+
"'--disable-shared' 'PROFILE_TASK=-m test --pgo' "
|
| 93 |
+
"'build_alias=x86_64-conda-linux-gnu' "
|
| 94 |
+
"'host_alias=x86_64-conda-linux-gnu' 'MACHDEP=linux' "
|
| 95 |
+
"'CC=x86_64-conda-linux-gnu-gcc' 'CFLAGS=-march=nocona "
|
| 96 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 97 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 98 |
+
'-pipe -isystem '
|
| 99 |
+
'/root/envs/evalkit_tf446/include '
|
| 100 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 101 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 102 |
+
' '
|
| 103 |
+
"' 'LDFLAGS=-Wl,-O2 -Wl,--sort-common -Wl,--as-needed "
|
| 104 |
+
'-Wl,-z,relro -Wl,-z,now -Wl,--disable-new-dtags '
|
| 105 |
+
'-Wl,--gc-sections '
|
| 106 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 107 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 108 |
+
"-L/root/envs/evalkit_tf446/lib' "
|
| 109 |
+
"'CPPFLAGS=-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem "
|
| 110 |
+
'/root/envs/evalkit_tf446/include '
|
| 111 |
+
"-I/root/envs/evalkit_tf446/include' "
|
| 112 |
+
"'CPP=/croot/python-split_1733933809325/_build_env/bin/x86_64-conda-linux-gnu-cpp' "
|
| 113 |
+
"'PKG_CONFIG_PATH=/root/envs/evalkit_tf446/lib/pkgconfig'",
|
| 114 |
+
'CONFINCLUDEDIR': '/root/envs/evalkit_tf446/include',
|
| 115 |
+
'CONFINCLUDEPY': '/root/envs/evalkit_tf446/include/python3.10',
|
| 116 |
+
'COREPYTHONPATH': '',
|
| 117 |
+
'COVERAGE_INFO': '/croot/python-split_1733933809325/work/build-static/coverage.info',
|
| 118 |
+
'COVERAGE_REPORT': '/croot/python-split_1733933809325/work/build-static/lcov-report',
|
| 119 |
+
'COVERAGE_REPORT_OPTIONS': '--no-branch-coverage --title "CPython lcov '
|
| 120 |
+
'report"',
|
| 121 |
+
'CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 122 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 123 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 124 |
+
'/root/envs/evalkit_tf446/include '
|
| 125 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 126 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 127 |
+
'/root/envs/evalkit_tf446/include '
|
| 128 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 129 |
+
'CXX': 'x86_64-conda-linux-gnu-c++ -pthread',
|
| 130 |
+
'DESTDIRS': '/root/envs/evalkit_tf446 '
|
| 131 |
+
'/root/envs/evalkit_tf446/lib '
|
| 132 |
+
'/root/envs/evalkit_tf446/lib/python3.10 '
|
| 133 |
+
'/root/envs/evalkit_tf446/lib/python3.10/lib-dynload',
|
| 134 |
+
'DESTLIB': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 135 |
+
'DESTPATH': '',
|
| 136 |
+
'DESTSHARED': '/root/envs/evalkit_tf446/lib/python3.10/lib-dynload',
|
| 137 |
+
'DFLAGS': '',
|
| 138 |
+
'DIRMODE': 755,
|
| 139 |
+
'DIST': 'README.rst ChangeLog configure configure.ac acconfig.h pyconfig.h.in '
|
| 140 |
+
'Makefile.pre.in Include Lib Misc Ext-dummy',
|
| 141 |
+
'DISTDIRS': 'Include Lib Misc Ext-dummy',
|
| 142 |
+
'DISTFILES': 'README.rst ChangeLog configure configure.ac acconfig.h '
|
| 143 |
+
'pyconfig.h.in Makefile.pre.in',
|
| 144 |
+
'DLINCLDIR': '.',
|
| 145 |
+
'DLLLIBRARY': '',
|
| 146 |
+
'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754': 0,
|
| 147 |
+
'DOUBLE_IS_BIG_ENDIAN_IEEE754': 0,
|
| 148 |
+
'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1,
|
| 149 |
+
'DTRACE': '',
|
| 150 |
+
'DTRACE_DEPS': '\\',
|
| 151 |
+
'DTRACE_HEADERS': '',
|
| 152 |
+
'DTRACE_OBJS': '',
|
| 153 |
+
'DYNLOADFILE': 'dynload_shlib.o',
|
| 154 |
+
'ENABLE_IPV6': 1,
|
| 155 |
+
'ENSUREPIP': 'no',
|
| 156 |
+
'EXE': '',
|
| 157 |
+
'EXEMODE': 755,
|
| 158 |
+
'EXPERIMENTAL_ISOLATED_SUBINTERPRETERS': 0,
|
| 159 |
+
'EXPORTSFROM': '',
|
| 160 |
+
'EXPORTSYMS': '',
|
| 161 |
+
'EXTRATESTOPTS': '',
|
| 162 |
+
'EXT_SUFFIX': '.cpython-310-x86_64-linux-gnu.so',
|
| 163 |
+
'FILEMODE': 644,
|
| 164 |
+
'FLOAT_WORDS_BIGENDIAN': 0,
|
| 165 |
+
'FLOCK_NEEDS_LIBBSD': 0,
|
| 166 |
+
'GETPGRP_HAVE_ARG': 0,
|
| 167 |
+
'GITBRANCH': '',
|
| 168 |
+
'GITTAG': '',
|
| 169 |
+
'GITVERSION': '',
|
| 170 |
+
'GNULD': 'yes',
|
| 171 |
+
'HAVE_ACCEPT4': 1,
|
| 172 |
+
'HAVE_ACOSH': 1,
|
| 173 |
+
'HAVE_ADDRINFO': 1,
|
| 174 |
+
'HAVE_ALARM': 1,
|
| 175 |
+
'HAVE_ALIGNED_REQUIRED': 0,
|
| 176 |
+
'HAVE_ALLOCA_H': 1,
|
| 177 |
+
'HAVE_ALTZONE': 0,
|
| 178 |
+
'HAVE_ASINH': 1,
|
| 179 |
+
'HAVE_ASM_TYPES_H': 1,
|
| 180 |
+
'HAVE_ATANH': 1,
|
| 181 |
+
'HAVE_BIND_TEXTDOMAIN_CODESET': 1,
|
| 182 |
+
'HAVE_BLUETOOTH_BLUETOOTH_H': 0,
|
| 183 |
+
'HAVE_BLUETOOTH_H': 0,
|
| 184 |
+
'HAVE_BROKEN_MBSTOWCS': 0,
|
| 185 |
+
'HAVE_BROKEN_NICE': 0,
|
| 186 |
+
'HAVE_BROKEN_PIPE_BUF': 0,
|
| 187 |
+
'HAVE_BROKEN_POLL': 0,
|
| 188 |
+
'HAVE_BROKEN_POSIX_SEMAPHORES': 0,
|
| 189 |
+
'HAVE_BROKEN_PTHREAD_SIGMASK': 0,
|
| 190 |
+
'HAVE_BROKEN_SEM_GETVALUE': 0,
|
| 191 |
+
'HAVE_BROKEN_UNSETENV': 0,
|
| 192 |
+
'HAVE_BUILTIN_ATOMIC': 1,
|
| 193 |
+
'HAVE_CHFLAGS': 0,
|
| 194 |
+
'HAVE_CHOWN': 1,
|
| 195 |
+
'HAVE_CHROOT': 1,
|
| 196 |
+
'HAVE_CLOCK': 1,
|
| 197 |
+
'HAVE_CLOCK_GETRES': 1,
|
| 198 |
+
'HAVE_CLOCK_GETTIME': 1,
|
| 199 |
+
'HAVE_CLOCK_SETTIME': 1,
|
| 200 |
+
'HAVE_CLOSE_RANGE': 0,
|
| 201 |
+
'HAVE_COMPUTED_GOTOS': 1,
|
| 202 |
+
'HAVE_CONFSTR': 1,
|
| 203 |
+
'HAVE_CONIO_H': 0,
|
| 204 |
+
'HAVE_COPYSIGN': 1,
|
| 205 |
+
'HAVE_COPY_FILE_RANGE': 0,
|
| 206 |
+
'HAVE_CRYPT_H': 1,
|
| 207 |
+
'HAVE_CRYPT_R': 1,
|
| 208 |
+
'HAVE_CTERMID': 1,
|
| 209 |
+
'HAVE_CTERMID_R': 0,
|
| 210 |
+
'HAVE_CURSES_FILTER': 1,
|
| 211 |
+
'HAVE_CURSES_H': 1,
|
| 212 |
+
'HAVE_CURSES_HAS_KEY': 1,
|
| 213 |
+
'HAVE_CURSES_IMMEDOK': 1,
|
| 214 |
+
'HAVE_CURSES_IS_PAD': 1,
|
| 215 |
+
'HAVE_CURSES_IS_TERM_RESIZED': 1,
|
| 216 |
+
'HAVE_CURSES_RESIZETERM': 1,
|
| 217 |
+
'HAVE_CURSES_RESIZE_TERM': 1,
|
| 218 |
+
'HAVE_CURSES_SYNCOK': 1,
|
| 219 |
+
'HAVE_CURSES_TYPEAHEAD': 1,
|
| 220 |
+
'HAVE_CURSES_USE_ENV': 1,
|
| 221 |
+
'HAVE_CURSES_WCHGAT': 1,
|
| 222 |
+
'HAVE_DECL_ISFINITE': 1,
|
| 223 |
+
'HAVE_DECL_ISINF': 1,
|
| 224 |
+
'HAVE_DECL_ISNAN': 1,
|
| 225 |
+
'HAVE_DECL_RTLD_DEEPBIND': 1,
|
| 226 |
+
'HAVE_DECL_RTLD_GLOBAL': 1,
|
| 227 |
+
'HAVE_DECL_RTLD_LAZY': 1,
|
| 228 |
+
'HAVE_DECL_RTLD_LOCAL': 1,
|
| 229 |
+
'HAVE_DECL_RTLD_MEMBER': 0,
|
| 230 |
+
'HAVE_DECL_RTLD_NODELETE': 1,
|
| 231 |
+
'HAVE_DECL_RTLD_NOLOAD': 1,
|
| 232 |
+
'HAVE_DECL_RTLD_NOW': 1,
|
| 233 |
+
'HAVE_DECL_TZNAME': 0,
|
| 234 |
+
'HAVE_DEVICE_MACROS': 1,
|
| 235 |
+
'HAVE_DEV_PTC': 0,
|
| 236 |
+
'HAVE_DEV_PTMX': 1,
|
| 237 |
+
'HAVE_DIRECT_H': 0,
|
| 238 |
+
'HAVE_DIRENT_D_TYPE': 1,
|
| 239 |
+
'HAVE_DIRENT_H': 1,
|
| 240 |
+
'HAVE_DIRFD': 1,
|
| 241 |
+
'HAVE_DLFCN_H': 1,
|
| 242 |
+
'HAVE_DLOPEN': 1,
|
| 243 |
+
'HAVE_DUP2': 1,
|
| 244 |
+
'HAVE_DUP3': 1,
|
| 245 |
+
'HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH': 0,
|
| 246 |
+
'HAVE_DYNAMIC_LOADING': 1,
|
| 247 |
+
'HAVE_ENDIAN_H': 1,
|
| 248 |
+
'HAVE_EPOLL': 1,
|
| 249 |
+
'HAVE_EPOLL_CREATE1': 1,
|
| 250 |
+
'HAVE_ERF': 1,
|
| 251 |
+
'HAVE_ERFC': 1,
|
| 252 |
+
'HAVE_ERRNO_H': 1,
|
| 253 |
+
'HAVE_EVENTFD': 1,
|
| 254 |
+
'HAVE_EXECV': 1,
|
| 255 |
+
'HAVE_EXPLICIT_BZERO': 0,
|
| 256 |
+
'HAVE_EXPLICIT_MEMSET': 0,
|
| 257 |
+
'HAVE_EXPM1': 1,
|
| 258 |
+
'HAVE_FACCESSAT': 1,
|
| 259 |
+
'HAVE_FCHDIR': 1,
|
| 260 |
+
'HAVE_FCHMOD': 1,
|
| 261 |
+
'HAVE_FCHMODAT': 1,
|
| 262 |
+
'HAVE_FCHOWN': 1,
|
| 263 |
+
'HAVE_FCHOWNAT': 1,
|
| 264 |
+
'HAVE_FCNTL_H': 1,
|
| 265 |
+
'HAVE_FDATASYNC': 1,
|
| 266 |
+
'HAVE_FDOPENDIR': 1,
|
| 267 |
+
'HAVE_FDWALK': 0,
|
| 268 |
+
'HAVE_FEXECVE': 1,
|
| 269 |
+
'HAVE_FINITE': 1,
|
| 270 |
+
'HAVE_FLOCK': 1,
|
| 271 |
+
'HAVE_FORK': 1,
|
| 272 |
+
'HAVE_FORKPTY': 1,
|
| 273 |
+
'HAVE_FPATHCONF': 1,
|
| 274 |
+
'HAVE_FSEEK64': 0,
|
| 275 |
+
'HAVE_FSEEKO': 1,
|
| 276 |
+
'HAVE_FSTATAT': 1,
|
| 277 |
+
'HAVE_FSTATVFS': 1,
|
| 278 |
+
'HAVE_FSYNC': 1,
|
| 279 |
+
'HAVE_FTELL64': 0,
|
| 280 |
+
'HAVE_FTELLO': 1,
|
| 281 |
+
'HAVE_FTIME': 1,
|
| 282 |
+
'HAVE_FTRUNCATE': 1,
|
| 283 |
+
'HAVE_FUTIMENS': 1,
|
| 284 |
+
'HAVE_FUTIMES': 1,
|
| 285 |
+
'HAVE_FUTIMESAT': 1,
|
| 286 |
+
'HAVE_GAI_STRERROR': 1,
|
| 287 |
+
'HAVE_GAMMA': 1,
|
| 288 |
+
'HAVE_GCC_ASM_FOR_MC68881': 0,
|
| 289 |
+
'HAVE_GCC_ASM_FOR_X64': 1,
|
| 290 |
+
'HAVE_GCC_ASM_FOR_X87': 1,
|
| 291 |
+
'HAVE_GCC_UINT128_T': 1,
|
| 292 |
+
'HAVE_GETADDRINFO': 1,
|
| 293 |
+
'HAVE_GETC_UNLOCKED': 1,
|
| 294 |
+
'HAVE_GETENTROPY': 0,
|
| 295 |
+
'HAVE_GETGRGID_R': 1,
|
| 296 |
+
'HAVE_GETGRNAM_R': 1,
|
| 297 |
+
'HAVE_GETGROUPLIST': 1,
|
| 298 |
+
'HAVE_GETGROUPS': 1,
|
| 299 |
+
'HAVE_GETHOSTBYNAME': 0,
|
| 300 |
+
'HAVE_GETHOSTBYNAME_R': 1,
|
| 301 |
+
'HAVE_GETHOSTBYNAME_R_3_ARG': 0,
|
| 302 |
+
'HAVE_GETHOSTBYNAME_R_5_ARG': 0,
|
| 303 |
+
'HAVE_GETHOSTBYNAME_R_6_ARG': 1,
|
| 304 |
+
'HAVE_GETITIMER': 1,
|
| 305 |
+
'HAVE_GETLOADAVG': 1,
|
| 306 |
+
'HAVE_GETLOGIN': 1,
|
| 307 |
+
'HAVE_GETNAMEINFO': 1,
|
| 308 |
+
'HAVE_GETPAGESIZE': 1,
|
| 309 |
+
'HAVE_GETPEERNAME': 1,
|
| 310 |
+
'HAVE_GETPGID': 1,
|
| 311 |
+
'HAVE_GETPGRP': 1,
|
| 312 |
+
'HAVE_GETPID': 1,
|
| 313 |
+
'HAVE_GETPRIORITY': 1,
|
| 314 |
+
'HAVE_GETPWENT': 1,
|
| 315 |
+
'HAVE_GETPWNAM_R': 1,
|
| 316 |
+
'HAVE_GETPWUID_R': 1,
|
| 317 |
+
'HAVE_GETRANDOM': 0,
|
| 318 |
+
'HAVE_GETRANDOM_SYSCALL': 1,
|
| 319 |
+
'HAVE_GETRESGID': 1,
|
| 320 |
+
'HAVE_GETRESUID': 1,
|
| 321 |
+
'HAVE_GETSID': 1,
|
| 322 |
+
'HAVE_GETSPENT': 1,
|
| 323 |
+
'HAVE_GETSPNAM': 1,
|
| 324 |
+
'HAVE_GETWD': 1,
|
| 325 |
+
'HAVE_GLIBC_MEMMOVE_BUG': 0,
|
| 326 |
+
'HAVE_GRP_H': 1,
|
| 327 |
+
'HAVE_HSTRERROR': 1,
|
| 328 |
+
'HAVE_HTOLE64': 1,
|
| 329 |
+
'HAVE_HYPOT': 1,
|
| 330 |
+
'HAVE_IEEEFP_H': 0,
|
| 331 |
+
'HAVE_IF_NAMEINDEX': 1,
|
| 332 |
+
'HAVE_INET_ATON': 1,
|
| 333 |
+
'HAVE_INET_PTON': 1,
|
| 334 |
+
'HAVE_INITGROUPS': 1,
|
| 335 |
+
'HAVE_INTTYPES_H': 1,
|
| 336 |
+
'HAVE_IO_H': 0,
|
| 337 |
+
'HAVE_IPA_PURE_CONST_BUG': 0,
|
| 338 |
+
'HAVE_KILL': 1,
|
| 339 |
+
'HAVE_KILLPG': 1,
|
| 340 |
+
'HAVE_KQUEUE': 0,
|
| 341 |
+
'HAVE_LANGINFO_H': 1,
|
| 342 |
+
'HAVE_LARGEFILE_SUPPORT': 0,
|
| 343 |
+
'HAVE_LCHFLAGS': 0,
|
| 344 |
+
'HAVE_LCHMOD': 0,
|
| 345 |
+
'HAVE_LCHOWN': 1,
|
| 346 |
+
'HAVE_LGAMMA': 1,
|
| 347 |
+
'HAVE_LIBDL': 1,
|
| 348 |
+
'HAVE_LIBDLD': 0,
|
| 349 |
+
'HAVE_LIBIEEE': 0,
|
| 350 |
+
'HAVE_LIBINTL_H': 1,
|
| 351 |
+
'HAVE_LIBREADLINE': 1,
|
| 352 |
+
'HAVE_LIBRESOLV': 0,
|
| 353 |
+
'HAVE_LIBSENDFILE': 0,
|
| 354 |
+
'HAVE_LIBUTIL_H': 0,
|
| 355 |
+
'HAVE_LIBUUID': 1,
|
| 356 |
+
'HAVE_LINK': 1,
|
| 357 |
+
'HAVE_LINKAT': 1,
|
| 358 |
+
'HAVE_LINUX_AUXVEC_H': 1,
|
| 359 |
+
'HAVE_LINUX_CAN_BCM_H': 1,
|
| 360 |
+
'HAVE_LINUX_CAN_H': 1,
|
| 361 |
+
'HAVE_LINUX_CAN_J1939_H': 0,
|
| 362 |
+
'HAVE_LINUX_CAN_RAW_FD_FRAMES': 1,
|
| 363 |
+
'HAVE_LINUX_CAN_RAW_H': 1,
|
| 364 |
+
'HAVE_LINUX_CAN_RAW_JOIN_FILTERS': 1,
|
| 365 |
+
'HAVE_LINUX_MEMFD_H': 1,
|
| 366 |
+
'HAVE_LINUX_NETLINK_H': 1,
|
| 367 |
+
'HAVE_LINUX_QRTR_H': 0,
|
| 368 |
+
'HAVE_LINUX_RANDOM_H': 1,
|
| 369 |
+
'HAVE_LINUX_TIPC_H': 1,
|
| 370 |
+
'HAVE_LINUX_VM_SOCKETS_H': 1,
|
| 371 |
+
'HAVE_LINUX_WAIT_H': 1,
|
| 372 |
+
'HAVE_LOCKF': 1,
|
| 373 |
+
'HAVE_LOG1P': 1,
|
| 374 |
+
'HAVE_LOG2': 1,
|
| 375 |
+
'HAVE_LONG_DOUBLE': 1,
|
| 376 |
+
'HAVE_LSTAT': 1,
|
| 377 |
+
'HAVE_LUTIMES': 1,
|
| 378 |
+
'HAVE_MADVISE': 1,
|
| 379 |
+
'HAVE_MAKEDEV': 1,
|
| 380 |
+
'HAVE_MBRTOWC': 1,
|
| 381 |
+
'HAVE_MEMFD_CREATE': 0,
|
| 382 |
+
'HAVE_MEMORY_H': 1,
|
| 383 |
+
'HAVE_MEMRCHR': 1,
|
| 384 |
+
'HAVE_MKDIRAT': 1,
|
| 385 |
+
'HAVE_MKFIFO': 1,
|
| 386 |
+
'HAVE_MKFIFOAT': 1,
|
| 387 |
+
'HAVE_MKNOD': 1,
|
| 388 |
+
'HAVE_MKNODAT': 1,
|
| 389 |
+
'HAVE_MKTIME': 1,
|
| 390 |
+
'HAVE_MMAP': 1,
|
| 391 |
+
'HAVE_MREMAP': 1,
|
| 392 |
+
'HAVE_NCURSES_H': 1,
|
| 393 |
+
'HAVE_NDIR_H': 0,
|
| 394 |
+
'HAVE_NETPACKET_PACKET_H': 1,
|
| 395 |
+
'HAVE_NET_IF_H': 1,
|
| 396 |
+
'HAVE_NICE': 1,
|
| 397 |
+
'HAVE_NON_UNICODE_WCHAR_T_REPRESENTATION': 0,
|
| 398 |
+
'HAVE_OPENAT': 1,
|
| 399 |
+
'HAVE_OPENPTY': 1,
|
| 400 |
+
'HAVE_PATHCONF': 1,
|
| 401 |
+
'HAVE_PAUSE': 1,
|
| 402 |
+
'HAVE_PIPE2': 1,
|
| 403 |
+
'HAVE_PLOCK': 0,
|
| 404 |
+
'HAVE_POLL': 1,
|
| 405 |
+
'HAVE_POLL_H': 1,
|
| 406 |
+
'HAVE_POSIX_FADVISE': 1,
|
| 407 |
+
'HAVE_POSIX_FALLOCATE': 1,
|
| 408 |
+
'HAVE_POSIX_SPAWN': 1,
|
| 409 |
+
'HAVE_POSIX_SPAWNP': 1,
|
| 410 |
+
'HAVE_PREAD': 1,
|
| 411 |
+
'HAVE_PREADV': 1,
|
| 412 |
+
'HAVE_PREADV2': 0,
|
| 413 |
+
'HAVE_PRLIMIT': 1,
|
| 414 |
+
'HAVE_PROCESS_H': 0,
|
| 415 |
+
'HAVE_PROTOTYPES': 1,
|
| 416 |
+
'HAVE_PTHREAD_CONDATTR_SETCLOCK': 1,
|
| 417 |
+
'HAVE_PTHREAD_DESTRUCTOR': 0,
|
| 418 |
+
'HAVE_PTHREAD_GETCPUCLOCKID': 1,
|
| 419 |
+
'HAVE_PTHREAD_H': 1,
|
| 420 |
+
'HAVE_PTHREAD_INIT': 0,
|
| 421 |
+
'HAVE_PTHREAD_KILL': 1,
|
| 422 |
+
'HAVE_PTHREAD_SIGMASK': 1,
|
| 423 |
+
'HAVE_PTY_H': 1,
|
| 424 |
+
'HAVE_PWRITE': 1,
|
| 425 |
+
'HAVE_PWRITEV': 1,
|
| 426 |
+
'HAVE_PWRITEV2': 0,
|
| 427 |
+
'HAVE_READLINK': 1,
|
| 428 |
+
'HAVE_READLINKAT': 1,
|
| 429 |
+
'HAVE_READV': 1,
|
| 430 |
+
'HAVE_REALPATH': 1,
|
| 431 |
+
'HAVE_RENAMEAT': 1,
|
| 432 |
+
'HAVE_RL_APPEND_HISTORY': 1,
|
| 433 |
+
'HAVE_RL_CATCH_SIGNAL': 1,
|
| 434 |
+
'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1,
|
| 435 |
+
'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1,
|
| 436 |
+
'HAVE_RL_COMPLETION_MATCHES': 1,
|
| 437 |
+
'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1,
|
| 438 |
+
'HAVE_RL_PRE_INPUT_HOOK': 1,
|
| 439 |
+
'HAVE_RL_RESIZE_TERMINAL': 1,
|
| 440 |
+
'HAVE_ROUND': 1,
|
| 441 |
+
'HAVE_RTPSPAWN': 0,
|
| 442 |
+
'HAVE_SCHED_GET_PRIORITY_MAX': 1,
|
| 443 |
+
'HAVE_SCHED_H': 1,
|
| 444 |
+
'HAVE_SCHED_RR_GET_INTERVAL': 1,
|
| 445 |
+
'HAVE_SCHED_SETAFFINITY': 1,
|
| 446 |
+
'HAVE_SCHED_SETPARAM': 1,
|
| 447 |
+
'HAVE_SCHED_SETSCHEDULER': 1,
|
| 448 |
+
'HAVE_SEM_CLOCKWAIT': 0,
|
| 449 |
+
'HAVE_SEM_GETVALUE': 1,
|
| 450 |
+
'HAVE_SEM_OPEN': 1,
|
| 451 |
+
'HAVE_SEM_TIMEDWAIT': 1,
|
| 452 |
+
'HAVE_SEM_UNLINK': 1,
|
| 453 |
+
'HAVE_SENDFILE': 1,
|
| 454 |
+
'HAVE_SETEGID': 1,
|
| 455 |
+
'HAVE_SETEUID': 1,
|
| 456 |
+
'HAVE_SETGID': 1,
|
| 457 |
+
'HAVE_SETGROUPS': 1,
|
| 458 |
+
'HAVE_SETHOSTNAME': 1,
|
| 459 |
+
'HAVE_SETITIMER': 1,
|
| 460 |
+
'HAVE_SETLOCALE': 1,
|
| 461 |
+
'HAVE_SETPGID': 1,
|
| 462 |
+
'HAVE_SETPGRP': 1,
|
| 463 |
+
'HAVE_SETPRIORITY': 1,
|
| 464 |
+
'HAVE_SETREGID': 1,
|
| 465 |
+
'HAVE_SETRESGID': 1,
|
| 466 |
+
'HAVE_SETRESUID': 1,
|
| 467 |
+
'HAVE_SETREUID': 1,
|
| 468 |
+
'HAVE_SETSID': 1,
|
| 469 |
+
'HAVE_SETUID': 1,
|
| 470 |
+
'HAVE_SETVBUF': 1,
|
| 471 |
+
'HAVE_SHADOW_H': 1,
|
| 472 |
+
'HAVE_SHM_OPEN': 1,
|
| 473 |
+
'HAVE_SHM_UNLINK': 1,
|
| 474 |
+
'HAVE_SIGACTION': 1,
|
| 475 |
+
'HAVE_SIGALTSTACK': 1,
|
| 476 |
+
'HAVE_SIGFILLSET': 1,
|
| 477 |
+
'HAVE_SIGINFO_T_SI_BAND': 1,
|
| 478 |
+
'HAVE_SIGINTERRUPT': 1,
|
| 479 |
+
'HAVE_SIGNAL_H': 1,
|
| 480 |
+
'HAVE_SIGPENDING': 1,
|
| 481 |
+
'HAVE_SIGRELSE': 1,
|
| 482 |
+
'HAVE_SIGTIMEDWAIT': 1,
|
| 483 |
+
'HAVE_SIGWAIT': 1,
|
| 484 |
+
'HAVE_SIGWAITINFO': 1,
|
| 485 |
+
'HAVE_SNPRINTF': 1,
|
| 486 |
+
'HAVE_SOCKADDR_ALG': 1,
|
| 487 |
+
'HAVE_SOCKADDR_SA_LEN': 0,
|
| 488 |
+
'HAVE_SOCKADDR_STORAGE': 1,
|
| 489 |
+
'HAVE_SOCKETPAIR': 1,
|
| 490 |
+
'HAVE_SPAWN_H': 1,
|
| 491 |
+
'HAVE_SPLICE': 1,
|
| 492 |
+
'HAVE_SSIZE_T': 1,
|
| 493 |
+
'HAVE_STATVFS': 1,
|
| 494 |
+
'HAVE_STAT_TV_NSEC': 1,
|
| 495 |
+
'HAVE_STAT_TV_NSEC2': 0,
|
| 496 |
+
'HAVE_STDARG_PROTOTYPES': 1,
|
| 497 |
+
'HAVE_STDINT_H': 1,
|
| 498 |
+
'HAVE_STDLIB_H': 1,
|
| 499 |
+
'HAVE_STD_ATOMIC': 1,
|
| 500 |
+
'HAVE_STRFTIME': 1,
|
| 501 |
+
'HAVE_STRINGS_H': 1,
|
| 502 |
+
'HAVE_STRING_H': 1,
|
| 503 |
+
'HAVE_STRLCPY': 0,
|
| 504 |
+
'HAVE_STROPTS_H': 0,
|
| 505 |
+
'HAVE_STRSIGNAL': 1,
|
| 506 |
+
'HAVE_STRUCT_PASSWD_PW_GECOS': 1,
|
| 507 |
+
'HAVE_STRUCT_PASSWD_PW_PASSWD': 1,
|
| 508 |
+
'HAVE_STRUCT_STAT_ST_BIRTHTIME': 0,
|
| 509 |
+
'HAVE_STRUCT_STAT_ST_BLKSIZE': 1,
|
| 510 |
+
'HAVE_STRUCT_STAT_ST_BLOCKS': 1,
|
| 511 |
+
'HAVE_STRUCT_STAT_ST_FLAGS': 0,
|
| 512 |
+
'HAVE_STRUCT_STAT_ST_GEN': 0,
|
| 513 |
+
'HAVE_STRUCT_STAT_ST_RDEV': 1,
|
| 514 |
+
'HAVE_STRUCT_TM_TM_ZONE': 1,
|
| 515 |
+
'HAVE_SYMLINK': 1,
|
| 516 |
+
'HAVE_SYMLINKAT': 1,
|
| 517 |
+
'HAVE_SYNC': 1,
|
| 518 |
+
'HAVE_SYSCONF': 1,
|
| 519 |
+
'HAVE_SYSEXITS_H': 1,
|
| 520 |
+
'HAVE_SYS_AUDIOIO_H': 0,
|
| 521 |
+
'HAVE_SYS_AUXV_H': 1,
|
| 522 |
+
'HAVE_SYS_BSDTTY_H': 0,
|
| 523 |
+
'HAVE_SYS_DEVPOLL_H': 0,
|
| 524 |
+
'HAVE_SYS_DIR_H': 0,
|
| 525 |
+
'HAVE_SYS_ENDIAN_H': 0,
|
| 526 |
+
'HAVE_SYS_EPOLL_H': 1,
|
| 527 |
+
'HAVE_SYS_EVENTFD_H': 1,
|
| 528 |
+
'HAVE_SYS_EVENT_H': 0,
|
| 529 |
+
'HAVE_SYS_FILE_H': 1,
|
| 530 |
+
'HAVE_SYS_IOCTL_H': 1,
|
| 531 |
+
'HAVE_SYS_KERN_CONTROL_H': 0,
|
| 532 |
+
'HAVE_SYS_LOADAVG_H': 0,
|
| 533 |
+
'HAVE_SYS_LOCK_H': 0,
|
| 534 |
+
'HAVE_SYS_MEMFD_H': 0,
|
| 535 |
+
'HAVE_SYS_MKDEV_H': 0,
|
| 536 |
+
'HAVE_SYS_MMAN_H': 1,
|
| 537 |
+
'HAVE_SYS_MODEM_H': 0,
|
| 538 |
+
'HAVE_SYS_NDIR_H': 0,
|
| 539 |
+
'HAVE_SYS_PARAM_H': 1,
|
| 540 |
+
'HAVE_SYS_POLL_H': 1,
|
| 541 |
+
'HAVE_SYS_RANDOM_H': 0,
|
| 542 |
+
'HAVE_SYS_RESOURCE_H': 1,
|
| 543 |
+
'HAVE_SYS_SELECT_H': 1,
|
| 544 |
+
'HAVE_SYS_SENDFILE_H': 1,
|
| 545 |
+
'HAVE_SYS_SOCKET_H': 1,
|
| 546 |
+
'HAVE_SYS_STATVFS_H': 1,
|
| 547 |
+
'HAVE_SYS_STAT_H': 1,
|
| 548 |
+
'HAVE_SYS_SYSCALL_H': 1,
|
| 549 |
+
'HAVE_SYS_SYSMACROS_H': 1,
|
| 550 |
+
'HAVE_SYS_SYS_DOMAIN_H': 0,
|
| 551 |
+
'HAVE_SYS_TERMIO_H': 0,
|
| 552 |
+
'HAVE_SYS_TIMES_H': 1,
|
| 553 |
+
'HAVE_SYS_TIME_H': 1,
|
| 554 |
+
'HAVE_SYS_TYPES_H': 1,
|
| 555 |
+
'HAVE_SYS_UIO_H': 1,
|
| 556 |
+
'HAVE_SYS_UN_H': 1,
|
| 557 |
+
'HAVE_SYS_UTSNAME_H': 1,
|
| 558 |
+
'HAVE_SYS_WAIT_H': 1,
|
| 559 |
+
'HAVE_SYS_XATTR_H': 1,
|
| 560 |
+
'HAVE_TCGETPGRP': 1,
|
| 561 |
+
'HAVE_TCSETPGRP': 1,
|
| 562 |
+
'HAVE_TEMPNAM': 1,
|
| 563 |
+
'HAVE_TERMIOS_H': 1,
|
| 564 |
+
'HAVE_TERM_H': 1,
|
| 565 |
+
'HAVE_TGAMMA': 1,
|
| 566 |
+
'HAVE_TIMEGM': 1,
|
| 567 |
+
'HAVE_TIMES': 1,
|
| 568 |
+
'HAVE_TMPFILE': 1,
|
| 569 |
+
'HAVE_TMPNAM': 1,
|
| 570 |
+
'HAVE_TMPNAM_R': 1,
|
| 571 |
+
'HAVE_TM_ZONE': 1,
|
| 572 |
+
'HAVE_TRUNCATE': 1,
|
| 573 |
+
'HAVE_TZNAME': 0,
|
| 574 |
+
'HAVE_UCS4_TCL': 0,
|
| 575 |
+
'HAVE_UNAME': 1,
|
| 576 |
+
'HAVE_UNISTD_H': 1,
|
| 577 |
+
'HAVE_UNLINKAT': 1,
|
| 578 |
+
'HAVE_USABLE_WCHAR_T': 0,
|
| 579 |
+
'HAVE_UTIL_H': 0,
|
| 580 |
+
'HAVE_UTIMENSAT': 1,
|
| 581 |
+
'HAVE_UTIMES': 1,
|
| 582 |
+
'HAVE_UTIME_H': 1,
|
| 583 |
+
'HAVE_UUID_CREATE': 0,
|
| 584 |
+
'HAVE_UUID_ENC_BE': 0,
|
| 585 |
+
'HAVE_UUID_GENERATE_TIME_SAFE': 1,
|
| 586 |
+
'HAVE_UUID_H': 1,
|
| 587 |
+
'HAVE_UUID_UUID_H': 1,
|
| 588 |
+
'HAVE_VFORK': 1,
|
| 589 |
+
'HAVE_WAIT3': 1,
|
| 590 |
+
'HAVE_WAIT4': 1,
|
| 591 |
+
'HAVE_WAITID': 1,
|
| 592 |
+
'HAVE_WAITPID': 1,
|
| 593 |
+
'HAVE_WCHAR_H': 1,
|
| 594 |
+
'HAVE_WCSCOLL': 1,
|
| 595 |
+
'HAVE_WCSFTIME': 1,
|
| 596 |
+
'HAVE_WCSXFRM': 1,
|
| 597 |
+
'HAVE_WMEMCMP': 1,
|
| 598 |
+
'HAVE_WORKING_TZSET': 1,
|
| 599 |
+
'HAVE_WRITEV': 1,
|
| 600 |
+
'HAVE_ZLIB_COPY': 1,
|
| 601 |
+
'HAVE__GETPTY': 0,
|
| 602 |
+
'HOST_GNU_TYPE': 'x86_64-conda-linux-gnu',
|
| 603 |
+
'INCLDIRSTOMAKE': '/root/envs/evalkit_tf446/include '
|
| 604 |
+
'/root/envs/evalkit_tf446/include '
|
| 605 |
+
'/root/envs/evalkit_tf446/include/python3.10 '
|
| 606 |
+
'/root/envs/evalkit_tf446/include/python3.10',
|
| 607 |
+
'INCLUDEDIR': '/root/envs/evalkit_tf446/include',
|
| 608 |
+
'INCLUDEPY': '/root/envs/evalkit_tf446/include/python3.10',
|
| 609 |
+
'INSTALL': '/usr/bin/install -c',
|
| 610 |
+
'INSTALL_DATA': '/usr/bin/install -c -m 644',
|
| 611 |
+
'INSTALL_PROGRAM': '/usr/bin/install -c',
|
| 612 |
+
'INSTALL_SCRIPT': '/usr/bin/install -c',
|
| 613 |
+
'INSTALL_SHARED': '/usr/bin/install -c -m 755',
|
| 614 |
+
'INSTSONAME': 'libpython3.10.a',
|
| 615 |
+
'IO_H': 'Modules/_io/_iomodule.h',
|
| 616 |
+
'IO_OBJS': '\\',
|
| 617 |
+
'LDCXXSHARED': 'x86_64-conda-linux-gnu-c++ -pthread -shared',
|
| 618 |
+
'LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 619 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 620 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 621 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 622 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 623 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 624 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 625 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 626 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 627 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 628 |
+
'LDLIBRARY': 'libpython3.10.a',
|
| 629 |
+
'LDLIBRARYDIR': '',
|
| 630 |
+
'LDSHARED': 'x86_64-conda-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 631 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 632 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 633 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 634 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 635 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 636 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 637 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 638 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 639 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 640 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 641 |
+
'LDVERSION': '3.10',
|
| 642 |
+
'LIBC': '',
|
| 643 |
+
'LIBDEST': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 644 |
+
'LIBDIR': '/root/envs/evalkit_tf446/lib',
|
| 645 |
+
'LIBFFI_INCLUDEDIR': '/root/envs/evalkit_tf446/include',
|
| 646 |
+
'LIBM': '-lm',
|
| 647 |
+
'LIBOBJDIR': 'Python/',
|
| 648 |
+
'LIBOBJS': '',
|
| 649 |
+
'LIBPC': '/root/envs/evalkit_tf446/lib/pkgconfig',
|
| 650 |
+
'LIBPL': '/root/envs/evalkit_tf446/lib/python3.10/config-3.10-x86_64-linux-gnu',
|
| 651 |
+
'LIBPYTHON': '',
|
| 652 |
+
'LIBRARY': 'libpython3.10.a',
|
| 653 |
+
'LIBRARY_DEPS': 'libpython3.10.a',
|
| 654 |
+
'LIBRARY_OBJS': '\\',
|
| 655 |
+
'LIBRARY_OBJS_OMIT_FROZEN': '\\',
|
| 656 |
+
'LIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 657 |
+
'LIBSUBDIRS': 'asyncio \\',
|
| 658 |
+
'LINKCC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 659 |
+
'LINKFORSHARED': '-Xlinker -export-dynamic',
|
| 660 |
+
'LIPO_32BIT_FLAGS': '',
|
| 661 |
+
'LIPO_INTEL64_FLAGS': '',
|
| 662 |
+
'LLVM_PROF_ERR': 'no',
|
| 663 |
+
'LLVM_PROF_FILE': '',
|
| 664 |
+
'LLVM_PROF_MERGER': 'true',
|
| 665 |
+
'LN': 'ln',
|
| 666 |
+
'LOCALMODLIBS': '',
|
| 667 |
+
'MACHDEP': 'linux',
|
| 668 |
+
'MACHDEP_OBJS': '',
|
| 669 |
+
'MACHDESTLIB': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 670 |
+
'MACOSX_DEPLOYMENT_TARGET': '',
|
| 671 |
+
'MAINCC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 672 |
+
'MAJOR_IN_MKDEV': 0,
|
| 673 |
+
'MAJOR_IN_SYSMACROS': 0,
|
| 674 |
+
'MAKESETUP': '/croot/python-split_1733933809325/work/Modules/makesetup',
|
| 675 |
+
'MANDIR': '/root/envs/evalkit_tf446/share/man',
|
| 676 |
+
'MKDIR_P': '/usr/bin/mkdir -p',
|
| 677 |
+
'MODBUILT_NAMES': 'posix errno pwd _sre _codecs _weakref _functools '
|
| 678 |
+
'_operator _collections _abc itertools atexit _signal '
|
| 679 |
+
'_stat time _thread _locale _io faulthandler '
|
| 680 |
+
'_tracemalloc _symtable xxsubtype',
|
| 681 |
+
'MODDISABLED_NAMES': '',
|
| 682 |
+
'MODLIBS': '',
|
| 683 |
+
'MODOBJS': 'Modules/posixmodule.o Modules/errnomodule.o '
|
| 684 |
+
'Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o '
|
| 685 |
+
'Modules/_weakref.o Modules/_functoolsmodule.o '
|
| 686 |
+
'Modules/_operator.o Modules/_collectionsmodule.o '
|
| 687 |
+
'Modules/_abc.o Modules/itertoolsmodule.o '
|
| 688 |
+
'Modules/atexitmodule.o Modules/signalmodule.o Modules/_stat.o '
|
| 689 |
+
'Modules/timemodule.o Modules/_threadmodule.o '
|
| 690 |
+
'Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o '
|
| 691 |
+
'Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o '
|
| 692 |
+
'Modules/textio.o Modules/stringio.o Modules/faulthandler.o '
|
| 693 |
+
'Modules/_tracemalloc.o Modules/symtablemodule.o '
|
| 694 |
+
'Modules/xxsubtype.o',
|
| 695 |
+
'MODULE_OBJS': '\\',
|
| 696 |
+
'MULTIARCH': 'x86_64-linux-gnu',
|
| 697 |
+
'MULTIARCH_CPPFLAGS': '-DMULTIARCH=\\"x86_64-linux-gnu\\"',
|
| 698 |
+
'MVWDELCH_IS_EXPRESSION': 1,
|
| 699 |
+
'NO_AS_NEEDED': '-Wl,--no-as-needed',
|
| 700 |
+
'OBJECT_OBJS': '\\',
|
| 701 |
+
'OPENSSL_INCLUDES': '-I/root/envs/evalkit_tf446/include',
|
| 702 |
+
'OPENSSL_LDFLAGS': '-L/root/envs/evalkit_tf446/lib',
|
| 703 |
+
'OPENSSL_LIBS': '-lssl -lcrypto',
|
| 704 |
+
'OPENSSL_RPATH': '',
|
| 705 |
+
'OPT': '-DNDEBUG -fwrapv -O2 -Wall',
|
| 706 |
+
'OTHER_LIBTOOL_OPT': '',
|
| 707 |
+
'PACKAGE_BUGREPORT': 0,
|
| 708 |
+
'PACKAGE_NAME': 0,
|
| 709 |
+
'PACKAGE_STRING': 0,
|
| 710 |
+
'PACKAGE_TARNAME': 0,
|
| 711 |
+
'PACKAGE_URL': 0,
|
| 712 |
+
'PACKAGE_VERSION': 0,
|
| 713 |
+
'PARSER_HEADERS': '\\',
|
| 714 |
+
'PARSER_OBJS': '\\ \\ Parser/myreadline.o Parser/tokenizer.o',
|
| 715 |
+
'PEGEN_HEADERS': '\\',
|
| 716 |
+
'PEGEN_OBJS': '\\',
|
| 717 |
+
'PGO_PROF_GEN_FLAG': '-fprofile-generate',
|
| 718 |
+
'PGO_PROF_USE_FLAG': ' ',
|
| 719 |
+
'PLATLIBDIR': 'lib',
|
| 720 |
+
'POBJS': '\\',
|
| 721 |
+
'POSIX_SEMAPHORES_NOT_ENABLED': 0,
|
| 722 |
+
'PROFILE_TASK': '-m test --pgo',
|
| 723 |
+
'PTHREAD_KEY_T_IS_COMPATIBLE_WITH_INT': 1,
|
| 724 |
+
'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1,
|
| 725 |
+
'PURIFY': '',
|
| 726 |
+
'PY3LIBRARY': '',
|
| 727 |
+
'PYLONG_BITS_IN_DIGIT': 0,
|
| 728 |
+
'PYTHON': 'python',
|
| 729 |
+
'PYTHONFRAMEWORK': '',
|
| 730 |
+
'PYTHONFRAMEWORKDIR': 'no-framework',
|
| 731 |
+
'PYTHONFRAMEWORKINSTALLDIR': '',
|
| 732 |
+
'PYTHONFRAMEWORKPREFIX': '',
|
| 733 |
+
'PYTHONPATH': '',
|
| 734 |
+
'PYTHON_FOR_BUILD': './python -E',
|
| 735 |
+
'PYTHON_FOR_REGEN': '',
|
| 736 |
+
'PYTHON_HEADERS': '\\',
|
| 737 |
+
'PYTHON_OBJS': '\\',
|
| 738 |
+
'PY_BUILD_ENVIRON': '',
|
| 739 |
+
'PY_BUILTIN_HASHLIB_HASHES': '"md5,sha1,sha256,sha512,sha3,blake2"',
|
| 740 |
+
'PY_BUILTIN_MODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG '
|
| 741 |
+
'-fwrapv -O2 -Wall -march=nocona -mtune=haswell '
|
| 742 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 743 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 744 |
+
'/root/envs/evalkit_tf446/include '
|
| 745 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 746 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 747 |
+
' '
|
| 748 |
+
' -march=nocona '
|
| 749 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 750 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 751 |
+
'-ffunction-sections -pipe -isystem '
|
| 752 |
+
'/root/envs/evalkit_tf446/include '
|
| 753 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 754 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 755 |
+
' '
|
| 756 |
+
' '
|
| 757 |
+
'-fno-semantic-interposition '
|
| 758 |
+
' '
|
| 759 |
+
' -g -std=c99 -Wextra '
|
| 760 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 761 |
+
'-Wno-missing-field-initializers '
|
| 762 |
+
'-Werror=implicit-function-declaration '
|
| 763 |
+
'-fvisibility=hidden '
|
| 764 |
+
' '
|
| 765 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 766 |
+
'-IObjects -IInclude -IPython -I. '
|
| 767 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 768 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 769 |
+
'/root/envs/evalkit_tf446/include '
|
| 770 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 771 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 772 |
+
'/root/envs/evalkit_tf446/include '
|
| 773 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 774 |
+
'-DPy_BUILD_CORE_BUILTIN',
|
| 775 |
+
'PY_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 776 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 777 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 778 |
+
'-isystem '
|
| 779 |
+
'/root/envs/evalkit_tf446/include '
|
| 780 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 781 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 782 |
+
' '
|
| 783 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 784 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 785 |
+
'-isystem '
|
| 786 |
+
'/root/envs/evalkit_tf446/include '
|
| 787 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 788 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 789 |
+
' '
|
| 790 |
+
'',
|
| 791 |
+
'PY_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 792 |
+
' -g -std=c99 '
|
| 793 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 794 |
+
'-Wno-missing-field-initializers '
|
| 795 |
+
'-Werror=implicit-function-declaration '
|
| 796 |
+
'-fvisibility=hidden '
|
| 797 |
+
'-I/croot/python-split_1733933809325/work/Include/internal',
|
| 798 |
+
'PY_COERCE_C_LOCALE': 1,
|
| 799 |
+
'PY_CORE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 '
|
| 800 |
+
'-Wall -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 801 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 802 |
+
'-pipe -isystem '
|
| 803 |
+
'/root/envs/evalkit_tf446/include '
|
| 804 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 805 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 806 |
+
' '
|
| 807 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 808 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 809 |
+
'-pipe -isystem '
|
| 810 |
+
'/root/envs/evalkit_tf446/include '
|
| 811 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 812 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 813 |
+
' '
|
| 814 |
+
' -fno-semantic-interposition '
|
| 815 |
+
' '
|
| 816 |
+
'-g -std=c99 -Wextra -Wno-unused-result '
|
| 817 |
+
'-Wno-unused-parameter -Wno-missing-field-initializers '
|
| 818 |
+
'-Werror=implicit-function-declaration -fvisibility=hidden '
|
| 819 |
+
' '
|
| 820 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 821 |
+
'-IObjects -IInclude -IPython -I. '
|
| 822 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 823 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 824 |
+
'/root/envs/evalkit_tf446/include '
|
| 825 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 826 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 827 |
+
'/root/envs/evalkit_tf446/include '
|
| 828 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 829 |
+
'-DPy_BUILD_CORE',
|
| 830 |
+
'PY_CORE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 831 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 832 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 833 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 834 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 835 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 836 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 837 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 838 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 839 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 840 |
+
'-fno-semantic-interposition '
|
| 841 |
+
' -g',
|
| 842 |
+
'PY_CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 843 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 844 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 845 |
+
'/root/envs/evalkit_tf446/include '
|
| 846 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 847 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 848 |
+
'/root/envs/evalkit_tf446/include '
|
| 849 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 850 |
+
'PY_ENABLE_SHARED': 0,
|
| 851 |
+
'PY_FORMAT_SIZE_T': '"z"',
|
| 852 |
+
'PY_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 853 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 854 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 855 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 856 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 857 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 858 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 859 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 860 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 861 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 862 |
+
'PY_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 863 |
+
' -g',
|
| 864 |
+
'PY_SSL_DEFAULT_CIPHERS': 1,
|
| 865 |
+
'PY_SSL_DEFAULT_CIPHER_STRING': 0,
|
| 866 |
+
'PY_STDMODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv '
|
| 867 |
+
'-O2 -Wall -march=nocona -mtune=haswell '
|
| 868 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 869 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 870 |
+
'/root/envs/evalkit_tf446/include '
|
| 871 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 872 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 873 |
+
' '
|
| 874 |
+
' -march=nocona '
|
| 875 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 876 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 877 |
+
'-ffunction-sections -pipe -isystem '
|
| 878 |
+
'/root/envs/evalkit_tf446/include '
|
| 879 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 880 |
+
'-fdebug-prefix-map=/root/envs/evalkit_tf446=/usr/local/src/conda-prefix '
|
| 881 |
+
' '
|
| 882 |
+
' '
|
| 883 |
+
'-fno-semantic-interposition '
|
| 884 |
+
' -g -std=c99 '
|
| 885 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 886 |
+
'-Wno-missing-field-initializers '
|
| 887 |
+
'-Werror=implicit-function-declaration '
|
| 888 |
+
'-fvisibility=hidden '
|
| 889 |
+
' '
|
| 890 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 891 |
+
'-IObjects -IInclude -IPython -I. '
|
| 892 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 893 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 894 |
+
'/root/envs/evalkit_tf446/include '
|
| 895 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 896 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 897 |
+
'/root/envs/evalkit_tf446/include '
|
| 898 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 899 |
+
'Py_DEBUG': 0,
|
| 900 |
+
'Py_ENABLE_SHARED': 0,
|
| 901 |
+
'Py_HASH_ALGORITHM': 0,
|
| 902 |
+
'Py_TRACE_REFS': 0,
|
| 903 |
+
'QUICKTESTOPTS': '-x test_subprocess test_io test_lib2to3 \\',
|
| 904 |
+
'READELF': 'x86_64-conda-linux-gnu-readelf',
|
| 905 |
+
'RESSRCDIR': 'Mac/Resources/framework',
|
| 906 |
+
'RETSIGTYPE': 'void',
|
| 907 |
+
'RUNSHARED': '',
|
| 908 |
+
'SCRIPTDIR': '/root/envs/evalkit_tf446/lib',
|
| 909 |
+
'SETPGRP_HAVE_ARG': 0,
|
| 910 |
+
'SHELL': '/bin/sh',
|
| 911 |
+
'SHLIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 912 |
+
'SHLIB_SUFFIX': '.so',
|
| 913 |
+
'SHM_NEEDS_LIBRT': 1,
|
| 914 |
+
'SIGNED_RIGHT_SHIFT_ZERO_FILLS': 0,
|
| 915 |
+
'SITEPATH': '',
|
| 916 |
+
'SIZEOF_DOUBLE': 8,
|
| 917 |
+
'SIZEOF_FLOAT': 4,
|
| 918 |
+
'SIZEOF_FPOS_T': 16,
|
| 919 |
+
'SIZEOF_INT': 4,
|
| 920 |
+
'SIZEOF_LONG': 8,
|
| 921 |
+
'SIZEOF_LONG_DOUBLE': 16,
|
| 922 |
+
'SIZEOF_LONG_LONG': 8,
|
| 923 |
+
'SIZEOF_OFF_T': 8,
|
| 924 |
+
'SIZEOF_PID_T': 4,
|
| 925 |
+
'SIZEOF_PTHREAD_KEY_T': 4,
|
| 926 |
+
'SIZEOF_PTHREAD_T': 8,
|
| 927 |
+
'SIZEOF_SHORT': 2,
|
| 928 |
+
'SIZEOF_SIZE_T': 8,
|
| 929 |
+
'SIZEOF_TIME_T': 8,
|
| 930 |
+
'SIZEOF_UINTPTR_T': 8,
|
| 931 |
+
'SIZEOF_VOID_P': 8,
|
| 932 |
+
'SIZEOF_WCHAR_T': 4,
|
| 933 |
+
'SIZEOF__BOOL': 1,
|
| 934 |
+
'SOABI': 'cpython-310-x86_64-linux-gnu',
|
| 935 |
+
'SRCDIRS': 'Parser Objects Python Modules Modules/_io Programs',
|
| 936 |
+
'SRC_GDB_HOOKS': '/croot/python-split_1733933809325/work/Tools/gdb/libpython.py',
|
| 937 |
+
'STATIC_LIBPYTHON': 1,
|
| 938 |
+
'STDC_HEADERS': 1,
|
| 939 |
+
'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */",
|
| 940 |
+
'STRIPFLAG': '-s',
|
| 941 |
+
'SUBDIRS': '',
|
| 942 |
+
'SUBDIRSTOO': 'Include Lib Misc',
|
| 943 |
+
'SYSLIBS': '-lm',
|
| 944 |
+
'SYS_SELECT_WITH_SYS_TIME': 1,
|
| 945 |
+
'TCLTK_INCLUDES': '-I/root/envs/evalkit_tf446/include',
|
| 946 |
+
'TCLTK_LIBS': '-L/root/envs/evalkit_tf446/lib '
|
| 947 |
+
'-ltcl8.6 -ltk8.6',
|
| 948 |
+
'TESTOPTS': '',
|
| 949 |
+
'TESTPATH': '',
|
| 950 |
+
'TESTPYTHON': './python',
|
| 951 |
+
'TESTPYTHONOPTS': '',
|
| 952 |
+
'TESTRUNNER': './python '
|
| 953 |
+
'/croot/python-split_1733933809325/work/Tools/scripts/run_tests.py',
|
| 954 |
+
'TESTSUBDIRS': 'ctypes/test \\',
|
| 955 |
+
'TESTTIMEOUT': 1200,
|
| 956 |
+
'TEST_MODULES': 'yes',
|
| 957 |
+
'THREAD_STACK_SIZE': 0,
|
| 958 |
+
'TIMEMODULE_LIB': 0,
|
| 959 |
+
'TIME_WITH_SYS_TIME': 1,
|
| 960 |
+
'TM_IN_SYS_TIME': 0,
|
| 961 |
+
'TZPATH': '/root/envs/evalkit_tf446/share/zoneinfo',
|
| 962 |
+
'UNICODE_DEPS': '\\',
|
| 963 |
+
'UNIVERSALSDK': '',
|
| 964 |
+
'UPDATE_FILE': '/croot/python-split_1733933809325/work/Tools/scripts/update_file.py',
|
| 965 |
+
'USE_COMPUTED_GOTOS': 1,
|
| 966 |
+
'VERSION': '3.10',
|
| 967 |
+
'VPATH': '/croot/python-split_1733933809325/work',
|
| 968 |
+
'WHEEL_PKG_DIR': '',
|
| 969 |
+
'WINDOW_HAS_FLAGS': 1,
|
| 970 |
+
'WITH_DECIMAL_CONTEXTVAR': 1,
|
| 971 |
+
'WITH_DOC_STRINGS': 1,
|
| 972 |
+
'WITH_DTRACE': 0,
|
| 973 |
+
'WITH_DYLD': 0,
|
| 974 |
+
'WITH_EDITLINE': 0,
|
| 975 |
+
'WITH_LIBINTL': 0,
|
| 976 |
+
'WITH_NEXT_FRAMEWORK': 0,
|
| 977 |
+
'WITH_PYMALLOC': 1,
|
| 978 |
+
'WITH_VALGRIND': 0,
|
| 979 |
+
'X87_DOUBLE_ROUNDING': 0,
|
| 980 |
+
'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax',
|
| 981 |
+
'abs_builddir': '/croot/python-split_1733933809325/work/build-static',
|
| 982 |
+
'abs_srcdir': '/croot/python-split_1733933809325/work',
|
| 983 |
+
'datarootdir': '/root/envs/evalkit_tf446/share',
|
| 984 |
+
'exec_prefix': '/root/envs/evalkit_tf446',
|
| 985 |
+
'prefix': '/root/envs/evalkit_tf446',
|
| 986 |
+
'srcdir': '/croot/python-split_1733933809325/work'}
|
evalkit_tf446/lib/python3.10/_sysconfigdata_x86_64_conda_cos6_linux_gnu.py
ADDED
|
@@ -0,0 +1,986 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# system configuration generated and used by the sysconfig module
|
| 2 |
+
build_time_vars = {'ABIFLAGS': '',
|
| 3 |
+
'AC_APPLE_UNIVERSAL_BUILD': 0,
|
| 4 |
+
'AIX_BUILDDATE': 0,
|
| 5 |
+
'AIX_GENUINE_CPLUSPLUS': 0,
|
| 6 |
+
'ALIGNOF_LONG': 8,
|
| 7 |
+
'ALIGNOF_SIZE_T': 8,
|
| 8 |
+
'ALT_SOABI': 0,
|
| 9 |
+
'ANDROID_API_LEVEL': 0,
|
| 10 |
+
'AR': 'x86_64-conda_cos6-linux-gnu-ar',
|
| 11 |
+
'ARFLAGS': 'rcs',
|
| 12 |
+
'BASECFLAGS': '-Wno-unused-result -Wsign-compare',
|
| 13 |
+
'BASECPPFLAGS': '-IObjects -IInclude -IPython',
|
| 14 |
+
'BASEMODLIBS': '',
|
| 15 |
+
'BINDIR': '/root/envs/evalkit_tf446/bin',
|
| 16 |
+
'BINLIBDEST': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 17 |
+
'BLDLIBRARY': 'libpython3.10.a',
|
| 18 |
+
'BLDSHARED': 'x86_64-conda_cos6-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 19 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 20 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 21 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 22 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 23 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 24 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 25 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 26 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 27 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 28 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 29 |
+
'BUILDEXE': '',
|
| 30 |
+
'BUILDPYTHON': 'python',
|
| 31 |
+
'BUILD_GNU_TYPE': 'x86_64-conda_cos6-linux-gnu',
|
| 32 |
+
'BYTESTR_DEPS': '\\',
|
| 33 |
+
'CC': 'x86_64-conda_cos6-linux-gnu-gcc -pthread',
|
| 34 |
+
'CCSHARED': '-fPIC',
|
| 35 |
+
'CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 36 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 37 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 38 |
+
'-isystem '
|
| 39 |
+
'/root/envs/evalkit_tf446/include '
|
| 40 |
+
' '
|
| 41 |
+
' '
|
| 42 |
+
' '
|
| 43 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 44 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 45 |
+
'-isystem '
|
| 46 |
+
'/root/envs/evalkit_tf446/include '
|
| 47 |
+
' '
|
| 48 |
+
' '
|
| 49 |
+
' ',
|
| 50 |
+
'CFLAGSFORSHARED': '',
|
| 51 |
+
'CFLAGS_ALIASING': '',
|
| 52 |
+
'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in '
|
| 53 |
+
'Makefile.pre.in',
|
| 54 |
+
'CONFIGURE_CFLAGS': '-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 55 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 56 |
+
'-ffunction-sections -pipe -isystem '
|
| 57 |
+
'/root/envs/evalkit_tf446/include '
|
| 58 |
+
' '
|
| 59 |
+
' '
|
| 60 |
+
' '
|
| 61 |
+
' ',
|
| 62 |
+
'CONFIGURE_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 63 |
+
' '
|
| 64 |
+
' -g -std=c99 -Wextra '
|
| 65 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 66 |
+
'-Wno-missing-field-initializers '
|
| 67 |
+
'-Werror=implicit-function-declaration '
|
| 68 |
+
'-fvisibility=hidden',
|
| 69 |
+
'CONFIGURE_CPPFLAGS': '-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 70 |
+
'/root/envs/evalkit_tf446/include '
|
| 71 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 72 |
+
'CONFIGURE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 73 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 74 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 75 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 76 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 77 |
+
'CONFIGURE_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 78 |
+
' '
|
| 79 |
+
' -g',
|
| 80 |
+
'CONFIG_ARGS': "'--prefix=/root/envs/evalkit_tf446' "
|
| 81 |
+
"'--build=x86_64-conda_cos6-linux-gnu' "
|
| 82 |
+
"'--host=x86_64-conda_cos6-linux-gnu' '--enable-ipv6' "
|
| 83 |
+
"'--with-ensurepip=no' "
|
| 84 |
+
"'--with-tzpath=/root/envs/evalkit_tf446/share/zoneinfo:/root/envs/evalkit_tf446/share/tzinfo' "
|
| 85 |
+
"'--with-computed-gotos' '--with-system-ffi' "
|
| 86 |
+
"'--enable-loadable-sqlite-extensions' "
|
| 87 |
+
"'--with-tcltk-includes=-I/root/envs/evalkit_tf446/include' "
|
| 88 |
+
"'--with-tcltk-libs=-L/root/envs/evalkit_tf446/lib "
|
| 89 |
+
"-ltcl8.6 -ltk8.6' '--with-platlibdir=lib' '--with-lto' "
|
| 90 |
+
"'--enable-optimizations' "
|
| 91 |
+
"'-oldincludedir=/croot/python-split_1733933809325/_build_env/x86_64-conda_cos6-linux-gnu/sysroot/usr/include' "
|
| 92 |
+
"'--disable-shared' 'PROFILE_TASK=-m test --pgo' "
|
| 93 |
+
"'build_alias=x86_64-conda_cos6-linux-gnu' "
|
| 94 |
+
"'host_alias=x86_64-conda_cos6-linux-gnu' 'MACHDEP=linux' "
|
| 95 |
+
"'CC=x86_64-conda_cos6-linux-gnu-gcc' 'CFLAGS=-march=nocona "
|
| 96 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 97 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 98 |
+
'-pipe -isystem '
|
| 99 |
+
'/root/envs/evalkit_tf446/include '
|
| 100 |
+
' '
|
| 101 |
+
' '
|
| 102 |
+
' '
|
| 103 |
+
"' 'LDFLAGS=-Wl,-O2 -Wl,--sort-common -Wl,--as-needed "
|
| 104 |
+
'-Wl,-z,relro -Wl,-z,now -Wl,--disable-new-dtags '
|
| 105 |
+
'-Wl,--gc-sections '
|
| 106 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 107 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 108 |
+
"-L/root/envs/evalkit_tf446/lib' "
|
| 109 |
+
"'CPPFLAGS=-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem "
|
| 110 |
+
'/root/envs/evalkit_tf446/include '
|
| 111 |
+
"-I/root/envs/evalkit_tf446/include' "
|
| 112 |
+
"'CPP=/croot/python-split_1733933809325/_build_env/bin/x86_64-conda_cos6-linux-gnu-cpp' "
|
| 113 |
+
"'PKG_CONFIG_PATH=/root/envs/evalkit_tf446/lib/pkgconfig'",
|
| 114 |
+
'CONFINCLUDEDIR': '/root/envs/evalkit_tf446/include',
|
| 115 |
+
'CONFINCLUDEPY': '/root/envs/evalkit_tf446/include/python3.10',
|
| 116 |
+
'COREPYTHONPATH': '',
|
| 117 |
+
'COVERAGE_INFO': '/croot/python-split_1733933809325/work/build-static/coverage.info',
|
| 118 |
+
'COVERAGE_REPORT': '/croot/python-split_1733933809325/work/build-static/lcov-report',
|
| 119 |
+
'COVERAGE_REPORT_OPTIONS': '--no-branch-coverage --title "CPython lcov '
|
| 120 |
+
'report"',
|
| 121 |
+
'CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 122 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 123 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 124 |
+
'/root/envs/evalkit_tf446/include '
|
| 125 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 126 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 127 |
+
'/root/envs/evalkit_tf446/include '
|
| 128 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 129 |
+
'CXX': 'x86_64-conda_cos6-linux-gnu-c++ -pthread',
|
| 130 |
+
'DESTDIRS': '/root/envs/evalkit_tf446 '
|
| 131 |
+
'/root/envs/evalkit_tf446/lib '
|
| 132 |
+
'/root/envs/evalkit_tf446/lib/python3.10 '
|
| 133 |
+
'/root/envs/evalkit_tf446/lib/python3.10/lib-dynload',
|
| 134 |
+
'DESTLIB': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 135 |
+
'DESTPATH': '',
|
| 136 |
+
'DESTSHARED': '/root/envs/evalkit_tf446/lib/python3.10/lib-dynload',
|
| 137 |
+
'DFLAGS': '',
|
| 138 |
+
'DIRMODE': 755,
|
| 139 |
+
'DIST': 'README.rst ChangeLog configure configure.ac acconfig.h pyconfig.h.in '
|
| 140 |
+
'Makefile.pre.in Include Lib Misc Ext-dummy',
|
| 141 |
+
'DISTDIRS': 'Include Lib Misc Ext-dummy',
|
| 142 |
+
'DISTFILES': 'README.rst ChangeLog configure configure.ac acconfig.h '
|
| 143 |
+
'pyconfig.h.in Makefile.pre.in',
|
| 144 |
+
'DLINCLDIR': '.',
|
| 145 |
+
'DLLLIBRARY': '',
|
| 146 |
+
'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754': 0,
|
| 147 |
+
'DOUBLE_IS_BIG_ENDIAN_IEEE754': 0,
|
| 148 |
+
'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1,
|
| 149 |
+
'DTRACE': '',
|
| 150 |
+
'DTRACE_DEPS': '\\',
|
| 151 |
+
'DTRACE_HEADERS': '',
|
| 152 |
+
'DTRACE_OBJS': '',
|
| 153 |
+
'DYNLOADFILE': 'dynload_shlib.o',
|
| 154 |
+
'ENABLE_IPV6': 1,
|
| 155 |
+
'ENSUREPIP': 'no',
|
| 156 |
+
'EXE': '',
|
| 157 |
+
'EXEMODE': 755,
|
| 158 |
+
'EXPERIMENTAL_ISOLATED_SUBINTERPRETERS': 0,
|
| 159 |
+
'EXPORTSFROM': '',
|
| 160 |
+
'EXPORTSYMS': '',
|
| 161 |
+
'EXTRATESTOPTS': '',
|
| 162 |
+
'EXT_SUFFIX': '.cpython-310-x86_64-linux-gnu.so',
|
| 163 |
+
'FILEMODE': 644,
|
| 164 |
+
'FLOAT_WORDS_BIGENDIAN': 0,
|
| 165 |
+
'FLOCK_NEEDS_LIBBSD': 0,
|
| 166 |
+
'GETPGRP_HAVE_ARG': 0,
|
| 167 |
+
'GITBRANCH': '',
|
| 168 |
+
'GITTAG': '',
|
| 169 |
+
'GITVERSION': '',
|
| 170 |
+
'GNULD': 'no',
|
| 171 |
+
'HAVE_ACCEPT4': 1,
|
| 172 |
+
'HAVE_ACOSH': 1,
|
| 173 |
+
'HAVE_ADDRINFO': 1,
|
| 174 |
+
'HAVE_ALARM': 1,
|
| 175 |
+
'HAVE_ALIGNED_REQUIRED': 0,
|
| 176 |
+
'HAVE_ALLOCA_H': 1,
|
| 177 |
+
'HAVE_ALTZONE': 0,
|
| 178 |
+
'HAVE_ASINH': 1,
|
| 179 |
+
'HAVE_ASM_TYPES_H': 1,
|
| 180 |
+
'HAVE_ATANH': 1,
|
| 181 |
+
'HAVE_BIND_TEXTDOMAIN_CODESET': 1,
|
| 182 |
+
'HAVE_BLUETOOTH_BLUETOOTH_H': 0,
|
| 183 |
+
'HAVE_BLUETOOTH_H': 0,
|
| 184 |
+
'HAVE_BROKEN_MBSTOWCS': 0,
|
| 185 |
+
'HAVE_BROKEN_NICE': 0,
|
| 186 |
+
'HAVE_BROKEN_PIPE_BUF': 0,
|
| 187 |
+
'HAVE_BROKEN_POLL': 0,
|
| 188 |
+
'HAVE_BROKEN_POSIX_SEMAPHORES': 0,
|
| 189 |
+
'HAVE_BROKEN_PTHREAD_SIGMASK': 0,
|
| 190 |
+
'HAVE_BROKEN_SEM_GETVALUE': 0,
|
| 191 |
+
'HAVE_BROKEN_UNSETENV': 0,
|
| 192 |
+
'HAVE_BUILTIN_ATOMIC': 1,
|
| 193 |
+
'HAVE_CHFLAGS': 0,
|
| 194 |
+
'HAVE_CHOWN': 1,
|
| 195 |
+
'HAVE_CHROOT': 1,
|
| 196 |
+
'HAVE_CLOCK': 1,
|
| 197 |
+
'HAVE_CLOCK_GETRES': 1,
|
| 198 |
+
'HAVE_CLOCK_GETTIME': 1,
|
| 199 |
+
'HAVE_CLOCK_SETTIME': 1,
|
| 200 |
+
'HAVE_CLOSE_RANGE': 0,
|
| 201 |
+
'HAVE_COMPUTED_GOTOS': 1,
|
| 202 |
+
'HAVE_CONFSTR': 1,
|
| 203 |
+
'HAVE_CONIO_H': 0,
|
| 204 |
+
'HAVE_COPYSIGN': 1,
|
| 205 |
+
'HAVE_COPY_FILE_RANGE': 0,
|
| 206 |
+
'HAVE_CRYPT_H': 1,
|
| 207 |
+
'HAVE_CRYPT_R': 1,
|
| 208 |
+
'HAVE_CTERMID': 1,
|
| 209 |
+
'HAVE_CTERMID_R': 0,
|
| 210 |
+
'HAVE_CURSES_FILTER': 1,
|
| 211 |
+
'HAVE_CURSES_H': 1,
|
| 212 |
+
'HAVE_CURSES_HAS_KEY': 1,
|
| 213 |
+
'HAVE_CURSES_IMMEDOK': 1,
|
| 214 |
+
'HAVE_CURSES_IS_PAD': 1,
|
| 215 |
+
'HAVE_CURSES_IS_TERM_RESIZED': 1,
|
| 216 |
+
'HAVE_CURSES_RESIZETERM': 1,
|
| 217 |
+
'HAVE_CURSES_RESIZE_TERM': 1,
|
| 218 |
+
'HAVE_CURSES_SYNCOK': 1,
|
| 219 |
+
'HAVE_CURSES_TYPEAHEAD': 1,
|
| 220 |
+
'HAVE_CURSES_USE_ENV': 1,
|
| 221 |
+
'HAVE_CURSES_WCHGAT': 1,
|
| 222 |
+
'HAVE_DECL_ISFINITE': 1,
|
| 223 |
+
'HAVE_DECL_ISINF': 1,
|
| 224 |
+
'HAVE_DECL_ISNAN': 1,
|
| 225 |
+
'HAVE_DECL_RTLD_DEEPBIND': 1,
|
| 226 |
+
'HAVE_DECL_RTLD_GLOBAL': 1,
|
| 227 |
+
'HAVE_DECL_RTLD_LAZY': 1,
|
| 228 |
+
'HAVE_DECL_RTLD_LOCAL': 1,
|
| 229 |
+
'HAVE_DECL_RTLD_MEMBER': 0,
|
| 230 |
+
'HAVE_DECL_RTLD_NODELETE': 1,
|
| 231 |
+
'HAVE_DECL_RTLD_NOLOAD': 1,
|
| 232 |
+
'HAVE_DECL_RTLD_NOW': 1,
|
| 233 |
+
'HAVE_DECL_TZNAME': 0,
|
| 234 |
+
'HAVE_DEVICE_MACROS': 1,
|
| 235 |
+
'HAVE_DEV_PTC': 0,
|
| 236 |
+
'HAVE_DEV_PTMX': 1,
|
| 237 |
+
'HAVE_DIRECT_H': 0,
|
| 238 |
+
'HAVE_DIRENT_D_TYPE': 1,
|
| 239 |
+
'HAVE_DIRENT_H': 1,
|
| 240 |
+
'HAVE_DIRFD': 1,
|
| 241 |
+
'HAVE_DLFCN_H': 1,
|
| 242 |
+
'HAVE_DLOPEN': 1,
|
| 243 |
+
'HAVE_DUP2': 1,
|
| 244 |
+
'HAVE_DUP3': 1,
|
| 245 |
+
'HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH': 0,
|
| 246 |
+
'HAVE_DYNAMIC_LOADING': 1,
|
| 247 |
+
'HAVE_ENDIAN_H': 1,
|
| 248 |
+
'HAVE_EPOLL': 1,
|
| 249 |
+
'HAVE_EPOLL_CREATE1': 1,
|
| 250 |
+
'HAVE_ERF': 1,
|
| 251 |
+
'HAVE_ERFC': 1,
|
| 252 |
+
'HAVE_ERRNO_H': 1,
|
| 253 |
+
'HAVE_EVENTFD': 1,
|
| 254 |
+
'HAVE_EXECV': 1,
|
| 255 |
+
'HAVE_EXPLICIT_BZERO': 0,
|
| 256 |
+
'HAVE_EXPLICIT_MEMSET': 0,
|
| 257 |
+
'HAVE_EXPM1': 1,
|
| 258 |
+
'HAVE_FACCESSAT': 1,
|
| 259 |
+
'HAVE_FCHDIR': 1,
|
| 260 |
+
'HAVE_FCHMOD': 1,
|
| 261 |
+
'HAVE_FCHMODAT': 1,
|
| 262 |
+
'HAVE_FCHOWN': 1,
|
| 263 |
+
'HAVE_FCHOWNAT': 1,
|
| 264 |
+
'HAVE_FCNTL_H': 1,
|
| 265 |
+
'HAVE_FDATASYNC': 1,
|
| 266 |
+
'HAVE_FDOPENDIR': 1,
|
| 267 |
+
'HAVE_FDWALK': 0,
|
| 268 |
+
'HAVE_FEXECVE': 1,
|
| 269 |
+
'HAVE_FINITE': 1,
|
| 270 |
+
'HAVE_FLOCK': 1,
|
| 271 |
+
'HAVE_FORK': 1,
|
| 272 |
+
'HAVE_FORKPTY': 1,
|
| 273 |
+
'HAVE_FPATHCONF': 1,
|
| 274 |
+
'HAVE_FSEEK64': 0,
|
| 275 |
+
'HAVE_FSEEKO': 1,
|
| 276 |
+
'HAVE_FSTATAT': 1,
|
| 277 |
+
'HAVE_FSTATVFS': 1,
|
| 278 |
+
'HAVE_FSYNC': 1,
|
| 279 |
+
'HAVE_FTELL64': 0,
|
| 280 |
+
'HAVE_FTELLO': 1,
|
| 281 |
+
'HAVE_FTIME': 1,
|
| 282 |
+
'HAVE_FTRUNCATE': 1,
|
| 283 |
+
'HAVE_FUTIMENS': 1,
|
| 284 |
+
'HAVE_FUTIMES': 1,
|
| 285 |
+
'HAVE_FUTIMESAT': 1,
|
| 286 |
+
'HAVE_GAI_STRERROR': 1,
|
| 287 |
+
'HAVE_GAMMA': 1,
|
| 288 |
+
'HAVE_GCC_ASM_FOR_MC68881': 0,
|
| 289 |
+
'HAVE_GCC_ASM_FOR_X64': 1,
|
| 290 |
+
'HAVE_GCC_ASM_FOR_X87': 1,
|
| 291 |
+
'HAVE_GCC_UINT128_T': 1,
|
| 292 |
+
'HAVE_GETADDRINFO': 1,
|
| 293 |
+
'HAVE_GETC_UNLOCKED': 1,
|
| 294 |
+
'HAVE_GETENTROPY': 0,
|
| 295 |
+
'HAVE_GETGRGID_R': 1,
|
| 296 |
+
'HAVE_GETGRNAM_R': 1,
|
| 297 |
+
'HAVE_GETGROUPLIST': 1,
|
| 298 |
+
'HAVE_GETGROUPS': 1,
|
| 299 |
+
'HAVE_GETHOSTBYNAME': 0,
|
| 300 |
+
'HAVE_GETHOSTBYNAME_R': 1,
|
| 301 |
+
'HAVE_GETHOSTBYNAME_R_3_ARG': 0,
|
| 302 |
+
'HAVE_GETHOSTBYNAME_R_5_ARG': 0,
|
| 303 |
+
'HAVE_GETHOSTBYNAME_R_6_ARG': 1,
|
| 304 |
+
'HAVE_GETITIMER': 1,
|
| 305 |
+
'HAVE_GETLOADAVG': 1,
|
| 306 |
+
'HAVE_GETLOGIN': 1,
|
| 307 |
+
'HAVE_GETNAMEINFO': 1,
|
| 308 |
+
'HAVE_GETPAGESIZE': 1,
|
| 309 |
+
'HAVE_GETPEERNAME': 1,
|
| 310 |
+
'HAVE_GETPGID': 1,
|
| 311 |
+
'HAVE_GETPGRP': 1,
|
| 312 |
+
'HAVE_GETPID': 1,
|
| 313 |
+
'HAVE_GETPRIORITY': 1,
|
| 314 |
+
'HAVE_GETPWENT': 1,
|
| 315 |
+
'HAVE_GETPWNAM_R': 1,
|
| 316 |
+
'HAVE_GETPWUID_R': 1,
|
| 317 |
+
'HAVE_GETRANDOM': 0,
|
| 318 |
+
'HAVE_GETRANDOM_SYSCALL': 1,
|
| 319 |
+
'HAVE_GETRESGID': 1,
|
| 320 |
+
'HAVE_GETRESUID': 1,
|
| 321 |
+
'HAVE_GETSID': 1,
|
| 322 |
+
'HAVE_GETSPENT': 1,
|
| 323 |
+
'HAVE_GETSPNAM': 1,
|
| 324 |
+
'HAVE_GETWD': 1,
|
| 325 |
+
'HAVE_GLIBC_MEMMOVE_BUG': 0,
|
| 326 |
+
'HAVE_GRP_H': 1,
|
| 327 |
+
'HAVE_HSTRERROR': 1,
|
| 328 |
+
'HAVE_HTOLE64': 1,
|
| 329 |
+
'HAVE_HYPOT': 1,
|
| 330 |
+
'HAVE_IEEEFP_H': 0,
|
| 331 |
+
'HAVE_IF_NAMEINDEX': 1,
|
| 332 |
+
'HAVE_INET_ATON': 1,
|
| 333 |
+
'HAVE_INET_PTON': 1,
|
| 334 |
+
'HAVE_INITGROUPS': 1,
|
| 335 |
+
'HAVE_INTTYPES_H': 1,
|
| 336 |
+
'HAVE_IO_H': 0,
|
| 337 |
+
'HAVE_IPA_PURE_CONST_BUG': 0,
|
| 338 |
+
'HAVE_KILL': 1,
|
| 339 |
+
'HAVE_KILLPG': 1,
|
| 340 |
+
'HAVE_KQUEUE': 0,
|
| 341 |
+
'HAVE_LANGINFO_H': 1,
|
| 342 |
+
'HAVE_LARGEFILE_SUPPORT': 0,
|
| 343 |
+
'HAVE_LCHFLAGS': 0,
|
| 344 |
+
'HAVE_LCHMOD': 0,
|
| 345 |
+
'HAVE_LCHOWN': 1,
|
| 346 |
+
'HAVE_LGAMMA': 1,
|
| 347 |
+
'HAVE_LIBDL': 1,
|
| 348 |
+
'HAVE_LIBDLD': 0,
|
| 349 |
+
'HAVE_LIBIEEE': 0,
|
| 350 |
+
'HAVE_LIBINTL_H': 1,
|
| 351 |
+
'HAVE_LIBREADLINE': 1,
|
| 352 |
+
'HAVE_LIBRESOLV': 0,
|
| 353 |
+
'HAVE_LIBSENDFILE': 0,
|
| 354 |
+
'HAVE_LIBUTIL_H': 0,
|
| 355 |
+
'HAVE_LIBUUID': 1,
|
| 356 |
+
'HAVE_LINK': 1,
|
| 357 |
+
'HAVE_LINKAT': 1,
|
| 358 |
+
'HAVE_LINUX_AUXVEC_H': 1,
|
| 359 |
+
'HAVE_LINUX_CAN_BCM_H': 1,
|
| 360 |
+
'HAVE_LINUX_CAN_H': 1,
|
| 361 |
+
'HAVE_LINUX_CAN_J1939_H': 0,
|
| 362 |
+
'HAVE_LINUX_CAN_RAW_FD_FRAMES': 1,
|
| 363 |
+
'HAVE_LINUX_CAN_RAW_H': 1,
|
| 364 |
+
'HAVE_LINUX_CAN_RAW_JOIN_FILTERS': 1,
|
| 365 |
+
'HAVE_LINUX_MEMFD_H': 1,
|
| 366 |
+
'HAVE_LINUX_NETLINK_H': 1,
|
| 367 |
+
'HAVE_LINUX_QRTR_H': 0,
|
| 368 |
+
'HAVE_LINUX_RANDOM_H': 1,
|
| 369 |
+
'HAVE_LINUX_TIPC_H': 1,
|
| 370 |
+
'HAVE_LINUX_VM_SOCKETS_H': 1,
|
| 371 |
+
'HAVE_LINUX_WAIT_H': 1,
|
| 372 |
+
'HAVE_LOCKF': 1,
|
| 373 |
+
'HAVE_LOG1P': 1,
|
| 374 |
+
'HAVE_LOG2': 1,
|
| 375 |
+
'HAVE_LONG_DOUBLE': 1,
|
| 376 |
+
'HAVE_LSTAT': 1,
|
| 377 |
+
'HAVE_LUTIMES': 1,
|
| 378 |
+
'HAVE_MADVISE': 1,
|
| 379 |
+
'HAVE_MAKEDEV': 1,
|
| 380 |
+
'HAVE_MBRTOWC': 1,
|
| 381 |
+
'HAVE_MEMFD_CREATE': 0,
|
| 382 |
+
'HAVE_MEMORY_H': 1,
|
| 383 |
+
'HAVE_MEMRCHR': 1,
|
| 384 |
+
'HAVE_MKDIRAT': 1,
|
| 385 |
+
'HAVE_MKFIFO': 1,
|
| 386 |
+
'HAVE_MKFIFOAT': 1,
|
| 387 |
+
'HAVE_MKNOD': 1,
|
| 388 |
+
'HAVE_MKNODAT': 1,
|
| 389 |
+
'HAVE_MKTIME': 1,
|
| 390 |
+
'HAVE_MMAP': 1,
|
| 391 |
+
'HAVE_MREMAP': 1,
|
| 392 |
+
'HAVE_NCURSES_H': 1,
|
| 393 |
+
'HAVE_NDIR_H': 0,
|
| 394 |
+
'HAVE_NETPACKET_PACKET_H': 1,
|
| 395 |
+
'HAVE_NET_IF_H': 1,
|
| 396 |
+
'HAVE_NICE': 1,
|
| 397 |
+
'HAVE_NON_UNICODE_WCHAR_T_REPRESENTATION': 0,
|
| 398 |
+
'HAVE_OPENAT': 1,
|
| 399 |
+
'HAVE_OPENPTY': 1,
|
| 400 |
+
'HAVE_PATHCONF': 1,
|
| 401 |
+
'HAVE_PAUSE': 1,
|
| 402 |
+
'HAVE_PIPE2': 1,
|
| 403 |
+
'HAVE_PLOCK': 0,
|
| 404 |
+
'HAVE_POLL': 1,
|
| 405 |
+
'HAVE_POLL_H': 1,
|
| 406 |
+
'HAVE_POSIX_FADVISE': 1,
|
| 407 |
+
'HAVE_POSIX_FALLOCATE': 1,
|
| 408 |
+
'HAVE_POSIX_SPAWN': 1,
|
| 409 |
+
'HAVE_POSIX_SPAWNP': 1,
|
| 410 |
+
'HAVE_PREAD': 1,
|
| 411 |
+
'HAVE_PREADV': 1,
|
| 412 |
+
'HAVE_PREADV2': 0,
|
| 413 |
+
'HAVE_PRLIMIT': 1,
|
| 414 |
+
'HAVE_PROCESS_H': 0,
|
| 415 |
+
'HAVE_PROTOTYPES': 1,
|
| 416 |
+
'HAVE_PTHREAD_CONDATTR_SETCLOCK': 1,
|
| 417 |
+
'HAVE_PTHREAD_DESTRUCTOR': 0,
|
| 418 |
+
'HAVE_PTHREAD_GETCPUCLOCKID': 1,
|
| 419 |
+
'HAVE_PTHREAD_H': 1,
|
| 420 |
+
'HAVE_PTHREAD_INIT': 0,
|
| 421 |
+
'HAVE_PTHREAD_KILL': 1,
|
| 422 |
+
'HAVE_PTHREAD_SIGMASK': 1,
|
| 423 |
+
'HAVE_PTY_H': 1,
|
| 424 |
+
'HAVE_PWRITE': 1,
|
| 425 |
+
'HAVE_PWRITEV': 1,
|
| 426 |
+
'HAVE_PWRITEV2': 0,
|
| 427 |
+
'HAVE_READLINK': 1,
|
| 428 |
+
'HAVE_READLINKAT': 1,
|
| 429 |
+
'HAVE_READV': 1,
|
| 430 |
+
'HAVE_REALPATH': 1,
|
| 431 |
+
'HAVE_RENAMEAT': 1,
|
| 432 |
+
'HAVE_RL_APPEND_HISTORY': 1,
|
| 433 |
+
'HAVE_RL_CATCH_SIGNAL': 1,
|
| 434 |
+
'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1,
|
| 435 |
+
'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1,
|
| 436 |
+
'HAVE_RL_COMPLETION_MATCHES': 1,
|
| 437 |
+
'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1,
|
| 438 |
+
'HAVE_RL_PRE_INPUT_HOOK': 1,
|
| 439 |
+
'HAVE_RL_RESIZE_TERMINAL': 1,
|
| 440 |
+
'HAVE_ROUND': 1,
|
| 441 |
+
'HAVE_RTPSPAWN': 0,
|
| 442 |
+
'HAVE_SCHED_GET_PRIORITY_MAX': 1,
|
| 443 |
+
'HAVE_SCHED_H': 1,
|
| 444 |
+
'HAVE_SCHED_RR_GET_INTERVAL': 1,
|
| 445 |
+
'HAVE_SCHED_SETAFFINITY': 1,
|
| 446 |
+
'HAVE_SCHED_SETPARAM': 1,
|
| 447 |
+
'HAVE_SCHED_SETSCHEDULER': 1,
|
| 448 |
+
'HAVE_SEM_CLOCKWAIT': 0,
|
| 449 |
+
'HAVE_SEM_GETVALUE': 1,
|
| 450 |
+
'HAVE_SEM_OPEN': 1,
|
| 451 |
+
'HAVE_SEM_TIMEDWAIT': 1,
|
| 452 |
+
'HAVE_SEM_UNLINK': 1,
|
| 453 |
+
'HAVE_SENDFILE': 1,
|
| 454 |
+
'HAVE_SETEGID': 1,
|
| 455 |
+
'HAVE_SETEUID': 1,
|
| 456 |
+
'HAVE_SETGID': 1,
|
| 457 |
+
'HAVE_SETGROUPS': 1,
|
| 458 |
+
'HAVE_SETHOSTNAME': 1,
|
| 459 |
+
'HAVE_SETITIMER': 1,
|
| 460 |
+
'HAVE_SETLOCALE': 1,
|
| 461 |
+
'HAVE_SETPGID': 1,
|
| 462 |
+
'HAVE_SETPGRP': 1,
|
| 463 |
+
'HAVE_SETPRIORITY': 1,
|
| 464 |
+
'HAVE_SETREGID': 1,
|
| 465 |
+
'HAVE_SETRESGID': 1,
|
| 466 |
+
'HAVE_SETRESUID': 1,
|
| 467 |
+
'HAVE_SETREUID': 1,
|
| 468 |
+
'HAVE_SETSID': 1,
|
| 469 |
+
'HAVE_SETUID': 1,
|
| 470 |
+
'HAVE_SETVBUF': 1,
|
| 471 |
+
'HAVE_SHADOW_H': 1,
|
| 472 |
+
'HAVE_SHM_OPEN': 1,
|
| 473 |
+
'HAVE_SHM_UNLINK': 1,
|
| 474 |
+
'HAVE_SIGACTION': 1,
|
| 475 |
+
'HAVE_SIGALTSTACK': 1,
|
| 476 |
+
'HAVE_SIGFILLSET': 1,
|
| 477 |
+
'HAVE_SIGINFO_T_SI_BAND': 1,
|
| 478 |
+
'HAVE_SIGINTERRUPT': 1,
|
| 479 |
+
'HAVE_SIGNAL_H': 1,
|
| 480 |
+
'HAVE_SIGPENDING': 1,
|
| 481 |
+
'HAVE_SIGRELSE': 1,
|
| 482 |
+
'HAVE_SIGTIMEDWAIT': 1,
|
| 483 |
+
'HAVE_SIGWAIT': 1,
|
| 484 |
+
'HAVE_SIGWAITINFO': 1,
|
| 485 |
+
'HAVE_SNPRINTF': 1,
|
| 486 |
+
'HAVE_SOCKADDR_ALG': 1,
|
| 487 |
+
'HAVE_SOCKADDR_SA_LEN': 0,
|
| 488 |
+
'HAVE_SOCKADDR_STORAGE': 1,
|
| 489 |
+
'HAVE_SOCKETPAIR': 1,
|
| 490 |
+
'HAVE_SPAWN_H': 1,
|
| 491 |
+
'HAVE_SPLICE': 1,
|
| 492 |
+
'HAVE_SSIZE_T': 1,
|
| 493 |
+
'HAVE_STATVFS': 1,
|
| 494 |
+
'HAVE_STAT_TV_NSEC': 1,
|
| 495 |
+
'HAVE_STAT_TV_NSEC2': 0,
|
| 496 |
+
'HAVE_STDARG_PROTOTYPES': 1,
|
| 497 |
+
'HAVE_STDINT_H': 1,
|
| 498 |
+
'HAVE_STDLIB_H': 1,
|
| 499 |
+
'HAVE_STD_ATOMIC': 1,
|
| 500 |
+
'HAVE_STRFTIME': 1,
|
| 501 |
+
'HAVE_STRINGS_H': 1,
|
| 502 |
+
'HAVE_STRING_H': 1,
|
| 503 |
+
'HAVE_STRLCPY': 0,
|
| 504 |
+
'HAVE_STROPTS_H': 0,
|
| 505 |
+
'HAVE_STRSIGNAL': 1,
|
| 506 |
+
'HAVE_STRUCT_PASSWD_PW_GECOS': 1,
|
| 507 |
+
'HAVE_STRUCT_PASSWD_PW_PASSWD': 1,
|
| 508 |
+
'HAVE_STRUCT_STAT_ST_BIRTHTIME': 0,
|
| 509 |
+
'HAVE_STRUCT_STAT_ST_BLKSIZE': 1,
|
| 510 |
+
'HAVE_STRUCT_STAT_ST_BLOCKS': 1,
|
| 511 |
+
'HAVE_STRUCT_STAT_ST_FLAGS': 0,
|
| 512 |
+
'HAVE_STRUCT_STAT_ST_GEN': 0,
|
| 513 |
+
'HAVE_STRUCT_STAT_ST_RDEV': 1,
|
| 514 |
+
'HAVE_STRUCT_TM_TM_ZONE': 1,
|
| 515 |
+
'HAVE_SYMLINK': 1,
|
| 516 |
+
'HAVE_SYMLINKAT': 1,
|
| 517 |
+
'HAVE_SYNC': 1,
|
| 518 |
+
'HAVE_SYSCONF': 1,
|
| 519 |
+
'HAVE_SYSEXITS_H': 1,
|
| 520 |
+
'HAVE_SYS_AUDIOIO_H': 0,
|
| 521 |
+
'HAVE_SYS_AUXV_H': 1,
|
| 522 |
+
'HAVE_SYS_BSDTTY_H': 0,
|
| 523 |
+
'HAVE_SYS_DEVPOLL_H': 0,
|
| 524 |
+
'HAVE_SYS_DIR_H': 0,
|
| 525 |
+
'HAVE_SYS_ENDIAN_H': 0,
|
| 526 |
+
'HAVE_SYS_EPOLL_H': 1,
|
| 527 |
+
'HAVE_SYS_EVENTFD_H': 1,
|
| 528 |
+
'HAVE_SYS_EVENT_H': 0,
|
| 529 |
+
'HAVE_SYS_FILE_H': 1,
|
| 530 |
+
'HAVE_SYS_IOCTL_H': 1,
|
| 531 |
+
'HAVE_SYS_KERN_CONTROL_H': 0,
|
| 532 |
+
'HAVE_SYS_LOADAVG_H': 0,
|
| 533 |
+
'HAVE_SYS_LOCK_H': 0,
|
| 534 |
+
'HAVE_SYS_MEMFD_H': 0,
|
| 535 |
+
'HAVE_SYS_MKDEV_H': 0,
|
| 536 |
+
'HAVE_SYS_MMAN_H': 1,
|
| 537 |
+
'HAVE_SYS_MODEM_H': 0,
|
| 538 |
+
'HAVE_SYS_NDIR_H': 0,
|
| 539 |
+
'HAVE_SYS_PARAM_H': 1,
|
| 540 |
+
'HAVE_SYS_POLL_H': 1,
|
| 541 |
+
'HAVE_SYS_RANDOM_H': 0,
|
| 542 |
+
'HAVE_SYS_RESOURCE_H': 1,
|
| 543 |
+
'HAVE_SYS_SELECT_H': 1,
|
| 544 |
+
'HAVE_SYS_SENDFILE_H': 1,
|
| 545 |
+
'HAVE_SYS_SOCKET_H': 1,
|
| 546 |
+
'HAVE_SYS_STATVFS_H': 1,
|
| 547 |
+
'HAVE_SYS_STAT_H': 1,
|
| 548 |
+
'HAVE_SYS_SYSCALL_H': 1,
|
| 549 |
+
'HAVE_SYS_SYSMACROS_H': 1,
|
| 550 |
+
'HAVE_SYS_SYS_DOMAIN_H': 0,
|
| 551 |
+
'HAVE_SYS_TERMIO_H': 0,
|
| 552 |
+
'HAVE_SYS_TIMES_H': 1,
|
| 553 |
+
'HAVE_SYS_TIME_H': 1,
|
| 554 |
+
'HAVE_SYS_TYPES_H': 1,
|
| 555 |
+
'HAVE_SYS_UIO_H': 1,
|
| 556 |
+
'HAVE_SYS_UN_H': 1,
|
| 557 |
+
'HAVE_SYS_UTSNAME_H': 1,
|
| 558 |
+
'HAVE_SYS_WAIT_H': 1,
|
| 559 |
+
'HAVE_SYS_XATTR_H': 1,
|
| 560 |
+
'HAVE_TCGETPGRP': 1,
|
| 561 |
+
'HAVE_TCSETPGRP': 1,
|
| 562 |
+
'HAVE_TEMPNAM': 1,
|
| 563 |
+
'HAVE_TERMIOS_H': 1,
|
| 564 |
+
'HAVE_TERM_H': 1,
|
| 565 |
+
'HAVE_TGAMMA': 1,
|
| 566 |
+
'HAVE_TIMEGM': 1,
|
| 567 |
+
'HAVE_TIMES': 1,
|
| 568 |
+
'HAVE_TMPFILE': 1,
|
| 569 |
+
'HAVE_TMPNAM': 1,
|
| 570 |
+
'HAVE_TMPNAM_R': 1,
|
| 571 |
+
'HAVE_TM_ZONE': 1,
|
| 572 |
+
'HAVE_TRUNCATE': 1,
|
| 573 |
+
'HAVE_TZNAME': 0,
|
| 574 |
+
'HAVE_UCS4_TCL': 0,
|
| 575 |
+
'HAVE_UNAME': 1,
|
| 576 |
+
'HAVE_UNISTD_H': 1,
|
| 577 |
+
'HAVE_UNLINKAT': 1,
|
| 578 |
+
'HAVE_USABLE_WCHAR_T': 0,
|
| 579 |
+
'HAVE_UTIL_H': 0,
|
| 580 |
+
'HAVE_UTIMENSAT': 1,
|
| 581 |
+
'HAVE_UTIMES': 1,
|
| 582 |
+
'HAVE_UTIME_H': 1,
|
| 583 |
+
'HAVE_UUID_CREATE': 0,
|
| 584 |
+
'HAVE_UUID_ENC_BE': 0,
|
| 585 |
+
'HAVE_UUID_GENERATE_TIME_SAFE': 1,
|
| 586 |
+
'HAVE_UUID_H': 1,
|
| 587 |
+
'HAVE_UUID_UUID_H': 1,
|
| 588 |
+
'HAVE_VFORK': 1,
|
| 589 |
+
'HAVE_WAIT3': 1,
|
| 590 |
+
'HAVE_WAIT4': 1,
|
| 591 |
+
'HAVE_WAITID': 1,
|
| 592 |
+
'HAVE_WAITPID': 1,
|
| 593 |
+
'HAVE_WCHAR_H': 1,
|
| 594 |
+
'HAVE_WCSCOLL': 1,
|
| 595 |
+
'HAVE_WCSFTIME': 1,
|
| 596 |
+
'HAVE_WCSXFRM': 1,
|
| 597 |
+
'HAVE_WMEMCMP': 1,
|
| 598 |
+
'HAVE_WORKING_TZSET': 1,
|
| 599 |
+
'HAVE_WRITEV': 1,
|
| 600 |
+
'HAVE_ZLIB_COPY': 1,
|
| 601 |
+
'HAVE__GETPTY': 0,
|
| 602 |
+
'HOST_GNU_TYPE': 'x86_64-conda_cos6-linux-gnu',
|
| 603 |
+
'INCLDIRSTOMAKE': '/root/envs/evalkit_tf446/include '
|
| 604 |
+
'/root/envs/evalkit_tf446/include '
|
| 605 |
+
'/root/envs/evalkit_tf446/include/python3.10 '
|
| 606 |
+
'/root/envs/evalkit_tf446/include/python3.10',
|
| 607 |
+
'INCLUDEDIR': '/root/envs/evalkit_tf446/include',
|
| 608 |
+
'INCLUDEPY': '/root/envs/evalkit_tf446/include/python3.10',
|
| 609 |
+
'INSTALL': '/usr/bin/install -c',
|
| 610 |
+
'INSTALL_DATA': '/usr/bin/install -c -m 644',
|
| 611 |
+
'INSTALL_PROGRAM': '/usr/bin/install -c',
|
| 612 |
+
'INSTALL_SCRIPT': '/usr/bin/install -c',
|
| 613 |
+
'INSTALL_SHARED': '/usr/bin/install -c -m 755',
|
| 614 |
+
'INSTSONAME': 'libpython3.10.a',
|
| 615 |
+
'IO_H': 'Modules/_io/_iomodule.h',
|
| 616 |
+
'IO_OBJS': '\\',
|
| 617 |
+
'LDCXXSHARED': 'x86_64-conda_cos6-linux-gnu-c++ -pthread -shared',
|
| 618 |
+
'LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 619 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 620 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 621 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 622 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 623 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 624 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 625 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 626 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 627 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 628 |
+
'LDLIBRARY': 'libpython3.10.a',
|
| 629 |
+
'LDLIBRARYDIR': '',
|
| 630 |
+
'LDSHARED': 'x86_64-conda_cos6-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 631 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 632 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 633 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 634 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 635 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 636 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 637 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 638 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 639 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 640 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 641 |
+
'LDVERSION': '3.10',
|
| 642 |
+
'LIBC': '',
|
| 643 |
+
'LIBDEST': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 644 |
+
'LIBDIR': '/root/envs/evalkit_tf446/lib',
|
| 645 |
+
'LIBFFI_INCLUDEDIR': '/root/envs/evalkit_tf446/include',
|
| 646 |
+
'LIBM': '-lm',
|
| 647 |
+
'LIBOBJDIR': 'Python/',
|
| 648 |
+
'LIBOBJS': '',
|
| 649 |
+
'LIBPC': '/root/envs/evalkit_tf446/lib/pkgconfig',
|
| 650 |
+
'LIBPL': '/root/envs/evalkit_tf446/lib/python3.10/config-3.10-x86_64-linux-gnu',
|
| 651 |
+
'LIBPYTHON': '',
|
| 652 |
+
'LIBRARY': 'libpython3.10.a',
|
| 653 |
+
'LIBRARY_DEPS': 'libpython3.10.a',
|
| 654 |
+
'LIBRARY_OBJS': '\\',
|
| 655 |
+
'LIBRARY_OBJS_OMIT_FROZEN': '\\',
|
| 656 |
+
'LIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 657 |
+
'LIBSUBDIRS': 'asyncio \\',
|
| 658 |
+
'LINKCC': 'x86_64-conda_cos6-linux-gnu-gcc -pthread',
|
| 659 |
+
'LINKFORSHARED': '-Xlinker -export-dynamic',
|
| 660 |
+
'LIPO_32BIT_FLAGS': '',
|
| 661 |
+
'LIPO_INTEL64_FLAGS': '',
|
| 662 |
+
'LLVM_PROF_ERR': 'no',
|
| 663 |
+
'LLVM_PROF_FILE': '',
|
| 664 |
+
'LLVM_PROF_MERGER': 'true',
|
| 665 |
+
'LN': 'ln',
|
| 666 |
+
'LOCALMODLIBS': '',
|
| 667 |
+
'MACHDEP': 'linux',
|
| 668 |
+
'MACHDEP_OBJS': '',
|
| 669 |
+
'MACHDESTLIB': '/root/envs/evalkit_tf446/lib/python3.10',
|
| 670 |
+
'MACOSX_DEPLOYMENT_TARGET': '',
|
| 671 |
+
'MAINCC': 'x86_64-conda_cos6-linux-gnu-gcc -pthread',
|
| 672 |
+
'MAJOR_IN_MKDEV': 0,
|
| 673 |
+
'MAJOR_IN_SYSMACROS': 0,
|
| 674 |
+
'MAKESETUP': '/croot/python-split_1733933809325/work/Modules/makesetup',
|
| 675 |
+
'MANDIR': '/root/envs/evalkit_tf446/share/man',
|
| 676 |
+
'MKDIR_P': '/usr/bin/mkdir -p',
|
| 677 |
+
'MODBUILT_NAMES': 'posix errno pwd _sre _codecs _weakref _functools '
|
| 678 |
+
'_operator _collections _abc itertools atexit _signal '
|
| 679 |
+
'_stat time _thread _locale _io faulthandler '
|
| 680 |
+
'_tracemalloc _symtable xxsubtype',
|
| 681 |
+
'MODDISABLED_NAMES': '',
|
| 682 |
+
'MODLIBS': '',
|
| 683 |
+
'MODOBJS': 'Modules/posixmodule.o Modules/errnomodule.o '
|
| 684 |
+
'Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o '
|
| 685 |
+
'Modules/_weakref.o Modules/_functoolsmodule.o '
|
| 686 |
+
'Modules/_operator.o Modules/_collectionsmodule.o '
|
| 687 |
+
'Modules/_abc.o Modules/itertoolsmodule.o '
|
| 688 |
+
'Modules/atexitmodule.o Modules/signalmodule.o Modules/_stat.o '
|
| 689 |
+
'Modules/timemodule.o Modules/_threadmodule.o '
|
| 690 |
+
'Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o '
|
| 691 |
+
'Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o '
|
| 692 |
+
'Modules/textio.o Modules/stringio.o Modules/faulthandler.o '
|
| 693 |
+
'Modules/_tracemalloc.o Modules/symtablemodule.o '
|
| 694 |
+
'Modules/xxsubtype.o',
|
| 695 |
+
'MODULE_OBJS': '\\',
|
| 696 |
+
'MULTIARCH': 'x86_64-linux-gnu',
|
| 697 |
+
'MULTIARCH_CPPFLAGS': '-DMULTIARCH=\\"x86_64-linux-gnu\\"',
|
| 698 |
+
'MVWDELCH_IS_EXPRESSION': 1,
|
| 699 |
+
'NO_AS_NEEDED': '-Wl,--no-as-needed',
|
| 700 |
+
'OBJECT_OBJS': '\\',
|
| 701 |
+
'OPENSSL_INCLUDES': '-I/root/envs/evalkit_tf446/include',
|
| 702 |
+
'OPENSSL_LDFLAGS': '-L/root/envs/evalkit_tf446/lib',
|
| 703 |
+
'OPENSSL_LIBS': '-lssl -lcrypto',
|
| 704 |
+
'OPENSSL_RPATH': '',
|
| 705 |
+
'OPT': '-DNDEBUG -fwrapv -O2 -Wall',
|
| 706 |
+
'OTHER_LIBTOOL_OPT': '',
|
| 707 |
+
'PACKAGE_BUGREPORT': 0,
|
| 708 |
+
'PACKAGE_NAME': 0,
|
| 709 |
+
'PACKAGE_STRING': 0,
|
| 710 |
+
'PACKAGE_TARNAME': 0,
|
| 711 |
+
'PACKAGE_URL': 0,
|
| 712 |
+
'PACKAGE_VERSION': 0,
|
| 713 |
+
'PARSER_HEADERS': '\\',
|
| 714 |
+
'PARSER_OBJS': '\\ \\ Parser/myreadline.o Parser/tokenizer.o',
|
| 715 |
+
'PEGEN_HEADERS': '\\',
|
| 716 |
+
'PEGEN_OBJS': '\\',
|
| 717 |
+
'PGO_PROF_GEN_FLAG': '-fprofile-generate',
|
| 718 |
+
'PGO_PROF_USE_FLAG': ' ',
|
| 719 |
+
'PLATLIBDIR': 'lib',
|
| 720 |
+
'POBJS': '\\',
|
| 721 |
+
'POSIX_SEMAPHORES_NOT_ENABLED': 0,
|
| 722 |
+
'PROFILE_TASK': '-m test --pgo',
|
| 723 |
+
'PTHREAD_KEY_T_IS_COMPATIBLE_WITH_INT': 1,
|
| 724 |
+
'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1,
|
| 725 |
+
'PURIFY': '',
|
| 726 |
+
'PY3LIBRARY': '',
|
| 727 |
+
'PYLONG_BITS_IN_DIGIT': 0,
|
| 728 |
+
'PYTHON': 'python',
|
| 729 |
+
'PYTHONFRAMEWORK': '',
|
| 730 |
+
'PYTHONFRAMEWORKDIR': 'no-framework',
|
| 731 |
+
'PYTHONFRAMEWORKINSTALLDIR': '',
|
| 732 |
+
'PYTHONFRAMEWORKPREFIX': '',
|
| 733 |
+
'PYTHONPATH': '',
|
| 734 |
+
'PYTHON_FOR_BUILD': './python -E',
|
| 735 |
+
'PYTHON_FOR_REGEN': '',
|
| 736 |
+
'PYTHON_HEADERS': '\\',
|
| 737 |
+
'PYTHON_OBJS': '\\',
|
| 738 |
+
'PY_BUILD_ENVIRON': '',
|
| 739 |
+
'PY_BUILTIN_HASHLIB_HASHES': '"md5,sha1,sha256,sha512,sha3,blake2"',
|
| 740 |
+
'PY_BUILTIN_MODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG '
|
| 741 |
+
'-fwrapv -O2 -Wall -march=nocona -mtune=haswell '
|
| 742 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 743 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 744 |
+
'/root/envs/evalkit_tf446/include '
|
| 745 |
+
' '
|
| 746 |
+
' '
|
| 747 |
+
' '
|
| 748 |
+
' -march=nocona '
|
| 749 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 750 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 751 |
+
'-ffunction-sections -pipe -isystem '
|
| 752 |
+
'/root/envs/evalkit_tf446/include '
|
| 753 |
+
' '
|
| 754 |
+
' '
|
| 755 |
+
' '
|
| 756 |
+
' '
|
| 757 |
+
'-fno-semantic-interposition '
|
| 758 |
+
' '
|
| 759 |
+
' -g -std=c99 -Wextra '
|
| 760 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 761 |
+
'-Wno-missing-field-initializers '
|
| 762 |
+
'-Werror=implicit-function-declaration '
|
| 763 |
+
'-fvisibility=hidden '
|
| 764 |
+
' '
|
| 765 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 766 |
+
'-IObjects -IInclude -IPython -I. '
|
| 767 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 768 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 769 |
+
'/root/envs/evalkit_tf446/include '
|
| 770 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 771 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 772 |
+
'/root/envs/evalkit_tf446/include '
|
| 773 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 774 |
+
'-DPy_BUILD_CORE_BUILTIN',
|
| 775 |
+
'PY_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 776 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 777 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 778 |
+
'-isystem '
|
| 779 |
+
'/root/envs/evalkit_tf446/include '
|
| 780 |
+
' '
|
| 781 |
+
' '
|
| 782 |
+
' '
|
| 783 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 784 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 785 |
+
'-isystem '
|
| 786 |
+
'/root/envs/evalkit_tf446/include '
|
| 787 |
+
' '
|
| 788 |
+
' '
|
| 789 |
+
' '
|
| 790 |
+
'',
|
| 791 |
+
'PY_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 792 |
+
' -g -std=c99 '
|
| 793 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 794 |
+
'-Wno-missing-field-initializers '
|
| 795 |
+
'-Werror=implicit-function-declaration '
|
| 796 |
+
'-fvisibility=hidden '
|
| 797 |
+
'-I/croot/python-split_1733933809325/work/Include/internal',
|
| 798 |
+
'PY_COERCE_C_LOCALE': 1,
|
| 799 |
+
'PY_CORE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 '
|
| 800 |
+
'-Wall -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 801 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 802 |
+
'-pipe -isystem '
|
| 803 |
+
'/root/envs/evalkit_tf446/include '
|
| 804 |
+
' '
|
| 805 |
+
' '
|
| 806 |
+
' '
|
| 807 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 808 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 809 |
+
'-pipe -isystem '
|
| 810 |
+
'/root/envs/evalkit_tf446/include '
|
| 811 |
+
' '
|
| 812 |
+
' '
|
| 813 |
+
' '
|
| 814 |
+
' -fno-semantic-interposition '
|
| 815 |
+
' '
|
| 816 |
+
'-g -std=c99 -Wextra -Wno-unused-result '
|
| 817 |
+
'-Wno-unused-parameter -Wno-missing-field-initializers '
|
| 818 |
+
'-Werror=implicit-function-declaration -fvisibility=hidden '
|
| 819 |
+
' '
|
| 820 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 821 |
+
'-IObjects -IInclude -IPython -I. '
|
| 822 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 823 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 824 |
+
'/root/envs/evalkit_tf446/include '
|
| 825 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 826 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 827 |
+
'/root/envs/evalkit_tf446/include '
|
| 828 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 829 |
+
'-DPy_BUILD_CORE',
|
| 830 |
+
'PY_CORE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 831 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 832 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 833 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 834 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 835 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 836 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 837 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 838 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 839 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 840 |
+
'-fno-semantic-interposition '
|
| 841 |
+
' -g',
|
| 842 |
+
'PY_CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 843 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 844 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 845 |
+
'/root/envs/evalkit_tf446/include '
|
| 846 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 847 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 848 |
+
'/root/envs/evalkit_tf446/include '
|
| 849 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 850 |
+
'PY_ENABLE_SHARED': 0,
|
| 851 |
+
'PY_FORMAT_SIZE_T': '"z"',
|
| 852 |
+
'PY_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 853 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 854 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 855 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 856 |
+
'-L/root/envs/evalkit_tf446/lib '
|
| 857 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 858 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 859 |
+
'-Wl,-rpath,/root/envs/evalkit_tf446/lib '
|
| 860 |
+
'-Wl,-rpath-link,/root/envs/evalkit_tf446/lib '
|
| 861 |
+
'-L/root/envs/evalkit_tf446/lib',
|
| 862 |
+
'PY_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 863 |
+
' -g',
|
| 864 |
+
'PY_SSL_DEFAULT_CIPHERS': 1,
|
| 865 |
+
'PY_SSL_DEFAULT_CIPHER_STRING': 0,
|
| 866 |
+
'PY_STDMODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv '
|
| 867 |
+
'-O2 -Wall -march=nocona -mtune=haswell '
|
| 868 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 869 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 870 |
+
'/root/envs/evalkit_tf446/include '
|
| 871 |
+
' '
|
| 872 |
+
' '
|
| 873 |
+
' '
|
| 874 |
+
' -march=nocona '
|
| 875 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 876 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 877 |
+
'-ffunction-sections -pipe -isystem '
|
| 878 |
+
'/root/envs/evalkit_tf446/include '
|
| 879 |
+
' '
|
| 880 |
+
' '
|
| 881 |
+
' '
|
| 882 |
+
' '
|
| 883 |
+
'-fno-semantic-interposition '
|
| 884 |
+
' -g -std=c99 '
|
| 885 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 886 |
+
'-Wno-missing-field-initializers '
|
| 887 |
+
'-Werror=implicit-function-declaration '
|
| 888 |
+
'-fvisibility=hidden '
|
| 889 |
+
' '
|
| 890 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 891 |
+
'-IObjects -IInclude -IPython -I. '
|
| 892 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 893 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 894 |
+
'/root/envs/evalkit_tf446/include '
|
| 895 |
+
'-I/root/envs/evalkit_tf446/include '
|
| 896 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 897 |
+
'/root/envs/evalkit_tf446/include '
|
| 898 |
+
'-I/root/envs/evalkit_tf446/include',
|
| 899 |
+
'Py_DEBUG': 0,
|
| 900 |
+
'Py_ENABLE_SHARED': 0,
|
| 901 |
+
'Py_HASH_ALGORITHM': 0,
|
| 902 |
+
'Py_TRACE_REFS': 0,
|
| 903 |
+
'QUICKTESTOPTS': '-x test_subprocess test_io test_lib2to3 \\',
|
| 904 |
+
'READELF': 'x86_64-conda_cos6-linux-gnu-readelf',
|
| 905 |
+
'RESSRCDIR': 'Mac/Resources/framework',
|
| 906 |
+
'RETSIGTYPE': 'void',
|
| 907 |
+
'RUNSHARED': '',
|
| 908 |
+
'SCRIPTDIR': '/root/envs/evalkit_tf446/lib',
|
| 909 |
+
'SETPGRP_HAVE_ARG': 0,
|
| 910 |
+
'SHELL': '/bin/sh',
|
| 911 |
+
'SHLIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 912 |
+
'SHLIB_SUFFIX': '.so',
|
| 913 |
+
'SHM_NEEDS_LIBRT': 1,
|
| 914 |
+
'SIGNED_RIGHT_SHIFT_ZERO_FILLS': 0,
|
| 915 |
+
'SITEPATH': '',
|
| 916 |
+
'SIZEOF_DOUBLE': 8,
|
| 917 |
+
'SIZEOF_FLOAT': 4,
|
| 918 |
+
'SIZEOF_FPOS_T': 16,
|
| 919 |
+
'SIZEOF_INT': 4,
|
| 920 |
+
'SIZEOF_LONG': 8,
|
| 921 |
+
'SIZEOF_LONG_DOUBLE': 16,
|
| 922 |
+
'SIZEOF_LONG_LONG': 8,
|
| 923 |
+
'SIZEOF_OFF_T': 8,
|
| 924 |
+
'SIZEOF_PID_T': 4,
|
| 925 |
+
'SIZEOF_PTHREAD_KEY_T': 4,
|
| 926 |
+
'SIZEOF_PTHREAD_T': 8,
|
| 927 |
+
'SIZEOF_SHORT': 2,
|
| 928 |
+
'SIZEOF_SIZE_T': 8,
|
| 929 |
+
'SIZEOF_TIME_T': 8,
|
| 930 |
+
'SIZEOF_UINTPTR_T': 8,
|
| 931 |
+
'SIZEOF_VOID_P': 8,
|
| 932 |
+
'SIZEOF_WCHAR_T': 4,
|
| 933 |
+
'SIZEOF__BOOL': 1,
|
| 934 |
+
'SOABI': 'cpython-310-x86_64-linux-gnu',
|
| 935 |
+
'SRCDIRS': 'Parser Objects Python Modules Modules/_io Programs',
|
| 936 |
+
'SRC_GDB_HOOKS': '/croot/python-split_1733933809325/work/Tools/gdb/libpython.py',
|
| 937 |
+
'STATIC_LIBPYTHON': 1,
|
| 938 |
+
'STDC_HEADERS': 1,
|
| 939 |
+
'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */",
|
| 940 |
+
'STRIPFLAG': '-s',
|
| 941 |
+
'SUBDIRS': '',
|
| 942 |
+
'SUBDIRSTOO': 'Include Lib Misc',
|
| 943 |
+
'SYSLIBS': '-lm',
|
| 944 |
+
'SYS_SELECT_WITH_SYS_TIME': 1,
|
| 945 |
+
'TCLTK_INCLUDES': '-I/root/envs/evalkit_tf446/include',
|
| 946 |
+
'TCLTK_LIBS': '-L/root/envs/evalkit_tf446/lib '
|
| 947 |
+
'-ltcl8.6 -ltk8.6',
|
| 948 |
+
'TESTOPTS': '',
|
| 949 |
+
'TESTPATH': '',
|
| 950 |
+
'TESTPYTHON': './python',
|
| 951 |
+
'TESTPYTHONOPTS': '',
|
| 952 |
+
'TESTRUNNER': './python '
|
| 953 |
+
'/croot/python-split_1733933809325/work/Tools/scripts/run_tests.py',
|
| 954 |
+
'TESTSUBDIRS': 'ctypes/test \\',
|
| 955 |
+
'TESTTIMEOUT': 1200,
|
| 956 |
+
'TEST_MODULES': 'yes',
|
| 957 |
+
'THREAD_STACK_SIZE': 0,
|
| 958 |
+
'TIMEMODULE_LIB': 0,
|
| 959 |
+
'TIME_WITH_SYS_TIME': 1,
|
| 960 |
+
'TM_IN_SYS_TIME': 0,
|
| 961 |
+
'TZPATH': '/root/envs/evalkit_tf446/share/zoneinfo:/root/envs/evalkit_tf446/share/tzinfo',
|
| 962 |
+
'UNICODE_DEPS': '\\',
|
| 963 |
+
'UNIVERSALSDK': '',
|
| 964 |
+
'UPDATE_FILE': '/croot/python-split_1733933809325/work/Tools/scripts/update_file.py',
|
| 965 |
+
'USE_COMPUTED_GOTOS': 1,
|
| 966 |
+
'VERSION': '3.10',
|
| 967 |
+
'VPATH': '/croot/python-split_1733933809325/work',
|
| 968 |
+
'WHEEL_PKG_DIR': '',
|
| 969 |
+
'WINDOW_HAS_FLAGS': 1,
|
| 970 |
+
'WITH_DECIMAL_CONTEXTVAR': 1,
|
| 971 |
+
'WITH_DOC_STRINGS': 1,
|
| 972 |
+
'WITH_DTRACE': 0,
|
| 973 |
+
'WITH_DYLD': 0,
|
| 974 |
+
'WITH_EDITLINE': 0,
|
| 975 |
+
'WITH_LIBINTL': 0,
|
| 976 |
+
'WITH_NEXT_FRAMEWORK': 0,
|
| 977 |
+
'WITH_PYMALLOC': 1,
|
| 978 |
+
'WITH_VALGRIND': 0,
|
| 979 |
+
'X87_DOUBLE_ROUNDING': 0,
|
| 980 |
+
'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax',
|
| 981 |
+
'abs_builddir': '/croot/python-split_1733933809325/work/build-static',
|
| 982 |
+
'abs_srcdir': '/croot/python-split_1733933809325/work',
|
| 983 |
+
'datarootdir': '/root/envs/evalkit_tf446/share',
|
| 984 |
+
'exec_prefix': '/root/envs/evalkit_tf446',
|
| 985 |
+
'prefix': '/root/envs/evalkit_tf446',
|
| 986 |
+
'srcdir': '/croot/python-split_1733933809325/work'}
|
evalkit_tf446/lib/python3.10/abc.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2007 Google, Inc. All Rights Reserved.
|
| 2 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 3 |
+
|
| 4 |
+
"""Abstract Base Classes (ABCs) according to PEP 3119."""
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def abstractmethod(funcobj):
|
| 8 |
+
"""A decorator indicating abstract methods.
|
| 9 |
+
|
| 10 |
+
Requires that the metaclass is ABCMeta or derived from it. A
|
| 11 |
+
class that has a metaclass derived from ABCMeta cannot be
|
| 12 |
+
instantiated unless all of its abstract methods are overridden.
|
| 13 |
+
The abstract methods can be called using any of the normal
|
| 14 |
+
'super' call mechanisms. abstractmethod() may be used to declare
|
| 15 |
+
abstract methods for properties and descriptors.
|
| 16 |
+
|
| 17 |
+
Usage:
|
| 18 |
+
|
| 19 |
+
class C(metaclass=ABCMeta):
|
| 20 |
+
@abstractmethod
|
| 21 |
+
def my_abstract_method(self, ...):
|
| 22 |
+
...
|
| 23 |
+
"""
|
| 24 |
+
funcobj.__isabstractmethod__ = True
|
| 25 |
+
return funcobj
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class abstractclassmethod(classmethod):
|
| 29 |
+
"""A decorator indicating abstract classmethods.
|
| 30 |
+
|
| 31 |
+
Deprecated, use 'classmethod' with 'abstractmethod' instead:
|
| 32 |
+
|
| 33 |
+
class C(ABC):
|
| 34 |
+
@classmethod
|
| 35 |
+
@abstractmethod
|
| 36 |
+
def my_abstract_classmethod(cls, ...):
|
| 37 |
+
...
|
| 38 |
+
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
__isabstractmethod__ = True
|
| 42 |
+
|
| 43 |
+
def __init__(self, callable):
|
| 44 |
+
callable.__isabstractmethod__ = True
|
| 45 |
+
super().__init__(callable)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class abstractstaticmethod(staticmethod):
|
| 49 |
+
"""A decorator indicating abstract staticmethods.
|
| 50 |
+
|
| 51 |
+
Deprecated, use 'staticmethod' with 'abstractmethod' instead:
|
| 52 |
+
|
| 53 |
+
class C(ABC):
|
| 54 |
+
@staticmethod
|
| 55 |
+
@abstractmethod
|
| 56 |
+
def my_abstract_staticmethod(...):
|
| 57 |
+
...
|
| 58 |
+
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
__isabstractmethod__ = True
|
| 62 |
+
|
| 63 |
+
def __init__(self, callable):
|
| 64 |
+
callable.__isabstractmethod__ = True
|
| 65 |
+
super().__init__(callable)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class abstractproperty(property):
|
| 69 |
+
"""A decorator indicating abstract properties.
|
| 70 |
+
|
| 71 |
+
Deprecated, use 'property' with 'abstractmethod' instead:
|
| 72 |
+
|
| 73 |
+
class C(ABC):
|
| 74 |
+
@property
|
| 75 |
+
@abstractmethod
|
| 76 |
+
def my_abstract_property(self):
|
| 77 |
+
...
|
| 78 |
+
|
| 79 |
+
"""
|
| 80 |
+
|
| 81 |
+
__isabstractmethod__ = True
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
try:
|
| 85 |
+
from _abc import (get_cache_token, _abc_init, _abc_register,
|
| 86 |
+
_abc_instancecheck, _abc_subclasscheck, _get_dump,
|
| 87 |
+
_reset_registry, _reset_caches)
|
| 88 |
+
except ImportError:
|
| 89 |
+
from _py_abc import ABCMeta, get_cache_token
|
| 90 |
+
ABCMeta.__module__ = 'abc'
|
| 91 |
+
else:
|
| 92 |
+
class ABCMeta(type):
|
| 93 |
+
"""Metaclass for defining Abstract Base Classes (ABCs).
|
| 94 |
+
|
| 95 |
+
Use this metaclass to create an ABC. An ABC can be subclassed
|
| 96 |
+
directly, and then acts as a mix-in class. You can also register
|
| 97 |
+
unrelated concrete classes (even built-in classes) and unrelated
|
| 98 |
+
ABCs as 'virtual subclasses' -- these and their descendants will
|
| 99 |
+
be considered subclasses of the registering ABC by the built-in
|
| 100 |
+
issubclass() function, but the registering ABC won't show up in
|
| 101 |
+
their MRO (Method Resolution Order) nor will method
|
| 102 |
+
implementations defined by the registering ABC be callable (not
|
| 103 |
+
even via super()).
|
| 104 |
+
"""
|
| 105 |
+
def __new__(mcls, name, bases, namespace, **kwargs):
|
| 106 |
+
cls = super().__new__(mcls, name, bases, namespace, **kwargs)
|
| 107 |
+
_abc_init(cls)
|
| 108 |
+
return cls
|
| 109 |
+
|
| 110 |
+
def register(cls, subclass):
|
| 111 |
+
"""Register a virtual subclass of an ABC.
|
| 112 |
+
|
| 113 |
+
Returns the subclass, to allow usage as a class decorator.
|
| 114 |
+
"""
|
| 115 |
+
return _abc_register(cls, subclass)
|
| 116 |
+
|
| 117 |
+
def __instancecheck__(cls, instance):
|
| 118 |
+
"""Override for isinstance(instance, cls)."""
|
| 119 |
+
return _abc_instancecheck(cls, instance)
|
| 120 |
+
|
| 121 |
+
def __subclasscheck__(cls, subclass):
|
| 122 |
+
"""Override for issubclass(subclass, cls)."""
|
| 123 |
+
return _abc_subclasscheck(cls, subclass)
|
| 124 |
+
|
| 125 |
+
def _dump_registry(cls, file=None):
|
| 126 |
+
"""Debug helper to print the ABC registry."""
|
| 127 |
+
print(f"Class: {cls.__module__}.{cls.__qualname__}", file=file)
|
| 128 |
+
print(f"Inv. counter: {get_cache_token()}", file=file)
|
| 129 |
+
(_abc_registry, _abc_cache, _abc_negative_cache,
|
| 130 |
+
_abc_negative_cache_version) = _get_dump(cls)
|
| 131 |
+
print(f"_abc_registry: {_abc_registry!r}", file=file)
|
| 132 |
+
print(f"_abc_cache: {_abc_cache!r}", file=file)
|
| 133 |
+
print(f"_abc_negative_cache: {_abc_negative_cache!r}", file=file)
|
| 134 |
+
print(f"_abc_negative_cache_version: {_abc_negative_cache_version!r}",
|
| 135 |
+
file=file)
|
| 136 |
+
|
| 137 |
+
def _abc_registry_clear(cls):
|
| 138 |
+
"""Clear the registry (for debugging or testing)."""
|
| 139 |
+
_reset_registry(cls)
|
| 140 |
+
|
| 141 |
+
def _abc_caches_clear(cls):
|
| 142 |
+
"""Clear the caches (for debugging or testing)."""
|
| 143 |
+
_reset_caches(cls)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def update_abstractmethods(cls):
|
| 147 |
+
"""Recalculate the set of abstract methods of an abstract class.
|
| 148 |
+
|
| 149 |
+
If a class has had one of its abstract methods implemented after the
|
| 150 |
+
class was created, the method will not be considered implemented until
|
| 151 |
+
this function is called. Alternatively, if a new abstract method has been
|
| 152 |
+
added to the class, it will only be considered an abstract method of the
|
| 153 |
+
class after this function is called.
|
| 154 |
+
|
| 155 |
+
This function should be called before any use is made of the class,
|
| 156 |
+
usually in class decorators that add methods to the subject class.
|
| 157 |
+
|
| 158 |
+
Returns cls, to allow usage as a class decorator.
|
| 159 |
+
|
| 160 |
+
If cls is not an instance of ABCMeta, does nothing.
|
| 161 |
+
"""
|
| 162 |
+
if not hasattr(cls, '__abstractmethods__'):
|
| 163 |
+
# We check for __abstractmethods__ here because cls might by a C
|
| 164 |
+
# implementation or a python implementation (especially during
|
| 165 |
+
# testing), and we want to handle both cases.
|
| 166 |
+
return cls
|
| 167 |
+
|
| 168 |
+
abstracts = set()
|
| 169 |
+
# Check the existing abstract methods of the parents, keep only the ones
|
| 170 |
+
# that are not implemented.
|
| 171 |
+
for scls in cls.__bases__:
|
| 172 |
+
for name in getattr(scls, '__abstractmethods__', ()):
|
| 173 |
+
value = getattr(cls, name, None)
|
| 174 |
+
if getattr(value, "__isabstractmethod__", False):
|
| 175 |
+
abstracts.add(name)
|
| 176 |
+
# Also add any other newly added abstract methods.
|
| 177 |
+
for name, value in cls.__dict__.items():
|
| 178 |
+
if getattr(value, "__isabstractmethod__", False):
|
| 179 |
+
abstracts.add(name)
|
| 180 |
+
cls.__abstractmethods__ = frozenset(abstracts)
|
| 181 |
+
return cls
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class ABC(metaclass=ABCMeta):
|
| 185 |
+
"""Helper class that provides a standard way to create an ABC using
|
| 186 |
+
inheritance.
|
| 187 |
+
"""
|
| 188 |
+
__slots__ = ()
|
evalkit_tf446/lib/python3.10/argparse.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_tf446/lib/python3.10/ast.py
ADDED
|
@@ -0,0 +1,1709 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ast
|
| 3 |
+
~~~
|
| 4 |
+
|
| 5 |
+
The `ast` module helps Python applications to process trees of the Python
|
| 6 |
+
abstract syntax grammar. The abstract syntax itself might change with
|
| 7 |
+
each Python release; this module helps to find out programmatically what
|
| 8 |
+
the current grammar looks like and allows modifications of it.
|
| 9 |
+
|
| 10 |
+
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
|
| 11 |
+
a flag to the `compile()` builtin function or by using the `parse()`
|
| 12 |
+
function from this module. The result will be a tree of objects whose
|
| 13 |
+
classes all inherit from `ast.AST`.
|
| 14 |
+
|
| 15 |
+
A modified abstract syntax tree can be compiled into a Python code object
|
| 16 |
+
using the built-in `compile()` function.
|
| 17 |
+
|
| 18 |
+
Additionally various helper functions are provided that make working with
|
| 19 |
+
the trees simpler. The main intention of the helper functions and this
|
| 20 |
+
module in general is to provide an easy to use interface for libraries
|
| 21 |
+
that work tightly with the python syntax (template engines for example).
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
:copyright: Copyright 2008 by Armin Ronacher.
|
| 25 |
+
:license: Python License.
|
| 26 |
+
"""
|
| 27 |
+
import sys
|
| 28 |
+
from _ast import *
|
| 29 |
+
from contextlib import contextmanager, nullcontext
|
| 30 |
+
from enum import IntEnum, auto
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def parse(source, filename='<unknown>', mode='exec', *,
|
| 34 |
+
type_comments=False, feature_version=None):
|
| 35 |
+
"""
|
| 36 |
+
Parse the source into an AST node.
|
| 37 |
+
Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
|
| 38 |
+
Pass type_comments=True to get back type comments where the syntax allows.
|
| 39 |
+
"""
|
| 40 |
+
flags = PyCF_ONLY_AST
|
| 41 |
+
if type_comments:
|
| 42 |
+
flags |= PyCF_TYPE_COMMENTS
|
| 43 |
+
if isinstance(feature_version, tuple):
|
| 44 |
+
major, minor = feature_version # Should be a 2-tuple.
|
| 45 |
+
assert major == 3
|
| 46 |
+
feature_version = minor
|
| 47 |
+
elif feature_version is None:
|
| 48 |
+
feature_version = -1
|
| 49 |
+
# Else it should be an int giving the minor version for 3.x.
|
| 50 |
+
return compile(source, filename, mode, flags,
|
| 51 |
+
_feature_version=feature_version)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def literal_eval(node_or_string):
|
| 55 |
+
"""
|
| 56 |
+
Evaluate an expression node or a string containing only a Python
|
| 57 |
+
expression. The string or node provided may only consist of the following
|
| 58 |
+
Python literal structures: strings, bytes, numbers, tuples, lists, dicts,
|
| 59 |
+
sets, booleans, and None.
|
| 60 |
+
|
| 61 |
+
Caution: A complex expression can overflow the C stack and cause a crash.
|
| 62 |
+
"""
|
| 63 |
+
if isinstance(node_or_string, str):
|
| 64 |
+
node_or_string = parse(node_or_string.lstrip(" \t"), mode='eval')
|
| 65 |
+
if isinstance(node_or_string, Expression):
|
| 66 |
+
node_or_string = node_or_string.body
|
| 67 |
+
def _raise_malformed_node(node):
|
| 68 |
+
msg = "malformed node or string"
|
| 69 |
+
if lno := getattr(node, 'lineno', None):
|
| 70 |
+
msg += f' on line {lno}'
|
| 71 |
+
raise ValueError(msg + f': {node!r}')
|
| 72 |
+
def _convert_num(node):
|
| 73 |
+
if not isinstance(node, Constant) or type(node.value) not in (int, float, complex):
|
| 74 |
+
_raise_malformed_node(node)
|
| 75 |
+
return node.value
|
| 76 |
+
def _convert_signed_num(node):
|
| 77 |
+
if isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)):
|
| 78 |
+
operand = _convert_num(node.operand)
|
| 79 |
+
if isinstance(node.op, UAdd):
|
| 80 |
+
return + operand
|
| 81 |
+
else:
|
| 82 |
+
return - operand
|
| 83 |
+
return _convert_num(node)
|
| 84 |
+
def _convert(node):
|
| 85 |
+
if isinstance(node, Constant):
|
| 86 |
+
return node.value
|
| 87 |
+
elif isinstance(node, Tuple):
|
| 88 |
+
return tuple(map(_convert, node.elts))
|
| 89 |
+
elif isinstance(node, List):
|
| 90 |
+
return list(map(_convert, node.elts))
|
| 91 |
+
elif isinstance(node, Set):
|
| 92 |
+
return set(map(_convert, node.elts))
|
| 93 |
+
elif (isinstance(node, Call) and isinstance(node.func, Name) and
|
| 94 |
+
node.func.id == 'set' and node.args == node.keywords == []):
|
| 95 |
+
return set()
|
| 96 |
+
elif isinstance(node, Dict):
|
| 97 |
+
if len(node.keys) != len(node.values):
|
| 98 |
+
_raise_malformed_node(node)
|
| 99 |
+
return dict(zip(map(_convert, node.keys),
|
| 100 |
+
map(_convert, node.values)))
|
| 101 |
+
elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)):
|
| 102 |
+
left = _convert_signed_num(node.left)
|
| 103 |
+
right = _convert_num(node.right)
|
| 104 |
+
if isinstance(left, (int, float)) and isinstance(right, complex):
|
| 105 |
+
if isinstance(node.op, Add):
|
| 106 |
+
return left + right
|
| 107 |
+
else:
|
| 108 |
+
return left - right
|
| 109 |
+
return _convert_signed_num(node)
|
| 110 |
+
return _convert(node_or_string)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def dump(node, annotate_fields=True, include_attributes=False, *, indent=None):
|
| 114 |
+
"""
|
| 115 |
+
Return a formatted dump of the tree in node. This is mainly useful for
|
| 116 |
+
debugging purposes. If annotate_fields is true (by default),
|
| 117 |
+
the returned string will show the names and the values for fields.
|
| 118 |
+
If annotate_fields is false, the result string will be more compact by
|
| 119 |
+
omitting unambiguous field names. Attributes such as line
|
| 120 |
+
numbers and column offsets are not dumped by default. If this is wanted,
|
| 121 |
+
include_attributes can be set to true. If indent is a non-negative
|
| 122 |
+
integer or string, then the tree will be pretty-printed with that indent
|
| 123 |
+
level. None (the default) selects the single line representation.
|
| 124 |
+
"""
|
| 125 |
+
def _format(node, level=0):
|
| 126 |
+
if indent is not None:
|
| 127 |
+
level += 1
|
| 128 |
+
prefix = '\n' + indent * level
|
| 129 |
+
sep = ',\n' + indent * level
|
| 130 |
+
else:
|
| 131 |
+
prefix = ''
|
| 132 |
+
sep = ', '
|
| 133 |
+
if isinstance(node, AST):
|
| 134 |
+
cls = type(node)
|
| 135 |
+
args = []
|
| 136 |
+
allsimple = True
|
| 137 |
+
keywords = annotate_fields
|
| 138 |
+
for name in node._fields:
|
| 139 |
+
try:
|
| 140 |
+
value = getattr(node, name)
|
| 141 |
+
except AttributeError:
|
| 142 |
+
keywords = True
|
| 143 |
+
continue
|
| 144 |
+
if value is None and getattr(cls, name, ...) is None:
|
| 145 |
+
keywords = True
|
| 146 |
+
continue
|
| 147 |
+
value, simple = _format(value, level)
|
| 148 |
+
allsimple = allsimple and simple
|
| 149 |
+
if keywords:
|
| 150 |
+
args.append('%s=%s' % (name, value))
|
| 151 |
+
else:
|
| 152 |
+
args.append(value)
|
| 153 |
+
if include_attributes and node._attributes:
|
| 154 |
+
for name in node._attributes:
|
| 155 |
+
try:
|
| 156 |
+
value = getattr(node, name)
|
| 157 |
+
except AttributeError:
|
| 158 |
+
continue
|
| 159 |
+
if value is None and getattr(cls, name, ...) is None:
|
| 160 |
+
continue
|
| 161 |
+
value, simple = _format(value, level)
|
| 162 |
+
allsimple = allsimple and simple
|
| 163 |
+
args.append('%s=%s' % (name, value))
|
| 164 |
+
if allsimple and len(args) <= 3:
|
| 165 |
+
return '%s(%s)' % (node.__class__.__name__, ', '.join(args)), not args
|
| 166 |
+
return '%s(%s%s)' % (node.__class__.__name__, prefix, sep.join(args)), False
|
| 167 |
+
elif isinstance(node, list):
|
| 168 |
+
if not node:
|
| 169 |
+
return '[]', True
|
| 170 |
+
return '[%s%s]' % (prefix, sep.join(_format(x, level)[0] for x in node)), False
|
| 171 |
+
return repr(node), True
|
| 172 |
+
|
| 173 |
+
if not isinstance(node, AST):
|
| 174 |
+
raise TypeError('expected AST, got %r' % node.__class__.__name__)
|
| 175 |
+
if indent is not None and not isinstance(indent, str):
|
| 176 |
+
indent = ' ' * indent
|
| 177 |
+
return _format(node)[0]
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def copy_location(new_node, old_node):
|
| 181 |
+
"""
|
| 182 |
+
Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset`
|
| 183 |
+
attributes) from *old_node* to *new_node* if possible, and return *new_node*.
|
| 184 |
+
"""
|
| 185 |
+
for attr in 'lineno', 'col_offset', 'end_lineno', 'end_col_offset':
|
| 186 |
+
if attr in old_node._attributes and attr in new_node._attributes:
|
| 187 |
+
value = getattr(old_node, attr, None)
|
| 188 |
+
# end_lineno and end_col_offset are optional attributes, and they
|
| 189 |
+
# should be copied whether the value is None or not.
|
| 190 |
+
if value is not None or (
|
| 191 |
+
hasattr(old_node, attr) and attr.startswith("end_")
|
| 192 |
+
):
|
| 193 |
+
setattr(new_node, attr, value)
|
| 194 |
+
return new_node
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def fix_missing_locations(node):
|
| 198 |
+
"""
|
| 199 |
+
When you compile a node tree with compile(), the compiler expects lineno and
|
| 200 |
+
col_offset attributes for every node that supports them. This is rather
|
| 201 |
+
tedious to fill in for generated nodes, so this helper adds these attributes
|
| 202 |
+
recursively where not already set, by setting them to the values of the
|
| 203 |
+
parent node. It works recursively starting at *node*.
|
| 204 |
+
"""
|
| 205 |
+
def _fix(node, lineno, col_offset, end_lineno, end_col_offset):
|
| 206 |
+
if 'lineno' in node._attributes:
|
| 207 |
+
if not hasattr(node, 'lineno'):
|
| 208 |
+
node.lineno = lineno
|
| 209 |
+
else:
|
| 210 |
+
lineno = node.lineno
|
| 211 |
+
if 'end_lineno' in node._attributes:
|
| 212 |
+
if getattr(node, 'end_lineno', None) is None:
|
| 213 |
+
node.end_lineno = end_lineno
|
| 214 |
+
else:
|
| 215 |
+
end_lineno = node.end_lineno
|
| 216 |
+
if 'col_offset' in node._attributes:
|
| 217 |
+
if not hasattr(node, 'col_offset'):
|
| 218 |
+
node.col_offset = col_offset
|
| 219 |
+
else:
|
| 220 |
+
col_offset = node.col_offset
|
| 221 |
+
if 'end_col_offset' in node._attributes:
|
| 222 |
+
if getattr(node, 'end_col_offset', None) is None:
|
| 223 |
+
node.end_col_offset = end_col_offset
|
| 224 |
+
else:
|
| 225 |
+
end_col_offset = node.end_col_offset
|
| 226 |
+
for child in iter_child_nodes(node):
|
| 227 |
+
_fix(child, lineno, col_offset, end_lineno, end_col_offset)
|
| 228 |
+
_fix(node, 1, 0, 1, 0)
|
| 229 |
+
return node
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def increment_lineno(node, n=1):
|
| 233 |
+
"""
|
| 234 |
+
Increment the line number and end line number of each node in the tree
|
| 235 |
+
starting at *node* by *n*. This is useful to "move code" to a different
|
| 236 |
+
location in a file.
|
| 237 |
+
"""
|
| 238 |
+
for child in walk(node):
|
| 239 |
+
# TypeIgnore is a special case where lineno is not an attribute
|
| 240 |
+
# but rather a field of the node itself.
|
| 241 |
+
if isinstance(child, TypeIgnore):
|
| 242 |
+
child.lineno = getattr(child, 'lineno', 0) + n
|
| 243 |
+
continue
|
| 244 |
+
|
| 245 |
+
if 'lineno' in child._attributes:
|
| 246 |
+
child.lineno = getattr(child, 'lineno', 0) + n
|
| 247 |
+
if (
|
| 248 |
+
"end_lineno" in child._attributes
|
| 249 |
+
and (end_lineno := getattr(child, "end_lineno", 0)) is not None
|
| 250 |
+
):
|
| 251 |
+
child.end_lineno = end_lineno + n
|
| 252 |
+
return node
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
def iter_fields(node):
|
| 256 |
+
"""
|
| 257 |
+
Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``
|
| 258 |
+
that is present on *node*.
|
| 259 |
+
"""
|
| 260 |
+
for field in node._fields:
|
| 261 |
+
try:
|
| 262 |
+
yield field, getattr(node, field)
|
| 263 |
+
except AttributeError:
|
| 264 |
+
pass
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def iter_child_nodes(node):
|
| 268 |
+
"""
|
| 269 |
+
Yield all direct child nodes of *node*, that is, all fields that are nodes
|
| 270 |
+
and all items of fields that are lists of nodes.
|
| 271 |
+
"""
|
| 272 |
+
for name, field in iter_fields(node):
|
| 273 |
+
if isinstance(field, AST):
|
| 274 |
+
yield field
|
| 275 |
+
elif isinstance(field, list):
|
| 276 |
+
for item in field:
|
| 277 |
+
if isinstance(item, AST):
|
| 278 |
+
yield item
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def get_docstring(node, clean=True):
|
| 282 |
+
"""
|
| 283 |
+
Return the docstring for the given node or None if no docstring can
|
| 284 |
+
be found. If the node provided does not have docstrings a TypeError
|
| 285 |
+
will be raised.
|
| 286 |
+
|
| 287 |
+
If *clean* is `True`, all tabs are expanded to spaces and any whitespace
|
| 288 |
+
that can be uniformly removed from the second line onwards is removed.
|
| 289 |
+
"""
|
| 290 |
+
if not isinstance(node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)):
|
| 291 |
+
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
|
| 292 |
+
if not(node.body and isinstance(node.body[0], Expr)):
|
| 293 |
+
return None
|
| 294 |
+
node = node.body[0].value
|
| 295 |
+
if isinstance(node, Str):
|
| 296 |
+
text = node.s
|
| 297 |
+
elif isinstance(node, Constant) and isinstance(node.value, str):
|
| 298 |
+
text = node.value
|
| 299 |
+
else:
|
| 300 |
+
return None
|
| 301 |
+
if clean:
|
| 302 |
+
import inspect
|
| 303 |
+
text = inspect.cleandoc(text)
|
| 304 |
+
return text
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
def _splitlines_no_ff(source):
|
| 308 |
+
"""Split a string into lines ignoring form feed and other chars.
|
| 309 |
+
|
| 310 |
+
This mimics how the Python parser splits source code.
|
| 311 |
+
"""
|
| 312 |
+
idx = 0
|
| 313 |
+
lines = []
|
| 314 |
+
next_line = ''
|
| 315 |
+
while idx < len(source):
|
| 316 |
+
c = source[idx]
|
| 317 |
+
next_line += c
|
| 318 |
+
idx += 1
|
| 319 |
+
# Keep \r\n together
|
| 320 |
+
if c == '\r' and idx < len(source) and source[idx] == '\n':
|
| 321 |
+
next_line += '\n'
|
| 322 |
+
idx += 1
|
| 323 |
+
if c in '\r\n':
|
| 324 |
+
lines.append(next_line)
|
| 325 |
+
next_line = ''
|
| 326 |
+
|
| 327 |
+
if next_line:
|
| 328 |
+
lines.append(next_line)
|
| 329 |
+
return lines
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def _pad_whitespace(source):
|
| 333 |
+
r"""Replace all chars except '\f\t' in a line with spaces."""
|
| 334 |
+
result = ''
|
| 335 |
+
for c in source:
|
| 336 |
+
if c in '\f\t':
|
| 337 |
+
result += c
|
| 338 |
+
else:
|
| 339 |
+
result += ' '
|
| 340 |
+
return result
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def get_source_segment(source, node, *, padded=False):
|
| 344 |
+
"""Get source code segment of the *source* that generated *node*.
|
| 345 |
+
|
| 346 |
+
If some location information (`lineno`, `end_lineno`, `col_offset`,
|
| 347 |
+
or `end_col_offset`) is missing, return None.
|
| 348 |
+
|
| 349 |
+
If *padded* is `True`, the first line of a multi-line statement will
|
| 350 |
+
be padded with spaces to match its original position.
|
| 351 |
+
"""
|
| 352 |
+
try:
|
| 353 |
+
if node.end_lineno is None or node.end_col_offset is None:
|
| 354 |
+
return None
|
| 355 |
+
lineno = node.lineno - 1
|
| 356 |
+
end_lineno = node.end_lineno - 1
|
| 357 |
+
col_offset = node.col_offset
|
| 358 |
+
end_col_offset = node.end_col_offset
|
| 359 |
+
except AttributeError:
|
| 360 |
+
return None
|
| 361 |
+
|
| 362 |
+
lines = _splitlines_no_ff(source)
|
| 363 |
+
if end_lineno == lineno:
|
| 364 |
+
return lines[lineno].encode()[col_offset:end_col_offset].decode()
|
| 365 |
+
|
| 366 |
+
if padded:
|
| 367 |
+
padding = _pad_whitespace(lines[lineno].encode()[:col_offset].decode())
|
| 368 |
+
else:
|
| 369 |
+
padding = ''
|
| 370 |
+
|
| 371 |
+
first = padding + lines[lineno].encode()[col_offset:].decode()
|
| 372 |
+
last = lines[end_lineno].encode()[:end_col_offset].decode()
|
| 373 |
+
lines = lines[lineno+1:end_lineno]
|
| 374 |
+
|
| 375 |
+
lines.insert(0, first)
|
| 376 |
+
lines.append(last)
|
| 377 |
+
return ''.join(lines)
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
def walk(node):
|
| 381 |
+
"""
|
| 382 |
+
Recursively yield all descendant nodes in the tree starting at *node*
|
| 383 |
+
(including *node* itself), in no specified order. This is useful if you
|
| 384 |
+
only want to modify nodes in place and don't care about the context.
|
| 385 |
+
"""
|
| 386 |
+
from collections import deque
|
| 387 |
+
todo = deque([node])
|
| 388 |
+
while todo:
|
| 389 |
+
node = todo.popleft()
|
| 390 |
+
todo.extend(iter_child_nodes(node))
|
| 391 |
+
yield node
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
class NodeVisitor(object):
|
| 395 |
+
"""
|
| 396 |
+
A node visitor base class that walks the abstract syntax tree and calls a
|
| 397 |
+
visitor function for every node found. This function may return a value
|
| 398 |
+
which is forwarded by the `visit` method.
|
| 399 |
+
|
| 400 |
+
This class is meant to be subclassed, with the subclass adding visitor
|
| 401 |
+
methods.
|
| 402 |
+
|
| 403 |
+
Per default the visitor functions for the nodes are ``'visit_'`` +
|
| 404 |
+
class name of the node. So a `TryFinally` node visit function would
|
| 405 |
+
be `visit_TryFinally`. This behavior can be changed by overriding
|
| 406 |
+
the `visit` method. If no visitor function exists for a node
|
| 407 |
+
(return value `None`) the `generic_visit` visitor is used instead.
|
| 408 |
+
|
| 409 |
+
Don't use the `NodeVisitor` if you want to apply changes to nodes during
|
| 410 |
+
traversing. For this a special visitor exists (`NodeTransformer`) that
|
| 411 |
+
allows modifications.
|
| 412 |
+
"""
|
| 413 |
+
|
| 414 |
+
def visit(self, node):
|
| 415 |
+
"""Visit a node."""
|
| 416 |
+
method = 'visit_' + node.__class__.__name__
|
| 417 |
+
visitor = getattr(self, method, self.generic_visit)
|
| 418 |
+
return visitor(node)
|
| 419 |
+
|
| 420 |
+
def generic_visit(self, node):
|
| 421 |
+
"""Called if no explicit visitor function exists for a node."""
|
| 422 |
+
for field, value in iter_fields(node):
|
| 423 |
+
if isinstance(value, list):
|
| 424 |
+
for item in value:
|
| 425 |
+
if isinstance(item, AST):
|
| 426 |
+
self.visit(item)
|
| 427 |
+
elif isinstance(value, AST):
|
| 428 |
+
self.visit(value)
|
| 429 |
+
|
| 430 |
+
def visit_Constant(self, node):
|
| 431 |
+
value = node.value
|
| 432 |
+
type_name = _const_node_type_names.get(type(value))
|
| 433 |
+
if type_name is None:
|
| 434 |
+
for cls, name in _const_node_type_names.items():
|
| 435 |
+
if isinstance(value, cls):
|
| 436 |
+
type_name = name
|
| 437 |
+
break
|
| 438 |
+
if type_name is not None:
|
| 439 |
+
method = 'visit_' + type_name
|
| 440 |
+
try:
|
| 441 |
+
visitor = getattr(self, method)
|
| 442 |
+
except AttributeError:
|
| 443 |
+
pass
|
| 444 |
+
else:
|
| 445 |
+
import warnings
|
| 446 |
+
warnings.warn(f"{method} is deprecated; add visit_Constant",
|
| 447 |
+
DeprecationWarning, 2)
|
| 448 |
+
return visitor(node)
|
| 449 |
+
return self.generic_visit(node)
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
class NodeTransformer(NodeVisitor):
|
| 453 |
+
"""
|
| 454 |
+
A :class:`NodeVisitor` subclass that walks the abstract syntax tree and
|
| 455 |
+
allows modification of nodes.
|
| 456 |
+
|
| 457 |
+
The `NodeTransformer` will walk the AST and use the return value of the
|
| 458 |
+
visitor methods to replace or remove the old node. If the return value of
|
| 459 |
+
the visitor method is ``None``, the node will be removed from its location,
|
| 460 |
+
otherwise it is replaced with the return value. The return value may be the
|
| 461 |
+
original node in which case no replacement takes place.
|
| 462 |
+
|
| 463 |
+
Here is an example transformer that rewrites all occurrences of name lookups
|
| 464 |
+
(``foo``) to ``data['foo']``::
|
| 465 |
+
|
| 466 |
+
class RewriteName(NodeTransformer):
|
| 467 |
+
|
| 468 |
+
def visit_Name(self, node):
|
| 469 |
+
return Subscript(
|
| 470 |
+
value=Name(id='data', ctx=Load()),
|
| 471 |
+
slice=Constant(value=node.id),
|
| 472 |
+
ctx=node.ctx
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
Keep in mind that if the node you're operating on has child nodes you must
|
| 476 |
+
either transform the child nodes yourself or call the :meth:`generic_visit`
|
| 477 |
+
method for the node first.
|
| 478 |
+
|
| 479 |
+
For nodes that were part of a collection of statements (that applies to all
|
| 480 |
+
statement nodes), the visitor may also return a list of nodes rather than
|
| 481 |
+
just a single node.
|
| 482 |
+
|
| 483 |
+
Usually you use the transformer like this::
|
| 484 |
+
|
| 485 |
+
node = YourTransformer().visit(node)
|
| 486 |
+
"""
|
| 487 |
+
|
| 488 |
+
def generic_visit(self, node):
|
| 489 |
+
for field, old_value in iter_fields(node):
|
| 490 |
+
if isinstance(old_value, list):
|
| 491 |
+
new_values = []
|
| 492 |
+
for value in old_value:
|
| 493 |
+
if isinstance(value, AST):
|
| 494 |
+
value = self.visit(value)
|
| 495 |
+
if value is None:
|
| 496 |
+
continue
|
| 497 |
+
elif not isinstance(value, AST):
|
| 498 |
+
new_values.extend(value)
|
| 499 |
+
continue
|
| 500 |
+
new_values.append(value)
|
| 501 |
+
old_value[:] = new_values
|
| 502 |
+
elif isinstance(old_value, AST):
|
| 503 |
+
new_node = self.visit(old_value)
|
| 504 |
+
if new_node is None:
|
| 505 |
+
delattr(node, field)
|
| 506 |
+
else:
|
| 507 |
+
setattr(node, field, new_node)
|
| 508 |
+
return node
|
| 509 |
+
|
| 510 |
+
|
| 511 |
+
# If the ast module is loaded more than once, only add deprecated methods once
|
| 512 |
+
if not hasattr(Constant, 'n'):
|
| 513 |
+
# The following code is for backward compatibility.
|
| 514 |
+
# It will be removed in future.
|
| 515 |
+
|
| 516 |
+
def _getter(self):
|
| 517 |
+
"""Deprecated. Use value instead."""
|
| 518 |
+
return self.value
|
| 519 |
+
|
| 520 |
+
def _setter(self, value):
|
| 521 |
+
self.value = value
|
| 522 |
+
|
| 523 |
+
Constant.n = property(_getter, _setter)
|
| 524 |
+
Constant.s = property(_getter, _setter)
|
| 525 |
+
|
| 526 |
+
class _ABC(type):
|
| 527 |
+
|
| 528 |
+
def __init__(cls, *args):
|
| 529 |
+
cls.__doc__ = """Deprecated AST node class. Use ast.Constant instead"""
|
| 530 |
+
|
| 531 |
+
def __instancecheck__(cls, inst):
|
| 532 |
+
if not isinstance(inst, Constant):
|
| 533 |
+
return False
|
| 534 |
+
if cls in _const_types:
|
| 535 |
+
try:
|
| 536 |
+
value = inst.value
|
| 537 |
+
except AttributeError:
|
| 538 |
+
return False
|
| 539 |
+
else:
|
| 540 |
+
return (
|
| 541 |
+
isinstance(value, _const_types[cls]) and
|
| 542 |
+
not isinstance(value, _const_types_not.get(cls, ()))
|
| 543 |
+
)
|
| 544 |
+
return type.__instancecheck__(cls, inst)
|
| 545 |
+
|
| 546 |
+
def _new(cls, *args, **kwargs):
|
| 547 |
+
for key in kwargs:
|
| 548 |
+
if key not in cls._fields:
|
| 549 |
+
# arbitrary keyword arguments are accepted
|
| 550 |
+
continue
|
| 551 |
+
pos = cls._fields.index(key)
|
| 552 |
+
if pos < len(args):
|
| 553 |
+
raise TypeError(f"{cls.__name__} got multiple values for argument {key!r}")
|
| 554 |
+
if cls in _const_types:
|
| 555 |
+
return Constant(*args, **kwargs)
|
| 556 |
+
return Constant.__new__(cls, *args, **kwargs)
|
| 557 |
+
|
| 558 |
+
class Num(Constant, metaclass=_ABC):
|
| 559 |
+
_fields = ('n',)
|
| 560 |
+
__new__ = _new
|
| 561 |
+
|
| 562 |
+
class Str(Constant, metaclass=_ABC):
|
| 563 |
+
_fields = ('s',)
|
| 564 |
+
__new__ = _new
|
| 565 |
+
|
| 566 |
+
class Bytes(Constant, metaclass=_ABC):
|
| 567 |
+
_fields = ('s',)
|
| 568 |
+
__new__ = _new
|
| 569 |
+
|
| 570 |
+
class NameConstant(Constant, metaclass=_ABC):
|
| 571 |
+
__new__ = _new
|
| 572 |
+
|
| 573 |
+
class Ellipsis(Constant, metaclass=_ABC):
|
| 574 |
+
_fields = ()
|
| 575 |
+
|
| 576 |
+
def __new__(cls, *args, **kwargs):
|
| 577 |
+
if cls is Ellipsis:
|
| 578 |
+
return Constant(..., *args, **kwargs)
|
| 579 |
+
return Constant.__new__(cls, *args, **kwargs)
|
| 580 |
+
|
| 581 |
+
_const_types = {
|
| 582 |
+
Num: (int, float, complex),
|
| 583 |
+
Str: (str,),
|
| 584 |
+
Bytes: (bytes,),
|
| 585 |
+
NameConstant: (type(None), bool),
|
| 586 |
+
Ellipsis: (type(...),),
|
| 587 |
+
}
|
| 588 |
+
_const_types_not = {
|
| 589 |
+
Num: (bool,),
|
| 590 |
+
}
|
| 591 |
+
|
| 592 |
+
_const_node_type_names = {
|
| 593 |
+
bool: 'NameConstant', # should be before int
|
| 594 |
+
type(None): 'NameConstant',
|
| 595 |
+
int: 'Num',
|
| 596 |
+
float: 'Num',
|
| 597 |
+
complex: 'Num',
|
| 598 |
+
str: 'Str',
|
| 599 |
+
bytes: 'Bytes',
|
| 600 |
+
type(...): 'Ellipsis',
|
| 601 |
+
}
|
| 602 |
+
|
| 603 |
+
class slice(AST):
|
| 604 |
+
"""Deprecated AST node class."""
|
| 605 |
+
|
| 606 |
+
class Index(slice):
|
| 607 |
+
"""Deprecated AST node class. Use the index value directly instead."""
|
| 608 |
+
def __new__(cls, value, **kwargs):
|
| 609 |
+
return value
|
| 610 |
+
|
| 611 |
+
class ExtSlice(slice):
|
| 612 |
+
"""Deprecated AST node class. Use ast.Tuple instead."""
|
| 613 |
+
def __new__(cls, dims=(), **kwargs):
|
| 614 |
+
return Tuple(list(dims), Load(), **kwargs)
|
| 615 |
+
|
| 616 |
+
# If the ast module is loaded more than once, only add deprecated methods once
|
| 617 |
+
if not hasattr(Tuple, 'dims'):
|
| 618 |
+
# The following code is for backward compatibility.
|
| 619 |
+
# It will be removed in future.
|
| 620 |
+
|
| 621 |
+
def _dims_getter(self):
|
| 622 |
+
"""Deprecated. Use elts instead."""
|
| 623 |
+
return self.elts
|
| 624 |
+
|
| 625 |
+
def _dims_setter(self, value):
|
| 626 |
+
self.elts = value
|
| 627 |
+
|
| 628 |
+
Tuple.dims = property(_dims_getter, _dims_setter)
|
| 629 |
+
|
| 630 |
+
class Suite(mod):
|
| 631 |
+
"""Deprecated AST node class. Unused in Python 3."""
|
| 632 |
+
|
| 633 |
+
class AugLoad(expr_context):
|
| 634 |
+
"""Deprecated AST node class. Unused in Python 3."""
|
| 635 |
+
|
| 636 |
+
class AugStore(expr_context):
|
| 637 |
+
"""Deprecated AST node class. Unused in Python 3."""
|
| 638 |
+
|
| 639 |
+
class Param(expr_context):
|
| 640 |
+
"""Deprecated AST node class. Unused in Python 3."""
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
# Large float and imaginary literals get turned into infinities in the AST.
|
| 644 |
+
# We unparse those infinities to INFSTR.
|
| 645 |
+
_INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
|
| 646 |
+
|
| 647 |
+
class _Precedence(IntEnum):
|
| 648 |
+
"""Precedence table that originated from python grammar."""
|
| 649 |
+
|
| 650 |
+
TUPLE = auto()
|
| 651 |
+
YIELD = auto() # 'yield', 'yield from'
|
| 652 |
+
TEST = auto() # 'if'-'else', 'lambda'
|
| 653 |
+
OR = auto() # 'or'
|
| 654 |
+
AND = auto() # 'and'
|
| 655 |
+
NOT = auto() # 'not'
|
| 656 |
+
CMP = auto() # '<', '>', '==', '>=', '<=', '!=',
|
| 657 |
+
# 'in', 'not in', 'is', 'is not'
|
| 658 |
+
EXPR = auto()
|
| 659 |
+
BOR = EXPR # '|'
|
| 660 |
+
BXOR = auto() # '^'
|
| 661 |
+
BAND = auto() # '&'
|
| 662 |
+
SHIFT = auto() # '<<', '>>'
|
| 663 |
+
ARITH = auto() # '+', '-'
|
| 664 |
+
TERM = auto() # '*', '@', '/', '%', '//'
|
| 665 |
+
FACTOR = auto() # unary '+', '-', '~'
|
| 666 |
+
POWER = auto() # '**'
|
| 667 |
+
AWAIT = auto() # 'await'
|
| 668 |
+
ATOM = auto()
|
| 669 |
+
|
| 670 |
+
def next(self):
|
| 671 |
+
try:
|
| 672 |
+
return self.__class__(self + 1)
|
| 673 |
+
except ValueError:
|
| 674 |
+
return self
|
| 675 |
+
|
| 676 |
+
|
| 677 |
+
_SINGLE_QUOTES = ("'", '"')
|
| 678 |
+
_MULTI_QUOTES = ('"""', "'''")
|
| 679 |
+
_ALL_QUOTES = (*_SINGLE_QUOTES, *_MULTI_QUOTES)
|
| 680 |
+
|
| 681 |
+
class _Unparser(NodeVisitor):
|
| 682 |
+
"""Methods in this class recursively traverse an AST and
|
| 683 |
+
output source code for the abstract syntax; original formatting
|
| 684 |
+
is disregarded."""
|
| 685 |
+
|
| 686 |
+
def __init__(self, *, _avoid_backslashes=False):
|
| 687 |
+
self._source = []
|
| 688 |
+
self._buffer = []
|
| 689 |
+
self._precedences = {}
|
| 690 |
+
self._type_ignores = {}
|
| 691 |
+
self._indent = 0
|
| 692 |
+
self._avoid_backslashes = _avoid_backslashes
|
| 693 |
+
|
| 694 |
+
def interleave(self, inter, f, seq):
|
| 695 |
+
"""Call f on each item in seq, calling inter() in between."""
|
| 696 |
+
seq = iter(seq)
|
| 697 |
+
try:
|
| 698 |
+
f(next(seq))
|
| 699 |
+
except StopIteration:
|
| 700 |
+
pass
|
| 701 |
+
else:
|
| 702 |
+
for x in seq:
|
| 703 |
+
inter()
|
| 704 |
+
f(x)
|
| 705 |
+
|
| 706 |
+
def items_view(self, traverser, items):
|
| 707 |
+
"""Traverse and separate the given *items* with a comma and append it to
|
| 708 |
+
the buffer. If *items* is a single item sequence, a trailing comma
|
| 709 |
+
will be added."""
|
| 710 |
+
if len(items) == 1:
|
| 711 |
+
traverser(items[0])
|
| 712 |
+
self.write(",")
|
| 713 |
+
else:
|
| 714 |
+
self.interleave(lambda: self.write(", "), traverser, items)
|
| 715 |
+
|
| 716 |
+
def maybe_newline(self):
|
| 717 |
+
"""Adds a newline if it isn't the start of generated source"""
|
| 718 |
+
if self._source:
|
| 719 |
+
self.write("\n")
|
| 720 |
+
|
| 721 |
+
def fill(self, text=""):
|
| 722 |
+
"""Indent a piece of text and append it, according to the current
|
| 723 |
+
indentation level"""
|
| 724 |
+
self.maybe_newline()
|
| 725 |
+
self.write(" " * self._indent + text)
|
| 726 |
+
|
| 727 |
+
def write(self, text):
|
| 728 |
+
"""Append a piece of text"""
|
| 729 |
+
self._source.append(text)
|
| 730 |
+
|
| 731 |
+
def buffer_writer(self, text):
|
| 732 |
+
self._buffer.append(text)
|
| 733 |
+
|
| 734 |
+
@property
|
| 735 |
+
def buffer(self):
|
| 736 |
+
value = "".join(self._buffer)
|
| 737 |
+
self._buffer.clear()
|
| 738 |
+
return value
|
| 739 |
+
|
| 740 |
+
@contextmanager
|
| 741 |
+
def block(self, *, extra = None):
|
| 742 |
+
"""A context manager for preparing the source for blocks. It adds
|
| 743 |
+
the character':', increases the indentation on enter and decreases
|
| 744 |
+
the indentation on exit. If *extra* is given, it will be directly
|
| 745 |
+
appended after the colon character.
|
| 746 |
+
"""
|
| 747 |
+
self.write(":")
|
| 748 |
+
if extra:
|
| 749 |
+
self.write(extra)
|
| 750 |
+
self._indent += 1
|
| 751 |
+
yield
|
| 752 |
+
self._indent -= 1
|
| 753 |
+
|
| 754 |
+
@contextmanager
|
| 755 |
+
def delimit(self, start, end):
|
| 756 |
+
"""A context manager for preparing the source for expressions. It adds
|
| 757 |
+
*start* to the buffer and enters, after exit it adds *end*."""
|
| 758 |
+
|
| 759 |
+
self.write(start)
|
| 760 |
+
yield
|
| 761 |
+
self.write(end)
|
| 762 |
+
|
| 763 |
+
def delimit_if(self, start, end, condition):
|
| 764 |
+
if condition:
|
| 765 |
+
return self.delimit(start, end)
|
| 766 |
+
else:
|
| 767 |
+
return nullcontext()
|
| 768 |
+
|
| 769 |
+
def require_parens(self, precedence, node):
|
| 770 |
+
"""Shortcut to adding precedence related parens"""
|
| 771 |
+
return self.delimit_if("(", ")", self.get_precedence(node) > precedence)
|
| 772 |
+
|
| 773 |
+
def get_precedence(self, node):
|
| 774 |
+
return self._precedences.get(node, _Precedence.TEST)
|
| 775 |
+
|
| 776 |
+
def set_precedence(self, precedence, *nodes):
|
| 777 |
+
for node in nodes:
|
| 778 |
+
self._precedences[node] = precedence
|
| 779 |
+
|
| 780 |
+
def get_raw_docstring(self, node):
|
| 781 |
+
"""If a docstring node is found in the body of the *node* parameter,
|
| 782 |
+
return that docstring node, None otherwise.
|
| 783 |
+
|
| 784 |
+
Logic mirrored from ``_PyAST_GetDocString``."""
|
| 785 |
+
if not isinstance(
|
| 786 |
+
node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)
|
| 787 |
+
) or len(node.body) < 1:
|
| 788 |
+
return None
|
| 789 |
+
node = node.body[0]
|
| 790 |
+
if not isinstance(node, Expr):
|
| 791 |
+
return None
|
| 792 |
+
node = node.value
|
| 793 |
+
if isinstance(node, Constant) and isinstance(node.value, str):
|
| 794 |
+
return node
|
| 795 |
+
|
| 796 |
+
def get_type_comment(self, node):
|
| 797 |
+
comment = self._type_ignores.get(node.lineno) or node.type_comment
|
| 798 |
+
if comment is not None:
|
| 799 |
+
return f" # type: {comment}"
|
| 800 |
+
|
| 801 |
+
def traverse(self, node):
|
| 802 |
+
if isinstance(node, list):
|
| 803 |
+
for item in node:
|
| 804 |
+
self.traverse(item)
|
| 805 |
+
else:
|
| 806 |
+
super().visit(node)
|
| 807 |
+
|
| 808 |
+
# Note: as visit() resets the output text, do NOT rely on
|
| 809 |
+
# NodeVisitor.generic_visit to handle any nodes (as it calls back in to
|
| 810 |
+
# the subclass visit() method, which resets self._source to an empty list)
|
| 811 |
+
def visit(self, node):
|
| 812 |
+
"""Outputs a source code string that, if converted back to an ast
|
| 813 |
+
(using ast.parse) will generate an AST equivalent to *node*"""
|
| 814 |
+
self._source = []
|
| 815 |
+
self.traverse(node)
|
| 816 |
+
return "".join(self._source)
|
| 817 |
+
|
| 818 |
+
def _write_docstring_and_traverse_body(self, node):
|
| 819 |
+
if (docstring := self.get_raw_docstring(node)):
|
| 820 |
+
self._write_docstring(docstring)
|
| 821 |
+
self.traverse(node.body[1:])
|
| 822 |
+
else:
|
| 823 |
+
self.traverse(node.body)
|
| 824 |
+
|
| 825 |
+
def visit_Module(self, node):
|
| 826 |
+
self._type_ignores = {
|
| 827 |
+
ignore.lineno: f"ignore{ignore.tag}"
|
| 828 |
+
for ignore in node.type_ignores
|
| 829 |
+
}
|
| 830 |
+
self._write_docstring_and_traverse_body(node)
|
| 831 |
+
self._type_ignores.clear()
|
| 832 |
+
|
| 833 |
+
def visit_FunctionType(self, node):
|
| 834 |
+
with self.delimit("(", ")"):
|
| 835 |
+
self.interleave(
|
| 836 |
+
lambda: self.write(", "), self.traverse, node.argtypes
|
| 837 |
+
)
|
| 838 |
+
|
| 839 |
+
self.write(" -> ")
|
| 840 |
+
self.traverse(node.returns)
|
| 841 |
+
|
| 842 |
+
def visit_Expr(self, node):
|
| 843 |
+
self.fill()
|
| 844 |
+
self.set_precedence(_Precedence.YIELD, node.value)
|
| 845 |
+
self.traverse(node.value)
|
| 846 |
+
|
| 847 |
+
def visit_NamedExpr(self, node):
|
| 848 |
+
with self.require_parens(_Precedence.TUPLE, node):
|
| 849 |
+
self.set_precedence(_Precedence.ATOM, node.target, node.value)
|
| 850 |
+
self.traverse(node.target)
|
| 851 |
+
self.write(" := ")
|
| 852 |
+
self.traverse(node.value)
|
| 853 |
+
|
| 854 |
+
def visit_Import(self, node):
|
| 855 |
+
self.fill("import ")
|
| 856 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.names)
|
| 857 |
+
|
| 858 |
+
def visit_ImportFrom(self, node):
|
| 859 |
+
self.fill("from ")
|
| 860 |
+
self.write("." * (node.level or 0))
|
| 861 |
+
if node.module:
|
| 862 |
+
self.write(node.module)
|
| 863 |
+
self.write(" import ")
|
| 864 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.names)
|
| 865 |
+
|
| 866 |
+
def visit_Assign(self, node):
|
| 867 |
+
self.fill()
|
| 868 |
+
for target in node.targets:
|
| 869 |
+
self.traverse(target)
|
| 870 |
+
self.write(" = ")
|
| 871 |
+
self.traverse(node.value)
|
| 872 |
+
if type_comment := self.get_type_comment(node):
|
| 873 |
+
self.write(type_comment)
|
| 874 |
+
|
| 875 |
+
def visit_AugAssign(self, node):
|
| 876 |
+
self.fill()
|
| 877 |
+
self.traverse(node.target)
|
| 878 |
+
self.write(" " + self.binop[node.op.__class__.__name__] + "= ")
|
| 879 |
+
self.traverse(node.value)
|
| 880 |
+
|
| 881 |
+
def visit_AnnAssign(self, node):
|
| 882 |
+
self.fill()
|
| 883 |
+
with self.delimit_if("(", ")", not node.simple and isinstance(node.target, Name)):
|
| 884 |
+
self.traverse(node.target)
|
| 885 |
+
self.write(": ")
|
| 886 |
+
self.traverse(node.annotation)
|
| 887 |
+
if node.value:
|
| 888 |
+
self.write(" = ")
|
| 889 |
+
self.traverse(node.value)
|
| 890 |
+
|
| 891 |
+
def visit_Return(self, node):
|
| 892 |
+
self.fill("return")
|
| 893 |
+
if node.value:
|
| 894 |
+
self.write(" ")
|
| 895 |
+
self.traverse(node.value)
|
| 896 |
+
|
| 897 |
+
def visit_Pass(self, node):
|
| 898 |
+
self.fill("pass")
|
| 899 |
+
|
| 900 |
+
def visit_Break(self, node):
|
| 901 |
+
self.fill("break")
|
| 902 |
+
|
| 903 |
+
def visit_Continue(self, node):
|
| 904 |
+
self.fill("continue")
|
| 905 |
+
|
| 906 |
+
def visit_Delete(self, node):
|
| 907 |
+
self.fill("del ")
|
| 908 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.targets)
|
| 909 |
+
|
| 910 |
+
def visit_Assert(self, node):
|
| 911 |
+
self.fill("assert ")
|
| 912 |
+
self.traverse(node.test)
|
| 913 |
+
if node.msg:
|
| 914 |
+
self.write(", ")
|
| 915 |
+
self.traverse(node.msg)
|
| 916 |
+
|
| 917 |
+
def visit_Global(self, node):
|
| 918 |
+
self.fill("global ")
|
| 919 |
+
self.interleave(lambda: self.write(", "), self.write, node.names)
|
| 920 |
+
|
| 921 |
+
def visit_Nonlocal(self, node):
|
| 922 |
+
self.fill("nonlocal ")
|
| 923 |
+
self.interleave(lambda: self.write(", "), self.write, node.names)
|
| 924 |
+
|
| 925 |
+
def visit_Await(self, node):
|
| 926 |
+
with self.require_parens(_Precedence.AWAIT, node):
|
| 927 |
+
self.write("await")
|
| 928 |
+
if node.value:
|
| 929 |
+
self.write(" ")
|
| 930 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 931 |
+
self.traverse(node.value)
|
| 932 |
+
|
| 933 |
+
def visit_Yield(self, node):
|
| 934 |
+
with self.require_parens(_Precedence.YIELD, node):
|
| 935 |
+
self.write("yield")
|
| 936 |
+
if node.value:
|
| 937 |
+
self.write(" ")
|
| 938 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 939 |
+
self.traverse(node.value)
|
| 940 |
+
|
| 941 |
+
def visit_YieldFrom(self, node):
|
| 942 |
+
with self.require_parens(_Precedence.YIELD, node):
|
| 943 |
+
self.write("yield from ")
|
| 944 |
+
if not node.value:
|
| 945 |
+
raise ValueError("Node can't be used without a value attribute.")
|
| 946 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 947 |
+
self.traverse(node.value)
|
| 948 |
+
|
| 949 |
+
def visit_Raise(self, node):
|
| 950 |
+
self.fill("raise")
|
| 951 |
+
if not node.exc:
|
| 952 |
+
if node.cause:
|
| 953 |
+
raise ValueError(f"Node can't use cause without an exception.")
|
| 954 |
+
return
|
| 955 |
+
self.write(" ")
|
| 956 |
+
self.traverse(node.exc)
|
| 957 |
+
if node.cause:
|
| 958 |
+
self.write(" from ")
|
| 959 |
+
self.traverse(node.cause)
|
| 960 |
+
|
| 961 |
+
def visit_Try(self, node):
|
| 962 |
+
self.fill("try")
|
| 963 |
+
with self.block():
|
| 964 |
+
self.traverse(node.body)
|
| 965 |
+
for ex in node.handlers:
|
| 966 |
+
self.traverse(ex)
|
| 967 |
+
if node.orelse:
|
| 968 |
+
self.fill("else")
|
| 969 |
+
with self.block():
|
| 970 |
+
self.traverse(node.orelse)
|
| 971 |
+
if node.finalbody:
|
| 972 |
+
self.fill("finally")
|
| 973 |
+
with self.block():
|
| 974 |
+
self.traverse(node.finalbody)
|
| 975 |
+
|
| 976 |
+
def visit_ExceptHandler(self, node):
|
| 977 |
+
self.fill("except")
|
| 978 |
+
if node.type:
|
| 979 |
+
self.write(" ")
|
| 980 |
+
self.traverse(node.type)
|
| 981 |
+
if node.name:
|
| 982 |
+
self.write(" as ")
|
| 983 |
+
self.write(node.name)
|
| 984 |
+
with self.block():
|
| 985 |
+
self.traverse(node.body)
|
| 986 |
+
|
| 987 |
+
def visit_ClassDef(self, node):
|
| 988 |
+
self.maybe_newline()
|
| 989 |
+
for deco in node.decorator_list:
|
| 990 |
+
self.fill("@")
|
| 991 |
+
self.traverse(deco)
|
| 992 |
+
self.fill("class " + node.name)
|
| 993 |
+
with self.delimit_if("(", ")", condition = node.bases or node.keywords):
|
| 994 |
+
comma = False
|
| 995 |
+
for e in node.bases:
|
| 996 |
+
if comma:
|
| 997 |
+
self.write(", ")
|
| 998 |
+
else:
|
| 999 |
+
comma = True
|
| 1000 |
+
self.traverse(e)
|
| 1001 |
+
for e in node.keywords:
|
| 1002 |
+
if comma:
|
| 1003 |
+
self.write(", ")
|
| 1004 |
+
else:
|
| 1005 |
+
comma = True
|
| 1006 |
+
self.traverse(e)
|
| 1007 |
+
|
| 1008 |
+
with self.block():
|
| 1009 |
+
self._write_docstring_and_traverse_body(node)
|
| 1010 |
+
|
| 1011 |
+
def visit_FunctionDef(self, node):
|
| 1012 |
+
self._function_helper(node, "def")
|
| 1013 |
+
|
| 1014 |
+
def visit_AsyncFunctionDef(self, node):
|
| 1015 |
+
self._function_helper(node, "async def")
|
| 1016 |
+
|
| 1017 |
+
def _function_helper(self, node, fill_suffix):
|
| 1018 |
+
self.maybe_newline()
|
| 1019 |
+
for deco in node.decorator_list:
|
| 1020 |
+
self.fill("@")
|
| 1021 |
+
self.traverse(deco)
|
| 1022 |
+
def_str = fill_suffix + " " + node.name
|
| 1023 |
+
self.fill(def_str)
|
| 1024 |
+
with self.delimit("(", ")"):
|
| 1025 |
+
self.traverse(node.args)
|
| 1026 |
+
if node.returns:
|
| 1027 |
+
self.write(" -> ")
|
| 1028 |
+
self.traverse(node.returns)
|
| 1029 |
+
with self.block(extra=self.get_type_comment(node)):
|
| 1030 |
+
self._write_docstring_and_traverse_body(node)
|
| 1031 |
+
|
| 1032 |
+
def visit_For(self, node):
|
| 1033 |
+
self._for_helper("for ", node)
|
| 1034 |
+
|
| 1035 |
+
def visit_AsyncFor(self, node):
|
| 1036 |
+
self._for_helper("async for ", node)
|
| 1037 |
+
|
| 1038 |
+
def _for_helper(self, fill, node):
|
| 1039 |
+
self.fill(fill)
|
| 1040 |
+
self.traverse(node.target)
|
| 1041 |
+
self.write(" in ")
|
| 1042 |
+
self.traverse(node.iter)
|
| 1043 |
+
with self.block(extra=self.get_type_comment(node)):
|
| 1044 |
+
self.traverse(node.body)
|
| 1045 |
+
if node.orelse:
|
| 1046 |
+
self.fill("else")
|
| 1047 |
+
with self.block():
|
| 1048 |
+
self.traverse(node.orelse)
|
| 1049 |
+
|
| 1050 |
+
def visit_If(self, node):
|
| 1051 |
+
self.fill("if ")
|
| 1052 |
+
self.traverse(node.test)
|
| 1053 |
+
with self.block():
|
| 1054 |
+
self.traverse(node.body)
|
| 1055 |
+
# collapse nested ifs into equivalent elifs.
|
| 1056 |
+
while node.orelse and len(node.orelse) == 1 and isinstance(node.orelse[0], If):
|
| 1057 |
+
node = node.orelse[0]
|
| 1058 |
+
self.fill("elif ")
|
| 1059 |
+
self.traverse(node.test)
|
| 1060 |
+
with self.block():
|
| 1061 |
+
self.traverse(node.body)
|
| 1062 |
+
# final else
|
| 1063 |
+
if node.orelse:
|
| 1064 |
+
self.fill("else")
|
| 1065 |
+
with self.block():
|
| 1066 |
+
self.traverse(node.orelse)
|
| 1067 |
+
|
| 1068 |
+
def visit_While(self, node):
|
| 1069 |
+
self.fill("while ")
|
| 1070 |
+
self.traverse(node.test)
|
| 1071 |
+
with self.block():
|
| 1072 |
+
self.traverse(node.body)
|
| 1073 |
+
if node.orelse:
|
| 1074 |
+
self.fill("else")
|
| 1075 |
+
with self.block():
|
| 1076 |
+
self.traverse(node.orelse)
|
| 1077 |
+
|
| 1078 |
+
def visit_With(self, node):
|
| 1079 |
+
self.fill("with ")
|
| 1080 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.items)
|
| 1081 |
+
with self.block(extra=self.get_type_comment(node)):
|
| 1082 |
+
self.traverse(node.body)
|
| 1083 |
+
|
| 1084 |
+
def visit_AsyncWith(self, node):
|
| 1085 |
+
self.fill("async with ")
|
| 1086 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.items)
|
| 1087 |
+
with self.block(extra=self.get_type_comment(node)):
|
| 1088 |
+
self.traverse(node.body)
|
| 1089 |
+
|
| 1090 |
+
def _str_literal_helper(
|
| 1091 |
+
self, string, *, quote_types=_ALL_QUOTES, escape_special_whitespace=False
|
| 1092 |
+
):
|
| 1093 |
+
"""Helper for writing string literals, minimizing escapes.
|
| 1094 |
+
Returns the tuple (string literal to write, possible quote types).
|
| 1095 |
+
"""
|
| 1096 |
+
def escape_char(c):
|
| 1097 |
+
# \n and \t are non-printable, but we only escape them if
|
| 1098 |
+
# escape_special_whitespace is True
|
| 1099 |
+
if not escape_special_whitespace and c in "\n\t":
|
| 1100 |
+
return c
|
| 1101 |
+
# Always escape backslashes and other non-printable characters
|
| 1102 |
+
if c == "\\" or not c.isprintable():
|
| 1103 |
+
return c.encode("unicode_escape").decode("ascii")
|
| 1104 |
+
return c
|
| 1105 |
+
|
| 1106 |
+
escaped_string = "".join(map(escape_char, string))
|
| 1107 |
+
possible_quotes = quote_types
|
| 1108 |
+
if "\n" in escaped_string:
|
| 1109 |
+
possible_quotes = [q for q in possible_quotes if q in _MULTI_QUOTES]
|
| 1110 |
+
possible_quotes = [q for q in possible_quotes if q not in escaped_string]
|
| 1111 |
+
if not possible_quotes:
|
| 1112 |
+
# If there aren't any possible_quotes, fallback to using repr
|
| 1113 |
+
# on the original string. Try to use a quote from quote_types,
|
| 1114 |
+
# e.g., so that we use triple quotes for docstrings.
|
| 1115 |
+
string = repr(string)
|
| 1116 |
+
quote = next((q for q in quote_types if string[0] in q), string[0])
|
| 1117 |
+
return string[1:-1], [quote]
|
| 1118 |
+
if escaped_string:
|
| 1119 |
+
# Sort so that we prefer '''"''' over """\""""
|
| 1120 |
+
possible_quotes.sort(key=lambda q: q[0] == escaped_string[-1])
|
| 1121 |
+
# If we're using triple quotes and we'd need to escape a final
|
| 1122 |
+
# quote, escape it
|
| 1123 |
+
if possible_quotes[0][0] == escaped_string[-1]:
|
| 1124 |
+
assert len(possible_quotes[0]) == 3
|
| 1125 |
+
escaped_string = escaped_string[:-1] + "\\" + escaped_string[-1]
|
| 1126 |
+
return escaped_string, possible_quotes
|
| 1127 |
+
|
| 1128 |
+
def _write_str_avoiding_backslashes(self, string, *, quote_types=_ALL_QUOTES):
|
| 1129 |
+
"""Write string literal value with a best effort attempt to avoid backslashes."""
|
| 1130 |
+
string, quote_types = self._str_literal_helper(string, quote_types=quote_types)
|
| 1131 |
+
quote_type = quote_types[0]
|
| 1132 |
+
self.write(f"{quote_type}{string}{quote_type}")
|
| 1133 |
+
|
| 1134 |
+
def visit_JoinedStr(self, node):
|
| 1135 |
+
self.write("f")
|
| 1136 |
+
if self._avoid_backslashes:
|
| 1137 |
+
self._fstring_JoinedStr(node, self.buffer_writer)
|
| 1138 |
+
self._write_str_avoiding_backslashes(self.buffer)
|
| 1139 |
+
return
|
| 1140 |
+
|
| 1141 |
+
# If we don't need to avoid backslashes globally (i.e., we only need
|
| 1142 |
+
# to avoid them inside FormattedValues), it's cosmetically preferred
|
| 1143 |
+
# to use escaped whitespace. That is, it's preferred to use backslashes
|
| 1144 |
+
# for cases like: f"{x}\n". To accomplish this, we keep track of what
|
| 1145 |
+
# in our buffer corresponds to FormattedValues and what corresponds to
|
| 1146 |
+
# Constant parts of the f-string, and allow escapes accordingly.
|
| 1147 |
+
buffer = []
|
| 1148 |
+
for value in node.values:
|
| 1149 |
+
meth = getattr(self, "_fstring_" + type(value).__name__)
|
| 1150 |
+
meth(value, self.buffer_writer)
|
| 1151 |
+
buffer.append((self.buffer, isinstance(value, Constant)))
|
| 1152 |
+
new_buffer = []
|
| 1153 |
+
quote_types = _ALL_QUOTES
|
| 1154 |
+
for value, is_constant in buffer:
|
| 1155 |
+
# Repeatedly narrow down the list of possible quote_types
|
| 1156 |
+
value, quote_types = self._str_literal_helper(
|
| 1157 |
+
value, quote_types=quote_types,
|
| 1158 |
+
escape_special_whitespace=is_constant
|
| 1159 |
+
)
|
| 1160 |
+
new_buffer.append(value)
|
| 1161 |
+
value = "".join(new_buffer)
|
| 1162 |
+
quote_type = quote_types[0]
|
| 1163 |
+
self.write(f"{quote_type}{value}{quote_type}")
|
| 1164 |
+
|
| 1165 |
+
def visit_FormattedValue(self, node):
|
| 1166 |
+
self.write("f")
|
| 1167 |
+
self._fstring_FormattedValue(node, self.buffer_writer)
|
| 1168 |
+
self._write_str_avoiding_backslashes(self.buffer)
|
| 1169 |
+
|
| 1170 |
+
def _fstring_JoinedStr(self, node, write):
|
| 1171 |
+
for value in node.values:
|
| 1172 |
+
meth = getattr(self, "_fstring_" + type(value).__name__)
|
| 1173 |
+
meth(value, write)
|
| 1174 |
+
|
| 1175 |
+
def _fstring_Constant(self, node, write):
|
| 1176 |
+
if not isinstance(node.value, str):
|
| 1177 |
+
raise ValueError("Constants inside JoinedStr should be a string.")
|
| 1178 |
+
value = node.value.replace("{", "{{").replace("}", "}}")
|
| 1179 |
+
write(value)
|
| 1180 |
+
|
| 1181 |
+
def _fstring_FormattedValue(self, node, write):
|
| 1182 |
+
write("{")
|
| 1183 |
+
unparser = type(self)(_avoid_backslashes=True)
|
| 1184 |
+
unparser.set_precedence(_Precedence.TEST.next(), node.value)
|
| 1185 |
+
expr = unparser.visit(node.value)
|
| 1186 |
+
if expr.startswith("{"):
|
| 1187 |
+
write(" ") # Separate pair of opening brackets as "{ {"
|
| 1188 |
+
if "\\" in expr:
|
| 1189 |
+
raise ValueError("Unable to avoid backslash in f-string expression part")
|
| 1190 |
+
write(expr)
|
| 1191 |
+
if node.conversion != -1:
|
| 1192 |
+
conversion = chr(node.conversion)
|
| 1193 |
+
if conversion not in "sra":
|
| 1194 |
+
raise ValueError("Unknown f-string conversion.")
|
| 1195 |
+
write(f"!{conversion}")
|
| 1196 |
+
if node.format_spec:
|
| 1197 |
+
write(":")
|
| 1198 |
+
meth = getattr(self, "_fstring_" + type(node.format_spec).__name__)
|
| 1199 |
+
meth(node.format_spec, write)
|
| 1200 |
+
write("}")
|
| 1201 |
+
|
| 1202 |
+
def visit_Name(self, node):
|
| 1203 |
+
self.write(node.id)
|
| 1204 |
+
|
| 1205 |
+
def _write_docstring(self, node):
|
| 1206 |
+
self.fill()
|
| 1207 |
+
if node.kind == "u":
|
| 1208 |
+
self.write("u")
|
| 1209 |
+
self._write_str_avoiding_backslashes(node.value, quote_types=_MULTI_QUOTES)
|
| 1210 |
+
|
| 1211 |
+
def _write_constant(self, value):
|
| 1212 |
+
if isinstance(value, (float, complex)):
|
| 1213 |
+
# Substitute overflowing decimal literal for AST infinities,
|
| 1214 |
+
# and inf - inf for NaNs.
|
| 1215 |
+
self.write(
|
| 1216 |
+
repr(value)
|
| 1217 |
+
.replace("inf", _INFSTR)
|
| 1218 |
+
.replace("nan", f"({_INFSTR}-{_INFSTR})")
|
| 1219 |
+
)
|
| 1220 |
+
elif self._avoid_backslashes and isinstance(value, str):
|
| 1221 |
+
self._write_str_avoiding_backslashes(value)
|
| 1222 |
+
else:
|
| 1223 |
+
self.write(repr(value))
|
| 1224 |
+
|
| 1225 |
+
def visit_Constant(self, node):
|
| 1226 |
+
value = node.value
|
| 1227 |
+
if isinstance(value, tuple):
|
| 1228 |
+
with self.delimit("(", ")"):
|
| 1229 |
+
self.items_view(self._write_constant, value)
|
| 1230 |
+
elif value is ...:
|
| 1231 |
+
self.write("...")
|
| 1232 |
+
else:
|
| 1233 |
+
if node.kind == "u":
|
| 1234 |
+
self.write("u")
|
| 1235 |
+
self._write_constant(node.value)
|
| 1236 |
+
|
| 1237 |
+
def visit_List(self, node):
|
| 1238 |
+
with self.delimit("[", "]"):
|
| 1239 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.elts)
|
| 1240 |
+
|
| 1241 |
+
def visit_ListComp(self, node):
|
| 1242 |
+
with self.delimit("[", "]"):
|
| 1243 |
+
self.traverse(node.elt)
|
| 1244 |
+
for gen in node.generators:
|
| 1245 |
+
self.traverse(gen)
|
| 1246 |
+
|
| 1247 |
+
def visit_GeneratorExp(self, node):
|
| 1248 |
+
with self.delimit("(", ")"):
|
| 1249 |
+
self.traverse(node.elt)
|
| 1250 |
+
for gen in node.generators:
|
| 1251 |
+
self.traverse(gen)
|
| 1252 |
+
|
| 1253 |
+
def visit_SetComp(self, node):
|
| 1254 |
+
with self.delimit("{", "}"):
|
| 1255 |
+
self.traverse(node.elt)
|
| 1256 |
+
for gen in node.generators:
|
| 1257 |
+
self.traverse(gen)
|
| 1258 |
+
|
| 1259 |
+
def visit_DictComp(self, node):
|
| 1260 |
+
with self.delimit("{", "}"):
|
| 1261 |
+
self.traverse(node.key)
|
| 1262 |
+
self.write(": ")
|
| 1263 |
+
self.traverse(node.value)
|
| 1264 |
+
for gen in node.generators:
|
| 1265 |
+
self.traverse(gen)
|
| 1266 |
+
|
| 1267 |
+
def visit_comprehension(self, node):
|
| 1268 |
+
if node.is_async:
|
| 1269 |
+
self.write(" async for ")
|
| 1270 |
+
else:
|
| 1271 |
+
self.write(" for ")
|
| 1272 |
+
self.set_precedence(_Precedence.TUPLE, node.target)
|
| 1273 |
+
self.traverse(node.target)
|
| 1274 |
+
self.write(" in ")
|
| 1275 |
+
self.set_precedence(_Precedence.TEST.next(), node.iter, *node.ifs)
|
| 1276 |
+
self.traverse(node.iter)
|
| 1277 |
+
for if_clause in node.ifs:
|
| 1278 |
+
self.write(" if ")
|
| 1279 |
+
self.traverse(if_clause)
|
| 1280 |
+
|
| 1281 |
+
def visit_IfExp(self, node):
|
| 1282 |
+
with self.require_parens(_Precedence.TEST, node):
|
| 1283 |
+
self.set_precedence(_Precedence.TEST.next(), node.body, node.test)
|
| 1284 |
+
self.traverse(node.body)
|
| 1285 |
+
self.write(" if ")
|
| 1286 |
+
self.traverse(node.test)
|
| 1287 |
+
self.write(" else ")
|
| 1288 |
+
self.set_precedence(_Precedence.TEST, node.orelse)
|
| 1289 |
+
self.traverse(node.orelse)
|
| 1290 |
+
|
| 1291 |
+
def visit_Set(self, node):
|
| 1292 |
+
if node.elts:
|
| 1293 |
+
with self.delimit("{", "}"):
|
| 1294 |
+
self.interleave(lambda: self.write(", "), self.traverse, node.elts)
|
| 1295 |
+
else:
|
| 1296 |
+
# `{}` would be interpreted as a dictionary literal, and
|
| 1297 |
+
# `set` might be shadowed. Thus:
|
| 1298 |
+
self.write('{*()}')
|
| 1299 |
+
|
| 1300 |
+
def visit_Dict(self, node):
|
| 1301 |
+
def write_key_value_pair(k, v):
|
| 1302 |
+
self.traverse(k)
|
| 1303 |
+
self.write(": ")
|
| 1304 |
+
self.traverse(v)
|
| 1305 |
+
|
| 1306 |
+
def write_item(item):
|
| 1307 |
+
k, v = item
|
| 1308 |
+
if k is None:
|
| 1309 |
+
# for dictionary unpacking operator in dicts {**{'y': 2}}
|
| 1310 |
+
# see PEP 448 for details
|
| 1311 |
+
self.write("**")
|
| 1312 |
+
self.set_precedence(_Precedence.EXPR, v)
|
| 1313 |
+
self.traverse(v)
|
| 1314 |
+
else:
|
| 1315 |
+
write_key_value_pair(k, v)
|
| 1316 |
+
|
| 1317 |
+
with self.delimit("{", "}"):
|
| 1318 |
+
self.interleave(
|
| 1319 |
+
lambda: self.write(", "), write_item, zip(node.keys, node.values)
|
| 1320 |
+
)
|
| 1321 |
+
|
| 1322 |
+
def visit_Tuple(self, node):
|
| 1323 |
+
with self.delimit("(", ")"):
|
| 1324 |
+
self.items_view(self.traverse, node.elts)
|
| 1325 |
+
|
| 1326 |
+
unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"}
|
| 1327 |
+
unop_precedence = {
|
| 1328 |
+
"not": _Precedence.NOT,
|
| 1329 |
+
"~": _Precedence.FACTOR,
|
| 1330 |
+
"+": _Precedence.FACTOR,
|
| 1331 |
+
"-": _Precedence.FACTOR,
|
| 1332 |
+
}
|
| 1333 |
+
|
| 1334 |
+
def visit_UnaryOp(self, node):
|
| 1335 |
+
operator = self.unop[node.op.__class__.__name__]
|
| 1336 |
+
operator_precedence = self.unop_precedence[operator]
|
| 1337 |
+
with self.require_parens(operator_precedence, node):
|
| 1338 |
+
self.write(operator)
|
| 1339 |
+
# factor prefixes (+, -, ~) shouldn't be seperated
|
| 1340 |
+
# from the value they belong, (e.g: +1 instead of + 1)
|
| 1341 |
+
if operator_precedence is not _Precedence.FACTOR:
|
| 1342 |
+
self.write(" ")
|
| 1343 |
+
self.set_precedence(operator_precedence, node.operand)
|
| 1344 |
+
self.traverse(node.operand)
|
| 1345 |
+
|
| 1346 |
+
binop = {
|
| 1347 |
+
"Add": "+",
|
| 1348 |
+
"Sub": "-",
|
| 1349 |
+
"Mult": "*",
|
| 1350 |
+
"MatMult": "@",
|
| 1351 |
+
"Div": "/",
|
| 1352 |
+
"Mod": "%",
|
| 1353 |
+
"LShift": "<<",
|
| 1354 |
+
"RShift": ">>",
|
| 1355 |
+
"BitOr": "|",
|
| 1356 |
+
"BitXor": "^",
|
| 1357 |
+
"BitAnd": "&",
|
| 1358 |
+
"FloorDiv": "//",
|
| 1359 |
+
"Pow": "**",
|
| 1360 |
+
}
|
| 1361 |
+
|
| 1362 |
+
binop_precedence = {
|
| 1363 |
+
"+": _Precedence.ARITH,
|
| 1364 |
+
"-": _Precedence.ARITH,
|
| 1365 |
+
"*": _Precedence.TERM,
|
| 1366 |
+
"@": _Precedence.TERM,
|
| 1367 |
+
"/": _Precedence.TERM,
|
| 1368 |
+
"%": _Precedence.TERM,
|
| 1369 |
+
"<<": _Precedence.SHIFT,
|
| 1370 |
+
">>": _Precedence.SHIFT,
|
| 1371 |
+
"|": _Precedence.BOR,
|
| 1372 |
+
"^": _Precedence.BXOR,
|
| 1373 |
+
"&": _Precedence.BAND,
|
| 1374 |
+
"//": _Precedence.TERM,
|
| 1375 |
+
"**": _Precedence.POWER,
|
| 1376 |
+
}
|
| 1377 |
+
|
| 1378 |
+
binop_rassoc = frozenset(("**",))
|
| 1379 |
+
def visit_BinOp(self, node):
|
| 1380 |
+
operator = self.binop[node.op.__class__.__name__]
|
| 1381 |
+
operator_precedence = self.binop_precedence[operator]
|
| 1382 |
+
with self.require_parens(operator_precedence, node):
|
| 1383 |
+
if operator in self.binop_rassoc:
|
| 1384 |
+
left_precedence = operator_precedence.next()
|
| 1385 |
+
right_precedence = operator_precedence
|
| 1386 |
+
else:
|
| 1387 |
+
left_precedence = operator_precedence
|
| 1388 |
+
right_precedence = operator_precedence.next()
|
| 1389 |
+
|
| 1390 |
+
self.set_precedence(left_precedence, node.left)
|
| 1391 |
+
self.traverse(node.left)
|
| 1392 |
+
self.write(f" {operator} ")
|
| 1393 |
+
self.set_precedence(right_precedence, node.right)
|
| 1394 |
+
self.traverse(node.right)
|
| 1395 |
+
|
| 1396 |
+
cmpops = {
|
| 1397 |
+
"Eq": "==",
|
| 1398 |
+
"NotEq": "!=",
|
| 1399 |
+
"Lt": "<",
|
| 1400 |
+
"LtE": "<=",
|
| 1401 |
+
"Gt": ">",
|
| 1402 |
+
"GtE": ">=",
|
| 1403 |
+
"Is": "is",
|
| 1404 |
+
"IsNot": "is not",
|
| 1405 |
+
"In": "in",
|
| 1406 |
+
"NotIn": "not in",
|
| 1407 |
+
}
|
| 1408 |
+
|
| 1409 |
+
def visit_Compare(self, node):
|
| 1410 |
+
with self.require_parens(_Precedence.CMP, node):
|
| 1411 |
+
self.set_precedence(_Precedence.CMP.next(), node.left, *node.comparators)
|
| 1412 |
+
self.traverse(node.left)
|
| 1413 |
+
for o, e in zip(node.ops, node.comparators):
|
| 1414 |
+
self.write(" " + self.cmpops[o.__class__.__name__] + " ")
|
| 1415 |
+
self.traverse(e)
|
| 1416 |
+
|
| 1417 |
+
boolops = {"And": "and", "Or": "or"}
|
| 1418 |
+
boolop_precedence = {"and": _Precedence.AND, "or": _Precedence.OR}
|
| 1419 |
+
|
| 1420 |
+
def visit_BoolOp(self, node):
|
| 1421 |
+
operator = self.boolops[node.op.__class__.__name__]
|
| 1422 |
+
operator_precedence = self.boolop_precedence[operator]
|
| 1423 |
+
|
| 1424 |
+
def increasing_level_traverse(node):
|
| 1425 |
+
nonlocal operator_precedence
|
| 1426 |
+
operator_precedence = operator_precedence.next()
|
| 1427 |
+
self.set_precedence(operator_precedence, node)
|
| 1428 |
+
self.traverse(node)
|
| 1429 |
+
|
| 1430 |
+
with self.require_parens(operator_precedence, node):
|
| 1431 |
+
s = f" {operator} "
|
| 1432 |
+
self.interleave(lambda: self.write(s), increasing_level_traverse, node.values)
|
| 1433 |
+
|
| 1434 |
+
def visit_Attribute(self, node):
|
| 1435 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 1436 |
+
self.traverse(node.value)
|
| 1437 |
+
# Special case: 3.__abs__() is a syntax error, so if node.value
|
| 1438 |
+
# is an integer literal then we need to either parenthesize
|
| 1439 |
+
# it or add an extra space to get 3 .__abs__().
|
| 1440 |
+
if isinstance(node.value, Constant) and isinstance(node.value.value, int):
|
| 1441 |
+
self.write(" ")
|
| 1442 |
+
self.write(".")
|
| 1443 |
+
self.write(node.attr)
|
| 1444 |
+
|
| 1445 |
+
def visit_Call(self, node):
|
| 1446 |
+
self.set_precedence(_Precedence.ATOM, node.func)
|
| 1447 |
+
self.traverse(node.func)
|
| 1448 |
+
with self.delimit("(", ")"):
|
| 1449 |
+
comma = False
|
| 1450 |
+
for e in node.args:
|
| 1451 |
+
if comma:
|
| 1452 |
+
self.write(", ")
|
| 1453 |
+
else:
|
| 1454 |
+
comma = True
|
| 1455 |
+
self.traverse(e)
|
| 1456 |
+
for e in node.keywords:
|
| 1457 |
+
if comma:
|
| 1458 |
+
self.write(", ")
|
| 1459 |
+
else:
|
| 1460 |
+
comma = True
|
| 1461 |
+
self.traverse(e)
|
| 1462 |
+
|
| 1463 |
+
def visit_Subscript(self, node):
|
| 1464 |
+
def is_simple_tuple(slice_value):
|
| 1465 |
+
# when unparsing a non-empty tuple, the parentheses can be safely
|
| 1466 |
+
# omitted if there aren't any elements that explicitly requires
|
| 1467 |
+
# parentheses (such as starred expressions).
|
| 1468 |
+
return (
|
| 1469 |
+
isinstance(slice_value, Tuple)
|
| 1470 |
+
and slice_value.elts
|
| 1471 |
+
and not any(isinstance(elt, Starred) for elt in slice_value.elts)
|
| 1472 |
+
)
|
| 1473 |
+
|
| 1474 |
+
self.set_precedence(_Precedence.ATOM, node.value)
|
| 1475 |
+
self.traverse(node.value)
|
| 1476 |
+
with self.delimit("[", "]"):
|
| 1477 |
+
if is_simple_tuple(node.slice):
|
| 1478 |
+
self.items_view(self.traverse, node.slice.elts)
|
| 1479 |
+
else:
|
| 1480 |
+
self.traverse(node.slice)
|
| 1481 |
+
|
| 1482 |
+
def visit_Starred(self, node):
|
| 1483 |
+
self.write("*")
|
| 1484 |
+
self.set_precedence(_Precedence.EXPR, node.value)
|
| 1485 |
+
self.traverse(node.value)
|
| 1486 |
+
|
| 1487 |
+
def visit_Ellipsis(self, node):
|
| 1488 |
+
self.write("...")
|
| 1489 |
+
|
| 1490 |
+
def visit_Slice(self, node):
|
| 1491 |
+
if node.lower:
|
| 1492 |
+
self.traverse(node.lower)
|
| 1493 |
+
self.write(":")
|
| 1494 |
+
if node.upper:
|
| 1495 |
+
self.traverse(node.upper)
|
| 1496 |
+
if node.step:
|
| 1497 |
+
self.write(":")
|
| 1498 |
+
self.traverse(node.step)
|
| 1499 |
+
|
| 1500 |
+
def visit_Match(self, node):
|
| 1501 |
+
self.fill("match ")
|
| 1502 |
+
self.traverse(node.subject)
|
| 1503 |
+
with self.block():
|
| 1504 |
+
for case in node.cases:
|
| 1505 |
+
self.traverse(case)
|
| 1506 |
+
|
| 1507 |
+
def visit_arg(self, node):
|
| 1508 |
+
self.write(node.arg)
|
| 1509 |
+
if node.annotation:
|
| 1510 |
+
self.write(": ")
|
| 1511 |
+
self.traverse(node.annotation)
|
| 1512 |
+
|
| 1513 |
+
def visit_arguments(self, node):
|
| 1514 |
+
first = True
|
| 1515 |
+
# normal arguments
|
| 1516 |
+
all_args = node.posonlyargs + node.args
|
| 1517 |
+
defaults = [None] * (len(all_args) - len(node.defaults)) + node.defaults
|
| 1518 |
+
for index, elements in enumerate(zip(all_args, defaults), 1):
|
| 1519 |
+
a, d = elements
|
| 1520 |
+
if first:
|
| 1521 |
+
first = False
|
| 1522 |
+
else:
|
| 1523 |
+
self.write(", ")
|
| 1524 |
+
self.traverse(a)
|
| 1525 |
+
if d:
|
| 1526 |
+
self.write("=")
|
| 1527 |
+
self.traverse(d)
|
| 1528 |
+
if index == len(node.posonlyargs):
|
| 1529 |
+
self.write(", /")
|
| 1530 |
+
|
| 1531 |
+
# varargs, or bare '*' if no varargs but keyword-only arguments present
|
| 1532 |
+
if node.vararg or node.kwonlyargs:
|
| 1533 |
+
if first:
|
| 1534 |
+
first = False
|
| 1535 |
+
else:
|
| 1536 |
+
self.write(", ")
|
| 1537 |
+
self.write("*")
|
| 1538 |
+
if node.vararg:
|
| 1539 |
+
self.write(node.vararg.arg)
|
| 1540 |
+
if node.vararg.annotation:
|
| 1541 |
+
self.write(": ")
|
| 1542 |
+
self.traverse(node.vararg.annotation)
|
| 1543 |
+
|
| 1544 |
+
# keyword-only arguments
|
| 1545 |
+
if node.kwonlyargs:
|
| 1546 |
+
for a, d in zip(node.kwonlyargs, node.kw_defaults):
|
| 1547 |
+
self.write(", ")
|
| 1548 |
+
self.traverse(a)
|
| 1549 |
+
if d:
|
| 1550 |
+
self.write("=")
|
| 1551 |
+
self.traverse(d)
|
| 1552 |
+
|
| 1553 |
+
# kwargs
|
| 1554 |
+
if node.kwarg:
|
| 1555 |
+
if first:
|
| 1556 |
+
first = False
|
| 1557 |
+
else:
|
| 1558 |
+
self.write(", ")
|
| 1559 |
+
self.write("**" + node.kwarg.arg)
|
| 1560 |
+
if node.kwarg.annotation:
|
| 1561 |
+
self.write(": ")
|
| 1562 |
+
self.traverse(node.kwarg.annotation)
|
| 1563 |
+
|
| 1564 |
+
def visit_keyword(self, node):
|
| 1565 |
+
if node.arg is None:
|
| 1566 |
+
self.write("**")
|
| 1567 |
+
else:
|
| 1568 |
+
self.write(node.arg)
|
| 1569 |
+
self.write("=")
|
| 1570 |
+
self.traverse(node.value)
|
| 1571 |
+
|
| 1572 |
+
def visit_Lambda(self, node):
|
| 1573 |
+
with self.require_parens(_Precedence.TEST, node):
|
| 1574 |
+
self.write("lambda ")
|
| 1575 |
+
self.traverse(node.args)
|
| 1576 |
+
self.write(": ")
|
| 1577 |
+
self.set_precedence(_Precedence.TEST, node.body)
|
| 1578 |
+
self.traverse(node.body)
|
| 1579 |
+
|
| 1580 |
+
def visit_alias(self, node):
|
| 1581 |
+
self.write(node.name)
|
| 1582 |
+
if node.asname:
|
| 1583 |
+
self.write(" as " + node.asname)
|
| 1584 |
+
|
| 1585 |
+
def visit_withitem(self, node):
|
| 1586 |
+
self.traverse(node.context_expr)
|
| 1587 |
+
if node.optional_vars:
|
| 1588 |
+
self.write(" as ")
|
| 1589 |
+
self.traverse(node.optional_vars)
|
| 1590 |
+
|
| 1591 |
+
def visit_match_case(self, node):
|
| 1592 |
+
self.fill("case ")
|
| 1593 |
+
self.traverse(node.pattern)
|
| 1594 |
+
if node.guard:
|
| 1595 |
+
self.write(" if ")
|
| 1596 |
+
self.traverse(node.guard)
|
| 1597 |
+
with self.block():
|
| 1598 |
+
self.traverse(node.body)
|
| 1599 |
+
|
| 1600 |
+
def visit_MatchValue(self, node):
|
| 1601 |
+
self.traverse(node.value)
|
| 1602 |
+
|
| 1603 |
+
def visit_MatchSingleton(self, node):
|
| 1604 |
+
self._write_constant(node.value)
|
| 1605 |
+
|
| 1606 |
+
def visit_MatchSequence(self, node):
|
| 1607 |
+
with self.delimit("[", "]"):
|
| 1608 |
+
self.interleave(
|
| 1609 |
+
lambda: self.write(", "), self.traverse, node.patterns
|
| 1610 |
+
)
|
| 1611 |
+
|
| 1612 |
+
def visit_MatchStar(self, node):
|
| 1613 |
+
name = node.name
|
| 1614 |
+
if name is None:
|
| 1615 |
+
name = "_"
|
| 1616 |
+
self.write(f"*{name}")
|
| 1617 |
+
|
| 1618 |
+
def visit_MatchMapping(self, node):
|
| 1619 |
+
def write_key_pattern_pair(pair):
|
| 1620 |
+
k, p = pair
|
| 1621 |
+
self.traverse(k)
|
| 1622 |
+
self.write(": ")
|
| 1623 |
+
self.traverse(p)
|
| 1624 |
+
|
| 1625 |
+
with self.delimit("{", "}"):
|
| 1626 |
+
keys = node.keys
|
| 1627 |
+
self.interleave(
|
| 1628 |
+
lambda: self.write(", "),
|
| 1629 |
+
write_key_pattern_pair,
|
| 1630 |
+
zip(keys, node.patterns, strict=True),
|
| 1631 |
+
)
|
| 1632 |
+
rest = node.rest
|
| 1633 |
+
if rest is not None:
|
| 1634 |
+
if keys:
|
| 1635 |
+
self.write(", ")
|
| 1636 |
+
self.write(f"**{rest}")
|
| 1637 |
+
|
| 1638 |
+
def visit_MatchClass(self, node):
|
| 1639 |
+
self.set_precedence(_Precedence.ATOM, node.cls)
|
| 1640 |
+
self.traverse(node.cls)
|
| 1641 |
+
with self.delimit("(", ")"):
|
| 1642 |
+
patterns = node.patterns
|
| 1643 |
+
self.interleave(
|
| 1644 |
+
lambda: self.write(", "), self.traverse, patterns
|
| 1645 |
+
)
|
| 1646 |
+
attrs = node.kwd_attrs
|
| 1647 |
+
if attrs:
|
| 1648 |
+
def write_attr_pattern(pair):
|
| 1649 |
+
attr, pattern = pair
|
| 1650 |
+
self.write(f"{attr}=")
|
| 1651 |
+
self.traverse(pattern)
|
| 1652 |
+
|
| 1653 |
+
if patterns:
|
| 1654 |
+
self.write(", ")
|
| 1655 |
+
self.interleave(
|
| 1656 |
+
lambda: self.write(", "),
|
| 1657 |
+
write_attr_pattern,
|
| 1658 |
+
zip(attrs, node.kwd_patterns, strict=True),
|
| 1659 |
+
)
|
| 1660 |
+
|
| 1661 |
+
def visit_MatchAs(self, node):
|
| 1662 |
+
name = node.name
|
| 1663 |
+
pattern = node.pattern
|
| 1664 |
+
if name is None:
|
| 1665 |
+
self.write("_")
|
| 1666 |
+
elif pattern is None:
|
| 1667 |
+
self.write(node.name)
|
| 1668 |
+
else:
|
| 1669 |
+
with self.require_parens(_Precedence.TEST, node):
|
| 1670 |
+
self.set_precedence(_Precedence.BOR, node.pattern)
|
| 1671 |
+
self.traverse(node.pattern)
|
| 1672 |
+
self.write(f" as {node.name}")
|
| 1673 |
+
|
| 1674 |
+
def visit_MatchOr(self, node):
|
| 1675 |
+
with self.require_parens(_Precedence.BOR, node):
|
| 1676 |
+
self.set_precedence(_Precedence.BOR.next(), *node.patterns)
|
| 1677 |
+
self.interleave(lambda: self.write(" | "), self.traverse, node.patterns)
|
| 1678 |
+
|
| 1679 |
+
def unparse(ast_obj):
|
| 1680 |
+
unparser = _Unparser()
|
| 1681 |
+
return unparser.visit(ast_obj)
|
| 1682 |
+
|
| 1683 |
+
|
| 1684 |
+
def main():
|
| 1685 |
+
import argparse
|
| 1686 |
+
|
| 1687 |
+
parser = argparse.ArgumentParser(prog='python -m ast')
|
| 1688 |
+
parser.add_argument('infile', type=argparse.FileType(mode='rb'), nargs='?',
|
| 1689 |
+
default='-',
|
| 1690 |
+
help='the file to parse; defaults to stdin')
|
| 1691 |
+
parser.add_argument('-m', '--mode', default='exec',
|
| 1692 |
+
choices=('exec', 'single', 'eval', 'func_type'),
|
| 1693 |
+
help='specify what kind of code must be parsed')
|
| 1694 |
+
parser.add_argument('--no-type-comments', default=True, action='store_false',
|
| 1695 |
+
help="don't add information about type comments")
|
| 1696 |
+
parser.add_argument('-a', '--include-attributes', action='store_true',
|
| 1697 |
+
help='include attributes such as line numbers and '
|
| 1698 |
+
'column offsets')
|
| 1699 |
+
parser.add_argument('-i', '--indent', type=int, default=3,
|
| 1700 |
+
help='indentation of nodes (number of spaces)')
|
| 1701 |
+
args = parser.parse_args()
|
| 1702 |
+
|
| 1703 |
+
with args.infile as infile:
|
| 1704 |
+
source = infile.read()
|
| 1705 |
+
tree = parse(source, args.infile.name, args.mode, type_comments=args.no_type_comments)
|
| 1706 |
+
print(dump(tree, include_attributes=args.include_attributes, indent=args.indent))
|
| 1707 |
+
|
| 1708 |
+
if __name__ == '__main__':
|
| 1709 |
+
main()
|
evalkit_tf446/lib/python3.10/binhex.py
ADDED
|
@@ -0,0 +1,502 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Macintosh binhex compression/decompression.
|
| 2 |
+
|
| 3 |
+
easy interface:
|
| 4 |
+
binhex(inputfilename, outputfilename)
|
| 5 |
+
hexbin(inputfilename, outputfilename)
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
#
|
| 9 |
+
# Jack Jansen, CWI, August 1995.
|
| 10 |
+
#
|
| 11 |
+
# The module is supposed to be as compatible as possible. Especially the
|
| 12 |
+
# easy interface should work "as expected" on any platform.
|
| 13 |
+
# XXXX Note: currently, textfiles appear in mac-form on all platforms.
|
| 14 |
+
# We seem to lack a simple character-translate in python.
|
| 15 |
+
# (we should probably use ISO-Latin-1 on all but the mac platform).
|
| 16 |
+
# XXXX The simple routines are too simple: they expect to hold the complete
|
| 17 |
+
# files in-core. Should be fixed.
|
| 18 |
+
# XXXX It would be nice to handle AppleDouble format on unix
|
| 19 |
+
# (for servers serving macs).
|
| 20 |
+
# XXXX I don't understand what happens when you get 0x90 times the same byte on
|
| 21 |
+
# input. The resulting code (xx 90 90) would appear to be interpreted as an
|
| 22 |
+
# escaped *value* of 0x90. All coders I've seen appear to ignore this nicety...
|
| 23 |
+
#
|
| 24 |
+
import binascii
|
| 25 |
+
import contextlib
|
| 26 |
+
import io
|
| 27 |
+
import os
|
| 28 |
+
import struct
|
| 29 |
+
import warnings
|
| 30 |
+
|
| 31 |
+
warnings.warn('the binhex module is deprecated', DeprecationWarning,
|
| 32 |
+
stacklevel=2)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
__all__ = ["binhex","hexbin","Error"]
|
| 36 |
+
|
| 37 |
+
class Error(Exception):
|
| 38 |
+
pass
|
| 39 |
+
|
| 40 |
+
# States (what have we written)
|
| 41 |
+
_DID_HEADER = 0
|
| 42 |
+
_DID_DATA = 1
|
| 43 |
+
|
| 44 |
+
# Various constants
|
| 45 |
+
REASONABLY_LARGE = 32768 # Minimal amount we pass the rle-coder
|
| 46 |
+
LINELEN = 64
|
| 47 |
+
RUNCHAR = b"\x90"
|
| 48 |
+
|
| 49 |
+
#
|
| 50 |
+
# This code is no longer byte-order dependent
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class FInfo:
|
| 54 |
+
def __init__(self):
|
| 55 |
+
self.Type = '????'
|
| 56 |
+
self.Creator = '????'
|
| 57 |
+
self.Flags = 0
|
| 58 |
+
|
| 59 |
+
def getfileinfo(name):
|
| 60 |
+
finfo = FInfo()
|
| 61 |
+
with io.open(name, 'rb') as fp:
|
| 62 |
+
# Quick check for textfile
|
| 63 |
+
data = fp.read(512)
|
| 64 |
+
if 0 not in data:
|
| 65 |
+
finfo.Type = 'TEXT'
|
| 66 |
+
fp.seek(0, 2)
|
| 67 |
+
dsize = fp.tell()
|
| 68 |
+
dir, file = os.path.split(name)
|
| 69 |
+
file = file.replace(':', '-', 1)
|
| 70 |
+
return file, finfo, dsize, 0
|
| 71 |
+
|
| 72 |
+
class openrsrc:
|
| 73 |
+
def __init__(self, *args):
|
| 74 |
+
pass
|
| 75 |
+
|
| 76 |
+
def read(self, *args):
|
| 77 |
+
return b''
|
| 78 |
+
|
| 79 |
+
def write(self, *args):
|
| 80 |
+
pass
|
| 81 |
+
|
| 82 |
+
def close(self):
|
| 83 |
+
pass
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
# DeprecationWarning is already emitted on "import binhex". There is no need
|
| 87 |
+
# to repeat the warning at each call to deprecated binascii functions.
|
| 88 |
+
@contextlib.contextmanager
|
| 89 |
+
def _ignore_deprecation_warning():
|
| 90 |
+
with warnings.catch_warnings():
|
| 91 |
+
warnings.filterwarnings('ignore', '', DeprecationWarning)
|
| 92 |
+
yield
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class _Hqxcoderengine:
|
| 96 |
+
"""Write data to the coder in 3-byte chunks"""
|
| 97 |
+
|
| 98 |
+
def __init__(self, ofp):
|
| 99 |
+
self.ofp = ofp
|
| 100 |
+
self.data = b''
|
| 101 |
+
self.hqxdata = b''
|
| 102 |
+
self.linelen = LINELEN - 1
|
| 103 |
+
|
| 104 |
+
def write(self, data):
|
| 105 |
+
self.data = self.data + data
|
| 106 |
+
datalen = len(self.data)
|
| 107 |
+
todo = (datalen // 3) * 3
|
| 108 |
+
data = self.data[:todo]
|
| 109 |
+
self.data = self.data[todo:]
|
| 110 |
+
if not data:
|
| 111 |
+
return
|
| 112 |
+
with _ignore_deprecation_warning():
|
| 113 |
+
self.hqxdata = self.hqxdata + binascii.b2a_hqx(data)
|
| 114 |
+
self._flush(0)
|
| 115 |
+
|
| 116 |
+
def _flush(self, force):
|
| 117 |
+
first = 0
|
| 118 |
+
while first <= len(self.hqxdata) - self.linelen:
|
| 119 |
+
last = first + self.linelen
|
| 120 |
+
self.ofp.write(self.hqxdata[first:last] + b'\r')
|
| 121 |
+
self.linelen = LINELEN
|
| 122 |
+
first = last
|
| 123 |
+
self.hqxdata = self.hqxdata[first:]
|
| 124 |
+
if force:
|
| 125 |
+
self.ofp.write(self.hqxdata + b':\r')
|
| 126 |
+
|
| 127 |
+
def close(self):
|
| 128 |
+
if self.data:
|
| 129 |
+
with _ignore_deprecation_warning():
|
| 130 |
+
self.hqxdata = self.hqxdata + binascii.b2a_hqx(self.data)
|
| 131 |
+
self._flush(1)
|
| 132 |
+
self.ofp.close()
|
| 133 |
+
del self.ofp
|
| 134 |
+
|
| 135 |
+
class _Rlecoderengine:
|
| 136 |
+
"""Write data to the RLE-coder in suitably large chunks"""
|
| 137 |
+
|
| 138 |
+
def __init__(self, ofp):
|
| 139 |
+
self.ofp = ofp
|
| 140 |
+
self.data = b''
|
| 141 |
+
|
| 142 |
+
def write(self, data):
|
| 143 |
+
self.data = self.data + data
|
| 144 |
+
if len(self.data) < REASONABLY_LARGE:
|
| 145 |
+
return
|
| 146 |
+
with _ignore_deprecation_warning():
|
| 147 |
+
rledata = binascii.rlecode_hqx(self.data)
|
| 148 |
+
self.ofp.write(rledata)
|
| 149 |
+
self.data = b''
|
| 150 |
+
|
| 151 |
+
def close(self):
|
| 152 |
+
if self.data:
|
| 153 |
+
with _ignore_deprecation_warning():
|
| 154 |
+
rledata = binascii.rlecode_hqx(self.data)
|
| 155 |
+
self.ofp.write(rledata)
|
| 156 |
+
self.ofp.close()
|
| 157 |
+
del self.ofp
|
| 158 |
+
|
| 159 |
+
class BinHex:
|
| 160 |
+
def __init__(self, name_finfo_dlen_rlen, ofp):
|
| 161 |
+
name, finfo, dlen, rlen = name_finfo_dlen_rlen
|
| 162 |
+
close_on_error = False
|
| 163 |
+
if isinstance(ofp, str):
|
| 164 |
+
ofname = ofp
|
| 165 |
+
ofp = io.open(ofname, 'wb')
|
| 166 |
+
close_on_error = True
|
| 167 |
+
try:
|
| 168 |
+
ofp.write(b'(This file must be converted with BinHex 4.0)\r\r:')
|
| 169 |
+
hqxer = _Hqxcoderengine(ofp)
|
| 170 |
+
self.ofp = _Rlecoderengine(hqxer)
|
| 171 |
+
self.crc = 0
|
| 172 |
+
if finfo is None:
|
| 173 |
+
finfo = FInfo()
|
| 174 |
+
self.dlen = dlen
|
| 175 |
+
self.rlen = rlen
|
| 176 |
+
self._writeinfo(name, finfo)
|
| 177 |
+
self.state = _DID_HEADER
|
| 178 |
+
except:
|
| 179 |
+
if close_on_error:
|
| 180 |
+
ofp.close()
|
| 181 |
+
raise
|
| 182 |
+
|
| 183 |
+
def _writeinfo(self, name, finfo):
|
| 184 |
+
nl = len(name)
|
| 185 |
+
if nl > 63:
|
| 186 |
+
raise Error('Filename too long')
|
| 187 |
+
d = bytes([nl]) + name.encode("latin-1") + b'\0'
|
| 188 |
+
tp, cr = finfo.Type, finfo.Creator
|
| 189 |
+
if isinstance(tp, str):
|
| 190 |
+
tp = tp.encode("latin-1")
|
| 191 |
+
if isinstance(cr, str):
|
| 192 |
+
cr = cr.encode("latin-1")
|
| 193 |
+
d2 = tp + cr
|
| 194 |
+
|
| 195 |
+
# Force all structs to be packed with big-endian
|
| 196 |
+
d3 = struct.pack('>h', finfo.Flags)
|
| 197 |
+
d4 = struct.pack('>ii', self.dlen, self.rlen)
|
| 198 |
+
info = d + d2 + d3 + d4
|
| 199 |
+
self._write(info)
|
| 200 |
+
self._writecrc()
|
| 201 |
+
|
| 202 |
+
def _write(self, data):
|
| 203 |
+
self.crc = binascii.crc_hqx(data, self.crc)
|
| 204 |
+
self.ofp.write(data)
|
| 205 |
+
|
| 206 |
+
def _writecrc(self):
|
| 207 |
+
# XXXX Should this be here??
|
| 208 |
+
# self.crc = binascii.crc_hqx('\0\0', self.crc)
|
| 209 |
+
if self.crc < 0:
|
| 210 |
+
fmt = '>h'
|
| 211 |
+
else:
|
| 212 |
+
fmt = '>H'
|
| 213 |
+
self.ofp.write(struct.pack(fmt, self.crc))
|
| 214 |
+
self.crc = 0
|
| 215 |
+
|
| 216 |
+
def write(self, data):
|
| 217 |
+
if self.state != _DID_HEADER:
|
| 218 |
+
raise Error('Writing data at the wrong time')
|
| 219 |
+
self.dlen = self.dlen - len(data)
|
| 220 |
+
self._write(data)
|
| 221 |
+
|
| 222 |
+
def close_data(self):
|
| 223 |
+
if self.dlen != 0:
|
| 224 |
+
raise Error('Incorrect data size, diff=%r' % (self.rlen,))
|
| 225 |
+
self._writecrc()
|
| 226 |
+
self.state = _DID_DATA
|
| 227 |
+
|
| 228 |
+
def write_rsrc(self, data):
|
| 229 |
+
if self.state < _DID_DATA:
|
| 230 |
+
self.close_data()
|
| 231 |
+
if self.state != _DID_DATA:
|
| 232 |
+
raise Error('Writing resource data at the wrong time')
|
| 233 |
+
self.rlen = self.rlen - len(data)
|
| 234 |
+
self._write(data)
|
| 235 |
+
|
| 236 |
+
def close(self):
|
| 237 |
+
if self.state is None:
|
| 238 |
+
return
|
| 239 |
+
try:
|
| 240 |
+
if self.state < _DID_DATA:
|
| 241 |
+
self.close_data()
|
| 242 |
+
if self.state != _DID_DATA:
|
| 243 |
+
raise Error('Close at the wrong time')
|
| 244 |
+
if self.rlen != 0:
|
| 245 |
+
raise Error("Incorrect resource-datasize, diff=%r" % (self.rlen,))
|
| 246 |
+
self._writecrc()
|
| 247 |
+
finally:
|
| 248 |
+
self.state = None
|
| 249 |
+
ofp = self.ofp
|
| 250 |
+
del self.ofp
|
| 251 |
+
ofp.close()
|
| 252 |
+
|
| 253 |
+
def binhex(inp, out):
|
| 254 |
+
"""binhex(infilename, outfilename): create binhex-encoded copy of a file"""
|
| 255 |
+
finfo = getfileinfo(inp)
|
| 256 |
+
ofp = BinHex(finfo, out)
|
| 257 |
+
|
| 258 |
+
with io.open(inp, 'rb') as ifp:
|
| 259 |
+
# XXXX Do textfile translation on non-mac systems
|
| 260 |
+
while True:
|
| 261 |
+
d = ifp.read(128000)
|
| 262 |
+
if not d: break
|
| 263 |
+
ofp.write(d)
|
| 264 |
+
ofp.close_data()
|
| 265 |
+
|
| 266 |
+
ifp = openrsrc(inp, 'rb')
|
| 267 |
+
while True:
|
| 268 |
+
d = ifp.read(128000)
|
| 269 |
+
if not d: break
|
| 270 |
+
ofp.write_rsrc(d)
|
| 271 |
+
ofp.close()
|
| 272 |
+
ifp.close()
|
| 273 |
+
|
| 274 |
+
class _Hqxdecoderengine:
|
| 275 |
+
"""Read data via the decoder in 4-byte chunks"""
|
| 276 |
+
|
| 277 |
+
def __init__(self, ifp):
|
| 278 |
+
self.ifp = ifp
|
| 279 |
+
self.eof = 0
|
| 280 |
+
|
| 281 |
+
def read(self, totalwtd):
|
| 282 |
+
"""Read at least wtd bytes (or until EOF)"""
|
| 283 |
+
decdata = b''
|
| 284 |
+
wtd = totalwtd
|
| 285 |
+
#
|
| 286 |
+
# The loop here is convoluted, since we don't really now how
|
| 287 |
+
# much to decode: there may be newlines in the incoming data.
|
| 288 |
+
while wtd > 0:
|
| 289 |
+
if self.eof: return decdata
|
| 290 |
+
wtd = ((wtd + 2) // 3) * 4
|
| 291 |
+
data = self.ifp.read(wtd)
|
| 292 |
+
#
|
| 293 |
+
# Next problem: there may not be a complete number of
|
| 294 |
+
# bytes in what we pass to a2b. Solve by yet another
|
| 295 |
+
# loop.
|
| 296 |
+
#
|
| 297 |
+
while True:
|
| 298 |
+
try:
|
| 299 |
+
with _ignore_deprecation_warning():
|
| 300 |
+
decdatacur, self.eof = binascii.a2b_hqx(data)
|
| 301 |
+
break
|
| 302 |
+
except binascii.Incomplete:
|
| 303 |
+
pass
|
| 304 |
+
newdata = self.ifp.read(1)
|
| 305 |
+
if not newdata:
|
| 306 |
+
raise Error('Premature EOF on binhex file')
|
| 307 |
+
data = data + newdata
|
| 308 |
+
decdata = decdata + decdatacur
|
| 309 |
+
wtd = totalwtd - len(decdata)
|
| 310 |
+
if not decdata and not self.eof:
|
| 311 |
+
raise Error('Premature EOF on binhex file')
|
| 312 |
+
return decdata
|
| 313 |
+
|
| 314 |
+
def close(self):
|
| 315 |
+
self.ifp.close()
|
| 316 |
+
|
| 317 |
+
class _Rledecoderengine:
|
| 318 |
+
"""Read data via the RLE-coder"""
|
| 319 |
+
|
| 320 |
+
def __init__(self, ifp):
|
| 321 |
+
self.ifp = ifp
|
| 322 |
+
self.pre_buffer = b''
|
| 323 |
+
self.post_buffer = b''
|
| 324 |
+
self.eof = 0
|
| 325 |
+
|
| 326 |
+
def read(self, wtd):
|
| 327 |
+
if wtd > len(self.post_buffer):
|
| 328 |
+
self._fill(wtd - len(self.post_buffer))
|
| 329 |
+
rv = self.post_buffer[:wtd]
|
| 330 |
+
self.post_buffer = self.post_buffer[wtd:]
|
| 331 |
+
return rv
|
| 332 |
+
|
| 333 |
+
def _fill(self, wtd):
|
| 334 |
+
self.pre_buffer = self.pre_buffer + self.ifp.read(wtd + 4)
|
| 335 |
+
if self.ifp.eof:
|
| 336 |
+
with _ignore_deprecation_warning():
|
| 337 |
+
self.post_buffer = self.post_buffer + \
|
| 338 |
+
binascii.rledecode_hqx(self.pre_buffer)
|
| 339 |
+
self.pre_buffer = b''
|
| 340 |
+
return
|
| 341 |
+
|
| 342 |
+
#
|
| 343 |
+
# Obfuscated code ahead. We have to take care that we don't
|
| 344 |
+
# end up with an orphaned RUNCHAR later on. So, we keep a couple
|
| 345 |
+
# of bytes in the buffer, depending on what the end of
|
| 346 |
+
# the buffer looks like:
|
| 347 |
+
# '\220\0\220' - Keep 3 bytes: repeated \220 (escaped as \220\0)
|
| 348 |
+
# '?\220' - Keep 2 bytes: repeated something-else
|
| 349 |
+
# '\220\0' - Escaped \220: Keep 2 bytes.
|
| 350 |
+
# '?\220?' - Complete repeat sequence: decode all
|
| 351 |
+
# otherwise: keep 1 byte.
|
| 352 |
+
#
|
| 353 |
+
mark = len(self.pre_buffer)
|
| 354 |
+
if self.pre_buffer[-3:] == RUNCHAR + b'\0' + RUNCHAR:
|
| 355 |
+
mark = mark - 3
|
| 356 |
+
elif self.pre_buffer[-1:] == RUNCHAR:
|
| 357 |
+
mark = mark - 2
|
| 358 |
+
elif self.pre_buffer[-2:] == RUNCHAR + b'\0':
|
| 359 |
+
mark = mark - 2
|
| 360 |
+
elif self.pre_buffer[-2:-1] == RUNCHAR:
|
| 361 |
+
pass # Decode all
|
| 362 |
+
else:
|
| 363 |
+
mark = mark - 1
|
| 364 |
+
|
| 365 |
+
with _ignore_deprecation_warning():
|
| 366 |
+
self.post_buffer = self.post_buffer + \
|
| 367 |
+
binascii.rledecode_hqx(self.pre_buffer[:mark])
|
| 368 |
+
self.pre_buffer = self.pre_buffer[mark:]
|
| 369 |
+
|
| 370 |
+
def close(self):
|
| 371 |
+
self.ifp.close()
|
| 372 |
+
|
| 373 |
+
class HexBin:
|
| 374 |
+
def __init__(self, ifp):
|
| 375 |
+
if isinstance(ifp, str):
|
| 376 |
+
ifp = io.open(ifp, 'rb')
|
| 377 |
+
#
|
| 378 |
+
# Find initial colon.
|
| 379 |
+
#
|
| 380 |
+
while True:
|
| 381 |
+
ch = ifp.read(1)
|
| 382 |
+
if not ch:
|
| 383 |
+
raise Error("No binhex data found")
|
| 384 |
+
# Cater for \r\n terminated lines (which show up as \n\r, hence
|
| 385 |
+
# all lines start with \r)
|
| 386 |
+
if ch == b'\r':
|
| 387 |
+
continue
|
| 388 |
+
if ch == b':':
|
| 389 |
+
break
|
| 390 |
+
|
| 391 |
+
hqxifp = _Hqxdecoderengine(ifp)
|
| 392 |
+
self.ifp = _Rledecoderengine(hqxifp)
|
| 393 |
+
self.crc = 0
|
| 394 |
+
self._readheader()
|
| 395 |
+
|
| 396 |
+
def _read(self, len):
|
| 397 |
+
data = self.ifp.read(len)
|
| 398 |
+
self.crc = binascii.crc_hqx(data, self.crc)
|
| 399 |
+
return data
|
| 400 |
+
|
| 401 |
+
def _checkcrc(self):
|
| 402 |
+
filecrc = struct.unpack('>h', self.ifp.read(2))[0] & 0xffff
|
| 403 |
+
#self.crc = binascii.crc_hqx('\0\0', self.crc)
|
| 404 |
+
# XXXX Is this needed??
|
| 405 |
+
self.crc = self.crc & 0xffff
|
| 406 |
+
if filecrc != self.crc:
|
| 407 |
+
raise Error('CRC error, computed %x, read %x'
|
| 408 |
+
% (self.crc, filecrc))
|
| 409 |
+
self.crc = 0
|
| 410 |
+
|
| 411 |
+
def _readheader(self):
|
| 412 |
+
len = self._read(1)
|
| 413 |
+
fname = self._read(ord(len))
|
| 414 |
+
rest = self._read(1 + 4 + 4 + 2 + 4 + 4)
|
| 415 |
+
self._checkcrc()
|
| 416 |
+
|
| 417 |
+
type = rest[1:5]
|
| 418 |
+
creator = rest[5:9]
|
| 419 |
+
flags = struct.unpack('>h', rest[9:11])[0]
|
| 420 |
+
self.dlen = struct.unpack('>l', rest[11:15])[0]
|
| 421 |
+
self.rlen = struct.unpack('>l', rest[15:19])[0]
|
| 422 |
+
|
| 423 |
+
self.FName = fname
|
| 424 |
+
self.FInfo = FInfo()
|
| 425 |
+
self.FInfo.Creator = creator
|
| 426 |
+
self.FInfo.Type = type
|
| 427 |
+
self.FInfo.Flags = flags
|
| 428 |
+
|
| 429 |
+
self.state = _DID_HEADER
|
| 430 |
+
|
| 431 |
+
def read(self, *n):
|
| 432 |
+
if self.state != _DID_HEADER:
|
| 433 |
+
raise Error('Read data at wrong time')
|
| 434 |
+
if n:
|
| 435 |
+
n = n[0]
|
| 436 |
+
n = min(n, self.dlen)
|
| 437 |
+
else:
|
| 438 |
+
n = self.dlen
|
| 439 |
+
rv = b''
|
| 440 |
+
while len(rv) < n:
|
| 441 |
+
rv = rv + self._read(n-len(rv))
|
| 442 |
+
self.dlen = self.dlen - n
|
| 443 |
+
return rv
|
| 444 |
+
|
| 445 |
+
def close_data(self):
|
| 446 |
+
if self.state != _DID_HEADER:
|
| 447 |
+
raise Error('close_data at wrong time')
|
| 448 |
+
if self.dlen:
|
| 449 |
+
dummy = self._read(self.dlen)
|
| 450 |
+
self._checkcrc()
|
| 451 |
+
self.state = _DID_DATA
|
| 452 |
+
|
| 453 |
+
def read_rsrc(self, *n):
|
| 454 |
+
if self.state == _DID_HEADER:
|
| 455 |
+
self.close_data()
|
| 456 |
+
if self.state != _DID_DATA:
|
| 457 |
+
raise Error('Read resource data at wrong time')
|
| 458 |
+
if n:
|
| 459 |
+
n = n[0]
|
| 460 |
+
n = min(n, self.rlen)
|
| 461 |
+
else:
|
| 462 |
+
n = self.rlen
|
| 463 |
+
self.rlen = self.rlen - n
|
| 464 |
+
return self._read(n)
|
| 465 |
+
|
| 466 |
+
def close(self):
|
| 467 |
+
if self.state is None:
|
| 468 |
+
return
|
| 469 |
+
try:
|
| 470 |
+
if self.rlen:
|
| 471 |
+
dummy = self.read_rsrc(self.rlen)
|
| 472 |
+
self._checkcrc()
|
| 473 |
+
finally:
|
| 474 |
+
self.state = None
|
| 475 |
+
self.ifp.close()
|
| 476 |
+
|
| 477 |
+
def hexbin(inp, out):
|
| 478 |
+
"""hexbin(infilename, outfilename) - Decode binhexed file"""
|
| 479 |
+
ifp = HexBin(inp)
|
| 480 |
+
finfo = ifp.FInfo
|
| 481 |
+
if not out:
|
| 482 |
+
out = ifp.FName
|
| 483 |
+
|
| 484 |
+
with io.open(out, 'wb') as ofp:
|
| 485 |
+
# XXXX Do translation on non-mac systems
|
| 486 |
+
while True:
|
| 487 |
+
d = ifp.read(128000)
|
| 488 |
+
if not d: break
|
| 489 |
+
ofp.write(d)
|
| 490 |
+
ifp.close_data()
|
| 491 |
+
|
| 492 |
+
d = ifp.read_rsrc(128000)
|
| 493 |
+
if d:
|
| 494 |
+
ofp = openrsrc(out, 'wb')
|
| 495 |
+
ofp.write(d)
|
| 496 |
+
while True:
|
| 497 |
+
d = ifp.read_rsrc(128000)
|
| 498 |
+
if not d: break
|
| 499 |
+
ofp.write(d)
|
| 500 |
+
ofp.close()
|
| 501 |
+
|
| 502 |
+
ifp.close()
|
evalkit_tf446/lib/python3.10/bz2.py
ADDED
|
@@ -0,0 +1,344 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Interface to the libbzip2 compression library.
|
| 2 |
+
|
| 3 |
+
This module provides a file interface, classes for incremental
|
| 4 |
+
(de)compression, and functions for one-shot (de)compression.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor",
|
| 8 |
+
"open", "compress", "decompress"]
|
| 9 |
+
|
| 10 |
+
__author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>"
|
| 11 |
+
|
| 12 |
+
from builtins import open as _builtin_open
|
| 13 |
+
import io
|
| 14 |
+
import os
|
| 15 |
+
import _compression
|
| 16 |
+
|
| 17 |
+
from _bz2 import BZ2Compressor, BZ2Decompressor
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
_MODE_CLOSED = 0
|
| 21 |
+
_MODE_READ = 1
|
| 22 |
+
# Value 2 no longer used
|
| 23 |
+
_MODE_WRITE = 3
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class BZ2File(_compression.BaseStream):
|
| 27 |
+
|
| 28 |
+
"""A file object providing transparent bzip2 (de)compression.
|
| 29 |
+
|
| 30 |
+
A BZ2File can act as a wrapper for an existing file object, or refer
|
| 31 |
+
directly to a named file on disk.
|
| 32 |
+
|
| 33 |
+
Note that BZ2File provides a *binary* file interface - data read is
|
| 34 |
+
returned as bytes, and data to be written should be given as bytes.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
def __init__(self, filename, mode="r", *, compresslevel=9):
|
| 38 |
+
"""Open a bzip2-compressed file.
|
| 39 |
+
|
| 40 |
+
If filename is a str, bytes, or PathLike object, it gives the
|
| 41 |
+
name of the file to be opened. Otherwise, it should be a file
|
| 42 |
+
object, which will be used to read or write the compressed data.
|
| 43 |
+
|
| 44 |
+
mode can be 'r' for reading (default), 'w' for (over)writing,
|
| 45 |
+
'x' for creating exclusively, or 'a' for appending. These can
|
| 46 |
+
equivalently be given as 'rb', 'wb', 'xb', and 'ab'.
|
| 47 |
+
|
| 48 |
+
If mode is 'w', 'x' or 'a', compresslevel can be a number between 1
|
| 49 |
+
and 9 specifying the level of compression: 1 produces the least
|
| 50 |
+
compression, and 9 (default) produces the most compression.
|
| 51 |
+
|
| 52 |
+
If mode is 'r', the input file may be the concatenation of
|
| 53 |
+
multiple compressed streams.
|
| 54 |
+
"""
|
| 55 |
+
self._fp = None
|
| 56 |
+
self._closefp = False
|
| 57 |
+
self._mode = _MODE_CLOSED
|
| 58 |
+
|
| 59 |
+
if not (1 <= compresslevel <= 9):
|
| 60 |
+
raise ValueError("compresslevel must be between 1 and 9")
|
| 61 |
+
|
| 62 |
+
if mode in ("", "r", "rb"):
|
| 63 |
+
mode = "rb"
|
| 64 |
+
mode_code = _MODE_READ
|
| 65 |
+
elif mode in ("w", "wb"):
|
| 66 |
+
mode = "wb"
|
| 67 |
+
mode_code = _MODE_WRITE
|
| 68 |
+
self._compressor = BZ2Compressor(compresslevel)
|
| 69 |
+
elif mode in ("x", "xb"):
|
| 70 |
+
mode = "xb"
|
| 71 |
+
mode_code = _MODE_WRITE
|
| 72 |
+
self._compressor = BZ2Compressor(compresslevel)
|
| 73 |
+
elif mode in ("a", "ab"):
|
| 74 |
+
mode = "ab"
|
| 75 |
+
mode_code = _MODE_WRITE
|
| 76 |
+
self._compressor = BZ2Compressor(compresslevel)
|
| 77 |
+
else:
|
| 78 |
+
raise ValueError("Invalid mode: %r" % (mode,))
|
| 79 |
+
|
| 80 |
+
if isinstance(filename, (str, bytes, os.PathLike)):
|
| 81 |
+
self._fp = _builtin_open(filename, mode)
|
| 82 |
+
self._closefp = True
|
| 83 |
+
self._mode = mode_code
|
| 84 |
+
elif hasattr(filename, "read") or hasattr(filename, "write"):
|
| 85 |
+
self._fp = filename
|
| 86 |
+
self._mode = mode_code
|
| 87 |
+
else:
|
| 88 |
+
raise TypeError("filename must be a str, bytes, file or PathLike object")
|
| 89 |
+
|
| 90 |
+
if self._mode == _MODE_READ:
|
| 91 |
+
raw = _compression.DecompressReader(self._fp,
|
| 92 |
+
BZ2Decompressor, trailing_error=OSError)
|
| 93 |
+
self._buffer = io.BufferedReader(raw)
|
| 94 |
+
else:
|
| 95 |
+
self._pos = 0
|
| 96 |
+
|
| 97 |
+
def close(self):
|
| 98 |
+
"""Flush and close the file.
|
| 99 |
+
|
| 100 |
+
May be called more than once without error. Once the file is
|
| 101 |
+
closed, any other operation on it will raise a ValueError.
|
| 102 |
+
"""
|
| 103 |
+
if self._mode == _MODE_CLOSED:
|
| 104 |
+
return
|
| 105 |
+
try:
|
| 106 |
+
if self._mode == _MODE_READ:
|
| 107 |
+
self._buffer.close()
|
| 108 |
+
elif self._mode == _MODE_WRITE:
|
| 109 |
+
self._fp.write(self._compressor.flush())
|
| 110 |
+
self._compressor = None
|
| 111 |
+
finally:
|
| 112 |
+
try:
|
| 113 |
+
if self._closefp:
|
| 114 |
+
self._fp.close()
|
| 115 |
+
finally:
|
| 116 |
+
self._fp = None
|
| 117 |
+
self._closefp = False
|
| 118 |
+
self._mode = _MODE_CLOSED
|
| 119 |
+
self._buffer = None
|
| 120 |
+
|
| 121 |
+
@property
|
| 122 |
+
def closed(self):
|
| 123 |
+
"""True if this file is closed."""
|
| 124 |
+
return self._mode == _MODE_CLOSED
|
| 125 |
+
|
| 126 |
+
def fileno(self):
|
| 127 |
+
"""Return the file descriptor for the underlying file."""
|
| 128 |
+
self._check_not_closed()
|
| 129 |
+
return self._fp.fileno()
|
| 130 |
+
|
| 131 |
+
def seekable(self):
|
| 132 |
+
"""Return whether the file supports seeking."""
|
| 133 |
+
return self.readable() and self._buffer.seekable()
|
| 134 |
+
|
| 135 |
+
def readable(self):
|
| 136 |
+
"""Return whether the file was opened for reading."""
|
| 137 |
+
self._check_not_closed()
|
| 138 |
+
return self._mode == _MODE_READ
|
| 139 |
+
|
| 140 |
+
def writable(self):
|
| 141 |
+
"""Return whether the file was opened for writing."""
|
| 142 |
+
self._check_not_closed()
|
| 143 |
+
return self._mode == _MODE_WRITE
|
| 144 |
+
|
| 145 |
+
def peek(self, n=0):
|
| 146 |
+
"""Return buffered data without advancing the file position.
|
| 147 |
+
|
| 148 |
+
Always returns at least one byte of data, unless at EOF.
|
| 149 |
+
The exact number of bytes returned is unspecified.
|
| 150 |
+
"""
|
| 151 |
+
self._check_can_read()
|
| 152 |
+
# Relies on the undocumented fact that BufferedReader.peek()
|
| 153 |
+
# always returns at least one byte (except at EOF), independent
|
| 154 |
+
# of the value of n
|
| 155 |
+
return self._buffer.peek(n)
|
| 156 |
+
|
| 157 |
+
def read(self, size=-1):
|
| 158 |
+
"""Read up to size uncompressed bytes from the file.
|
| 159 |
+
|
| 160 |
+
If size is negative or omitted, read until EOF is reached.
|
| 161 |
+
Returns b'' if the file is already at EOF.
|
| 162 |
+
"""
|
| 163 |
+
self._check_can_read()
|
| 164 |
+
return self._buffer.read(size)
|
| 165 |
+
|
| 166 |
+
def read1(self, size=-1):
|
| 167 |
+
"""Read up to size uncompressed bytes, while trying to avoid
|
| 168 |
+
making multiple reads from the underlying stream. Reads up to a
|
| 169 |
+
buffer's worth of data if size is negative.
|
| 170 |
+
|
| 171 |
+
Returns b'' if the file is at EOF.
|
| 172 |
+
"""
|
| 173 |
+
self._check_can_read()
|
| 174 |
+
if size < 0:
|
| 175 |
+
size = io.DEFAULT_BUFFER_SIZE
|
| 176 |
+
return self._buffer.read1(size)
|
| 177 |
+
|
| 178 |
+
def readinto(self, b):
|
| 179 |
+
"""Read bytes into b.
|
| 180 |
+
|
| 181 |
+
Returns the number of bytes read (0 for EOF).
|
| 182 |
+
"""
|
| 183 |
+
self._check_can_read()
|
| 184 |
+
return self._buffer.readinto(b)
|
| 185 |
+
|
| 186 |
+
def readline(self, size=-1):
|
| 187 |
+
"""Read a line of uncompressed bytes from the file.
|
| 188 |
+
|
| 189 |
+
The terminating newline (if present) is retained. If size is
|
| 190 |
+
non-negative, no more than size bytes will be read (in which
|
| 191 |
+
case the line may be incomplete). Returns b'' if already at EOF.
|
| 192 |
+
"""
|
| 193 |
+
if not isinstance(size, int):
|
| 194 |
+
if not hasattr(size, "__index__"):
|
| 195 |
+
raise TypeError("Integer argument expected")
|
| 196 |
+
size = size.__index__()
|
| 197 |
+
self._check_can_read()
|
| 198 |
+
return self._buffer.readline(size)
|
| 199 |
+
|
| 200 |
+
def readlines(self, size=-1):
|
| 201 |
+
"""Read a list of lines of uncompressed bytes from the file.
|
| 202 |
+
|
| 203 |
+
size can be specified to control the number of lines read: no
|
| 204 |
+
further lines will be read once the total size of the lines read
|
| 205 |
+
so far equals or exceeds size.
|
| 206 |
+
"""
|
| 207 |
+
if not isinstance(size, int):
|
| 208 |
+
if not hasattr(size, "__index__"):
|
| 209 |
+
raise TypeError("Integer argument expected")
|
| 210 |
+
size = size.__index__()
|
| 211 |
+
self._check_can_read()
|
| 212 |
+
return self._buffer.readlines(size)
|
| 213 |
+
|
| 214 |
+
def write(self, data):
|
| 215 |
+
"""Write a byte string to the file.
|
| 216 |
+
|
| 217 |
+
Returns the number of uncompressed bytes written, which is
|
| 218 |
+
always the length of data in bytes. Note that due to buffering,
|
| 219 |
+
the file on disk may not reflect the data written until close()
|
| 220 |
+
is called.
|
| 221 |
+
"""
|
| 222 |
+
self._check_can_write()
|
| 223 |
+
if isinstance(data, (bytes, bytearray)):
|
| 224 |
+
length = len(data)
|
| 225 |
+
else:
|
| 226 |
+
# accept any data that supports the buffer protocol
|
| 227 |
+
data = memoryview(data)
|
| 228 |
+
length = data.nbytes
|
| 229 |
+
|
| 230 |
+
compressed = self._compressor.compress(data)
|
| 231 |
+
self._fp.write(compressed)
|
| 232 |
+
self._pos += length
|
| 233 |
+
return length
|
| 234 |
+
|
| 235 |
+
def writelines(self, seq):
|
| 236 |
+
"""Write a sequence of byte strings to the file.
|
| 237 |
+
|
| 238 |
+
Returns the number of uncompressed bytes written.
|
| 239 |
+
seq can be any iterable yielding byte strings.
|
| 240 |
+
|
| 241 |
+
Line separators are not added between the written byte strings.
|
| 242 |
+
"""
|
| 243 |
+
return _compression.BaseStream.writelines(self, seq)
|
| 244 |
+
|
| 245 |
+
def seek(self, offset, whence=io.SEEK_SET):
|
| 246 |
+
"""Change the file position.
|
| 247 |
+
|
| 248 |
+
The new position is specified by offset, relative to the
|
| 249 |
+
position indicated by whence. Values for whence are:
|
| 250 |
+
|
| 251 |
+
0: start of stream (default); offset must not be negative
|
| 252 |
+
1: current stream position
|
| 253 |
+
2: end of stream; offset must not be positive
|
| 254 |
+
|
| 255 |
+
Returns the new file position.
|
| 256 |
+
|
| 257 |
+
Note that seeking is emulated, so depending on the parameters,
|
| 258 |
+
this operation may be extremely slow.
|
| 259 |
+
"""
|
| 260 |
+
self._check_can_seek()
|
| 261 |
+
return self._buffer.seek(offset, whence)
|
| 262 |
+
|
| 263 |
+
def tell(self):
|
| 264 |
+
"""Return the current file position."""
|
| 265 |
+
self._check_not_closed()
|
| 266 |
+
if self._mode == _MODE_READ:
|
| 267 |
+
return self._buffer.tell()
|
| 268 |
+
return self._pos
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def open(filename, mode="rb", compresslevel=9,
|
| 272 |
+
encoding=None, errors=None, newline=None):
|
| 273 |
+
"""Open a bzip2-compressed file in binary or text mode.
|
| 274 |
+
|
| 275 |
+
The filename argument can be an actual filename (a str, bytes, or
|
| 276 |
+
PathLike object), or an existing file object to read from or write
|
| 277 |
+
to.
|
| 278 |
+
|
| 279 |
+
The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or
|
| 280 |
+
"ab" for binary mode, or "rt", "wt", "xt" or "at" for text mode.
|
| 281 |
+
The default mode is "rb", and the default compresslevel is 9.
|
| 282 |
+
|
| 283 |
+
For binary mode, this function is equivalent to the BZ2File
|
| 284 |
+
constructor: BZ2File(filename, mode, compresslevel). In this case,
|
| 285 |
+
the encoding, errors and newline arguments must not be provided.
|
| 286 |
+
|
| 287 |
+
For text mode, a BZ2File object is created, and wrapped in an
|
| 288 |
+
io.TextIOWrapper instance with the specified encoding, error
|
| 289 |
+
handling behavior, and line ending(s).
|
| 290 |
+
|
| 291 |
+
"""
|
| 292 |
+
if "t" in mode:
|
| 293 |
+
if "b" in mode:
|
| 294 |
+
raise ValueError("Invalid mode: %r" % (mode,))
|
| 295 |
+
else:
|
| 296 |
+
if encoding is not None:
|
| 297 |
+
raise ValueError("Argument 'encoding' not supported in binary mode")
|
| 298 |
+
if errors is not None:
|
| 299 |
+
raise ValueError("Argument 'errors' not supported in binary mode")
|
| 300 |
+
if newline is not None:
|
| 301 |
+
raise ValueError("Argument 'newline' not supported in binary mode")
|
| 302 |
+
|
| 303 |
+
bz_mode = mode.replace("t", "")
|
| 304 |
+
binary_file = BZ2File(filename, bz_mode, compresslevel=compresslevel)
|
| 305 |
+
|
| 306 |
+
if "t" in mode:
|
| 307 |
+
encoding = io.text_encoding(encoding)
|
| 308 |
+
return io.TextIOWrapper(binary_file, encoding, errors, newline)
|
| 309 |
+
else:
|
| 310 |
+
return binary_file
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def compress(data, compresslevel=9):
|
| 314 |
+
"""Compress a block of data.
|
| 315 |
+
|
| 316 |
+
compresslevel, if given, must be a number between 1 and 9.
|
| 317 |
+
|
| 318 |
+
For incremental compression, use a BZ2Compressor object instead.
|
| 319 |
+
"""
|
| 320 |
+
comp = BZ2Compressor(compresslevel)
|
| 321 |
+
return comp.compress(data) + comp.flush()
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
def decompress(data):
|
| 325 |
+
"""Decompress a block of data.
|
| 326 |
+
|
| 327 |
+
For incremental decompression, use a BZ2Decompressor object instead.
|
| 328 |
+
"""
|
| 329 |
+
results = []
|
| 330 |
+
while data:
|
| 331 |
+
decomp = BZ2Decompressor()
|
| 332 |
+
try:
|
| 333 |
+
res = decomp.decompress(data)
|
| 334 |
+
except OSError:
|
| 335 |
+
if results:
|
| 336 |
+
break # Leftover data is not a valid bzip2 stream; ignore it.
|
| 337 |
+
else:
|
| 338 |
+
raise # Error on the first iteration; bail out.
|
| 339 |
+
results.append(res)
|
| 340 |
+
if not decomp.eof:
|
| 341 |
+
raise ValueError("Compressed data ended before the "
|
| 342 |
+
"end-of-stream marker was reached")
|
| 343 |
+
data = decomp.unused_data
|
| 344 |
+
return b"".join(results)
|
evalkit_tf446/lib/python3.10/cProfile.py
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/bin/env python3
|
| 2 |
+
|
| 3 |
+
"""Python interface for the 'lsprof' profiler.
|
| 4 |
+
Compatible with the 'profile' module.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
__all__ = ["run", "runctx", "Profile"]
|
| 8 |
+
|
| 9 |
+
import _lsprof
|
| 10 |
+
import io
|
| 11 |
+
import profile as _pyprofile
|
| 12 |
+
|
| 13 |
+
# ____________________________________________________________
|
| 14 |
+
# Simple interface
|
| 15 |
+
|
| 16 |
+
def run(statement, filename=None, sort=-1):
|
| 17 |
+
return _pyprofile._Utils(Profile).run(statement, filename, sort)
|
| 18 |
+
|
| 19 |
+
def runctx(statement, globals, locals, filename=None, sort=-1):
|
| 20 |
+
return _pyprofile._Utils(Profile).runctx(statement, globals, locals,
|
| 21 |
+
filename, sort)
|
| 22 |
+
|
| 23 |
+
run.__doc__ = _pyprofile.run.__doc__
|
| 24 |
+
runctx.__doc__ = _pyprofile.runctx.__doc__
|
| 25 |
+
|
| 26 |
+
# ____________________________________________________________
|
| 27 |
+
|
| 28 |
+
class Profile(_lsprof.Profiler):
|
| 29 |
+
"""Profile(timer=None, timeunit=None, subcalls=True, builtins=True)
|
| 30 |
+
|
| 31 |
+
Builds a profiler object using the specified timer function.
|
| 32 |
+
The default timer is a fast built-in one based on real time.
|
| 33 |
+
For custom timer functions returning integers, timeunit can
|
| 34 |
+
be a float specifying a scale (i.e. how long each integer unit
|
| 35 |
+
is, in seconds).
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
# Most of the functionality is in the base class.
|
| 39 |
+
# This subclass only adds convenient and backward-compatible methods.
|
| 40 |
+
|
| 41 |
+
def print_stats(self, sort=-1):
|
| 42 |
+
import pstats
|
| 43 |
+
pstats.Stats(self).strip_dirs().sort_stats(sort).print_stats()
|
| 44 |
+
|
| 45 |
+
def dump_stats(self, file):
|
| 46 |
+
import marshal
|
| 47 |
+
with open(file, 'wb') as f:
|
| 48 |
+
self.create_stats()
|
| 49 |
+
marshal.dump(self.stats, f)
|
| 50 |
+
|
| 51 |
+
def create_stats(self):
|
| 52 |
+
self.disable()
|
| 53 |
+
self.snapshot_stats()
|
| 54 |
+
|
| 55 |
+
def snapshot_stats(self):
|
| 56 |
+
entries = self.getstats()
|
| 57 |
+
self.stats = {}
|
| 58 |
+
callersdicts = {}
|
| 59 |
+
# call information
|
| 60 |
+
for entry in entries:
|
| 61 |
+
func = label(entry.code)
|
| 62 |
+
nc = entry.callcount # ncalls column of pstats (before '/')
|
| 63 |
+
cc = nc - entry.reccallcount # ncalls column of pstats (after '/')
|
| 64 |
+
tt = entry.inlinetime # tottime column of pstats
|
| 65 |
+
ct = entry.totaltime # cumtime column of pstats
|
| 66 |
+
callers = {}
|
| 67 |
+
callersdicts[id(entry.code)] = callers
|
| 68 |
+
self.stats[func] = cc, nc, tt, ct, callers
|
| 69 |
+
# subcall information
|
| 70 |
+
for entry in entries:
|
| 71 |
+
if entry.calls:
|
| 72 |
+
func = label(entry.code)
|
| 73 |
+
for subentry in entry.calls:
|
| 74 |
+
try:
|
| 75 |
+
callers = callersdicts[id(subentry.code)]
|
| 76 |
+
except KeyError:
|
| 77 |
+
continue
|
| 78 |
+
nc = subentry.callcount
|
| 79 |
+
cc = nc - subentry.reccallcount
|
| 80 |
+
tt = subentry.inlinetime
|
| 81 |
+
ct = subentry.totaltime
|
| 82 |
+
if func in callers:
|
| 83 |
+
prev = callers[func]
|
| 84 |
+
nc += prev[0]
|
| 85 |
+
cc += prev[1]
|
| 86 |
+
tt += prev[2]
|
| 87 |
+
ct += prev[3]
|
| 88 |
+
callers[func] = nc, cc, tt, ct
|
| 89 |
+
|
| 90 |
+
# The following two methods can be called by clients to use
|
| 91 |
+
# a profiler to profile a statement, given as a string.
|
| 92 |
+
|
| 93 |
+
def run(self, cmd):
|
| 94 |
+
import __main__
|
| 95 |
+
dict = __main__.__dict__
|
| 96 |
+
return self.runctx(cmd, dict, dict)
|
| 97 |
+
|
| 98 |
+
def runctx(self, cmd, globals, locals):
|
| 99 |
+
self.enable()
|
| 100 |
+
try:
|
| 101 |
+
exec(cmd, globals, locals)
|
| 102 |
+
finally:
|
| 103 |
+
self.disable()
|
| 104 |
+
return self
|
| 105 |
+
|
| 106 |
+
# This method is more useful to profile a single function call.
|
| 107 |
+
def runcall(self, func, /, *args, **kw):
|
| 108 |
+
self.enable()
|
| 109 |
+
try:
|
| 110 |
+
return func(*args, **kw)
|
| 111 |
+
finally:
|
| 112 |
+
self.disable()
|
| 113 |
+
|
| 114 |
+
def __enter__(self):
|
| 115 |
+
self.enable()
|
| 116 |
+
return self
|
| 117 |
+
|
| 118 |
+
def __exit__(self, *exc_info):
|
| 119 |
+
self.disable()
|
| 120 |
+
|
| 121 |
+
# ____________________________________________________________
|
| 122 |
+
|
| 123 |
+
def label(code):
|
| 124 |
+
if isinstance(code, str):
|
| 125 |
+
return ('~', 0, code) # built-in functions ('~' sorts at the end)
|
| 126 |
+
else:
|
| 127 |
+
return (code.co_filename, code.co_firstlineno, code.co_name)
|
| 128 |
+
|
| 129 |
+
# ____________________________________________________________
|
| 130 |
+
|
| 131 |
+
def main():
|
| 132 |
+
import os
|
| 133 |
+
import sys
|
| 134 |
+
import runpy
|
| 135 |
+
import pstats
|
| 136 |
+
from optparse import OptionParser
|
| 137 |
+
usage = "cProfile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
|
| 138 |
+
parser = OptionParser(usage=usage)
|
| 139 |
+
parser.allow_interspersed_args = False
|
| 140 |
+
parser.add_option('-o', '--outfile', dest="outfile",
|
| 141 |
+
help="Save stats to <outfile>", default=None)
|
| 142 |
+
parser.add_option('-s', '--sort', dest="sort",
|
| 143 |
+
help="Sort order when printing to stdout, based on pstats.Stats class",
|
| 144 |
+
default=-1,
|
| 145 |
+
choices=sorted(pstats.Stats.sort_arg_dict_default))
|
| 146 |
+
parser.add_option('-m', dest="module", action="store_true",
|
| 147 |
+
help="Profile a library module", default=False)
|
| 148 |
+
|
| 149 |
+
if not sys.argv[1:]:
|
| 150 |
+
parser.print_usage()
|
| 151 |
+
sys.exit(2)
|
| 152 |
+
|
| 153 |
+
(options, args) = parser.parse_args()
|
| 154 |
+
sys.argv[:] = args
|
| 155 |
+
|
| 156 |
+
# The script that we're profiling may chdir, so capture the absolute path
|
| 157 |
+
# to the output file at startup.
|
| 158 |
+
if options.outfile is not None:
|
| 159 |
+
options.outfile = os.path.abspath(options.outfile)
|
| 160 |
+
|
| 161 |
+
if len(args) > 0:
|
| 162 |
+
if options.module:
|
| 163 |
+
code = "run_module(modname, run_name='__main__')"
|
| 164 |
+
globs = {
|
| 165 |
+
'run_module': runpy.run_module,
|
| 166 |
+
'modname': args[0]
|
| 167 |
+
}
|
| 168 |
+
else:
|
| 169 |
+
progname = args[0]
|
| 170 |
+
sys.path.insert(0, os.path.dirname(progname))
|
| 171 |
+
with io.open_code(progname) as fp:
|
| 172 |
+
code = compile(fp.read(), progname, 'exec')
|
| 173 |
+
globs = {
|
| 174 |
+
'__file__': progname,
|
| 175 |
+
'__name__': '__main__',
|
| 176 |
+
'__package__': None,
|
| 177 |
+
'__cached__': None,
|
| 178 |
+
}
|
| 179 |
+
try:
|
| 180 |
+
runctx(code, globs, None, options.outfile, options.sort)
|
| 181 |
+
except BrokenPipeError as exc:
|
| 182 |
+
# Prevent "Exception ignored" during interpreter shutdown.
|
| 183 |
+
sys.stdout = None
|
| 184 |
+
sys.exit(exc.errno)
|
| 185 |
+
else:
|
| 186 |
+
parser.print_usage()
|
| 187 |
+
return parser
|
| 188 |
+
|
| 189 |
+
# When invoked as main program, invoke the profiler on a script
|
| 190 |
+
if __name__ == '__main__':
|
| 191 |
+
main()
|
evalkit_tf446/lib/python3.10/cgitb.py
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""More comprehensive traceback formatting for Python scripts.
|
| 2 |
+
|
| 3 |
+
To enable this module, do:
|
| 4 |
+
|
| 5 |
+
import cgitb; cgitb.enable()
|
| 6 |
+
|
| 7 |
+
at the top of your script. The optional arguments to enable() are:
|
| 8 |
+
|
| 9 |
+
display - if true, tracebacks are displayed in the web browser
|
| 10 |
+
logdir - if set, tracebacks are written to files in this directory
|
| 11 |
+
context - number of lines of source code to show for each stack frame
|
| 12 |
+
format - 'text' or 'html' controls the output format
|
| 13 |
+
|
| 14 |
+
By default, tracebacks are displayed but not saved, the context is 5 lines
|
| 15 |
+
and the output format is 'html' (for backwards compatibility with the
|
| 16 |
+
original use of this module)
|
| 17 |
+
|
| 18 |
+
Alternatively, if you have caught an exception and want cgitb to display it
|
| 19 |
+
for you, call cgitb.handler(). The optional argument to handler() is a
|
| 20 |
+
3-item tuple (etype, evalue, etb) just like the value of sys.exc_info().
|
| 21 |
+
The default handler displays output as HTML.
|
| 22 |
+
|
| 23 |
+
"""
|
| 24 |
+
import inspect
|
| 25 |
+
import keyword
|
| 26 |
+
import linecache
|
| 27 |
+
import os
|
| 28 |
+
import pydoc
|
| 29 |
+
import sys
|
| 30 |
+
import tempfile
|
| 31 |
+
import time
|
| 32 |
+
import tokenize
|
| 33 |
+
import traceback
|
| 34 |
+
|
| 35 |
+
def reset():
|
| 36 |
+
"""Return a string that resets the CGI and browser to a known state."""
|
| 37 |
+
return '''<!--: spam
|
| 38 |
+
Content-Type: text/html
|
| 39 |
+
|
| 40 |
+
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> -->
|
| 41 |
+
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> -->
|
| 42 |
+
</font> </font> </font> </script> </object> </blockquote> </pre>
|
| 43 |
+
</table> </table> </table> </table> </table> </font> </font> </font>'''
|
| 44 |
+
|
| 45 |
+
__UNDEF__ = [] # a special sentinel object
|
| 46 |
+
def small(text):
|
| 47 |
+
if text:
|
| 48 |
+
return '<small>' + text + '</small>'
|
| 49 |
+
else:
|
| 50 |
+
return ''
|
| 51 |
+
|
| 52 |
+
def strong(text):
|
| 53 |
+
if text:
|
| 54 |
+
return '<strong>' + text + '</strong>'
|
| 55 |
+
else:
|
| 56 |
+
return ''
|
| 57 |
+
|
| 58 |
+
def grey(text):
|
| 59 |
+
if text:
|
| 60 |
+
return '<font color="#909090">' + text + '</font>'
|
| 61 |
+
else:
|
| 62 |
+
return ''
|
| 63 |
+
|
| 64 |
+
def lookup(name, frame, locals):
|
| 65 |
+
"""Find the value for a given name in the given environment."""
|
| 66 |
+
if name in locals:
|
| 67 |
+
return 'local', locals[name]
|
| 68 |
+
if name in frame.f_globals:
|
| 69 |
+
return 'global', frame.f_globals[name]
|
| 70 |
+
if '__builtins__' in frame.f_globals:
|
| 71 |
+
builtins = frame.f_globals['__builtins__']
|
| 72 |
+
if type(builtins) is type({}):
|
| 73 |
+
if name in builtins:
|
| 74 |
+
return 'builtin', builtins[name]
|
| 75 |
+
else:
|
| 76 |
+
if hasattr(builtins, name):
|
| 77 |
+
return 'builtin', getattr(builtins, name)
|
| 78 |
+
return None, __UNDEF__
|
| 79 |
+
|
| 80 |
+
def scanvars(reader, frame, locals):
|
| 81 |
+
"""Scan one logical line of Python and look up values of variables used."""
|
| 82 |
+
vars, lasttoken, parent, prefix, value = [], None, None, '', __UNDEF__
|
| 83 |
+
for ttype, token, start, end, line in tokenize.generate_tokens(reader):
|
| 84 |
+
if ttype == tokenize.NEWLINE: break
|
| 85 |
+
if ttype == tokenize.NAME and token not in keyword.kwlist:
|
| 86 |
+
if lasttoken == '.':
|
| 87 |
+
if parent is not __UNDEF__:
|
| 88 |
+
value = getattr(parent, token, __UNDEF__)
|
| 89 |
+
vars.append((prefix + token, prefix, value))
|
| 90 |
+
else:
|
| 91 |
+
where, value = lookup(token, frame, locals)
|
| 92 |
+
vars.append((token, where, value))
|
| 93 |
+
elif token == '.':
|
| 94 |
+
prefix += lasttoken + '.'
|
| 95 |
+
parent = value
|
| 96 |
+
else:
|
| 97 |
+
parent, prefix = None, ''
|
| 98 |
+
lasttoken = token
|
| 99 |
+
return vars
|
| 100 |
+
|
| 101 |
+
def html(einfo, context=5):
|
| 102 |
+
"""Return a nice HTML document describing a given traceback."""
|
| 103 |
+
etype, evalue, etb = einfo
|
| 104 |
+
if isinstance(etype, type):
|
| 105 |
+
etype = etype.__name__
|
| 106 |
+
pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
|
| 107 |
+
date = time.ctime(time.time())
|
| 108 |
+
head = '<body bgcolor="#f0f0f8">' + pydoc.html.heading(
|
| 109 |
+
'<big><big>%s</big></big>' %
|
| 110 |
+
strong(pydoc.html.escape(str(etype))),
|
| 111 |
+
'#ffffff', '#6622aa', pyver + '<br>' + date) + '''
|
| 112 |
+
<p>A problem occurred in a Python script. Here is the sequence of
|
| 113 |
+
function calls leading up to the error, in the order they occurred.</p>'''
|
| 114 |
+
|
| 115 |
+
indent = '<tt>' + small(' ' * 5) + ' </tt>'
|
| 116 |
+
frames = []
|
| 117 |
+
records = inspect.getinnerframes(etb, context)
|
| 118 |
+
for frame, file, lnum, func, lines, index in records:
|
| 119 |
+
if file:
|
| 120 |
+
file = os.path.abspath(file)
|
| 121 |
+
link = '<a href="file://%s">%s</a>' % (file, pydoc.html.escape(file))
|
| 122 |
+
else:
|
| 123 |
+
file = link = '?'
|
| 124 |
+
args, varargs, varkw, locals = inspect.getargvalues(frame)
|
| 125 |
+
call = ''
|
| 126 |
+
if func != '?':
|
| 127 |
+
call = 'in ' + strong(pydoc.html.escape(func))
|
| 128 |
+
if func != "<module>":
|
| 129 |
+
call += inspect.formatargvalues(args, varargs, varkw, locals,
|
| 130 |
+
formatvalue=lambda value: '=' + pydoc.html.repr(value))
|
| 131 |
+
|
| 132 |
+
highlight = {}
|
| 133 |
+
def reader(lnum=[lnum]):
|
| 134 |
+
highlight[lnum[0]] = 1
|
| 135 |
+
try: return linecache.getline(file, lnum[0])
|
| 136 |
+
finally: lnum[0] += 1
|
| 137 |
+
vars = scanvars(reader, frame, locals)
|
| 138 |
+
|
| 139 |
+
rows = ['<tr><td bgcolor="#d8bbff">%s%s %s</td></tr>' %
|
| 140 |
+
('<big> </big>', link, call)]
|
| 141 |
+
if index is not None:
|
| 142 |
+
i = lnum - index
|
| 143 |
+
for line in lines:
|
| 144 |
+
num = small(' ' * (5-len(str(i))) + str(i)) + ' '
|
| 145 |
+
if i in highlight:
|
| 146 |
+
line = '<tt>=>%s%s</tt>' % (num, pydoc.html.preformat(line))
|
| 147 |
+
rows.append('<tr><td bgcolor="#ffccee">%s</td></tr>' % line)
|
| 148 |
+
else:
|
| 149 |
+
line = '<tt> %s%s</tt>' % (num, pydoc.html.preformat(line))
|
| 150 |
+
rows.append('<tr><td>%s</td></tr>' % grey(line))
|
| 151 |
+
i += 1
|
| 152 |
+
|
| 153 |
+
done, dump = {}, []
|
| 154 |
+
for name, where, value in vars:
|
| 155 |
+
if name in done: continue
|
| 156 |
+
done[name] = 1
|
| 157 |
+
if value is not __UNDEF__:
|
| 158 |
+
if where in ('global', 'builtin'):
|
| 159 |
+
name = ('<em>%s</em> ' % where) + strong(name)
|
| 160 |
+
elif where == 'local':
|
| 161 |
+
name = strong(name)
|
| 162 |
+
else:
|
| 163 |
+
name = where + strong(name.split('.')[-1])
|
| 164 |
+
dump.append('%s = %s' % (name, pydoc.html.repr(value)))
|
| 165 |
+
else:
|
| 166 |
+
dump.append(name + ' <em>undefined</em>')
|
| 167 |
+
|
| 168 |
+
rows.append('<tr><td>%s</td></tr>' % small(grey(', '.join(dump))))
|
| 169 |
+
frames.append('''
|
| 170 |
+
<table width="100%%" cellspacing=0 cellpadding=0 border=0>
|
| 171 |
+
%s</table>''' % '\n'.join(rows))
|
| 172 |
+
|
| 173 |
+
exception = ['<p>%s: %s' % (strong(pydoc.html.escape(str(etype))),
|
| 174 |
+
pydoc.html.escape(str(evalue)))]
|
| 175 |
+
for name in dir(evalue):
|
| 176 |
+
if name[:1] == '_': continue
|
| 177 |
+
value = pydoc.html.repr(getattr(evalue, name))
|
| 178 |
+
exception.append('\n<br>%s%s =\n%s' % (indent, name, value))
|
| 179 |
+
|
| 180 |
+
return head + ''.join(frames) + ''.join(exception) + '''
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
<!-- The above is a description of an error in a Python program, formatted
|
| 184 |
+
for a web browser because the 'cgitb' module was enabled. In case you
|
| 185 |
+
are not reading this in a web browser, here is the original traceback:
|
| 186 |
+
|
| 187 |
+
%s
|
| 188 |
+
-->
|
| 189 |
+
''' % pydoc.html.escape(
|
| 190 |
+
''.join(traceback.format_exception(etype, evalue, etb)))
|
| 191 |
+
|
| 192 |
+
def text(einfo, context=5):
|
| 193 |
+
"""Return a plain text document describing a given traceback."""
|
| 194 |
+
etype, evalue, etb = einfo
|
| 195 |
+
if isinstance(etype, type):
|
| 196 |
+
etype = etype.__name__
|
| 197 |
+
pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
|
| 198 |
+
date = time.ctime(time.time())
|
| 199 |
+
head = "%s\n%s\n%s\n" % (str(etype), pyver, date) + '''
|
| 200 |
+
A problem occurred in a Python script. Here is the sequence of
|
| 201 |
+
function calls leading up to the error, in the order they occurred.
|
| 202 |
+
'''
|
| 203 |
+
|
| 204 |
+
frames = []
|
| 205 |
+
records = inspect.getinnerframes(etb, context)
|
| 206 |
+
for frame, file, lnum, func, lines, index in records:
|
| 207 |
+
file = file and os.path.abspath(file) or '?'
|
| 208 |
+
args, varargs, varkw, locals = inspect.getargvalues(frame)
|
| 209 |
+
call = ''
|
| 210 |
+
if func != '?':
|
| 211 |
+
call = 'in ' + func
|
| 212 |
+
if func != "<module>":
|
| 213 |
+
call += inspect.formatargvalues(args, varargs, varkw, locals,
|
| 214 |
+
formatvalue=lambda value: '=' + pydoc.text.repr(value))
|
| 215 |
+
|
| 216 |
+
highlight = {}
|
| 217 |
+
def reader(lnum=[lnum]):
|
| 218 |
+
highlight[lnum[0]] = 1
|
| 219 |
+
try: return linecache.getline(file, lnum[0])
|
| 220 |
+
finally: lnum[0] += 1
|
| 221 |
+
vars = scanvars(reader, frame, locals)
|
| 222 |
+
|
| 223 |
+
rows = [' %s %s' % (file, call)]
|
| 224 |
+
if index is not None:
|
| 225 |
+
i = lnum - index
|
| 226 |
+
for line in lines:
|
| 227 |
+
num = '%5d ' % i
|
| 228 |
+
rows.append(num+line.rstrip())
|
| 229 |
+
i += 1
|
| 230 |
+
|
| 231 |
+
done, dump = {}, []
|
| 232 |
+
for name, where, value in vars:
|
| 233 |
+
if name in done: continue
|
| 234 |
+
done[name] = 1
|
| 235 |
+
if value is not __UNDEF__:
|
| 236 |
+
if where == 'global': name = 'global ' + name
|
| 237 |
+
elif where != 'local': name = where + name.split('.')[-1]
|
| 238 |
+
dump.append('%s = %s' % (name, pydoc.text.repr(value)))
|
| 239 |
+
else:
|
| 240 |
+
dump.append(name + ' undefined')
|
| 241 |
+
|
| 242 |
+
rows.append('\n'.join(dump))
|
| 243 |
+
frames.append('\n%s\n' % '\n'.join(rows))
|
| 244 |
+
|
| 245 |
+
exception = ['%s: %s' % (str(etype), str(evalue))]
|
| 246 |
+
for name in dir(evalue):
|
| 247 |
+
value = pydoc.text.repr(getattr(evalue, name))
|
| 248 |
+
exception.append('\n%s%s = %s' % (" "*4, name, value))
|
| 249 |
+
|
| 250 |
+
return head + ''.join(frames) + ''.join(exception) + '''
|
| 251 |
+
|
| 252 |
+
The above is a description of an error in a Python program. Here is
|
| 253 |
+
the original traceback:
|
| 254 |
+
|
| 255 |
+
%s
|
| 256 |
+
''' % ''.join(traceback.format_exception(etype, evalue, etb))
|
| 257 |
+
|
| 258 |
+
class Hook:
|
| 259 |
+
"""A hook to replace sys.excepthook that shows tracebacks in HTML."""
|
| 260 |
+
|
| 261 |
+
def __init__(self, display=1, logdir=None, context=5, file=None,
|
| 262 |
+
format="html"):
|
| 263 |
+
self.display = display # send tracebacks to browser if true
|
| 264 |
+
self.logdir = logdir # log tracebacks to files if not None
|
| 265 |
+
self.context = context # number of source code lines per frame
|
| 266 |
+
self.file = file or sys.stdout # place to send the output
|
| 267 |
+
self.format = format
|
| 268 |
+
|
| 269 |
+
def __call__(self, etype, evalue, etb):
|
| 270 |
+
self.handle((etype, evalue, etb))
|
| 271 |
+
|
| 272 |
+
def handle(self, info=None):
|
| 273 |
+
info = info or sys.exc_info()
|
| 274 |
+
if self.format == "html":
|
| 275 |
+
self.file.write(reset())
|
| 276 |
+
|
| 277 |
+
formatter = (self.format=="html") and html or text
|
| 278 |
+
plain = False
|
| 279 |
+
try:
|
| 280 |
+
doc = formatter(info, self.context)
|
| 281 |
+
except: # just in case something goes wrong
|
| 282 |
+
doc = ''.join(traceback.format_exception(*info))
|
| 283 |
+
plain = True
|
| 284 |
+
|
| 285 |
+
if self.display:
|
| 286 |
+
if plain:
|
| 287 |
+
doc = pydoc.html.escape(doc)
|
| 288 |
+
self.file.write('<pre>' + doc + '</pre>\n')
|
| 289 |
+
else:
|
| 290 |
+
self.file.write(doc + '\n')
|
| 291 |
+
else:
|
| 292 |
+
self.file.write('<p>A problem occurred in a Python script.\n')
|
| 293 |
+
|
| 294 |
+
if self.logdir is not None:
|
| 295 |
+
suffix = ['.txt', '.html'][self.format=="html"]
|
| 296 |
+
(fd, path) = tempfile.mkstemp(suffix=suffix, dir=self.logdir)
|
| 297 |
+
|
| 298 |
+
try:
|
| 299 |
+
with os.fdopen(fd, 'w') as file:
|
| 300 |
+
file.write(doc)
|
| 301 |
+
msg = '%s contains the description of this error.' % path
|
| 302 |
+
except:
|
| 303 |
+
msg = 'Tried to save traceback to %s, but failed.' % path
|
| 304 |
+
|
| 305 |
+
if self.format == 'html':
|
| 306 |
+
self.file.write('<p>%s</p>\n' % msg)
|
| 307 |
+
else:
|
| 308 |
+
self.file.write(msg + '\n')
|
| 309 |
+
try:
|
| 310 |
+
self.file.flush()
|
| 311 |
+
except: pass
|
| 312 |
+
|
| 313 |
+
handler = Hook().handle
|
| 314 |
+
def enable(display=1, logdir=None, context=5, format="html"):
|
| 315 |
+
"""Install an exception handler that formats tracebacks as HTML.
|
| 316 |
+
|
| 317 |
+
The optional argument 'display' can be set to 0 to suppress sending the
|
| 318 |
+
traceback to the browser, and 'logdir' can be set to a directory to cause
|
| 319 |
+
tracebacks to be written to files there."""
|
| 320 |
+
sys.excepthook = Hook(display=display, logdir=logdir,
|
| 321 |
+
context=context, format=format)
|
evalkit_tf446/lib/python3.10/chunk.py
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Simple class to read IFF chunks.
|
| 2 |
+
|
| 3 |
+
An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File
|
| 4 |
+
Format)) has the following structure:
|
| 5 |
+
|
| 6 |
+
+----------------+
|
| 7 |
+
| ID (4 bytes) |
|
| 8 |
+
+----------------+
|
| 9 |
+
| size (4 bytes) |
|
| 10 |
+
+----------------+
|
| 11 |
+
| data |
|
| 12 |
+
| ... |
|
| 13 |
+
+----------------+
|
| 14 |
+
|
| 15 |
+
The ID is a 4-byte string which identifies the type of chunk.
|
| 16 |
+
|
| 17 |
+
The size field (a 32-bit value, encoded using big-endian byte order)
|
| 18 |
+
gives the size of the whole chunk, including the 8-byte header.
|
| 19 |
+
|
| 20 |
+
Usually an IFF-type file consists of one or more chunks. The proposed
|
| 21 |
+
usage of the Chunk class defined here is to instantiate an instance at
|
| 22 |
+
the start of each chunk and read from the instance until it reaches
|
| 23 |
+
the end, after which a new instance can be instantiated. At the end
|
| 24 |
+
of the file, creating a new instance will fail with an EOFError
|
| 25 |
+
exception.
|
| 26 |
+
|
| 27 |
+
Usage:
|
| 28 |
+
while True:
|
| 29 |
+
try:
|
| 30 |
+
chunk = Chunk(file)
|
| 31 |
+
except EOFError:
|
| 32 |
+
break
|
| 33 |
+
chunktype = chunk.getname()
|
| 34 |
+
while True:
|
| 35 |
+
data = chunk.read(nbytes)
|
| 36 |
+
if not data:
|
| 37 |
+
pass
|
| 38 |
+
# do something with data
|
| 39 |
+
|
| 40 |
+
The interface is file-like. The implemented methods are:
|
| 41 |
+
read, close, seek, tell, isatty.
|
| 42 |
+
Extra methods are: skip() (called by close, skips to the end of the chunk),
|
| 43 |
+
getname() (returns the name (ID) of the chunk)
|
| 44 |
+
|
| 45 |
+
The __init__ method has one required argument, a file-like object
|
| 46 |
+
(including a chunk instance), and one optional argument, a flag which
|
| 47 |
+
specifies whether or not chunks are aligned on 2-byte boundaries. The
|
| 48 |
+
default is 1, i.e. aligned.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
class Chunk:
|
| 52 |
+
def __init__(self, file, align=True, bigendian=True, inclheader=False):
|
| 53 |
+
import struct
|
| 54 |
+
self.closed = False
|
| 55 |
+
self.align = align # whether to align to word (2-byte) boundaries
|
| 56 |
+
if bigendian:
|
| 57 |
+
strflag = '>'
|
| 58 |
+
else:
|
| 59 |
+
strflag = '<'
|
| 60 |
+
self.file = file
|
| 61 |
+
self.chunkname = file.read(4)
|
| 62 |
+
if len(self.chunkname) < 4:
|
| 63 |
+
raise EOFError
|
| 64 |
+
try:
|
| 65 |
+
self.chunksize = struct.unpack_from(strflag+'L', file.read(4))[0]
|
| 66 |
+
except struct.error:
|
| 67 |
+
raise EOFError from None
|
| 68 |
+
if inclheader:
|
| 69 |
+
self.chunksize = self.chunksize - 8 # subtract header
|
| 70 |
+
self.size_read = 0
|
| 71 |
+
try:
|
| 72 |
+
self.offset = self.file.tell()
|
| 73 |
+
except (AttributeError, OSError):
|
| 74 |
+
self.seekable = False
|
| 75 |
+
else:
|
| 76 |
+
self.seekable = True
|
| 77 |
+
|
| 78 |
+
def getname(self):
|
| 79 |
+
"""Return the name (ID) of the current chunk."""
|
| 80 |
+
return self.chunkname
|
| 81 |
+
|
| 82 |
+
def getsize(self):
|
| 83 |
+
"""Return the size of the current chunk."""
|
| 84 |
+
return self.chunksize
|
| 85 |
+
|
| 86 |
+
def close(self):
|
| 87 |
+
if not self.closed:
|
| 88 |
+
try:
|
| 89 |
+
self.skip()
|
| 90 |
+
finally:
|
| 91 |
+
self.closed = True
|
| 92 |
+
|
| 93 |
+
def isatty(self):
|
| 94 |
+
if self.closed:
|
| 95 |
+
raise ValueError("I/O operation on closed file")
|
| 96 |
+
return False
|
| 97 |
+
|
| 98 |
+
def seek(self, pos, whence=0):
|
| 99 |
+
"""Seek to specified position into the chunk.
|
| 100 |
+
Default position is 0 (start of chunk).
|
| 101 |
+
If the file is not seekable, this will result in an error.
|
| 102 |
+
"""
|
| 103 |
+
|
| 104 |
+
if self.closed:
|
| 105 |
+
raise ValueError("I/O operation on closed file")
|
| 106 |
+
if not self.seekable:
|
| 107 |
+
raise OSError("cannot seek")
|
| 108 |
+
if whence == 1:
|
| 109 |
+
pos = pos + self.size_read
|
| 110 |
+
elif whence == 2:
|
| 111 |
+
pos = pos + self.chunksize
|
| 112 |
+
if pos < 0 or pos > self.chunksize:
|
| 113 |
+
raise RuntimeError
|
| 114 |
+
self.file.seek(self.offset + pos, 0)
|
| 115 |
+
self.size_read = pos
|
| 116 |
+
|
| 117 |
+
def tell(self):
|
| 118 |
+
if self.closed:
|
| 119 |
+
raise ValueError("I/O operation on closed file")
|
| 120 |
+
return self.size_read
|
| 121 |
+
|
| 122 |
+
def read(self, size=-1):
|
| 123 |
+
"""Read at most size bytes from the chunk.
|
| 124 |
+
If size is omitted or negative, read until the end
|
| 125 |
+
of the chunk.
|
| 126 |
+
"""
|
| 127 |
+
|
| 128 |
+
if self.closed:
|
| 129 |
+
raise ValueError("I/O operation on closed file")
|
| 130 |
+
if self.size_read >= self.chunksize:
|
| 131 |
+
return b''
|
| 132 |
+
if size < 0:
|
| 133 |
+
size = self.chunksize - self.size_read
|
| 134 |
+
if size > self.chunksize - self.size_read:
|
| 135 |
+
size = self.chunksize - self.size_read
|
| 136 |
+
data = self.file.read(size)
|
| 137 |
+
self.size_read = self.size_read + len(data)
|
| 138 |
+
if self.size_read == self.chunksize and \
|
| 139 |
+
self.align and \
|
| 140 |
+
(self.chunksize & 1):
|
| 141 |
+
dummy = self.file.read(1)
|
| 142 |
+
self.size_read = self.size_read + len(dummy)
|
| 143 |
+
return data
|
| 144 |
+
|
| 145 |
+
def skip(self):
|
| 146 |
+
"""Skip the rest of the chunk.
|
| 147 |
+
If you are not interested in the contents of the chunk,
|
| 148 |
+
this method should be called so that the file points to
|
| 149 |
+
the start of the next chunk.
|
| 150 |
+
"""
|
| 151 |
+
|
| 152 |
+
if self.closed:
|
| 153 |
+
raise ValueError("I/O operation on closed file")
|
| 154 |
+
if self.seekable:
|
| 155 |
+
try:
|
| 156 |
+
n = self.chunksize - self.size_read
|
| 157 |
+
# maybe fix alignment
|
| 158 |
+
if self.align and (self.chunksize & 1):
|
| 159 |
+
n = n + 1
|
| 160 |
+
self.file.seek(n, 1)
|
| 161 |
+
self.size_read = self.size_read + n
|
| 162 |
+
return
|
| 163 |
+
except OSError:
|
| 164 |
+
pass
|
| 165 |
+
while self.size_read < self.chunksize:
|
| 166 |
+
n = min(8192, self.chunksize - self.size_read)
|
| 167 |
+
dummy = self.read(n)
|
| 168 |
+
if not dummy:
|
| 169 |
+
raise EOFError
|
evalkit_tf446/lib/python3.10/cmd.py
ADDED
|
@@ -0,0 +1,401 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A generic class to build line-oriented command interpreters.
|
| 2 |
+
|
| 3 |
+
Interpreters constructed with this class obey the following conventions:
|
| 4 |
+
|
| 5 |
+
1. End of file on input is processed as the command 'EOF'.
|
| 6 |
+
2. A command is parsed out of each line by collecting the prefix composed
|
| 7 |
+
of characters in the identchars member.
|
| 8 |
+
3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method
|
| 9 |
+
is passed a single argument consisting of the remainder of the line.
|
| 10 |
+
4. Typing an empty line repeats the last command. (Actually, it calls the
|
| 11 |
+
method `emptyline', which may be overridden in a subclass.)
|
| 12 |
+
5. There is a predefined `help' method. Given an argument `topic', it
|
| 13 |
+
calls the command `help_topic'. With no arguments, it lists all topics
|
| 14 |
+
with defined help_ functions, broken into up to three topics; documented
|
| 15 |
+
commands, miscellaneous help topics, and undocumented commands.
|
| 16 |
+
6. The command '?' is a synonym for `help'. The command '!' is a synonym
|
| 17 |
+
for `shell', if a do_shell method exists.
|
| 18 |
+
7. If completion is enabled, completing commands will be done automatically,
|
| 19 |
+
and completing of commands args is done by calling complete_foo() with
|
| 20 |
+
arguments text, line, begidx, endidx. text is string we are matching
|
| 21 |
+
against, all returned matches must begin with it. line is the current
|
| 22 |
+
input line (lstripped), begidx and endidx are the beginning and end
|
| 23 |
+
indexes of the text being matched, which could be used to provide
|
| 24 |
+
different completion depending upon which position the argument is in.
|
| 25 |
+
|
| 26 |
+
The `default' method may be overridden to intercept commands for which there
|
| 27 |
+
is no do_ method.
|
| 28 |
+
|
| 29 |
+
The `completedefault' method may be overridden to intercept completions for
|
| 30 |
+
commands that have no complete_ method.
|
| 31 |
+
|
| 32 |
+
The data member `self.ruler' sets the character used to draw separator lines
|
| 33 |
+
in the help messages. If empty, no ruler line is drawn. It defaults to "=".
|
| 34 |
+
|
| 35 |
+
If the value of `self.intro' is nonempty when the cmdloop method is called,
|
| 36 |
+
it is printed out on interpreter startup. This value may be overridden
|
| 37 |
+
via an optional argument to the cmdloop() method.
|
| 38 |
+
|
| 39 |
+
The data members `self.doc_header', `self.misc_header', and
|
| 40 |
+
`self.undoc_header' set the headers used for the help function's
|
| 41 |
+
listings of documented functions, miscellaneous topics, and undocumented
|
| 42 |
+
functions respectively.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
import string, sys
|
| 46 |
+
|
| 47 |
+
__all__ = ["Cmd"]
|
| 48 |
+
|
| 49 |
+
PROMPT = '(Cmd) '
|
| 50 |
+
IDENTCHARS = string.ascii_letters + string.digits + '_'
|
| 51 |
+
|
| 52 |
+
class Cmd:
|
| 53 |
+
"""A simple framework for writing line-oriented command interpreters.
|
| 54 |
+
|
| 55 |
+
These are often useful for test harnesses, administrative tools, and
|
| 56 |
+
prototypes that will later be wrapped in a more sophisticated interface.
|
| 57 |
+
|
| 58 |
+
A Cmd instance or subclass instance is a line-oriented interpreter
|
| 59 |
+
framework. There is no good reason to instantiate Cmd itself; rather,
|
| 60 |
+
it's useful as a superclass of an interpreter class you define yourself
|
| 61 |
+
in order to inherit Cmd's methods and encapsulate action methods.
|
| 62 |
+
|
| 63 |
+
"""
|
| 64 |
+
prompt = PROMPT
|
| 65 |
+
identchars = IDENTCHARS
|
| 66 |
+
ruler = '='
|
| 67 |
+
lastcmd = ''
|
| 68 |
+
intro = None
|
| 69 |
+
doc_leader = ""
|
| 70 |
+
doc_header = "Documented commands (type help <topic>):"
|
| 71 |
+
misc_header = "Miscellaneous help topics:"
|
| 72 |
+
undoc_header = "Undocumented commands:"
|
| 73 |
+
nohelp = "*** No help on %s"
|
| 74 |
+
use_rawinput = 1
|
| 75 |
+
|
| 76 |
+
def __init__(self, completekey='tab', stdin=None, stdout=None):
|
| 77 |
+
"""Instantiate a line-oriented interpreter framework.
|
| 78 |
+
|
| 79 |
+
The optional argument 'completekey' is the readline name of a
|
| 80 |
+
completion key; it defaults to the Tab key. If completekey is
|
| 81 |
+
not None and the readline module is available, command completion
|
| 82 |
+
is done automatically. The optional arguments stdin and stdout
|
| 83 |
+
specify alternate input and output file objects; if not specified,
|
| 84 |
+
sys.stdin and sys.stdout are used.
|
| 85 |
+
|
| 86 |
+
"""
|
| 87 |
+
if stdin is not None:
|
| 88 |
+
self.stdin = stdin
|
| 89 |
+
else:
|
| 90 |
+
self.stdin = sys.stdin
|
| 91 |
+
if stdout is not None:
|
| 92 |
+
self.stdout = stdout
|
| 93 |
+
else:
|
| 94 |
+
self.stdout = sys.stdout
|
| 95 |
+
self.cmdqueue = []
|
| 96 |
+
self.completekey = completekey
|
| 97 |
+
|
| 98 |
+
def cmdloop(self, intro=None):
|
| 99 |
+
"""Repeatedly issue a prompt, accept input, parse an initial prefix
|
| 100 |
+
off the received input, and dispatch to action methods, passing them
|
| 101 |
+
the remainder of the line as argument.
|
| 102 |
+
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
self.preloop()
|
| 106 |
+
if self.use_rawinput and self.completekey:
|
| 107 |
+
try:
|
| 108 |
+
import readline
|
| 109 |
+
self.old_completer = readline.get_completer()
|
| 110 |
+
readline.set_completer(self.complete)
|
| 111 |
+
readline.parse_and_bind(self.completekey+": complete")
|
| 112 |
+
except ImportError:
|
| 113 |
+
pass
|
| 114 |
+
try:
|
| 115 |
+
if intro is not None:
|
| 116 |
+
self.intro = intro
|
| 117 |
+
if self.intro:
|
| 118 |
+
self.stdout.write(str(self.intro)+"\n")
|
| 119 |
+
stop = None
|
| 120 |
+
while not stop:
|
| 121 |
+
if self.cmdqueue:
|
| 122 |
+
line = self.cmdqueue.pop(0)
|
| 123 |
+
else:
|
| 124 |
+
if self.use_rawinput:
|
| 125 |
+
try:
|
| 126 |
+
line = input(self.prompt)
|
| 127 |
+
except EOFError:
|
| 128 |
+
line = 'EOF'
|
| 129 |
+
else:
|
| 130 |
+
self.stdout.write(self.prompt)
|
| 131 |
+
self.stdout.flush()
|
| 132 |
+
line = self.stdin.readline()
|
| 133 |
+
if not len(line):
|
| 134 |
+
line = 'EOF'
|
| 135 |
+
else:
|
| 136 |
+
line = line.rstrip('\r\n')
|
| 137 |
+
line = self.precmd(line)
|
| 138 |
+
stop = self.onecmd(line)
|
| 139 |
+
stop = self.postcmd(stop, line)
|
| 140 |
+
self.postloop()
|
| 141 |
+
finally:
|
| 142 |
+
if self.use_rawinput and self.completekey:
|
| 143 |
+
try:
|
| 144 |
+
import readline
|
| 145 |
+
readline.set_completer(self.old_completer)
|
| 146 |
+
except ImportError:
|
| 147 |
+
pass
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def precmd(self, line):
|
| 151 |
+
"""Hook method executed just before the command line is
|
| 152 |
+
interpreted, but after the input prompt is generated and issued.
|
| 153 |
+
|
| 154 |
+
"""
|
| 155 |
+
return line
|
| 156 |
+
|
| 157 |
+
def postcmd(self, stop, line):
|
| 158 |
+
"""Hook method executed just after a command dispatch is finished."""
|
| 159 |
+
return stop
|
| 160 |
+
|
| 161 |
+
def preloop(self):
|
| 162 |
+
"""Hook method executed once when the cmdloop() method is called."""
|
| 163 |
+
pass
|
| 164 |
+
|
| 165 |
+
def postloop(self):
|
| 166 |
+
"""Hook method executed once when the cmdloop() method is about to
|
| 167 |
+
return.
|
| 168 |
+
|
| 169 |
+
"""
|
| 170 |
+
pass
|
| 171 |
+
|
| 172 |
+
def parseline(self, line):
|
| 173 |
+
"""Parse the line into a command name and a string containing
|
| 174 |
+
the arguments. Returns a tuple containing (command, args, line).
|
| 175 |
+
'command' and 'args' may be None if the line couldn't be parsed.
|
| 176 |
+
"""
|
| 177 |
+
line = line.strip()
|
| 178 |
+
if not line:
|
| 179 |
+
return None, None, line
|
| 180 |
+
elif line[0] == '?':
|
| 181 |
+
line = 'help ' + line[1:]
|
| 182 |
+
elif line[0] == '!':
|
| 183 |
+
if hasattr(self, 'do_shell'):
|
| 184 |
+
line = 'shell ' + line[1:]
|
| 185 |
+
else:
|
| 186 |
+
return None, None, line
|
| 187 |
+
i, n = 0, len(line)
|
| 188 |
+
while i < n and line[i] in self.identchars: i = i+1
|
| 189 |
+
cmd, arg = line[:i], line[i:].strip()
|
| 190 |
+
return cmd, arg, line
|
| 191 |
+
|
| 192 |
+
def onecmd(self, line):
|
| 193 |
+
"""Interpret the argument as though it had been typed in response
|
| 194 |
+
to the prompt.
|
| 195 |
+
|
| 196 |
+
This may be overridden, but should not normally need to be;
|
| 197 |
+
see the precmd() and postcmd() methods for useful execution hooks.
|
| 198 |
+
The return value is a flag indicating whether interpretation of
|
| 199 |
+
commands by the interpreter should stop.
|
| 200 |
+
|
| 201 |
+
"""
|
| 202 |
+
cmd, arg, line = self.parseline(line)
|
| 203 |
+
if not line:
|
| 204 |
+
return self.emptyline()
|
| 205 |
+
if cmd is None:
|
| 206 |
+
return self.default(line)
|
| 207 |
+
self.lastcmd = line
|
| 208 |
+
if line == 'EOF' :
|
| 209 |
+
self.lastcmd = ''
|
| 210 |
+
if cmd == '':
|
| 211 |
+
return self.default(line)
|
| 212 |
+
else:
|
| 213 |
+
try:
|
| 214 |
+
func = getattr(self, 'do_' + cmd)
|
| 215 |
+
except AttributeError:
|
| 216 |
+
return self.default(line)
|
| 217 |
+
return func(arg)
|
| 218 |
+
|
| 219 |
+
def emptyline(self):
|
| 220 |
+
"""Called when an empty line is entered in response to the prompt.
|
| 221 |
+
|
| 222 |
+
If this method is not overridden, it repeats the last nonempty
|
| 223 |
+
command entered.
|
| 224 |
+
|
| 225 |
+
"""
|
| 226 |
+
if self.lastcmd:
|
| 227 |
+
return self.onecmd(self.lastcmd)
|
| 228 |
+
|
| 229 |
+
def default(self, line):
|
| 230 |
+
"""Called on an input line when the command prefix is not recognized.
|
| 231 |
+
|
| 232 |
+
If this method is not overridden, it prints an error message and
|
| 233 |
+
returns.
|
| 234 |
+
|
| 235 |
+
"""
|
| 236 |
+
self.stdout.write('*** Unknown syntax: %s\n'%line)
|
| 237 |
+
|
| 238 |
+
def completedefault(self, *ignored):
|
| 239 |
+
"""Method called to complete an input line when no command-specific
|
| 240 |
+
complete_*() method is available.
|
| 241 |
+
|
| 242 |
+
By default, it returns an empty list.
|
| 243 |
+
|
| 244 |
+
"""
|
| 245 |
+
return []
|
| 246 |
+
|
| 247 |
+
def completenames(self, text, *ignored):
|
| 248 |
+
dotext = 'do_'+text
|
| 249 |
+
return [a[3:] for a in self.get_names() if a.startswith(dotext)]
|
| 250 |
+
|
| 251 |
+
def complete(self, text, state):
|
| 252 |
+
"""Return the next possible completion for 'text'.
|
| 253 |
+
|
| 254 |
+
If a command has not been entered, then complete against command list.
|
| 255 |
+
Otherwise try to call complete_<command> to get list of completions.
|
| 256 |
+
"""
|
| 257 |
+
if state == 0:
|
| 258 |
+
import readline
|
| 259 |
+
origline = readline.get_line_buffer()
|
| 260 |
+
line = origline.lstrip()
|
| 261 |
+
stripped = len(origline) - len(line)
|
| 262 |
+
begidx = readline.get_begidx() - stripped
|
| 263 |
+
endidx = readline.get_endidx() - stripped
|
| 264 |
+
if begidx>0:
|
| 265 |
+
cmd, args, foo = self.parseline(line)
|
| 266 |
+
if cmd == '':
|
| 267 |
+
compfunc = self.completedefault
|
| 268 |
+
else:
|
| 269 |
+
try:
|
| 270 |
+
compfunc = getattr(self, 'complete_' + cmd)
|
| 271 |
+
except AttributeError:
|
| 272 |
+
compfunc = self.completedefault
|
| 273 |
+
else:
|
| 274 |
+
compfunc = self.completenames
|
| 275 |
+
self.completion_matches = compfunc(text, line, begidx, endidx)
|
| 276 |
+
try:
|
| 277 |
+
return self.completion_matches[state]
|
| 278 |
+
except IndexError:
|
| 279 |
+
return None
|
| 280 |
+
|
| 281 |
+
def get_names(self):
|
| 282 |
+
# This method used to pull in base class attributes
|
| 283 |
+
# at a time dir() didn't do it yet.
|
| 284 |
+
return dir(self.__class__)
|
| 285 |
+
|
| 286 |
+
def complete_help(self, *args):
|
| 287 |
+
commands = set(self.completenames(*args))
|
| 288 |
+
topics = set(a[5:] for a in self.get_names()
|
| 289 |
+
if a.startswith('help_' + args[0]))
|
| 290 |
+
return list(commands | topics)
|
| 291 |
+
|
| 292 |
+
def do_help(self, arg):
|
| 293 |
+
'List available commands with "help" or detailed help with "help cmd".'
|
| 294 |
+
if arg:
|
| 295 |
+
# XXX check arg syntax
|
| 296 |
+
try:
|
| 297 |
+
func = getattr(self, 'help_' + arg)
|
| 298 |
+
except AttributeError:
|
| 299 |
+
try:
|
| 300 |
+
doc=getattr(self, 'do_' + arg).__doc__
|
| 301 |
+
if doc:
|
| 302 |
+
self.stdout.write("%s\n"%str(doc))
|
| 303 |
+
return
|
| 304 |
+
except AttributeError:
|
| 305 |
+
pass
|
| 306 |
+
self.stdout.write("%s\n"%str(self.nohelp % (arg,)))
|
| 307 |
+
return
|
| 308 |
+
func()
|
| 309 |
+
else:
|
| 310 |
+
names = self.get_names()
|
| 311 |
+
cmds_doc = []
|
| 312 |
+
cmds_undoc = []
|
| 313 |
+
help = {}
|
| 314 |
+
for name in names:
|
| 315 |
+
if name[:5] == 'help_':
|
| 316 |
+
help[name[5:]]=1
|
| 317 |
+
names.sort()
|
| 318 |
+
# There can be duplicates if routines overridden
|
| 319 |
+
prevname = ''
|
| 320 |
+
for name in names:
|
| 321 |
+
if name[:3] == 'do_':
|
| 322 |
+
if name == prevname:
|
| 323 |
+
continue
|
| 324 |
+
prevname = name
|
| 325 |
+
cmd=name[3:]
|
| 326 |
+
if cmd in help:
|
| 327 |
+
cmds_doc.append(cmd)
|
| 328 |
+
del help[cmd]
|
| 329 |
+
elif getattr(self, name).__doc__:
|
| 330 |
+
cmds_doc.append(cmd)
|
| 331 |
+
else:
|
| 332 |
+
cmds_undoc.append(cmd)
|
| 333 |
+
self.stdout.write("%s\n"%str(self.doc_leader))
|
| 334 |
+
self.print_topics(self.doc_header, cmds_doc, 15,80)
|
| 335 |
+
self.print_topics(self.misc_header, list(help.keys()),15,80)
|
| 336 |
+
self.print_topics(self.undoc_header, cmds_undoc, 15,80)
|
| 337 |
+
|
| 338 |
+
def print_topics(self, header, cmds, cmdlen, maxcol):
|
| 339 |
+
if cmds:
|
| 340 |
+
self.stdout.write("%s\n"%str(header))
|
| 341 |
+
if self.ruler:
|
| 342 |
+
self.stdout.write("%s\n"%str(self.ruler * len(header)))
|
| 343 |
+
self.columnize(cmds, maxcol-1)
|
| 344 |
+
self.stdout.write("\n")
|
| 345 |
+
|
| 346 |
+
def columnize(self, list, displaywidth=80):
|
| 347 |
+
"""Display a list of strings as a compact set of columns.
|
| 348 |
+
|
| 349 |
+
Each column is only as wide as necessary.
|
| 350 |
+
Columns are separated by two spaces (one was not legible enough).
|
| 351 |
+
"""
|
| 352 |
+
if not list:
|
| 353 |
+
self.stdout.write("<empty>\n")
|
| 354 |
+
return
|
| 355 |
+
|
| 356 |
+
nonstrings = [i for i in range(len(list))
|
| 357 |
+
if not isinstance(list[i], str)]
|
| 358 |
+
if nonstrings:
|
| 359 |
+
raise TypeError("list[i] not a string for i in %s"
|
| 360 |
+
% ", ".join(map(str, nonstrings)))
|
| 361 |
+
size = len(list)
|
| 362 |
+
if size == 1:
|
| 363 |
+
self.stdout.write('%s\n'%str(list[0]))
|
| 364 |
+
return
|
| 365 |
+
# Try every row count from 1 upwards
|
| 366 |
+
for nrows in range(1, len(list)):
|
| 367 |
+
ncols = (size+nrows-1) // nrows
|
| 368 |
+
colwidths = []
|
| 369 |
+
totwidth = -2
|
| 370 |
+
for col in range(ncols):
|
| 371 |
+
colwidth = 0
|
| 372 |
+
for row in range(nrows):
|
| 373 |
+
i = row + nrows*col
|
| 374 |
+
if i >= size:
|
| 375 |
+
break
|
| 376 |
+
x = list[i]
|
| 377 |
+
colwidth = max(colwidth, len(x))
|
| 378 |
+
colwidths.append(colwidth)
|
| 379 |
+
totwidth += colwidth + 2
|
| 380 |
+
if totwidth > displaywidth:
|
| 381 |
+
break
|
| 382 |
+
if totwidth <= displaywidth:
|
| 383 |
+
break
|
| 384 |
+
else:
|
| 385 |
+
nrows = len(list)
|
| 386 |
+
ncols = 1
|
| 387 |
+
colwidths = [0]
|
| 388 |
+
for row in range(nrows):
|
| 389 |
+
texts = []
|
| 390 |
+
for col in range(ncols):
|
| 391 |
+
i = row + nrows*col
|
| 392 |
+
if i >= size:
|
| 393 |
+
x = ""
|
| 394 |
+
else:
|
| 395 |
+
x = list[i]
|
| 396 |
+
texts.append(x)
|
| 397 |
+
while texts and not texts[-1]:
|
| 398 |
+
del texts[-1]
|
| 399 |
+
for col in range(len(texts)):
|
| 400 |
+
texts[col] = texts[col].ljust(colwidths[col])
|
| 401 |
+
self.stdout.write("%s\n"%str(" ".join(texts)))
|
evalkit_tf446/lib/python3.10/code.py
ADDED
|
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utilities needed to emulate Python's interactive interpreter.
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
# Inspired by similar code by Jeff Epler and Fredrik Lundh.
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
import sys
|
| 9 |
+
import traceback
|
| 10 |
+
from codeop import CommandCompiler, compile_command
|
| 11 |
+
|
| 12 |
+
__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact",
|
| 13 |
+
"compile_command"]
|
| 14 |
+
|
| 15 |
+
class InteractiveInterpreter:
|
| 16 |
+
"""Base class for InteractiveConsole.
|
| 17 |
+
|
| 18 |
+
This class deals with parsing and interpreter state (the user's
|
| 19 |
+
namespace); it doesn't deal with input buffering or prompting or
|
| 20 |
+
input file naming (the filename is always passed in explicitly).
|
| 21 |
+
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def __init__(self, locals=None):
|
| 25 |
+
"""Constructor.
|
| 26 |
+
|
| 27 |
+
The optional 'locals' argument specifies the dictionary in
|
| 28 |
+
which code will be executed; it defaults to a newly created
|
| 29 |
+
dictionary with key "__name__" set to "__console__" and key
|
| 30 |
+
"__doc__" set to None.
|
| 31 |
+
|
| 32 |
+
"""
|
| 33 |
+
if locals is None:
|
| 34 |
+
locals = {"__name__": "__console__", "__doc__": None}
|
| 35 |
+
self.locals = locals
|
| 36 |
+
self.compile = CommandCompiler()
|
| 37 |
+
|
| 38 |
+
def runsource(self, source, filename="<input>", symbol="single"):
|
| 39 |
+
"""Compile and run some source in the interpreter.
|
| 40 |
+
|
| 41 |
+
Arguments are as for compile_command().
|
| 42 |
+
|
| 43 |
+
One of several things can happen:
|
| 44 |
+
|
| 45 |
+
1) The input is incorrect; compile_command() raised an
|
| 46 |
+
exception (SyntaxError or OverflowError). A syntax traceback
|
| 47 |
+
will be printed by calling the showsyntaxerror() method.
|
| 48 |
+
|
| 49 |
+
2) The input is incomplete, and more input is required;
|
| 50 |
+
compile_command() returned None. Nothing happens.
|
| 51 |
+
|
| 52 |
+
3) The input is complete; compile_command() returned a code
|
| 53 |
+
object. The code is executed by calling self.runcode() (which
|
| 54 |
+
also handles run-time exceptions, except for SystemExit).
|
| 55 |
+
|
| 56 |
+
The return value is True in case 2, False in the other cases (unless
|
| 57 |
+
an exception is raised). The return value can be used to
|
| 58 |
+
decide whether to use sys.ps1 or sys.ps2 to prompt the next
|
| 59 |
+
line.
|
| 60 |
+
|
| 61 |
+
"""
|
| 62 |
+
try:
|
| 63 |
+
code = self.compile(source, filename, symbol)
|
| 64 |
+
except (OverflowError, SyntaxError, ValueError):
|
| 65 |
+
# Case 1
|
| 66 |
+
self.showsyntaxerror(filename)
|
| 67 |
+
return False
|
| 68 |
+
|
| 69 |
+
if code is None:
|
| 70 |
+
# Case 2
|
| 71 |
+
return True
|
| 72 |
+
|
| 73 |
+
# Case 3
|
| 74 |
+
self.runcode(code)
|
| 75 |
+
return False
|
| 76 |
+
|
| 77 |
+
def runcode(self, code):
|
| 78 |
+
"""Execute a code object.
|
| 79 |
+
|
| 80 |
+
When an exception occurs, self.showtraceback() is called to
|
| 81 |
+
display a traceback. All exceptions are caught except
|
| 82 |
+
SystemExit, which is reraised.
|
| 83 |
+
|
| 84 |
+
A note about KeyboardInterrupt: this exception may occur
|
| 85 |
+
elsewhere in this code, and may not always be caught. The
|
| 86 |
+
caller should be prepared to deal with it.
|
| 87 |
+
|
| 88 |
+
"""
|
| 89 |
+
try:
|
| 90 |
+
exec(code, self.locals)
|
| 91 |
+
except SystemExit:
|
| 92 |
+
raise
|
| 93 |
+
except:
|
| 94 |
+
self.showtraceback()
|
| 95 |
+
|
| 96 |
+
def showsyntaxerror(self, filename=None):
|
| 97 |
+
"""Display the syntax error that just occurred.
|
| 98 |
+
|
| 99 |
+
This doesn't display a stack trace because there isn't one.
|
| 100 |
+
|
| 101 |
+
If a filename is given, it is stuffed in the exception instead
|
| 102 |
+
of what was there before (because Python's parser always uses
|
| 103 |
+
"<string>" when reading from a string).
|
| 104 |
+
|
| 105 |
+
The output is written by self.write(), below.
|
| 106 |
+
|
| 107 |
+
"""
|
| 108 |
+
type, value, tb = sys.exc_info()
|
| 109 |
+
sys.last_type = type
|
| 110 |
+
sys.last_value = value
|
| 111 |
+
sys.last_traceback = tb
|
| 112 |
+
if filename and type is SyntaxError:
|
| 113 |
+
# Work hard to stuff the correct filename in the exception
|
| 114 |
+
try:
|
| 115 |
+
msg, (dummy_filename, lineno, offset, line) = value.args
|
| 116 |
+
except ValueError:
|
| 117 |
+
# Not the format we expect; leave it alone
|
| 118 |
+
pass
|
| 119 |
+
else:
|
| 120 |
+
# Stuff in the right filename
|
| 121 |
+
value = SyntaxError(msg, (filename, lineno, offset, line))
|
| 122 |
+
sys.last_value = value
|
| 123 |
+
if sys.excepthook is sys.__excepthook__:
|
| 124 |
+
lines = traceback.format_exception_only(type, value)
|
| 125 |
+
self.write(''.join(lines))
|
| 126 |
+
else:
|
| 127 |
+
# If someone has set sys.excepthook, we let that take precedence
|
| 128 |
+
# over self.write
|
| 129 |
+
sys.excepthook(type, value, tb)
|
| 130 |
+
|
| 131 |
+
def showtraceback(self):
|
| 132 |
+
"""Display the exception that just occurred.
|
| 133 |
+
|
| 134 |
+
We remove the first stack item because it is our own code.
|
| 135 |
+
|
| 136 |
+
The output is written by self.write(), below.
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
sys.last_type, sys.last_value, last_tb = ei = sys.exc_info()
|
| 140 |
+
sys.last_traceback = last_tb
|
| 141 |
+
try:
|
| 142 |
+
lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next)
|
| 143 |
+
if sys.excepthook is sys.__excepthook__:
|
| 144 |
+
self.write(''.join(lines))
|
| 145 |
+
else:
|
| 146 |
+
# If someone has set sys.excepthook, we let that take precedence
|
| 147 |
+
# over self.write
|
| 148 |
+
sys.excepthook(ei[0], ei[1], last_tb)
|
| 149 |
+
finally:
|
| 150 |
+
last_tb = ei = None
|
| 151 |
+
|
| 152 |
+
def write(self, data):
|
| 153 |
+
"""Write a string.
|
| 154 |
+
|
| 155 |
+
The base implementation writes to sys.stderr; a subclass may
|
| 156 |
+
replace this with a different implementation.
|
| 157 |
+
|
| 158 |
+
"""
|
| 159 |
+
sys.stderr.write(data)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
class InteractiveConsole(InteractiveInterpreter):
|
| 163 |
+
"""Closely emulate the behavior of the interactive Python interpreter.
|
| 164 |
+
|
| 165 |
+
This class builds on InteractiveInterpreter and adds prompting
|
| 166 |
+
using the familiar sys.ps1 and sys.ps2, and input buffering.
|
| 167 |
+
|
| 168 |
+
"""
|
| 169 |
+
|
| 170 |
+
def __init__(self, locals=None, filename="<console>"):
|
| 171 |
+
"""Constructor.
|
| 172 |
+
|
| 173 |
+
The optional locals argument will be passed to the
|
| 174 |
+
InteractiveInterpreter base class.
|
| 175 |
+
|
| 176 |
+
The optional filename argument should specify the (file)name
|
| 177 |
+
of the input stream; it will show up in tracebacks.
|
| 178 |
+
|
| 179 |
+
"""
|
| 180 |
+
InteractiveInterpreter.__init__(self, locals)
|
| 181 |
+
self.filename = filename
|
| 182 |
+
self.resetbuffer()
|
| 183 |
+
|
| 184 |
+
def resetbuffer(self):
|
| 185 |
+
"""Reset the input buffer."""
|
| 186 |
+
self.buffer = []
|
| 187 |
+
|
| 188 |
+
def interact(self, banner=None, exitmsg=None):
|
| 189 |
+
"""Closely emulate the interactive Python console.
|
| 190 |
+
|
| 191 |
+
The optional banner argument specifies the banner to print
|
| 192 |
+
before the first interaction; by default it prints a banner
|
| 193 |
+
similar to the one printed by the real Python interpreter,
|
| 194 |
+
followed by the current class name in parentheses (so as not
|
| 195 |
+
to confuse this with the real interpreter -- since it's so
|
| 196 |
+
close!).
|
| 197 |
+
|
| 198 |
+
The optional exitmsg argument specifies the exit message
|
| 199 |
+
printed when exiting. Pass the empty string to suppress
|
| 200 |
+
printing an exit message. If exitmsg is not given or None,
|
| 201 |
+
a default message is printed.
|
| 202 |
+
|
| 203 |
+
"""
|
| 204 |
+
try:
|
| 205 |
+
sys.ps1
|
| 206 |
+
except AttributeError:
|
| 207 |
+
sys.ps1 = ">>> "
|
| 208 |
+
try:
|
| 209 |
+
sys.ps2
|
| 210 |
+
except AttributeError:
|
| 211 |
+
sys.ps2 = "... "
|
| 212 |
+
cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
|
| 213 |
+
if banner is None:
|
| 214 |
+
self.write("Python %s on %s\n%s\n(%s)\n" %
|
| 215 |
+
(sys.version, sys.platform, cprt,
|
| 216 |
+
self.__class__.__name__))
|
| 217 |
+
elif banner:
|
| 218 |
+
self.write("%s\n" % str(banner))
|
| 219 |
+
more = 0
|
| 220 |
+
while 1:
|
| 221 |
+
try:
|
| 222 |
+
if more:
|
| 223 |
+
prompt = sys.ps2
|
| 224 |
+
else:
|
| 225 |
+
prompt = sys.ps1
|
| 226 |
+
try:
|
| 227 |
+
line = self.raw_input(prompt)
|
| 228 |
+
except EOFError:
|
| 229 |
+
self.write("\n")
|
| 230 |
+
break
|
| 231 |
+
else:
|
| 232 |
+
more = self.push(line)
|
| 233 |
+
except KeyboardInterrupt:
|
| 234 |
+
self.write("\nKeyboardInterrupt\n")
|
| 235 |
+
self.resetbuffer()
|
| 236 |
+
more = 0
|
| 237 |
+
if exitmsg is None:
|
| 238 |
+
self.write('now exiting %s...\n' % self.__class__.__name__)
|
| 239 |
+
elif exitmsg != '':
|
| 240 |
+
self.write('%s\n' % exitmsg)
|
| 241 |
+
|
| 242 |
+
def push(self, line):
|
| 243 |
+
"""Push a line to the interpreter.
|
| 244 |
+
|
| 245 |
+
The line should not have a trailing newline; it may have
|
| 246 |
+
internal newlines. The line is appended to a buffer and the
|
| 247 |
+
interpreter's runsource() method is called with the
|
| 248 |
+
concatenated contents of the buffer as source. If this
|
| 249 |
+
indicates that the command was executed or invalid, the buffer
|
| 250 |
+
is reset; otherwise, the command is incomplete, and the buffer
|
| 251 |
+
is left as it was after the line was appended. The return
|
| 252 |
+
value is 1 if more input is required, 0 if the line was dealt
|
| 253 |
+
with in some way (this is the same as runsource()).
|
| 254 |
+
|
| 255 |
+
"""
|
| 256 |
+
self.buffer.append(line)
|
| 257 |
+
source = "\n".join(self.buffer)
|
| 258 |
+
more = self.runsource(source, self.filename)
|
| 259 |
+
if not more:
|
| 260 |
+
self.resetbuffer()
|
| 261 |
+
return more
|
| 262 |
+
|
| 263 |
+
def raw_input(self, prompt=""):
|
| 264 |
+
"""Write a prompt and read a line.
|
| 265 |
+
|
| 266 |
+
The returned line does not include the trailing newline.
|
| 267 |
+
When the user enters the EOF key sequence, EOFError is raised.
|
| 268 |
+
|
| 269 |
+
The base implementation uses the built-in function
|
| 270 |
+
input(); a subclass may replace this with a different
|
| 271 |
+
implementation.
|
| 272 |
+
|
| 273 |
+
"""
|
| 274 |
+
return input(prompt)
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
def interact(banner=None, readfunc=None, local=None, exitmsg=None):
|
| 279 |
+
"""Closely emulate the interactive Python interpreter.
|
| 280 |
+
|
| 281 |
+
This is a backwards compatible interface to the InteractiveConsole
|
| 282 |
+
class. When readfunc is not specified, it attempts to import the
|
| 283 |
+
readline module to enable GNU readline if it is available.
|
| 284 |
+
|
| 285 |
+
Arguments (all optional, all default to None):
|
| 286 |
+
|
| 287 |
+
banner -- passed to InteractiveConsole.interact()
|
| 288 |
+
readfunc -- if not None, replaces InteractiveConsole.raw_input()
|
| 289 |
+
local -- passed to InteractiveInterpreter.__init__()
|
| 290 |
+
exitmsg -- passed to InteractiveConsole.interact()
|
| 291 |
+
|
| 292 |
+
"""
|
| 293 |
+
console = InteractiveConsole(local)
|
| 294 |
+
if readfunc is not None:
|
| 295 |
+
console.raw_input = readfunc
|
| 296 |
+
else:
|
| 297 |
+
try:
|
| 298 |
+
import readline
|
| 299 |
+
except ImportError:
|
| 300 |
+
pass
|
| 301 |
+
console.interact(banner, exitmsg)
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
if __name__ == "__main__":
|
| 305 |
+
import argparse
|
| 306 |
+
|
| 307 |
+
parser = argparse.ArgumentParser()
|
| 308 |
+
parser.add_argument('-q', action='store_true',
|
| 309 |
+
help="don't print version and copyright messages")
|
| 310 |
+
args = parser.parse_args()
|
| 311 |
+
if args.q or sys.flags.quiet:
|
| 312 |
+
banner = ''
|
| 313 |
+
else:
|
| 314 |
+
banner = None
|
| 315 |
+
interact(banner)
|
evalkit_tf446/lib/python3.10/codecs.py
ADDED
|
@@ -0,0 +1,1127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" codecs -- Python Codec Registry, API and helpers.
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
Written by Marc-Andre Lemburg (mal@lemburg.com).
|
| 5 |
+
|
| 6 |
+
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
|
| 7 |
+
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import builtins
|
| 11 |
+
import sys
|
| 12 |
+
|
| 13 |
+
### Registry and builtin stateless codec functions
|
| 14 |
+
|
| 15 |
+
try:
|
| 16 |
+
from _codecs import *
|
| 17 |
+
except ImportError as why:
|
| 18 |
+
raise SystemError('Failed to load the builtin codecs: %s' % why)
|
| 19 |
+
|
| 20 |
+
__all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE",
|
| 21 |
+
"BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE",
|
| 22 |
+
"BOM_UTF8", "BOM_UTF16", "BOM_UTF16_LE", "BOM_UTF16_BE",
|
| 23 |
+
"BOM_UTF32", "BOM_UTF32_LE", "BOM_UTF32_BE",
|
| 24 |
+
"CodecInfo", "Codec", "IncrementalEncoder", "IncrementalDecoder",
|
| 25 |
+
"StreamReader", "StreamWriter",
|
| 26 |
+
"StreamReaderWriter", "StreamRecoder",
|
| 27 |
+
"getencoder", "getdecoder", "getincrementalencoder",
|
| 28 |
+
"getincrementaldecoder", "getreader", "getwriter",
|
| 29 |
+
"encode", "decode", "iterencode", "iterdecode",
|
| 30 |
+
"strict_errors", "ignore_errors", "replace_errors",
|
| 31 |
+
"xmlcharrefreplace_errors",
|
| 32 |
+
"backslashreplace_errors", "namereplace_errors",
|
| 33 |
+
"register_error", "lookup_error"]
|
| 34 |
+
|
| 35 |
+
### Constants
|
| 36 |
+
|
| 37 |
+
#
|
| 38 |
+
# Byte Order Mark (BOM = ZERO WIDTH NO-BREAK SPACE = U+FEFF)
|
| 39 |
+
# and its possible byte string values
|
| 40 |
+
# for UTF8/UTF16/UTF32 output and little/big endian machines
|
| 41 |
+
#
|
| 42 |
+
|
| 43 |
+
# UTF-8
|
| 44 |
+
BOM_UTF8 = b'\xef\xbb\xbf'
|
| 45 |
+
|
| 46 |
+
# UTF-16, little endian
|
| 47 |
+
BOM_LE = BOM_UTF16_LE = b'\xff\xfe'
|
| 48 |
+
|
| 49 |
+
# UTF-16, big endian
|
| 50 |
+
BOM_BE = BOM_UTF16_BE = b'\xfe\xff'
|
| 51 |
+
|
| 52 |
+
# UTF-32, little endian
|
| 53 |
+
BOM_UTF32_LE = b'\xff\xfe\x00\x00'
|
| 54 |
+
|
| 55 |
+
# UTF-32, big endian
|
| 56 |
+
BOM_UTF32_BE = b'\x00\x00\xfe\xff'
|
| 57 |
+
|
| 58 |
+
if sys.byteorder == 'little':
|
| 59 |
+
|
| 60 |
+
# UTF-16, native endianness
|
| 61 |
+
BOM = BOM_UTF16 = BOM_UTF16_LE
|
| 62 |
+
|
| 63 |
+
# UTF-32, native endianness
|
| 64 |
+
BOM_UTF32 = BOM_UTF32_LE
|
| 65 |
+
|
| 66 |
+
else:
|
| 67 |
+
|
| 68 |
+
# UTF-16, native endianness
|
| 69 |
+
BOM = BOM_UTF16 = BOM_UTF16_BE
|
| 70 |
+
|
| 71 |
+
# UTF-32, native endianness
|
| 72 |
+
BOM_UTF32 = BOM_UTF32_BE
|
| 73 |
+
|
| 74 |
+
# Old broken names (don't use in new code)
|
| 75 |
+
BOM32_LE = BOM_UTF16_LE
|
| 76 |
+
BOM32_BE = BOM_UTF16_BE
|
| 77 |
+
BOM64_LE = BOM_UTF32_LE
|
| 78 |
+
BOM64_BE = BOM_UTF32_BE
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
### Codec base classes (defining the API)
|
| 82 |
+
|
| 83 |
+
class CodecInfo(tuple):
|
| 84 |
+
"""Codec details when looking up the codec registry"""
|
| 85 |
+
|
| 86 |
+
# Private API to allow Python 3.4 to denylist the known non-Unicode
|
| 87 |
+
# codecs in the standard library. A more general mechanism to
|
| 88 |
+
# reliably distinguish test encodings from other codecs will hopefully
|
| 89 |
+
# be defined for Python 3.5
|
| 90 |
+
#
|
| 91 |
+
# See http://bugs.python.org/issue19619
|
| 92 |
+
_is_text_encoding = True # Assume codecs are text encodings by default
|
| 93 |
+
|
| 94 |
+
def __new__(cls, encode, decode, streamreader=None, streamwriter=None,
|
| 95 |
+
incrementalencoder=None, incrementaldecoder=None, name=None,
|
| 96 |
+
*, _is_text_encoding=None):
|
| 97 |
+
self = tuple.__new__(cls, (encode, decode, streamreader, streamwriter))
|
| 98 |
+
self.name = name
|
| 99 |
+
self.encode = encode
|
| 100 |
+
self.decode = decode
|
| 101 |
+
self.incrementalencoder = incrementalencoder
|
| 102 |
+
self.incrementaldecoder = incrementaldecoder
|
| 103 |
+
self.streamwriter = streamwriter
|
| 104 |
+
self.streamreader = streamreader
|
| 105 |
+
if _is_text_encoding is not None:
|
| 106 |
+
self._is_text_encoding = _is_text_encoding
|
| 107 |
+
return self
|
| 108 |
+
|
| 109 |
+
def __repr__(self):
|
| 110 |
+
return "<%s.%s object for encoding %s at %#x>" % \
|
| 111 |
+
(self.__class__.__module__, self.__class__.__qualname__,
|
| 112 |
+
self.name, id(self))
|
| 113 |
+
|
| 114 |
+
class Codec:
|
| 115 |
+
|
| 116 |
+
""" Defines the interface for stateless encoders/decoders.
|
| 117 |
+
|
| 118 |
+
The .encode()/.decode() methods may use different error
|
| 119 |
+
handling schemes by providing the errors argument. These
|
| 120 |
+
string values are predefined:
|
| 121 |
+
|
| 122 |
+
'strict' - raise a ValueError error (or a subclass)
|
| 123 |
+
'ignore' - ignore the character and continue with the next
|
| 124 |
+
'replace' - replace with a suitable replacement character;
|
| 125 |
+
Python will use the official U+FFFD REPLACEMENT
|
| 126 |
+
CHARACTER for the builtin Unicode codecs on
|
| 127 |
+
decoding and '?' on encoding.
|
| 128 |
+
'surrogateescape' - replace with private code points U+DCnn.
|
| 129 |
+
'xmlcharrefreplace' - Replace with the appropriate XML
|
| 130 |
+
character reference (only for encoding).
|
| 131 |
+
'backslashreplace' - Replace with backslashed escape sequences.
|
| 132 |
+
'namereplace' - Replace with \\N{...} escape sequences
|
| 133 |
+
(only for encoding).
|
| 134 |
+
|
| 135 |
+
The set of allowed values can be extended via register_error.
|
| 136 |
+
|
| 137 |
+
"""
|
| 138 |
+
def encode(self, input, errors='strict'):
|
| 139 |
+
|
| 140 |
+
""" Encodes the object input and returns a tuple (output
|
| 141 |
+
object, length consumed).
|
| 142 |
+
|
| 143 |
+
errors defines the error handling to apply. It defaults to
|
| 144 |
+
'strict' handling.
|
| 145 |
+
|
| 146 |
+
The method may not store state in the Codec instance. Use
|
| 147 |
+
StreamWriter for codecs which have to keep state in order to
|
| 148 |
+
make encoding efficient.
|
| 149 |
+
|
| 150 |
+
The encoder must be able to handle zero length input and
|
| 151 |
+
return an empty object of the output object type in this
|
| 152 |
+
situation.
|
| 153 |
+
|
| 154 |
+
"""
|
| 155 |
+
raise NotImplementedError
|
| 156 |
+
|
| 157 |
+
def decode(self, input, errors='strict'):
|
| 158 |
+
|
| 159 |
+
""" Decodes the object input and returns a tuple (output
|
| 160 |
+
object, length consumed).
|
| 161 |
+
|
| 162 |
+
input must be an object which provides the bf_getreadbuf
|
| 163 |
+
buffer slot. Python strings, buffer objects and memory
|
| 164 |
+
mapped files are examples of objects providing this slot.
|
| 165 |
+
|
| 166 |
+
errors defines the error handling to apply. It defaults to
|
| 167 |
+
'strict' handling.
|
| 168 |
+
|
| 169 |
+
The method may not store state in the Codec instance. Use
|
| 170 |
+
StreamReader for codecs which have to keep state in order to
|
| 171 |
+
make decoding efficient.
|
| 172 |
+
|
| 173 |
+
The decoder must be able to handle zero length input and
|
| 174 |
+
return an empty object of the output object type in this
|
| 175 |
+
situation.
|
| 176 |
+
|
| 177 |
+
"""
|
| 178 |
+
raise NotImplementedError
|
| 179 |
+
|
| 180 |
+
class IncrementalEncoder(object):
|
| 181 |
+
"""
|
| 182 |
+
An IncrementalEncoder encodes an input in multiple steps. The input can
|
| 183 |
+
be passed piece by piece to the encode() method. The IncrementalEncoder
|
| 184 |
+
remembers the state of the encoding process between calls to encode().
|
| 185 |
+
"""
|
| 186 |
+
def __init__(self, errors='strict'):
|
| 187 |
+
"""
|
| 188 |
+
Creates an IncrementalEncoder instance.
|
| 189 |
+
|
| 190 |
+
The IncrementalEncoder may use different error handling schemes by
|
| 191 |
+
providing the errors keyword argument. See the module docstring
|
| 192 |
+
for a list of possible values.
|
| 193 |
+
"""
|
| 194 |
+
self.errors = errors
|
| 195 |
+
self.buffer = ""
|
| 196 |
+
|
| 197 |
+
def encode(self, input, final=False):
|
| 198 |
+
"""
|
| 199 |
+
Encodes input and returns the resulting object.
|
| 200 |
+
"""
|
| 201 |
+
raise NotImplementedError
|
| 202 |
+
|
| 203 |
+
def reset(self):
|
| 204 |
+
"""
|
| 205 |
+
Resets the encoder to the initial state.
|
| 206 |
+
"""
|
| 207 |
+
|
| 208 |
+
def getstate(self):
|
| 209 |
+
"""
|
| 210 |
+
Return the current state of the encoder.
|
| 211 |
+
"""
|
| 212 |
+
return 0
|
| 213 |
+
|
| 214 |
+
def setstate(self, state):
|
| 215 |
+
"""
|
| 216 |
+
Set the current state of the encoder. state must have been
|
| 217 |
+
returned by getstate().
|
| 218 |
+
"""
|
| 219 |
+
|
| 220 |
+
class BufferedIncrementalEncoder(IncrementalEncoder):
|
| 221 |
+
"""
|
| 222 |
+
This subclass of IncrementalEncoder can be used as the baseclass for an
|
| 223 |
+
incremental encoder if the encoder must keep some of the output in a
|
| 224 |
+
buffer between calls to encode().
|
| 225 |
+
"""
|
| 226 |
+
def __init__(self, errors='strict'):
|
| 227 |
+
IncrementalEncoder.__init__(self, errors)
|
| 228 |
+
# unencoded input that is kept between calls to encode()
|
| 229 |
+
self.buffer = ""
|
| 230 |
+
|
| 231 |
+
def _buffer_encode(self, input, errors, final):
|
| 232 |
+
# Overwrite this method in subclasses: It must encode input
|
| 233 |
+
# and return an (output, length consumed) tuple
|
| 234 |
+
raise NotImplementedError
|
| 235 |
+
|
| 236 |
+
def encode(self, input, final=False):
|
| 237 |
+
# encode input (taking the buffer into account)
|
| 238 |
+
data = self.buffer + input
|
| 239 |
+
(result, consumed) = self._buffer_encode(data, self.errors, final)
|
| 240 |
+
# keep unencoded input until the next call
|
| 241 |
+
self.buffer = data[consumed:]
|
| 242 |
+
return result
|
| 243 |
+
|
| 244 |
+
def reset(self):
|
| 245 |
+
IncrementalEncoder.reset(self)
|
| 246 |
+
self.buffer = ""
|
| 247 |
+
|
| 248 |
+
def getstate(self):
|
| 249 |
+
return self.buffer or 0
|
| 250 |
+
|
| 251 |
+
def setstate(self, state):
|
| 252 |
+
self.buffer = state or ""
|
| 253 |
+
|
| 254 |
+
class IncrementalDecoder(object):
|
| 255 |
+
"""
|
| 256 |
+
An IncrementalDecoder decodes an input in multiple steps. The input can
|
| 257 |
+
be passed piece by piece to the decode() method. The IncrementalDecoder
|
| 258 |
+
remembers the state of the decoding process between calls to decode().
|
| 259 |
+
"""
|
| 260 |
+
def __init__(self, errors='strict'):
|
| 261 |
+
"""
|
| 262 |
+
Create an IncrementalDecoder instance.
|
| 263 |
+
|
| 264 |
+
The IncrementalDecoder may use different error handling schemes by
|
| 265 |
+
providing the errors keyword argument. See the module docstring
|
| 266 |
+
for a list of possible values.
|
| 267 |
+
"""
|
| 268 |
+
self.errors = errors
|
| 269 |
+
|
| 270 |
+
def decode(self, input, final=False):
|
| 271 |
+
"""
|
| 272 |
+
Decode input and returns the resulting object.
|
| 273 |
+
"""
|
| 274 |
+
raise NotImplementedError
|
| 275 |
+
|
| 276 |
+
def reset(self):
|
| 277 |
+
"""
|
| 278 |
+
Reset the decoder to the initial state.
|
| 279 |
+
"""
|
| 280 |
+
|
| 281 |
+
def getstate(self):
|
| 282 |
+
"""
|
| 283 |
+
Return the current state of the decoder.
|
| 284 |
+
|
| 285 |
+
This must be a (buffered_input, additional_state_info) tuple.
|
| 286 |
+
buffered_input must be a bytes object containing bytes that
|
| 287 |
+
were passed to decode() that have not yet been converted.
|
| 288 |
+
additional_state_info must be a non-negative integer
|
| 289 |
+
representing the state of the decoder WITHOUT yet having
|
| 290 |
+
processed the contents of buffered_input. In the initial state
|
| 291 |
+
and after reset(), getstate() must return (b"", 0).
|
| 292 |
+
"""
|
| 293 |
+
return (b"", 0)
|
| 294 |
+
|
| 295 |
+
def setstate(self, state):
|
| 296 |
+
"""
|
| 297 |
+
Set the current state of the decoder.
|
| 298 |
+
|
| 299 |
+
state must have been returned by getstate(). The effect of
|
| 300 |
+
setstate((b"", 0)) must be equivalent to reset().
|
| 301 |
+
"""
|
| 302 |
+
|
| 303 |
+
class BufferedIncrementalDecoder(IncrementalDecoder):
|
| 304 |
+
"""
|
| 305 |
+
This subclass of IncrementalDecoder can be used as the baseclass for an
|
| 306 |
+
incremental decoder if the decoder must be able to handle incomplete
|
| 307 |
+
byte sequences.
|
| 308 |
+
"""
|
| 309 |
+
def __init__(self, errors='strict'):
|
| 310 |
+
IncrementalDecoder.__init__(self, errors)
|
| 311 |
+
# undecoded input that is kept between calls to decode()
|
| 312 |
+
self.buffer = b""
|
| 313 |
+
|
| 314 |
+
def _buffer_decode(self, input, errors, final):
|
| 315 |
+
# Overwrite this method in subclasses: It must decode input
|
| 316 |
+
# and return an (output, length consumed) tuple
|
| 317 |
+
raise NotImplementedError
|
| 318 |
+
|
| 319 |
+
def decode(self, input, final=False):
|
| 320 |
+
# decode input (taking the buffer into account)
|
| 321 |
+
data = self.buffer + input
|
| 322 |
+
(result, consumed) = self._buffer_decode(data, self.errors, final)
|
| 323 |
+
# keep undecoded input until the next call
|
| 324 |
+
self.buffer = data[consumed:]
|
| 325 |
+
return result
|
| 326 |
+
|
| 327 |
+
def reset(self):
|
| 328 |
+
IncrementalDecoder.reset(self)
|
| 329 |
+
self.buffer = b""
|
| 330 |
+
|
| 331 |
+
def getstate(self):
|
| 332 |
+
# additional state info is always 0
|
| 333 |
+
return (self.buffer, 0)
|
| 334 |
+
|
| 335 |
+
def setstate(self, state):
|
| 336 |
+
# ignore additional state info
|
| 337 |
+
self.buffer = state[0]
|
| 338 |
+
|
| 339 |
+
#
|
| 340 |
+
# The StreamWriter and StreamReader class provide generic working
|
| 341 |
+
# interfaces which can be used to implement new encoding submodules
|
| 342 |
+
# very easily. See encodings/utf_8.py for an example on how this is
|
| 343 |
+
# done.
|
| 344 |
+
#
|
| 345 |
+
|
| 346 |
+
class StreamWriter(Codec):
|
| 347 |
+
|
| 348 |
+
def __init__(self, stream, errors='strict'):
|
| 349 |
+
|
| 350 |
+
""" Creates a StreamWriter instance.
|
| 351 |
+
|
| 352 |
+
stream must be a file-like object open for writing.
|
| 353 |
+
|
| 354 |
+
The StreamWriter may use different error handling
|
| 355 |
+
schemes by providing the errors keyword argument. These
|
| 356 |
+
parameters are predefined:
|
| 357 |
+
|
| 358 |
+
'strict' - raise a ValueError (or a subclass)
|
| 359 |
+
'ignore' - ignore the character and continue with the next
|
| 360 |
+
'replace'- replace with a suitable replacement character
|
| 361 |
+
'xmlcharrefreplace' - Replace with the appropriate XML
|
| 362 |
+
character reference.
|
| 363 |
+
'backslashreplace' - Replace with backslashed escape
|
| 364 |
+
sequences.
|
| 365 |
+
'namereplace' - Replace with \\N{...} escape sequences.
|
| 366 |
+
|
| 367 |
+
The set of allowed parameter values can be extended via
|
| 368 |
+
register_error.
|
| 369 |
+
"""
|
| 370 |
+
self.stream = stream
|
| 371 |
+
self.errors = errors
|
| 372 |
+
|
| 373 |
+
def write(self, object):
|
| 374 |
+
|
| 375 |
+
""" Writes the object's contents encoded to self.stream.
|
| 376 |
+
"""
|
| 377 |
+
data, consumed = self.encode(object, self.errors)
|
| 378 |
+
self.stream.write(data)
|
| 379 |
+
|
| 380 |
+
def writelines(self, list):
|
| 381 |
+
|
| 382 |
+
""" Writes the concatenated list of strings to the stream
|
| 383 |
+
using .write().
|
| 384 |
+
"""
|
| 385 |
+
self.write(''.join(list))
|
| 386 |
+
|
| 387 |
+
def reset(self):
|
| 388 |
+
|
| 389 |
+
""" Resets the codec buffers used for keeping internal state.
|
| 390 |
+
|
| 391 |
+
Calling this method should ensure that the data on the
|
| 392 |
+
output is put into a clean state, that allows appending
|
| 393 |
+
of new fresh data without having to rescan the whole
|
| 394 |
+
stream to recover state.
|
| 395 |
+
|
| 396 |
+
"""
|
| 397 |
+
pass
|
| 398 |
+
|
| 399 |
+
def seek(self, offset, whence=0):
|
| 400 |
+
self.stream.seek(offset, whence)
|
| 401 |
+
if whence == 0 and offset == 0:
|
| 402 |
+
self.reset()
|
| 403 |
+
|
| 404 |
+
def __getattr__(self, name,
|
| 405 |
+
getattr=getattr):
|
| 406 |
+
|
| 407 |
+
""" Inherit all other methods from the underlying stream.
|
| 408 |
+
"""
|
| 409 |
+
return getattr(self.stream, name)
|
| 410 |
+
|
| 411 |
+
def __enter__(self):
|
| 412 |
+
return self
|
| 413 |
+
|
| 414 |
+
def __exit__(self, type, value, tb):
|
| 415 |
+
self.stream.close()
|
| 416 |
+
|
| 417 |
+
###
|
| 418 |
+
|
| 419 |
+
class StreamReader(Codec):
|
| 420 |
+
|
| 421 |
+
charbuffertype = str
|
| 422 |
+
|
| 423 |
+
def __init__(self, stream, errors='strict'):
|
| 424 |
+
|
| 425 |
+
""" Creates a StreamReader instance.
|
| 426 |
+
|
| 427 |
+
stream must be a file-like object open for reading.
|
| 428 |
+
|
| 429 |
+
The StreamReader may use different error handling
|
| 430 |
+
schemes by providing the errors keyword argument. These
|
| 431 |
+
parameters are predefined:
|
| 432 |
+
|
| 433 |
+
'strict' - raise a ValueError (or a subclass)
|
| 434 |
+
'ignore' - ignore the character and continue with the next
|
| 435 |
+
'replace'- replace with a suitable replacement character
|
| 436 |
+
'backslashreplace' - Replace with backslashed escape sequences;
|
| 437 |
+
|
| 438 |
+
The set of allowed parameter values can be extended via
|
| 439 |
+
register_error.
|
| 440 |
+
"""
|
| 441 |
+
self.stream = stream
|
| 442 |
+
self.errors = errors
|
| 443 |
+
self.bytebuffer = b""
|
| 444 |
+
self._empty_charbuffer = self.charbuffertype()
|
| 445 |
+
self.charbuffer = self._empty_charbuffer
|
| 446 |
+
self.linebuffer = None
|
| 447 |
+
|
| 448 |
+
def decode(self, input, errors='strict'):
|
| 449 |
+
raise NotImplementedError
|
| 450 |
+
|
| 451 |
+
def read(self, size=-1, chars=-1, firstline=False):
|
| 452 |
+
|
| 453 |
+
""" Decodes data from the stream self.stream and returns the
|
| 454 |
+
resulting object.
|
| 455 |
+
|
| 456 |
+
chars indicates the number of decoded code points or bytes to
|
| 457 |
+
return. read() will never return more data than requested,
|
| 458 |
+
but it might return less, if there is not enough available.
|
| 459 |
+
|
| 460 |
+
size indicates the approximate maximum number of decoded
|
| 461 |
+
bytes or code points to read for decoding. The decoder
|
| 462 |
+
can modify this setting as appropriate. The default value
|
| 463 |
+
-1 indicates to read and decode as much as possible. size
|
| 464 |
+
is intended to prevent having to decode huge files in one
|
| 465 |
+
step.
|
| 466 |
+
|
| 467 |
+
If firstline is true, and a UnicodeDecodeError happens
|
| 468 |
+
after the first line terminator in the input only the first line
|
| 469 |
+
will be returned, the rest of the input will be kept until the
|
| 470 |
+
next call to read().
|
| 471 |
+
|
| 472 |
+
The method should use a greedy read strategy, meaning that
|
| 473 |
+
it should read as much data as is allowed within the
|
| 474 |
+
definition of the encoding and the given size, e.g. if
|
| 475 |
+
optional encoding endings or state markers are available
|
| 476 |
+
on the stream, these should be read too.
|
| 477 |
+
"""
|
| 478 |
+
# If we have lines cached, first merge them back into characters
|
| 479 |
+
if self.linebuffer:
|
| 480 |
+
self.charbuffer = self._empty_charbuffer.join(self.linebuffer)
|
| 481 |
+
self.linebuffer = None
|
| 482 |
+
|
| 483 |
+
if chars < 0:
|
| 484 |
+
# For compatibility with other read() methods that take a
|
| 485 |
+
# single argument
|
| 486 |
+
chars = size
|
| 487 |
+
|
| 488 |
+
# read until we get the required number of characters (if available)
|
| 489 |
+
while True:
|
| 490 |
+
# can the request be satisfied from the character buffer?
|
| 491 |
+
if chars >= 0:
|
| 492 |
+
if len(self.charbuffer) >= chars:
|
| 493 |
+
break
|
| 494 |
+
# we need more data
|
| 495 |
+
if size < 0:
|
| 496 |
+
newdata = self.stream.read()
|
| 497 |
+
else:
|
| 498 |
+
newdata = self.stream.read(size)
|
| 499 |
+
# decode bytes (those remaining from the last call included)
|
| 500 |
+
data = self.bytebuffer + newdata
|
| 501 |
+
if not data:
|
| 502 |
+
break
|
| 503 |
+
try:
|
| 504 |
+
newchars, decodedbytes = self.decode(data, self.errors)
|
| 505 |
+
except UnicodeDecodeError as exc:
|
| 506 |
+
if firstline:
|
| 507 |
+
newchars, decodedbytes = \
|
| 508 |
+
self.decode(data[:exc.start], self.errors)
|
| 509 |
+
lines = newchars.splitlines(keepends=True)
|
| 510 |
+
if len(lines)<=1:
|
| 511 |
+
raise
|
| 512 |
+
else:
|
| 513 |
+
raise
|
| 514 |
+
# keep undecoded bytes until the next call
|
| 515 |
+
self.bytebuffer = data[decodedbytes:]
|
| 516 |
+
# put new characters in the character buffer
|
| 517 |
+
self.charbuffer += newchars
|
| 518 |
+
# there was no data available
|
| 519 |
+
if not newdata:
|
| 520 |
+
break
|
| 521 |
+
if chars < 0:
|
| 522 |
+
# Return everything we've got
|
| 523 |
+
result = self.charbuffer
|
| 524 |
+
self.charbuffer = self._empty_charbuffer
|
| 525 |
+
else:
|
| 526 |
+
# Return the first chars characters
|
| 527 |
+
result = self.charbuffer[:chars]
|
| 528 |
+
self.charbuffer = self.charbuffer[chars:]
|
| 529 |
+
return result
|
| 530 |
+
|
| 531 |
+
def readline(self, size=None, keepends=True):
|
| 532 |
+
|
| 533 |
+
""" Read one line from the input stream and return the
|
| 534 |
+
decoded data.
|
| 535 |
+
|
| 536 |
+
size, if given, is passed as size argument to the
|
| 537 |
+
read() method.
|
| 538 |
+
|
| 539 |
+
"""
|
| 540 |
+
# If we have lines cached from an earlier read, return
|
| 541 |
+
# them unconditionally
|
| 542 |
+
if self.linebuffer:
|
| 543 |
+
line = self.linebuffer[0]
|
| 544 |
+
del self.linebuffer[0]
|
| 545 |
+
if len(self.linebuffer) == 1:
|
| 546 |
+
# revert to charbuffer mode; we might need more data
|
| 547 |
+
# next time
|
| 548 |
+
self.charbuffer = self.linebuffer[0]
|
| 549 |
+
self.linebuffer = None
|
| 550 |
+
if not keepends:
|
| 551 |
+
line = line.splitlines(keepends=False)[0]
|
| 552 |
+
return line
|
| 553 |
+
|
| 554 |
+
readsize = size or 72
|
| 555 |
+
line = self._empty_charbuffer
|
| 556 |
+
# If size is given, we call read() only once
|
| 557 |
+
while True:
|
| 558 |
+
data = self.read(readsize, firstline=True)
|
| 559 |
+
if data:
|
| 560 |
+
# If we're at a "\r" read one extra character (which might
|
| 561 |
+
# be a "\n") to get a proper line ending. If the stream is
|
| 562 |
+
# temporarily exhausted we return the wrong line ending.
|
| 563 |
+
if (isinstance(data, str) and data.endswith("\r")) or \
|
| 564 |
+
(isinstance(data, bytes) and data.endswith(b"\r")):
|
| 565 |
+
data += self.read(size=1, chars=1)
|
| 566 |
+
|
| 567 |
+
line += data
|
| 568 |
+
lines = line.splitlines(keepends=True)
|
| 569 |
+
if lines:
|
| 570 |
+
if len(lines) > 1:
|
| 571 |
+
# More than one line result; the first line is a full line
|
| 572 |
+
# to return
|
| 573 |
+
line = lines[0]
|
| 574 |
+
del lines[0]
|
| 575 |
+
if len(lines) > 1:
|
| 576 |
+
# cache the remaining lines
|
| 577 |
+
lines[-1] += self.charbuffer
|
| 578 |
+
self.linebuffer = lines
|
| 579 |
+
self.charbuffer = None
|
| 580 |
+
else:
|
| 581 |
+
# only one remaining line, put it back into charbuffer
|
| 582 |
+
self.charbuffer = lines[0] + self.charbuffer
|
| 583 |
+
if not keepends:
|
| 584 |
+
line = line.splitlines(keepends=False)[0]
|
| 585 |
+
break
|
| 586 |
+
line0withend = lines[0]
|
| 587 |
+
line0withoutend = lines[0].splitlines(keepends=False)[0]
|
| 588 |
+
if line0withend != line0withoutend: # We really have a line end
|
| 589 |
+
# Put the rest back together and keep it until the next call
|
| 590 |
+
self.charbuffer = self._empty_charbuffer.join(lines[1:]) + \
|
| 591 |
+
self.charbuffer
|
| 592 |
+
if keepends:
|
| 593 |
+
line = line0withend
|
| 594 |
+
else:
|
| 595 |
+
line = line0withoutend
|
| 596 |
+
break
|
| 597 |
+
# we didn't get anything or this was our only try
|
| 598 |
+
if not data or size is not None:
|
| 599 |
+
if line and not keepends:
|
| 600 |
+
line = line.splitlines(keepends=False)[0]
|
| 601 |
+
break
|
| 602 |
+
if readsize < 8000:
|
| 603 |
+
readsize *= 2
|
| 604 |
+
return line
|
| 605 |
+
|
| 606 |
+
def readlines(self, sizehint=None, keepends=True):
|
| 607 |
+
|
| 608 |
+
""" Read all lines available on the input stream
|
| 609 |
+
and return them as a list.
|
| 610 |
+
|
| 611 |
+
Line breaks are implemented using the codec's decoder
|
| 612 |
+
method and are included in the list entries.
|
| 613 |
+
|
| 614 |
+
sizehint, if given, is ignored since there is no efficient
|
| 615 |
+
way to finding the true end-of-line.
|
| 616 |
+
|
| 617 |
+
"""
|
| 618 |
+
data = self.read()
|
| 619 |
+
return data.splitlines(keepends)
|
| 620 |
+
|
| 621 |
+
def reset(self):
|
| 622 |
+
|
| 623 |
+
""" Resets the codec buffers used for keeping internal state.
|
| 624 |
+
|
| 625 |
+
Note that no stream repositioning should take place.
|
| 626 |
+
This method is primarily intended to be able to recover
|
| 627 |
+
from decoding errors.
|
| 628 |
+
|
| 629 |
+
"""
|
| 630 |
+
self.bytebuffer = b""
|
| 631 |
+
self.charbuffer = self._empty_charbuffer
|
| 632 |
+
self.linebuffer = None
|
| 633 |
+
|
| 634 |
+
def seek(self, offset, whence=0):
|
| 635 |
+
""" Set the input stream's current position.
|
| 636 |
+
|
| 637 |
+
Resets the codec buffers used for keeping state.
|
| 638 |
+
"""
|
| 639 |
+
self.stream.seek(offset, whence)
|
| 640 |
+
self.reset()
|
| 641 |
+
|
| 642 |
+
def __next__(self):
|
| 643 |
+
|
| 644 |
+
""" Return the next decoded line from the input stream."""
|
| 645 |
+
line = self.readline()
|
| 646 |
+
if line:
|
| 647 |
+
return line
|
| 648 |
+
raise StopIteration
|
| 649 |
+
|
| 650 |
+
def __iter__(self):
|
| 651 |
+
return self
|
| 652 |
+
|
| 653 |
+
def __getattr__(self, name,
|
| 654 |
+
getattr=getattr):
|
| 655 |
+
|
| 656 |
+
""" Inherit all other methods from the underlying stream.
|
| 657 |
+
"""
|
| 658 |
+
return getattr(self.stream, name)
|
| 659 |
+
|
| 660 |
+
def __enter__(self):
|
| 661 |
+
return self
|
| 662 |
+
|
| 663 |
+
def __exit__(self, type, value, tb):
|
| 664 |
+
self.stream.close()
|
| 665 |
+
|
| 666 |
+
###
|
| 667 |
+
|
| 668 |
+
class StreamReaderWriter:
|
| 669 |
+
|
| 670 |
+
""" StreamReaderWriter instances allow wrapping streams which
|
| 671 |
+
work in both read and write modes.
|
| 672 |
+
|
| 673 |
+
The design is such that one can use the factory functions
|
| 674 |
+
returned by the codec.lookup() function to construct the
|
| 675 |
+
instance.
|
| 676 |
+
|
| 677 |
+
"""
|
| 678 |
+
# Optional attributes set by the file wrappers below
|
| 679 |
+
encoding = 'unknown'
|
| 680 |
+
|
| 681 |
+
def __init__(self, stream, Reader, Writer, errors='strict'):
|
| 682 |
+
|
| 683 |
+
""" Creates a StreamReaderWriter instance.
|
| 684 |
+
|
| 685 |
+
stream must be a Stream-like object.
|
| 686 |
+
|
| 687 |
+
Reader, Writer must be factory functions or classes
|
| 688 |
+
providing the StreamReader, StreamWriter interface resp.
|
| 689 |
+
|
| 690 |
+
Error handling is done in the same way as defined for the
|
| 691 |
+
StreamWriter/Readers.
|
| 692 |
+
|
| 693 |
+
"""
|
| 694 |
+
self.stream = stream
|
| 695 |
+
self.reader = Reader(stream, errors)
|
| 696 |
+
self.writer = Writer(stream, errors)
|
| 697 |
+
self.errors = errors
|
| 698 |
+
|
| 699 |
+
def read(self, size=-1):
|
| 700 |
+
|
| 701 |
+
return self.reader.read(size)
|
| 702 |
+
|
| 703 |
+
def readline(self, size=None):
|
| 704 |
+
|
| 705 |
+
return self.reader.readline(size)
|
| 706 |
+
|
| 707 |
+
def readlines(self, sizehint=None):
|
| 708 |
+
|
| 709 |
+
return self.reader.readlines(sizehint)
|
| 710 |
+
|
| 711 |
+
def __next__(self):
|
| 712 |
+
|
| 713 |
+
""" Return the next decoded line from the input stream."""
|
| 714 |
+
return next(self.reader)
|
| 715 |
+
|
| 716 |
+
def __iter__(self):
|
| 717 |
+
return self
|
| 718 |
+
|
| 719 |
+
def write(self, data):
|
| 720 |
+
|
| 721 |
+
return self.writer.write(data)
|
| 722 |
+
|
| 723 |
+
def writelines(self, list):
|
| 724 |
+
|
| 725 |
+
return self.writer.writelines(list)
|
| 726 |
+
|
| 727 |
+
def reset(self):
|
| 728 |
+
|
| 729 |
+
self.reader.reset()
|
| 730 |
+
self.writer.reset()
|
| 731 |
+
|
| 732 |
+
def seek(self, offset, whence=0):
|
| 733 |
+
self.stream.seek(offset, whence)
|
| 734 |
+
self.reader.reset()
|
| 735 |
+
if whence == 0 and offset == 0:
|
| 736 |
+
self.writer.reset()
|
| 737 |
+
|
| 738 |
+
def __getattr__(self, name,
|
| 739 |
+
getattr=getattr):
|
| 740 |
+
|
| 741 |
+
""" Inherit all other methods from the underlying stream.
|
| 742 |
+
"""
|
| 743 |
+
return getattr(self.stream, name)
|
| 744 |
+
|
| 745 |
+
# these are needed to make "with StreamReaderWriter(...)" work properly
|
| 746 |
+
|
| 747 |
+
def __enter__(self):
|
| 748 |
+
return self
|
| 749 |
+
|
| 750 |
+
def __exit__(self, type, value, tb):
|
| 751 |
+
self.stream.close()
|
| 752 |
+
|
| 753 |
+
###
|
| 754 |
+
|
| 755 |
+
class StreamRecoder:
|
| 756 |
+
|
| 757 |
+
""" StreamRecoder instances translate data from one encoding to another.
|
| 758 |
+
|
| 759 |
+
They use the complete set of APIs returned by the
|
| 760 |
+
codecs.lookup() function to implement their task.
|
| 761 |
+
|
| 762 |
+
Data written to the StreamRecoder is first decoded into an
|
| 763 |
+
intermediate format (depending on the "decode" codec) and then
|
| 764 |
+
written to the underlying stream using an instance of the provided
|
| 765 |
+
Writer class.
|
| 766 |
+
|
| 767 |
+
In the other direction, data is read from the underlying stream using
|
| 768 |
+
a Reader instance and then encoded and returned to the caller.
|
| 769 |
+
|
| 770 |
+
"""
|
| 771 |
+
# Optional attributes set by the file wrappers below
|
| 772 |
+
data_encoding = 'unknown'
|
| 773 |
+
file_encoding = 'unknown'
|
| 774 |
+
|
| 775 |
+
def __init__(self, stream, encode, decode, Reader, Writer,
|
| 776 |
+
errors='strict'):
|
| 777 |
+
|
| 778 |
+
""" Creates a StreamRecoder instance which implements a two-way
|
| 779 |
+
conversion: encode and decode work on the frontend (the
|
| 780 |
+
data visible to .read() and .write()) while Reader and Writer
|
| 781 |
+
work on the backend (the data in stream).
|
| 782 |
+
|
| 783 |
+
You can use these objects to do transparent
|
| 784 |
+
transcodings from e.g. latin-1 to utf-8 and back.
|
| 785 |
+
|
| 786 |
+
stream must be a file-like object.
|
| 787 |
+
|
| 788 |
+
encode and decode must adhere to the Codec interface; Reader and
|
| 789 |
+
Writer must be factory functions or classes providing the
|
| 790 |
+
StreamReader and StreamWriter interfaces resp.
|
| 791 |
+
|
| 792 |
+
Error handling is done in the same way as defined for the
|
| 793 |
+
StreamWriter/Readers.
|
| 794 |
+
|
| 795 |
+
"""
|
| 796 |
+
self.stream = stream
|
| 797 |
+
self.encode = encode
|
| 798 |
+
self.decode = decode
|
| 799 |
+
self.reader = Reader(stream, errors)
|
| 800 |
+
self.writer = Writer(stream, errors)
|
| 801 |
+
self.errors = errors
|
| 802 |
+
|
| 803 |
+
def read(self, size=-1):
|
| 804 |
+
|
| 805 |
+
data = self.reader.read(size)
|
| 806 |
+
data, bytesencoded = self.encode(data, self.errors)
|
| 807 |
+
return data
|
| 808 |
+
|
| 809 |
+
def readline(self, size=None):
|
| 810 |
+
|
| 811 |
+
if size is None:
|
| 812 |
+
data = self.reader.readline()
|
| 813 |
+
else:
|
| 814 |
+
data = self.reader.readline(size)
|
| 815 |
+
data, bytesencoded = self.encode(data, self.errors)
|
| 816 |
+
return data
|
| 817 |
+
|
| 818 |
+
def readlines(self, sizehint=None):
|
| 819 |
+
|
| 820 |
+
data = self.reader.read()
|
| 821 |
+
data, bytesencoded = self.encode(data, self.errors)
|
| 822 |
+
return data.splitlines(keepends=True)
|
| 823 |
+
|
| 824 |
+
def __next__(self):
|
| 825 |
+
|
| 826 |
+
""" Return the next decoded line from the input stream."""
|
| 827 |
+
data = next(self.reader)
|
| 828 |
+
data, bytesencoded = self.encode(data, self.errors)
|
| 829 |
+
return data
|
| 830 |
+
|
| 831 |
+
def __iter__(self):
|
| 832 |
+
return self
|
| 833 |
+
|
| 834 |
+
def write(self, data):
|
| 835 |
+
|
| 836 |
+
data, bytesdecoded = self.decode(data, self.errors)
|
| 837 |
+
return self.writer.write(data)
|
| 838 |
+
|
| 839 |
+
def writelines(self, list):
|
| 840 |
+
|
| 841 |
+
data = b''.join(list)
|
| 842 |
+
data, bytesdecoded = self.decode(data, self.errors)
|
| 843 |
+
return self.writer.write(data)
|
| 844 |
+
|
| 845 |
+
def reset(self):
|
| 846 |
+
|
| 847 |
+
self.reader.reset()
|
| 848 |
+
self.writer.reset()
|
| 849 |
+
|
| 850 |
+
def seek(self, offset, whence=0):
|
| 851 |
+
# Seeks must be propagated to both the readers and writers
|
| 852 |
+
# as they might need to reset their internal buffers.
|
| 853 |
+
self.reader.seek(offset, whence)
|
| 854 |
+
self.writer.seek(offset, whence)
|
| 855 |
+
|
| 856 |
+
def __getattr__(self, name,
|
| 857 |
+
getattr=getattr):
|
| 858 |
+
|
| 859 |
+
""" Inherit all other methods from the underlying stream.
|
| 860 |
+
"""
|
| 861 |
+
return getattr(self.stream, name)
|
| 862 |
+
|
| 863 |
+
def __enter__(self):
|
| 864 |
+
return self
|
| 865 |
+
|
| 866 |
+
def __exit__(self, type, value, tb):
|
| 867 |
+
self.stream.close()
|
| 868 |
+
|
| 869 |
+
### Shortcuts
|
| 870 |
+
|
| 871 |
+
def open(filename, mode='r', encoding=None, errors='strict', buffering=-1):
|
| 872 |
+
|
| 873 |
+
""" Open an encoded file using the given mode and return
|
| 874 |
+
a wrapped version providing transparent encoding/decoding.
|
| 875 |
+
|
| 876 |
+
Note: The wrapped version will only accept the object format
|
| 877 |
+
defined by the codecs, i.e. Unicode objects for most builtin
|
| 878 |
+
codecs. Output is also codec dependent and will usually be
|
| 879 |
+
Unicode as well.
|
| 880 |
+
|
| 881 |
+
If encoding is not None, then the
|
| 882 |
+
underlying encoded files are always opened in binary mode.
|
| 883 |
+
The default file mode is 'r', meaning to open the file in read mode.
|
| 884 |
+
|
| 885 |
+
encoding specifies the encoding which is to be used for the
|
| 886 |
+
file.
|
| 887 |
+
|
| 888 |
+
errors may be given to define the error handling. It defaults
|
| 889 |
+
to 'strict' which causes ValueErrors to be raised in case an
|
| 890 |
+
encoding error occurs.
|
| 891 |
+
|
| 892 |
+
buffering has the same meaning as for the builtin open() API.
|
| 893 |
+
It defaults to -1 which means that the default buffer size will
|
| 894 |
+
be used.
|
| 895 |
+
|
| 896 |
+
The returned wrapped file object provides an extra attribute
|
| 897 |
+
.encoding which allows querying the used encoding. This
|
| 898 |
+
attribute is only available if an encoding was specified as
|
| 899 |
+
parameter.
|
| 900 |
+
|
| 901 |
+
"""
|
| 902 |
+
if encoding is not None and \
|
| 903 |
+
'b' not in mode:
|
| 904 |
+
# Force opening of the file in binary mode
|
| 905 |
+
mode = mode + 'b'
|
| 906 |
+
file = builtins.open(filename, mode, buffering)
|
| 907 |
+
if encoding is None:
|
| 908 |
+
return file
|
| 909 |
+
|
| 910 |
+
try:
|
| 911 |
+
info = lookup(encoding)
|
| 912 |
+
srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
|
| 913 |
+
# Add attributes to simplify introspection
|
| 914 |
+
srw.encoding = encoding
|
| 915 |
+
return srw
|
| 916 |
+
except:
|
| 917 |
+
file.close()
|
| 918 |
+
raise
|
| 919 |
+
|
| 920 |
+
def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
|
| 921 |
+
|
| 922 |
+
""" Return a wrapped version of file which provides transparent
|
| 923 |
+
encoding translation.
|
| 924 |
+
|
| 925 |
+
Data written to the wrapped file is decoded according
|
| 926 |
+
to the given data_encoding and then encoded to the underlying
|
| 927 |
+
file using file_encoding. The intermediate data type
|
| 928 |
+
will usually be Unicode but depends on the specified codecs.
|
| 929 |
+
|
| 930 |
+
Bytes read from the file are decoded using file_encoding and then
|
| 931 |
+
passed back to the caller encoded using data_encoding.
|
| 932 |
+
|
| 933 |
+
If file_encoding is not given, it defaults to data_encoding.
|
| 934 |
+
|
| 935 |
+
errors may be given to define the error handling. It defaults
|
| 936 |
+
to 'strict' which causes ValueErrors to be raised in case an
|
| 937 |
+
encoding error occurs.
|
| 938 |
+
|
| 939 |
+
The returned wrapped file object provides two extra attributes
|
| 940 |
+
.data_encoding and .file_encoding which reflect the given
|
| 941 |
+
parameters of the same name. The attributes can be used for
|
| 942 |
+
introspection by Python programs.
|
| 943 |
+
|
| 944 |
+
"""
|
| 945 |
+
if file_encoding is None:
|
| 946 |
+
file_encoding = data_encoding
|
| 947 |
+
data_info = lookup(data_encoding)
|
| 948 |
+
file_info = lookup(file_encoding)
|
| 949 |
+
sr = StreamRecoder(file, data_info.encode, data_info.decode,
|
| 950 |
+
file_info.streamreader, file_info.streamwriter, errors)
|
| 951 |
+
# Add attributes to simplify introspection
|
| 952 |
+
sr.data_encoding = data_encoding
|
| 953 |
+
sr.file_encoding = file_encoding
|
| 954 |
+
return sr
|
| 955 |
+
|
| 956 |
+
### Helpers for codec lookup
|
| 957 |
+
|
| 958 |
+
def getencoder(encoding):
|
| 959 |
+
|
| 960 |
+
""" Lookup up the codec for the given encoding and return
|
| 961 |
+
its encoder function.
|
| 962 |
+
|
| 963 |
+
Raises a LookupError in case the encoding cannot be found.
|
| 964 |
+
|
| 965 |
+
"""
|
| 966 |
+
return lookup(encoding).encode
|
| 967 |
+
|
| 968 |
+
def getdecoder(encoding):
|
| 969 |
+
|
| 970 |
+
""" Lookup up the codec for the given encoding and return
|
| 971 |
+
its decoder function.
|
| 972 |
+
|
| 973 |
+
Raises a LookupError in case the encoding cannot be found.
|
| 974 |
+
|
| 975 |
+
"""
|
| 976 |
+
return lookup(encoding).decode
|
| 977 |
+
|
| 978 |
+
def getincrementalencoder(encoding):
|
| 979 |
+
|
| 980 |
+
""" Lookup up the codec for the given encoding and return
|
| 981 |
+
its IncrementalEncoder class or factory function.
|
| 982 |
+
|
| 983 |
+
Raises a LookupError in case the encoding cannot be found
|
| 984 |
+
or the codecs doesn't provide an incremental encoder.
|
| 985 |
+
|
| 986 |
+
"""
|
| 987 |
+
encoder = lookup(encoding).incrementalencoder
|
| 988 |
+
if encoder is None:
|
| 989 |
+
raise LookupError(encoding)
|
| 990 |
+
return encoder
|
| 991 |
+
|
| 992 |
+
def getincrementaldecoder(encoding):
|
| 993 |
+
|
| 994 |
+
""" Lookup up the codec for the given encoding and return
|
| 995 |
+
its IncrementalDecoder class or factory function.
|
| 996 |
+
|
| 997 |
+
Raises a LookupError in case the encoding cannot be found
|
| 998 |
+
or the codecs doesn't provide an incremental decoder.
|
| 999 |
+
|
| 1000 |
+
"""
|
| 1001 |
+
decoder = lookup(encoding).incrementaldecoder
|
| 1002 |
+
if decoder is None:
|
| 1003 |
+
raise LookupError(encoding)
|
| 1004 |
+
return decoder
|
| 1005 |
+
|
| 1006 |
+
def getreader(encoding):
|
| 1007 |
+
|
| 1008 |
+
""" Lookup up the codec for the given encoding and return
|
| 1009 |
+
its StreamReader class or factory function.
|
| 1010 |
+
|
| 1011 |
+
Raises a LookupError in case the encoding cannot be found.
|
| 1012 |
+
|
| 1013 |
+
"""
|
| 1014 |
+
return lookup(encoding).streamreader
|
| 1015 |
+
|
| 1016 |
+
def getwriter(encoding):
|
| 1017 |
+
|
| 1018 |
+
""" Lookup up the codec for the given encoding and return
|
| 1019 |
+
its StreamWriter class or factory function.
|
| 1020 |
+
|
| 1021 |
+
Raises a LookupError in case the encoding cannot be found.
|
| 1022 |
+
|
| 1023 |
+
"""
|
| 1024 |
+
return lookup(encoding).streamwriter
|
| 1025 |
+
|
| 1026 |
+
def iterencode(iterator, encoding, errors='strict', **kwargs):
|
| 1027 |
+
"""
|
| 1028 |
+
Encoding iterator.
|
| 1029 |
+
|
| 1030 |
+
Encodes the input strings from the iterator using an IncrementalEncoder.
|
| 1031 |
+
|
| 1032 |
+
errors and kwargs are passed through to the IncrementalEncoder
|
| 1033 |
+
constructor.
|
| 1034 |
+
"""
|
| 1035 |
+
encoder = getincrementalencoder(encoding)(errors, **kwargs)
|
| 1036 |
+
for input in iterator:
|
| 1037 |
+
output = encoder.encode(input)
|
| 1038 |
+
if output:
|
| 1039 |
+
yield output
|
| 1040 |
+
output = encoder.encode("", True)
|
| 1041 |
+
if output:
|
| 1042 |
+
yield output
|
| 1043 |
+
|
| 1044 |
+
def iterdecode(iterator, encoding, errors='strict', **kwargs):
|
| 1045 |
+
"""
|
| 1046 |
+
Decoding iterator.
|
| 1047 |
+
|
| 1048 |
+
Decodes the input strings from the iterator using an IncrementalDecoder.
|
| 1049 |
+
|
| 1050 |
+
errors and kwargs are passed through to the IncrementalDecoder
|
| 1051 |
+
constructor.
|
| 1052 |
+
"""
|
| 1053 |
+
decoder = getincrementaldecoder(encoding)(errors, **kwargs)
|
| 1054 |
+
for input in iterator:
|
| 1055 |
+
output = decoder.decode(input)
|
| 1056 |
+
if output:
|
| 1057 |
+
yield output
|
| 1058 |
+
output = decoder.decode(b"", True)
|
| 1059 |
+
if output:
|
| 1060 |
+
yield output
|
| 1061 |
+
|
| 1062 |
+
### Helpers for charmap-based codecs
|
| 1063 |
+
|
| 1064 |
+
def make_identity_dict(rng):
|
| 1065 |
+
|
| 1066 |
+
""" make_identity_dict(rng) -> dict
|
| 1067 |
+
|
| 1068 |
+
Return a dictionary where elements of the rng sequence are
|
| 1069 |
+
mapped to themselves.
|
| 1070 |
+
|
| 1071 |
+
"""
|
| 1072 |
+
return {i:i for i in rng}
|
| 1073 |
+
|
| 1074 |
+
def make_encoding_map(decoding_map):
|
| 1075 |
+
|
| 1076 |
+
""" Creates an encoding map from a decoding map.
|
| 1077 |
+
|
| 1078 |
+
If a target mapping in the decoding map occurs multiple
|
| 1079 |
+
times, then that target is mapped to None (undefined mapping),
|
| 1080 |
+
causing an exception when encountered by the charmap codec
|
| 1081 |
+
during translation.
|
| 1082 |
+
|
| 1083 |
+
One example where this happens is cp875.py which decodes
|
| 1084 |
+
multiple character to \\u001a.
|
| 1085 |
+
|
| 1086 |
+
"""
|
| 1087 |
+
m = {}
|
| 1088 |
+
for k,v in decoding_map.items():
|
| 1089 |
+
if not v in m:
|
| 1090 |
+
m[v] = k
|
| 1091 |
+
else:
|
| 1092 |
+
m[v] = None
|
| 1093 |
+
return m
|
| 1094 |
+
|
| 1095 |
+
### error handlers
|
| 1096 |
+
|
| 1097 |
+
try:
|
| 1098 |
+
strict_errors = lookup_error("strict")
|
| 1099 |
+
ignore_errors = lookup_error("ignore")
|
| 1100 |
+
replace_errors = lookup_error("replace")
|
| 1101 |
+
xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
|
| 1102 |
+
backslashreplace_errors = lookup_error("backslashreplace")
|
| 1103 |
+
namereplace_errors = lookup_error("namereplace")
|
| 1104 |
+
except LookupError:
|
| 1105 |
+
# In --disable-unicode builds, these error handler are missing
|
| 1106 |
+
strict_errors = None
|
| 1107 |
+
ignore_errors = None
|
| 1108 |
+
replace_errors = None
|
| 1109 |
+
xmlcharrefreplace_errors = None
|
| 1110 |
+
backslashreplace_errors = None
|
| 1111 |
+
namereplace_errors = None
|
| 1112 |
+
|
| 1113 |
+
# Tell modulefinder that using codecs probably needs the encodings
|
| 1114 |
+
# package
|
| 1115 |
+
_false = 0
|
| 1116 |
+
if _false:
|
| 1117 |
+
import encodings
|
| 1118 |
+
|
| 1119 |
+
### Tests
|
| 1120 |
+
|
| 1121 |
+
if __name__ == '__main__':
|
| 1122 |
+
|
| 1123 |
+
# Make stdout translate Latin-1 output into UTF-8 output
|
| 1124 |
+
sys.stdout = EncodedFile(sys.stdout, 'latin-1', 'utf-8')
|
| 1125 |
+
|
| 1126 |
+
# Have stdin translate Latin-1 input into UTF-8 input
|
| 1127 |
+
sys.stdin = EncodedFile(sys.stdin, 'utf-8', 'latin-1')
|
evalkit_tf446/lib/python3.10/codeop.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""Utilities to compile possibly incomplete Python source code.
|
| 2 |
+
|
| 3 |
+
This module provides two interfaces, broadly similar to the builtin
|
| 4 |
+
function compile(), which take program text, a filename and a 'mode'
|
| 5 |
+
and:
|
| 6 |
+
|
| 7 |
+
- Return code object if the command is complete and valid
|
| 8 |
+
- Return None if the command is incomplete
|
| 9 |
+
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
| 10 |
+
syntax error (OverflowError and ValueError can be produced by
|
| 11 |
+
malformed literals).
|
| 12 |
+
|
| 13 |
+
The two interfaces are:
|
| 14 |
+
|
| 15 |
+
compile_command(source, filename, symbol):
|
| 16 |
+
|
| 17 |
+
Compiles a single command in the manner described above.
|
| 18 |
+
|
| 19 |
+
CommandCompiler():
|
| 20 |
+
|
| 21 |
+
Instances of this class have __call__ methods identical in
|
| 22 |
+
signature to compile_command; the difference is that if the
|
| 23 |
+
instance compiles program text containing a __future__ statement,
|
| 24 |
+
the instance 'remembers' and compiles all subsequent program texts
|
| 25 |
+
with the statement in force.
|
| 26 |
+
|
| 27 |
+
The module also provides another class:
|
| 28 |
+
|
| 29 |
+
Compile():
|
| 30 |
+
|
| 31 |
+
Instances of this class act like the built-in function compile,
|
| 32 |
+
but with 'memory' in the sense described above.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
import __future__
|
| 36 |
+
import warnings
|
| 37 |
+
|
| 38 |
+
_features = [getattr(__future__, fname)
|
| 39 |
+
for fname in __future__.all_feature_names]
|
| 40 |
+
|
| 41 |
+
__all__ = ["compile_command", "Compile", "CommandCompiler"]
|
| 42 |
+
|
| 43 |
+
# The following flags match the values from Include/cpython/compile.h
|
| 44 |
+
# Caveat emptor: These flags are undocumented on purpose and depending
|
| 45 |
+
# on their effect outside the standard library is **unsupported**.
|
| 46 |
+
PyCF_DONT_IMPLY_DEDENT = 0x200
|
| 47 |
+
PyCF_ALLOW_INCOMPLETE_INPUT = 0x4000
|
| 48 |
+
|
| 49 |
+
def _maybe_compile(compiler, source, filename, symbol):
|
| 50 |
+
# Check for source consisting of only blank lines and comments.
|
| 51 |
+
for line in source.split("\n"):
|
| 52 |
+
line = line.strip()
|
| 53 |
+
if line and line[0] != '#':
|
| 54 |
+
break # Leave it alone.
|
| 55 |
+
else:
|
| 56 |
+
if symbol != "eval":
|
| 57 |
+
source = "pass" # Replace it with a 'pass' statement
|
| 58 |
+
|
| 59 |
+
# Disable compiler warnings when checking for incomplete input.
|
| 60 |
+
with warnings.catch_warnings():
|
| 61 |
+
warnings.simplefilter("ignore", (SyntaxWarning, DeprecationWarning))
|
| 62 |
+
try:
|
| 63 |
+
compiler(source, filename, symbol)
|
| 64 |
+
except SyntaxError: # Let other compile() errors propagate.
|
| 65 |
+
try:
|
| 66 |
+
compiler(source + "\n", filename, symbol)
|
| 67 |
+
return None
|
| 68 |
+
except SyntaxError as e:
|
| 69 |
+
if "incomplete input" in str(e):
|
| 70 |
+
return None
|
| 71 |
+
# fallthrough
|
| 72 |
+
|
| 73 |
+
return compiler(source, filename, symbol)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def _is_syntax_error(err1, err2):
|
| 77 |
+
rep1 = repr(err1)
|
| 78 |
+
rep2 = repr(err2)
|
| 79 |
+
if "was never closed" in rep1 and "was never closed" in rep2:
|
| 80 |
+
return False
|
| 81 |
+
if rep1 == rep2:
|
| 82 |
+
return True
|
| 83 |
+
return False
|
| 84 |
+
|
| 85 |
+
def _compile(source, filename, symbol):
|
| 86 |
+
return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT | PyCF_ALLOW_INCOMPLETE_INPUT)
|
| 87 |
+
|
| 88 |
+
def compile_command(source, filename="<input>", symbol="single"):
|
| 89 |
+
r"""Compile a command and determine whether it is incomplete.
|
| 90 |
+
|
| 91 |
+
Arguments:
|
| 92 |
+
|
| 93 |
+
source -- the source string; may contain \n characters
|
| 94 |
+
filename -- optional filename from which source was read; default
|
| 95 |
+
"<input>"
|
| 96 |
+
symbol -- optional grammar start symbol; "single" (default), "exec"
|
| 97 |
+
or "eval"
|
| 98 |
+
|
| 99 |
+
Return value / exceptions raised:
|
| 100 |
+
|
| 101 |
+
- Return a code object if the command is complete and valid
|
| 102 |
+
- Return None if the command is incomplete
|
| 103 |
+
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
| 104 |
+
syntax error (OverflowError and ValueError can be produced by
|
| 105 |
+
malformed literals).
|
| 106 |
+
"""
|
| 107 |
+
return _maybe_compile(_compile, source, filename, symbol)
|
| 108 |
+
|
| 109 |
+
class Compile:
|
| 110 |
+
"""Instances of this class behave much like the built-in compile
|
| 111 |
+
function, but if one is used to compile text containing a future
|
| 112 |
+
statement, it "remembers" and compiles all subsequent program texts
|
| 113 |
+
with the statement in force."""
|
| 114 |
+
def __init__(self):
|
| 115 |
+
self.flags = PyCF_DONT_IMPLY_DEDENT | PyCF_ALLOW_INCOMPLETE_INPUT
|
| 116 |
+
|
| 117 |
+
def __call__(self, source, filename, symbol):
|
| 118 |
+
codeob = compile(source, filename, symbol, self.flags, True)
|
| 119 |
+
for feature in _features:
|
| 120 |
+
if codeob.co_flags & feature.compiler_flag:
|
| 121 |
+
self.flags |= feature.compiler_flag
|
| 122 |
+
return codeob
|
| 123 |
+
|
| 124 |
+
class CommandCompiler:
|
| 125 |
+
"""Instances of this class have __call__ methods identical in
|
| 126 |
+
signature to compile_command; the difference is that if the
|
| 127 |
+
instance compiles program text containing a __future__ statement,
|
| 128 |
+
the instance 'remembers' and compiles all subsequent program texts
|
| 129 |
+
with the statement in force."""
|
| 130 |
+
|
| 131 |
+
def __init__(self,):
|
| 132 |
+
self.compiler = Compile()
|
| 133 |
+
|
| 134 |
+
def __call__(self, source, filename="<input>", symbol="single"):
|
| 135 |
+
r"""Compile a command and determine whether it is incomplete.
|
| 136 |
+
|
| 137 |
+
Arguments:
|
| 138 |
+
|
| 139 |
+
source -- the source string; may contain \n characters
|
| 140 |
+
filename -- optional filename from which source was read;
|
| 141 |
+
default "<input>"
|
| 142 |
+
symbol -- optional grammar start symbol; "single" (default) or
|
| 143 |
+
"eval"
|
| 144 |
+
|
| 145 |
+
Return value / exceptions raised:
|
| 146 |
+
|
| 147 |
+
- Return a code object if the command is complete and valid
|
| 148 |
+
- Return None if the command is incomplete
|
| 149 |
+
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
| 150 |
+
syntax error (OverflowError and ValueError can be produced by
|
| 151 |
+
malformed literals).
|
| 152 |
+
"""
|
| 153 |
+
return _maybe_compile(self.compiler, source, filename, symbol)
|
evalkit_tf446/lib/python3.10/compileall.py
ADDED
|
@@ -0,0 +1,463 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Module/script to byte-compile all .py files to .pyc files.
|
| 2 |
+
|
| 3 |
+
When called as a script with arguments, this compiles the directories
|
| 4 |
+
given as arguments recursively; the -l option prevents it from
|
| 5 |
+
recursing into directories.
|
| 6 |
+
|
| 7 |
+
Without arguments, if compiles all modules on sys.path, without
|
| 8 |
+
recursing into subdirectories. (Even though it should do so for
|
| 9 |
+
packages -- for now, you'll have to deal with packages separately.)
|
| 10 |
+
|
| 11 |
+
See module py_compile for details of the actual byte-compilation.
|
| 12 |
+
"""
|
| 13 |
+
import os
|
| 14 |
+
import sys
|
| 15 |
+
import importlib.util
|
| 16 |
+
import py_compile
|
| 17 |
+
import struct
|
| 18 |
+
import filecmp
|
| 19 |
+
|
| 20 |
+
from functools import partial
|
| 21 |
+
from pathlib import Path
|
| 22 |
+
|
| 23 |
+
__all__ = ["compile_dir","compile_file","compile_path"]
|
| 24 |
+
|
| 25 |
+
def _walk_dir(dir, maxlevels, quiet=0):
|
| 26 |
+
if quiet < 2 and isinstance(dir, os.PathLike):
|
| 27 |
+
dir = os.fspath(dir)
|
| 28 |
+
if not quiet:
|
| 29 |
+
print('Listing {!r}...'.format(dir))
|
| 30 |
+
try:
|
| 31 |
+
names = os.listdir(dir)
|
| 32 |
+
except OSError:
|
| 33 |
+
if quiet < 2:
|
| 34 |
+
print("Can't list {!r}".format(dir))
|
| 35 |
+
names = []
|
| 36 |
+
names.sort()
|
| 37 |
+
for name in names:
|
| 38 |
+
if name == '__pycache__':
|
| 39 |
+
continue
|
| 40 |
+
fullname = os.path.join(dir, name)
|
| 41 |
+
if not os.path.isdir(fullname):
|
| 42 |
+
yield fullname
|
| 43 |
+
elif (maxlevels > 0 and name != os.curdir and name != os.pardir and
|
| 44 |
+
os.path.isdir(fullname) and not os.path.islink(fullname)):
|
| 45 |
+
yield from _walk_dir(fullname, maxlevels=maxlevels - 1,
|
| 46 |
+
quiet=quiet)
|
| 47 |
+
|
| 48 |
+
def compile_dir(dir, maxlevels=None, ddir=None, force=False,
|
| 49 |
+
rx=None, quiet=0, legacy=False, optimize=-1, workers=1,
|
| 50 |
+
invalidation_mode=None, *, stripdir=None,
|
| 51 |
+
prependdir=None, limit_sl_dest=None, hardlink_dupes=False):
|
| 52 |
+
"""Byte-compile all modules in the given directory tree.
|
| 53 |
+
|
| 54 |
+
Arguments (only dir is required):
|
| 55 |
+
|
| 56 |
+
dir: the directory to byte-compile
|
| 57 |
+
maxlevels: maximum recursion level (default `sys.getrecursionlimit()`)
|
| 58 |
+
ddir: the directory that will be prepended to the path to the
|
| 59 |
+
file as it is compiled into each byte-code file.
|
| 60 |
+
force: if True, force compilation, even if timestamps are up-to-date
|
| 61 |
+
quiet: full output with False or 0, errors only with 1,
|
| 62 |
+
no output with 2
|
| 63 |
+
legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
|
| 64 |
+
optimize: int or list of optimization levels or -1 for level of
|
| 65 |
+
the interpreter. Multiple levels leads to multiple compiled
|
| 66 |
+
files each with one optimization level.
|
| 67 |
+
workers: maximum number of parallel workers
|
| 68 |
+
invalidation_mode: how the up-to-dateness of the pyc will be checked
|
| 69 |
+
stripdir: part of path to left-strip from source file path
|
| 70 |
+
prependdir: path to prepend to beginning of original file path, applied
|
| 71 |
+
after stripdir
|
| 72 |
+
limit_sl_dest: ignore symlinks if they are pointing outside of
|
| 73 |
+
the defined path
|
| 74 |
+
hardlink_dupes: hardlink duplicated pyc files
|
| 75 |
+
"""
|
| 76 |
+
ProcessPoolExecutor = None
|
| 77 |
+
if ddir is not None and (stripdir is not None or prependdir is not None):
|
| 78 |
+
raise ValueError(("Destination dir (ddir) cannot be used "
|
| 79 |
+
"in combination with stripdir or prependdir"))
|
| 80 |
+
if ddir is not None:
|
| 81 |
+
stripdir = dir
|
| 82 |
+
prependdir = ddir
|
| 83 |
+
ddir = None
|
| 84 |
+
if workers < 0:
|
| 85 |
+
raise ValueError('workers must be greater or equal to 0')
|
| 86 |
+
if workers != 1:
|
| 87 |
+
# Check if this is a system where ProcessPoolExecutor can function.
|
| 88 |
+
from concurrent.futures.process import _check_system_limits
|
| 89 |
+
try:
|
| 90 |
+
_check_system_limits()
|
| 91 |
+
except NotImplementedError:
|
| 92 |
+
workers = 1
|
| 93 |
+
else:
|
| 94 |
+
from concurrent.futures import ProcessPoolExecutor
|
| 95 |
+
if maxlevels is None:
|
| 96 |
+
maxlevels = sys.getrecursionlimit()
|
| 97 |
+
files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels)
|
| 98 |
+
success = True
|
| 99 |
+
if workers != 1 and ProcessPoolExecutor is not None:
|
| 100 |
+
# If workers == 0, let ProcessPoolExecutor choose
|
| 101 |
+
workers = workers or None
|
| 102 |
+
with ProcessPoolExecutor(max_workers=workers) as executor:
|
| 103 |
+
results = executor.map(partial(compile_file,
|
| 104 |
+
ddir=ddir, force=force,
|
| 105 |
+
rx=rx, quiet=quiet,
|
| 106 |
+
legacy=legacy,
|
| 107 |
+
optimize=optimize,
|
| 108 |
+
invalidation_mode=invalidation_mode,
|
| 109 |
+
stripdir=stripdir,
|
| 110 |
+
prependdir=prependdir,
|
| 111 |
+
limit_sl_dest=limit_sl_dest,
|
| 112 |
+
hardlink_dupes=hardlink_dupes),
|
| 113 |
+
files)
|
| 114 |
+
success = min(results, default=True)
|
| 115 |
+
else:
|
| 116 |
+
for file in files:
|
| 117 |
+
if not compile_file(file, ddir, force, rx, quiet,
|
| 118 |
+
legacy, optimize, invalidation_mode,
|
| 119 |
+
stripdir=stripdir, prependdir=prependdir,
|
| 120 |
+
limit_sl_dest=limit_sl_dest,
|
| 121 |
+
hardlink_dupes=hardlink_dupes):
|
| 122 |
+
success = False
|
| 123 |
+
return success
|
| 124 |
+
|
| 125 |
+
def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
|
| 126 |
+
legacy=False, optimize=-1,
|
| 127 |
+
invalidation_mode=None, *, stripdir=None, prependdir=None,
|
| 128 |
+
limit_sl_dest=None, hardlink_dupes=False):
|
| 129 |
+
"""Byte-compile one file.
|
| 130 |
+
|
| 131 |
+
Arguments (only fullname is required):
|
| 132 |
+
|
| 133 |
+
fullname: the file to byte-compile
|
| 134 |
+
ddir: if given, the directory name compiled in to the
|
| 135 |
+
byte-code file.
|
| 136 |
+
force: if True, force compilation, even if timestamps are up-to-date
|
| 137 |
+
quiet: full output with False or 0, errors only with 1,
|
| 138 |
+
no output with 2
|
| 139 |
+
legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
|
| 140 |
+
optimize: int or list of optimization levels or -1 for level of
|
| 141 |
+
the interpreter. Multiple levels leads to multiple compiled
|
| 142 |
+
files each with one optimization level.
|
| 143 |
+
invalidation_mode: how the up-to-dateness of the pyc will be checked
|
| 144 |
+
stripdir: part of path to left-strip from source file path
|
| 145 |
+
prependdir: path to prepend to beginning of original file path, applied
|
| 146 |
+
after stripdir
|
| 147 |
+
limit_sl_dest: ignore symlinks if they are pointing outside of
|
| 148 |
+
the defined path.
|
| 149 |
+
hardlink_dupes: hardlink duplicated pyc files
|
| 150 |
+
"""
|
| 151 |
+
|
| 152 |
+
if ddir is not None and (stripdir is not None or prependdir is not None):
|
| 153 |
+
raise ValueError(("Destination dir (ddir) cannot be used "
|
| 154 |
+
"in combination with stripdir or prependdir"))
|
| 155 |
+
|
| 156 |
+
success = True
|
| 157 |
+
fullname = os.fspath(fullname)
|
| 158 |
+
stripdir = os.fspath(stripdir) if stripdir is not None else None
|
| 159 |
+
name = os.path.basename(fullname)
|
| 160 |
+
|
| 161 |
+
dfile = None
|
| 162 |
+
|
| 163 |
+
if ddir is not None:
|
| 164 |
+
dfile = os.path.join(ddir, name)
|
| 165 |
+
|
| 166 |
+
if stripdir is not None:
|
| 167 |
+
fullname_parts = fullname.split(os.path.sep)
|
| 168 |
+
stripdir_parts = stripdir.split(os.path.sep)
|
| 169 |
+
ddir_parts = list(fullname_parts)
|
| 170 |
+
|
| 171 |
+
for spart, opart in zip(stripdir_parts, fullname_parts):
|
| 172 |
+
if spart == opart:
|
| 173 |
+
ddir_parts.remove(spart)
|
| 174 |
+
|
| 175 |
+
dfile = os.path.join(*ddir_parts)
|
| 176 |
+
|
| 177 |
+
if prependdir is not None:
|
| 178 |
+
if dfile is None:
|
| 179 |
+
dfile = os.path.join(prependdir, fullname)
|
| 180 |
+
else:
|
| 181 |
+
dfile = os.path.join(prependdir, dfile)
|
| 182 |
+
|
| 183 |
+
if isinstance(optimize, int):
|
| 184 |
+
optimize = [optimize]
|
| 185 |
+
|
| 186 |
+
# Use set() to remove duplicates.
|
| 187 |
+
# Use sorted() to create pyc files in a deterministic order.
|
| 188 |
+
optimize = sorted(set(optimize))
|
| 189 |
+
|
| 190 |
+
if hardlink_dupes and len(optimize) < 2:
|
| 191 |
+
raise ValueError("Hardlinking of duplicated bytecode makes sense "
|
| 192 |
+
"only for more than one optimization level")
|
| 193 |
+
|
| 194 |
+
if rx is not None:
|
| 195 |
+
mo = rx.search(fullname)
|
| 196 |
+
if mo:
|
| 197 |
+
return success
|
| 198 |
+
|
| 199 |
+
if limit_sl_dest is not None and os.path.islink(fullname):
|
| 200 |
+
if Path(limit_sl_dest).resolve() not in Path(fullname).resolve().parents:
|
| 201 |
+
return success
|
| 202 |
+
|
| 203 |
+
opt_cfiles = {}
|
| 204 |
+
|
| 205 |
+
if os.path.isfile(fullname):
|
| 206 |
+
for opt_level in optimize:
|
| 207 |
+
if legacy:
|
| 208 |
+
opt_cfiles[opt_level] = fullname + 'c'
|
| 209 |
+
else:
|
| 210 |
+
if opt_level >= 0:
|
| 211 |
+
opt = opt_level if opt_level >= 1 else ''
|
| 212 |
+
cfile = (importlib.util.cache_from_source(
|
| 213 |
+
fullname, optimization=opt))
|
| 214 |
+
opt_cfiles[opt_level] = cfile
|
| 215 |
+
else:
|
| 216 |
+
cfile = importlib.util.cache_from_source(fullname)
|
| 217 |
+
opt_cfiles[opt_level] = cfile
|
| 218 |
+
|
| 219 |
+
head, tail = name[:-3], name[-3:]
|
| 220 |
+
if tail == '.py':
|
| 221 |
+
if not force:
|
| 222 |
+
try:
|
| 223 |
+
mtime = int(os.stat(fullname).st_mtime)
|
| 224 |
+
expect = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER,
|
| 225 |
+
0, mtime & 0xFFFF_FFFF)
|
| 226 |
+
for cfile in opt_cfiles.values():
|
| 227 |
+
with open(cfile, 'rb') as chandle:
|
| 228 |
+
actual = chandle.read(12)
|
| 229 |
+
if expect != actual:
|
| 230 |
+
break
|
| 231 |
+
else:
|
| 232 |
+
return success
|
| 233 |
+
except OSError:
|
| 234 |
+
pass
|
| 235 |
+
if not quiet:
|
| 236 |
+
print('Compiling {!r}...'.format(fullname))
|
| 237 |
+
try:
|
| 238 |
+
for index, opt_level in enumerate(optimize):
|
| 239 |
+
cfile = opt_cfiles[opt_level]
|
| 240 |
+
ok = py_compile.compile(fullname, cfile, dfile, True,
|
| 241 |
+
optimize=opt_level,
|
| 242 |
+
invalidation_mode=invalidation_mode)
|
| 243 |
+
if index > 0 and hardlink_dupes:
|
| 244 |
+
previous_cfile = opt_cfiles[optimize[index - 1]]
|
| 245 |
+
if filecmp.cmp(cfile, previous_cfile, shallow=False):
|
| 246 |
+
os.unlink(cfile)
|
| 247 |
+
os.link(previous_cfile, cfile)
|
| 248 |
+
except py_compile.PyCompileError as err:
|
| 249 |
+
success = False
|
| 250 |
+
if quiet >= 2:
|
| 251 |
+
return success
|
| 252 |
+
elif quiet:
|
| 253 |
+
print('*** Error compiling {!r}...'.format(fullname))
|
| 254 |
+
else:
|
| 255 |
+
print('*** ', end='')
|
| 256 |
+
# escape non-printable characters in msg
|
| 257 |
+
encoding = sys.stdout.encoding or sys.getdefaultencoding()
|
| 258 |
+
msg = err.msg.encode(encoding, errors='backslashreplace').decode(encoding)
|
| 259 |
+
print(msg)
|
| 260 |
+
except (SyntaxError, UnicodeError, OSError) as e:
|
| 261 |
+
success = False
|
| 262 |
+
if quiet >= 2:
|
| 263 |
+
return success
|
| 264 |
+
elif quiet:
|
| 265 |
+
print('*** Error compiling {!r}...'.format(fullname))
|
| 266 |
+
else:
|
| 267 |
+
print('*** ', end='')
|
| 268 |
+
print(e.__class__.__name__ + ':', e)
|
| 269 |
+
else:
|
| 270 |
+
if ok == 0:
|
| 271 |
+
success = False
|
| 272 |
+
return success
|
| 273 |
+
|
| 274 |
+
def compile_path(skip_curdir=1, maxlevels=0, force=False, quiet=0,
|
| 275 |
+
legacy=False, optimize=-1,
|
| 276 |
+
invalidation_mode=None):
|
| 277 |
+
"""Byte-compile all module on sys.path.
|
| 278 |
+
|
| 279 |
+
Arguments (all optional):
|
| 280 |
+
|
| 281 |
+
skip_curdir: if true, skip current directory (default True)
|
| 282 |
+
maxlevels: max recursion level (default 0)
|
| 283 |
+
force: as for compile_dir() (default False)
|
| 284 |
+
quiet: as for compile_dir() (default 0)
|
| 285 |
+
legacy: as for compile_dir() (default False)
|
| 286 |
+
optimize: as for compile_dir() (default -1)
|
| 287 |
+
invalidation_mode: as for compiler_dir()
|
| 288 |
+
"""
|
| 289 |
+
success = True
|
| 290 |
+
for dir in sys.path:
|
| 291 |
+
if (not dir or dir == os.curdir) and skip_curdir:
|
| 292 |
+
if quiet < 2:
|
| 293 |
+
print('Skipping current directory')
|
| 294 |
+
else:
|
| 295 |
+
success = success and compile_dir(
|
| 296 |
+
dir,
|
| 297 |
+
maxlevels,
|
| 298 |
+
None,
|
| 299 |
+
force,
|
| 300 |
+
quiet=quiet,
|
| 301 |
+
legacy=legacy,
|
| 302 |
+
optimize=optimize,
|
| 303 |
+
invalidation_mode=invalidation_mode,
|
| 304 |
+
)
|
| 305 |
+
return success
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
def main():
|
| 309 |
+
"""Script main program."""
|
| 310 |
+
import argparse
|
| 311 |
+
|
| 312 |
+
parser = argparse.ArgumentParser(
|
| 313 |
+
description='Utilities to support installing Python libraries.')
|
| 314 |
+
parser.add_argument('-l', action='store_const', const=0,
|
| 315 |
+
default=None, dest='maxlevels',
|
| 316 |
+
help="don't recurse into subdirectories")
|
| 317 |
+
parser.add_argument('-r', type=int, dest='recursion',
|
| 318 |
+
help=('control the maximum recursion level. '
|
| 319 |
+
'if `-l` and `-r` options are specified, '
|
| 320 |
+
'then `-r` takes precedence.'))
|
| 321 |
+
parser.add_argument('-f', action='store_true', dest='force',
|
| 322 |
+
help='force rebuild even if timestamps are up to date')
|
| 323 |
+
parser.add_argument('-q', action='count', dest='quiet', default=0,
|
| 324 |
+
help='output only error messages; -qq will suppress '
|
| 325 |
+
'the error messages as well.')
|
| 326 |
+
parser.add_argument('-b', action='store_true', dest='legacy',
|
| 327 |
+
help='use legacy (pre-PEP3147) compiled file locations')
|
| 328 |
+
parser.add_argument('-d', metavar='DESTDIR', dest='ddir', default=None,
|
| 329 |
+
help=('directory to prepend to file paths for use in '
|
| 330 |
+
'compile-time tracebacks and in runtime '
|
| 331 |
+
'tracebacks in cases where the source file is '
|
| 332 |
+
'unavailable'))
|
| 333 |
+
parser.add_argument('-s', metavar='STRIPDIR', dest='stripdir',
|
| 334 |
+
default=None,
|
| 335 |
+
help=('part of path to left-strip from path '
|
| 336 |
+
'to source file - for example buildroot. '
|
| 337 |
+
'`-d` and `-s` options cannot be '
|
| 338 |
+
'specified together.'))
|
| 339 |
+
parser.add_argument('-p', metavar='PREPENDDIR', dest='prependdir',
|
| 340 |
+
default=None,
|
| 341 |
+
help=('path to add as prefix to path '
|
| 342 |
+
'to source file - for example / to make '
|
| 343 |
+
'it absolute when some part is removed '
|
| 344 |
+
'by `-s` option. '
|
| 345 |
+
'`-d` and `-p` options cannot be '
|
| 346 |
+
'specified together.'))
|
| 347 |
+
parser.add_argument('-x', metavar='REGEXP', dest='rx', default=None,
|
| 348 |
+
help=('skip files matching the regular expression; '
|
| 349 |
+
'the regexp is searched for in the full path '
|
| 350 |
+
'of each file considered for compilation'))
|
| 351 |
+
parser.add_argument('-i', metavar='FILE', dest='flist',
|
| 352 |
+
help=('add all the files and directories listed in '
|
| 353 |
+
'FILE to the list considered for compilation; '
|
| 354 |
+
'if "-", names are read from stdin'))
|
| 355 |
+
parser.add_argument('compile_dest', metavar='FILE|DIR', nargs='*',
|
| 356 |
+
help=('zero or more file and directory names '
|
| 357 |
+
'to compile; if no arguments given, defaults '
|
| 358 |
+
'to the equivalent of -l sys.path'))
|
| 359 |
+
parser.add_argument('-j', '--workers', default=1,
|
| 360 |
+
type=int, help='Run compileall concurrently')
|
| 361 |
+
invalidation_modes = [mode.name.lower().replace('_', '-')
|
| 362 |
+
for mode in py_compile.PycInvalidationMode]
|
| 363 |
+
parser.add_argument('--invalidation-mode',
|
| 364 |
+
choices=sorted(invalidation_modes),
|
| 365 |
+
help=('set .pyc invalidation mode; defaults to '
|
| 366 |
+
'"checked-hash" if the SOURCE_DATE_EPOCH '
|
| 367 |
+
'environment variable is set, and '
|
| 368 |
+
'"timestamp" otherwise.'))
|
| 369 |
+
parser.add_argument('-o', action='append', type=int, dest='opt_levels',
|
| 370 |
+
help=('Optimization levels to run compilation with. '
|
| 371 |
+
'Default is -1 which uses the optimization level '
|
| 372 |
+
'of the Python interpreter itself (see -O).'))
|
| 373 |
+
parser.add_argument('-e', metavar='DIR', dest='limit_sl_dest',
|
| 374 |
+
help='Ignore symlinks pointing outsite of the DIR')
|
| 375 |
+
parser.add_argument('--hardlink-dupes', action='store_true',
|
| 376 |
+
dest='hardlink_dupes',
|
| 377 |
+
help='Hardlink duplicated pyc files')
|
| 378 |
+
|
| 379 |
+
args = parser.parse_args()
|
| 380 |
+
compile_dests = args.compile_dest
|
| 381 |
+
|
| 382 |
+
if args.rx:
|
| 383 |
+
import re
|
| 384 |
+
args.rx = re.compile(args.rx)
|
| 385 |
+
|
| 386 |
+
if args.limit_sl_dest == "":
|
| 387 |
+
args.limit_sl_dest = None
|
| 388 |
+
|
| 389 |
+
if args.recursion is not None:
|
| 390 |
+
maxlevels = args.recursion
|
| 391 |
+
else:
|
| 392 |
+
maxlevels = args.maxlevels
|
| 393 |
+
|
| 394 |
+
if args.opt_levels is None:
|
| 395 |
+
args.opt_levels = [-1]
|
| 396 |
+
|
| 397 |
+
if len(args.opt_levels) == 1 and args.hardlink_dupes:
|
| 398 |
+
parser.error(("Hardlinking of duplicated bytecode makes sense "
|
| 399 |
+
"only for more than one optimization level."))
|
| 400 |
+
|
| 401 |
+
if args.ddir is not None and (
|
| 402 |
+
args.stripdir is not None or args.prependdir is not None
|
| 403 |
+
):
|
| 404 |
+
parser.error("-d cannot be used in combination with -s or -p")
|
| 405 |
+
|
| 406 |
+
# if flist is provided then load it
|
| 407 |
+
if args.flist:
|
| 408 |
+
try:
|
| 409 |
+
with (sys.stdin if args.flist=='-' else
|
| 410 |
+
open(args.flist, encoding="utf-8")) as f:
|
| 411 |
+
for line in f:
|
| 412 |
+
compile_dests.append(line.strip())
|
| 413 |
+
except OSError:
|
| 414 |
+
if args.quiet < 2:
|
| 415 |
+
print("Error reading file list {}".format(args.flist))
|
| 416 |
+
return False
|
| 417 |
+
|
| 418 |
+
if args.invalidation_mode:
|
| 419 |
+
ivl_mode = args.invalidation_mode.replace('-', '_').upper()
|
| 420 |
+
invalidation_mode = py_compile.PycInvalidationMode[ivl_mode]
|
| 421 |
+
else:
|
| 422 |
+
invalidation_mode = None
|
| 423 |
+
|
| 424 |
+
success = True
|
| 425 |
+
try:
|
| 426 |
+
if compile_dests:
|
| 427 |
+
for dest in compile_dests:
|
| 428 |
+
if os.path.isfile(dest):
|
| 429 |
+
if not compile_file(dest, args.ddir, args.force, args.rx,
|
| 430 |
+
args.quiet, args.legacy,
|
| 431 |
+
invalidation_mode=invalidation_mode,
|
| 432 |
+
stripdir=args.stripdir,
|
| 433 |
+
prependdir=args.prependdir,
|
| 434 |
+
optimize=args.opt_levels,
|
| 435 |
+
limit_sl_dest=args.limit_sl_dest,
|
| 436 |
+
hardlink_dupes=args.hardlink_dupes):
|
| 437 |
+
success = False
|
| 438 |
+
else:
|
| 439 |
+
if not compile_dir(dest, maxlevels, args.ddir,
|
| 440 |
+
args.force, args.rx, args.quiet,
|
| 441 |
+
args.legacy, workers=args.workers,
|
| 442 |
+
invalidation_mode=invalidation_mode,
|
| 443 |
+
stripdir=args.stripdir,
|
| 444 |
+
prependdir=args.prependdir,
|
| 445 |
+
optimize=args.opt_levels,
|
| 446 |
+
limit_sl_dest=args.limit_sl_dest,
|
| 447 |
+
hardlink_dupes=args.hardlink_dupes):
|
| 448 |
+
success = False
|
| 449 |
+
return success
|
| 450 |
+
else:
|
| 451 |
+
return compile_path(legacy=args.legacy, force=args.force,
|
| 452 |
+
quiet=args.quiet,
|
| 453 |
+
invalidation_mode=invalidation_mode)
|
| 454 |
+
except KeyboardInterrupt:
|
| 455 |
+
if args.quiet < 2:
|
| 456 |
+
print("\n[interrupted]")
|
| 457 |
+
return False
|
| 458 |
+
return True
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
if __name__ == '__main__':
|
| 462 |
+
exit_status = int(not main())
|
| 463 |
+
sys.exit(exit_status)
|
evalkit_tf446/lib/python3.10/contextvars.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from _contextvars import Context, ContextVar, Token, copy_context
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
__all__ = ('Context', 'ContextVar', 'Token', 'copy_context')
|
evalkit_tf446/lib/python3.10/copyreg.py
ADDED
|
@@ -0,0 +1,219 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper to provide extensibility for pickle.
|
| 2 |
+
|
| 3 |
+
This is only useful to add pickle support for extension types defined in
|
| 4 |
+
C, not for instances of user-defined classes.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
__all__ = ["pickle", "constructor",
|
| 8 |
+
"add_extension", "remove_extension", "clear_extension_cache"]
|
| 9 |
+
|
| 10 |
+
dispatch_table = {}
|
| 11 |
+
|
| 12 |
+
def pickle(ob_type, pickle_function, constructor_ob=None):
|
| 13 |
+
if not callable(pickle_function):
|
| 14 |
+
raise TypeError("reduction functions must be callable")
|
| 15 |
+
dispatch_table[ob_type] = pickle_function
|
| 16 |
+
|
| 17 |
+
# The constructor_ob function is a vestige of safe for unpickling.
|
| 18 |
+
# There is no reason for the caller to pass it anymore.
|
| 19 |
+
if constructor_ob is not None:
|
| 20 |
+
constructor(constructor_ob)
|
| 21 |
+
|
| 22 |
+
def constructor(object):
|
| 23 |
+
if not callable(object):
|
| 24 |
+
raise TypeError("constructors must be callable")
|
| 25 |
+
|
| 26 |
+
# Example: provide pickling support for complex numbers.
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
complex
|
| 30 |
+
except NameError:
|
| 31 |
+
pass
|
| 32 |
+
else:
|
| 33 |
+
|
| 34 |
+
def pickle_complex(c):
|
| 35 |
+
return complex, (c.real, c.imag)
|
| 36 |
+
|
| 37 |
+
pickle(complex, pickle_complex, complex)
|
| 38 |
+
|
| 39 |
+
def pickle_union(obj):
|
| 40 |
+
import functools, operator
|
| 41 |
+
return functools.reduce, (operator.or_, obj.__args__)
|
| 42 |
+
|
| 43 |
+
pickle(type(int | str), pickle_union)
|
| 44 |
+
|
| 45 |
+
# Support for pickling new-style objects
|
| 46 |
+
|
| 47 |
+
def _reconstructor(cls, base, state):
|
| 48 |
+
if base is object:
|
| 49 |
+
obj = object.__new__(cls)
|
| 50 |
+
else:
|
| 51 |
+
obj = base.__new__(cls, state)
|
| 52 |
+
if base.__init__ != object.__init__:
|
| 53 |
+
base.__init__(obj, state)
|
| 54 |
+
return obj
|
| 55 |
+
|
| 56 |
+
_HEAPTYPE = 1<<9
|
| 57 |
+
_new_type = type(int.__new__)
|
| 58 |
+
|
| 59 |
+
# Python code for object.__reduce_ex__ for protocols 0 and 1
|
| 60 |
+
|
| 61 |
+
def _reduce_ex(self, proto):
|
| 62 |
+
assert proto < 2
|
| 63 |
+
cls = self.__class__
|
| 64 |
+
for base in cls.__mro__:
|
| 65 |
+
if hasattr(base, '__flags__') and not base.__flags__ & _HEAPTYPE:
|
| 66 |
+
break
|
| 67 |
+
new = base.__new__
|
| 68 |
+
if isinstance(new, _new_type) and new.__self__ is base:
|
| 69 |
+
break
|
| 70 |
+
else:
|
| 71 |
+
base = object # not really reachable
|
| 72 |
+
if base is object:
|
| 73 |
+
state = None
|
| 74 |
+
else:
|
| 75 |
+
if base is cls:
|
| 76 |
+
raise TypeError(f"cannot pickle {cls.__name__!r} object")
|
| 77 |
+
state = base(self)
|
| 78 |
+
args = (cls, base, state)
|
| 79 |
+
try:
|
| 80 |
+
getstate = self.__getstate__
|
| 81 |
+
except AttributeError:
|
| 82 |
+
if getattr(self, "__slots__", None):
|
| 83 |
+
raise TypeError(f"cannot pickle {cls.__name__!r} object: "
|
| 84 |
+
f"a class that defines __slots__ without "
|
| 85 |
+
f"defining __getstate__ cannot be pickled "
|
| 86 |
+
f"with protocol {proto}") from None
|
| 87 |
+
try:
|
| 88 |
+
dict = self.__dict__
|
| 89 |
+
except AttributeError:
|
| 90 |
+
dict = None
|
| 91 |
+
else:
|
| 92 |
+
dict = getstate()
|
| 93 |
+
if dict:
|
| 94 |
+
return _reconstructor, args, dict
|
| 95 |
+
else:
|
| 96 |
+
return _reconstructor, args
|
| 97 |
+
|
| 98 |
+
# Helper for __reduce_ex__ protocol 2
|
| 99 |
+
|
| 100 |
+
def __newobj__(cls, *args):
|
| 101 |
+
return cls.__new__(cls, *args)
|
| 102 |
+
|
| 103 |
+
def __newobj_ex__(cls, args, kwargs):
|
| 104 |
+
"""Used by pickle protocol 4, instead of __newobj__ to allow classes with
|
| 105 |
+
keyword-only arguments to be pickled correctly.
|
| 106 |
+
"""
|
| 107 |
+
return cls.__new__(cls, *args, **kwargs)
|
| 108 |
+
|
| 109 |
+
def _slotnames(cls):
|
| 110 |
+
"""Return a list of slot names for a given class.
|
| 111 |
+
|
| 112 |
+
This needs to find slots defined by the class and its bases, so we
|
| 113 |
+
can't simply return the __slots__ attribute. We must walk down
|
| 114 |
+
the Method Resolution Order and concatenate the __slots__ of each
|
| 115 |
+
class found there. (This assumes classes don't modify their
|
| 116 |
+
__slots__ attribute to misrepresent their slots after the class is
|
| 117 |
+
defined.)
|
| 118 |
+
"""
|
| 119 |
+
|
| 120 |
+
# Get the value from a cache in the class if possible
|
| 121 |
+
names = cls.__dict__.get("__slotnames__")
|
| 122 |
+
if names is not None:
|
| 123 |
+
return names
|
| 124 |
+
|
| 125 |
+
# Not cached -- calculate the value
|
| 126 |
+
names = []
|
| 127 |
+
if not hasattr(cls, "__slots__"):
|
| 128 |
+
# This class has no slots
|
| 129 |
+
pass
|
| 130 |
+
else:
|
| 131 |
+
# Slots found -- gather slot names from all base classes
|
| 132 |
+
for c in cls.__mro__:
|
| 133 |
+
if "__slots__" in c.__dict__:
|
| 134 |
+
slots = c.__dict__['__slots__']
|
| 135 |
+
# if class has a single slot, it can be given as a string
|
| 136 |
+
if isinstance(slots, str):
|
| 137 |
+
slots = (slots,)
|
| 138 |
+
for name in slots:
|
| 139 |
+
# special descriptors
|
| 140 |
+
if name in ("__dict__", "__weakref__"):
|
| 141 |
+
continue
|
| 142 |
+
# mangled names
|
| 143 |
+
elif name.startswith('__') and not name.endswith('__'):
|
| 144 |
+
stripped = c.__name__.lstrip('_')
|
| 145 |
+
if stripped:
|
| 146 |
+
names.append('_%s%s' % (stripped, name))
|
| 147 |
+
else:
|
| 148 |
+
names.append(name)
|
| 149 |
+
else:
|
| 150 |
+
names.append(name)
|
| 151 |
+
|
| 152 |
+
# Cache the outcome in the class if at all possible
|
| 153 |
+
try:
|
| 154 |
+
cls.__slotnames__ = names
|
| 155 |
+
except:
|
| 156 |
+
pass # But don't die if we can't
|
| 157 |
+
|
| 158 |
+
return names
|
| 159 |
+
|
| 160 |
+
# A registry of extension codes. This is an ad-hoc compression
|
| 161 |
+
# mechanism. Whenever a global reference to <module>, <name> is about
|
| 162 |
+
# to be pickled, the (<module>, <name>) tuple is looked up here to see
|
| 163 |
+
# if it is a registered extension code for it. Extension codes are
|
| 164 |
+
# universal, so that the meaning of a pickle does not depend on
|
| 165 |
+
# context. (There are also some codes reserved for local use that
|
| 166 |
+
# don't have this restriction.) Codes are positive ints; 0 is
|
| 167 |
+
# reserved.
|
| 168 |
+
|
| 169 |
+
_extension_registry = {} # key -> code
|
| 170 |
+
_inverted_registry = {} # code -> key
|
| 171 |
+
_extension_cache = {} # code -> object
|
| 172 |
+
# Don't ever rebind those names: pickling grabs a reference to them when
|
| 173 |
+
# it's initialized, and won't see a rebinding.
|
| 174 |
+
|
| 175 |
+
def add_extension(module, name, code):
|
| 176 |
+
"""Register an extension code."""
|
| 177 |
+
code = int(code)
|
| 178 |
+
if not 1 <= code <= 0x7fffffff:
|
| 179 |
+
raise ValueError("code out of range")
|
| 180 |
+
key = (module, name)
|
| 181 |
+
if (_extension_registry.get(key) == code and
|
| 182 |
+
_inverted_registry.get(code) == key):
|
| 183 |
+
return # Redundant registrations are benign
|
| 184 |
+
if key in _extension_registry:
|
| 185 |
+
raise ValueError("key %s is already registered with code %s" %
|
| 186 |
+
(key, _extension_registry[key]))
|
| 187 |
+
if code in _inverted_registry:
|
| 188 |
+
raise ValueError("code %s is already in use for key %s" %
|
| 189 |
+
(code, _inverted_registry[code]))
|
| 190 |
+
_extension_registry[key] = code
|
| 191 |
+
_inverted_registry[code] = key
|
| 192 |
+
|
| 193 |
+
def remove_extension(module, name, code):
|
| 194 |
+
"""Unregister an extension code. For testing only."""
|
| 195 |
+
key = (module, name)
|
| 196 |
+
if (_extension_registry.get(key) != code or
|
| 197 |
+
_inverted_registry.get(code) != key):
|
| 198 |
+
raise ValueError("key %s is not registered with code %s" %
|
| 199 |
+
(key, code))
|
| 200 |
+
del _extension_registry[key]
|
| 201 |
+
del _inverted_registry[code]
|
| 202 |
+
if code in _extension_cache:
|
| 203 |
+
del _extension_cache[code]
|
| 204 |
+
|
| 205 |
+
def clear_extension_cache():
|
| 206 |
+
_extension_cache.clear()
|
| 207 |
+
|
| 208 |
+
# Standard extension code assignments
|
| 209 |
+
|
| 210 |
+
# Reserved ranges
|
| 211 |
+
|
| 212 |
+
# First Last Count Purpose
|
| 213 |
+
# 1 127 127 Reserved for Python standard library
|
| 214 |
+
# 128 191 64 Reserved for Zope
|
| 215 |
+
# 192 239 48 Reserved for 3rd parties
|
| 216 |
+
# 240 255 16 Reserved for private use (will never be assigned)
|
| 217 |
+
# 256 Inf Inf Reserved for future assignment
|
| 218 |
+
|
| 219 |
+
# Extension codes are assigned by the Python Software Foundation.
|
evalkit_tf446/lib/python3.10/decimal.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
try:
|
| 3 |
+
from _decimal import *
|
| 4 |
+
from _decimal import __doc__
|
| 5 |
+
from _decimal import __version__
|
| 6 |
+
from _decimal import __libmpdec_version__
|
| 7 |
+
except ImportError:
|
| 8 |
+
from _pydecimal import *
|
| 9 |
+
from _pydecimal import __doc__
|
| 10 |
+
from _pydecimal import __version__
|
| 11 |
+
from _pydecimal import __libmpdec_version__
|
evalkit_tf446/lib/python3.10/doctest.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_tf446/lib/python3.10/enum.py
ADDED
|
@@ -0,0 +1,1053 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from types import MappingProxyType, DynamicClassAttribute
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
__all__ = [
|
| 6 |
+
'EnumMeta',
|
| 7 |
+
'Enum', 'IntEnum', 'Flag', 'IntFlag',
|
| 8 |
+
'auto', 'unique',
|
| 9 |
+
]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def _is_descriptor(obj):
|
| 13 |
+
"""
|
| 14 |
+
Returns True if obj is a descriptor, False otherwise.
|
| 15 |
+
"""
|
| 16 |
+
return (
|
| 17 |
+
hasattr(obj, '__get__') or
|
| 18 |
+
hasattr(obj, '__set__') or
|
| 19 |
+
hasattr(obj, '__delete__')
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
def _is_dunder(name):
|
| 23 |
+
"""
|
| 24 |
+
Returns True if a __dunder__ name, False otherwise.
|
| 25 |
+
"""
|
| 26 |
+
return (
|
| 27 |
+
len(name) > 4 and
|
| 28 |
+
name[:2] == name[-2:] == '__' and
|
| 29 |
+
name[2] != '_' and
|
| 30 |
+
name[-3] != '_'
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
def _is_sunder(name):
|
| 34 |
+
"""
|
| 35 |
+
Returns True if a _sunder_ name, False otherwise.
|
| 36 |
+
"""
|
| 37 |
+
return (
|
| 38 |
+
len(name) > 2 and
|
| 39 |
+
name[0] == name[-1] == '_' and
|
| 40 |
+
name[1:2] != '_' and
|
| 41 |
+
name[-2:-1] != '_'
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
def _is_private(cls_name, name):
|
| 45 |
+
# do not use `re` as `re` imports `enum`
|
| 46 |
+
pattern = '_%s__' % (cls_name, )
|
| 47 |
+
pat_len = len(pattern)
|
| 48 |
+
if (
|
| 49 |
+
len(name) > pat_len
|
| 50 |
+
and name.startswith(pattern)
|
| 51 |
+
and name[pat_len:pat_len+1] != ['_']
|
| 52 |
+
and (name[-1] != '_' or name[-2] != '_')
|
| 53 |
+
):
|
| 54 |
+
return True
|
| 55 |
+
else:
|
| 56 |
+
return False
|
| 57 |
+
|
| 58 |
+
def _make_class_unpicklable(cls):
|
| 59 |
+
"""
|
| 60 |
+
Make the given class un-picklable.
|
| 61 |
+
"""
|
| 62 |
+
def _break_on_call_reduce(self, proto):
|
| 63 |
+
raise TypeError('%r cannot be pickled' % self)
|
| 64 |
+
cls.__reduce_ex__ = _break_on_call_reduce
|
| 65 |
+
cls.__module__ = '<unknown>'
|
| 66 |
+
|
| 67 |
+
_auto_null = object()
|
| 68 |
+
class auto:
|
| 69 |
+
"""
|
| 70 |
+
Instances are replaced with an appropriate value in Enum class suites.
|
| 71 |
+
"""
|
| 72 |
+
value = _auto_null
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class _EnumDict(dict):
|
| 76 |
+
"""
|
| 77 |
+
Track enum member order and ensure member names are not reused.
|
| 78 |
+
|
| 79 |
+
EnumMeta will use the names found in self._member_names as the
|
| 80 |
+
enumeration member names.
|
| 81 |
+
"""
|
| 82 |
+
def __init__(self):
|
| 83 |
+
super().__init__()
|
| 84 |
+
self._member_names = []
|
| 85 |
+
self._last_values = []
|
| 86 |
+
self._ignore = []
|
| 87 |
+
self._auto_called = False
|
| 88 |
+
|
| 89 |
+
def __setitem__(self, key, value):
|
| 90 |
+
"""
|
| 91 |
+
Changes anything not dundered or not a descriptor.
|
| 92 |
+
|
| 93 |
+
If an enum member name is used twice, an error is raised; duplicate
|
| 94 |
+
values are not checked for.
|
| 95 |
+
|
| 96 |
+
Single underscore (sunder) names are reserved.
|
| 97 |
+
"""
|
| 98 |
+
if _is_private(self._cls_name, key):
|
| 99 |
+
import warnings
|
| 100 |
+
warnings.warn(
|
| 101 |
+
"private variables, such as %r, will be normal attributes in 3.11"
|
| 102 |
+
% (key, ),
|
| 103 |
+
DeprecationWarning,
|
| 104 |
+
stacklevel=2,
|
| 105 |
+
)
|
| 106 |
+
if _is_sunder(key):
|
| 107 |
+
if key not in (
|
| 108 |
+
'_order_', '_create_pseudo_member_',
|
| 109 |
+
'_generate_next_value_', '_missing_', '_ignore_',
|
| 110 |
+
):
|
| 111 |
+
raise ValueError('_names_ are reserved for future Enum use')
|
| 112 |
+
if key == '_generate_next_value_':
|
| 113 |
+
# check if members already defined as auto()
|
| 114 |
+
if self._auto_called:
|
| 115 |
+
raise TypeError("_generate_next_value_ must be defined before members")
|
| 116 |
+
setattr(self, '_generate_next_value', value)
|
| 117 |
+
elif key == '_ignore_':
|
| 118 |
+
if isinstance(value, str):
|
| 119 |
+
value = value.replace(',',' ').split()
|
| 120 |
+
else:
|
| 121 |
+
value = list(value)
|
| 122 |
+
self._ignore = value
|
| 123 |
+
already = set(value) & set(self._member_names)
|
| 124 |
+
if already:
|
| 125 |
+
raise ValueError(
|
| 126 |
+
'_ignore_ cannot specify already set names: %r'
|
| 127 |
+
% (already, )
|
| 128 |
+
)
|
| 129 |
+
elif _is_dunder(key):
|
| 130 |
+
if key == '__order__':
|
| 131 |
+
key = '_order_'
|
| 132 |
+
elif key in self._member_names:
|
| 133 |
+
# descriptor overwriting an enum?
|
| 134 |
+
raise TypeError('Attempted to reuse key: %r' % key)
|
| 135 |
+
elif key in self._ignore:
|
| 136 |
+
pass
|
| 137 |
+
elif not _is_descriptor(value):
|
| 138 |
+
if key in self:
|
| 139 |
+
# enum overwriting a descriptor?
|
| 140 |
+
raise TypeError('%r already defined as: %r' % (key, self[key]))
|
| 141 |
+
if isinstance(value, auto):
|
| 142 |
+
if value.value == _auto_null:
|
| 143 |
+
value.value = self._generate_next_value(
|
| 144 |
+
key,
|
| 145 |
+
1,
|
| 146 |
+
len(self._member_names),
|
| 147 |
+
self._last_values[:],
|
| 148 |
+
)
|
| 149 |
+
self._auto_called = True
|
| 150 |
+
value = value.value
|
| 151 |
+
self._member_names.append(key)
|
| 152 |
+
self._last_values.append(value)
|
| 153 |
+
super().__setitem__(key, value)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
# Dummy value for Enum as EnumMeta explicitly checks for it, but of course
|
| 157 |
+
# until EnumMeta finishes running the first time the Enum class doesn't exist.
|
| 158 |
+
# This is also why there are checks in EnumMeta like `if Enum is not None`
|
| 159 |
+
Enum = None
|
| 160 |
+
|
| 161 |
+
class EnumMeta(type):
|
| 162 |
+
"""
|
| 163 |
+
Metaclass for Enum
|
| 164 |
+
"""
|
| 165 |
+
@classmethod
|
| 166 |
+
def __prepare__(metacls, cls, bases, **kwds):
|
| 167 |
+
# check that previous enum members do not exist
|
| 168 |
+
metacls._check_for_existing_members(cls, bases)
|
| 169 |
+
# create the namespace dict
|
| 170 |
+
enum_dict = _EnumDict()
|
| 171 |
+
enum_dict._cls_name = cls
|
| 172 |
+
# inherit previous flags and _generate_next_value_ function
|
| 173 |
+
member_type, first_enum = metacls._get_mixins_(cls, bases)
|
| 174 |
+
if first_enum is not None:
|
| 175 |
+
enum_dict['_generate_next_value_'] = getattr(
|
| 176 |
+
first_enum, '_generate_next_value_', None,
|
| 177 |
+
)
|
| 178 |
+
return enum_dict
|
| 179 |
+
|
| 180 |
+
def __new__(metacls, cls, bases, classdict, **kwds):
|
| 181 |
+
# an Enum class is final once enumeration items have been defined; it
|
| 182 |
+
# cannot be mixed with other types (int, float, etc.) if it has an
|
| 183 |
+
# inherited __new__ unless a new __new__ is defined (or the resulting
|
| 184 |
+
# class will fail).
|
| 185 |
+
#
|
| 186 |
+
# remove any keys listed in _ignore_
|
| 187 |
+
classdict.setdefault('_ignore_', []).append('_ignore_')
|
| 188 |
+
ignore = classdict['_ignore_']
|
| 189 |
+
for key in ignore:
|
| 190 |
+
classdict.pop(key, None)
|
| 191 |
+
member_type, first_enum = metacls._get_mixins_(cls, bases)
|
| 192 |
+
__new__, save_new, use_args = metacls._find_new_(
|
| 193 |
+
classdict, member_type, first_enum,
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# save enum items into separate mapping so they don't get baked into
|
| 197 |
+
# the new class
|
| 198 |
+
enum_members = {k: classdict[k] for k in classdict._member_names}
|
| 199 |
+
for name in classdict._member_names:
|
| 200 |
+
del classdict[name]
|
| 201 |
+
|
| 202 |
+
# adjust the sunders
|
| 203 |
+
_order_ = classdict.pop('_order_', None)
|
| 204 |
+
|
| 205 |
+
# check for illegal enum names (any others?)
|
| 206 |
+
invalid_names = set(enum_members) & {'mro', ''}
|
| 207 |
+
if invalid_names:
|
| 208 |
+
raise ValueError('Invalid enum member name: {0}'.format(
|
| 209 |
+
','.join(invalid_names)))
|
| 210 |
+
|
| 211 |
+
# create a default docstring if one has not been provided
|
| 212 |
+
if '__doc__' not in classdict:
|
| 213 |
+
classdict['__doc__'] = 'An enumeration.'
|
| 214 |
+
|
| 215 |
+
enum_class = super().__new__(metacls, cls, bases, classdict, **kwds)
|
| 216 |
+
enum_class._member_names_ = [] # names in definition order
|
| 217 |
+
enum_class._member_map_ = {} # name->value map
|
| 218 |
+
enum_class._member_type_ = member_type
|
| 219 |
+
|
| 220 |
+
# save DynamicClassAttribute attributes from super classes so we know
|
| 221 |
+
# if we can take the shortcut of storing members in the class dict
|
| 222 |
+
dynamic_attributes = {
|
| 223 |
+
k for c in enum_class.mro()
|
| 224 |
+
for k, v in c.__dict__.items()
|
| 225 |
+
if isinstance(v, DynamicClassAttribute)
|
| 226 |
+
}
|
| 227 |
+
|
| 228 |
+
# Reverse value->name map for hashable values.
|
| 229 |
+
enum_class._value2member_map_ = {}
|
| 230 |
+
|
| 231 |
+
# If a custom type is mixed into the Enum, and it does not know how
|
| 232 |
+
# to pickle itself, pickle.dumps will succeed but pickle.loads will
|
| 233 |
+
# fail. Rather than have the error show up later and possibly far
|
| 234 |
+
# from the source, sabotage the pickle protocol for this class so
|
| 235 |
+
# that pickle.dumps also fails.
|
| 236 |
+
#
|
| 237 |
+
# However, if the new class implements its own __reduce_ex__, do not
|
| 238 |
+
# sabotage -- it's on them to make sure it works correctly. We use
|
| 239 |
+
# __reduce_ex__ instead of any of the others as it is preferred by
|
| 240 |
+
# pickle over __reduce__, and it handles all pickle protocols.
|
| 241 |
+
if '__reduce_ex__' not in classdict:
|
| 242 |
+
if member_type is not object:
|
| 243 |
+
methods = ('__getnewargs_ex__', '__getnewargs__',
|
| 244 |
+
'__reduce_ex__', '__reduce__')
|
| 245 |
+
if not any(m in member_type.__dict__ for m in methods):
|
| 246 |
+
if '__new__' in classdict:
|
| 247 |
+
# too late, sabotage
|
| 248 |
+
_make_class_unpicklable(enum_class)
|
| 249 |
+
else:
|
| 250 |
+
# final attempt to verify that pickling would work:
|
| 251 |
+
# travel mro until __new__ is found, checking for
|
| 252 |
+
# __reduce__ and friends along the way -- if any of them
|
| 253 |
+
# are found before/when __new__ is found, pickling should
|
| 254 |
+
# work
|
| 255 |
+
sabotage = None
|
| 256 |
+
for chain in bases:
|
| 257 |
+
for base in chain.__mro__:
|
| 258 |
+
if base is object:
|
| 259 |
+
continue
|
| 260 |
+
elif any(m in base.__dict__ for m in methods):
|
| 261 |
+
# found one, we're good
|
| 262 |
+
sabotage = False
|
| 263 |
+
break
|
| 264 |
+
elif '__new__' in base.__dict__:
|
| 265 |
+
# not good
|
| 266 |
+
sabotage = True
|
| 267 |
+
break
|
| 268 |
+
if sabotage is not None:
|
| 269 |
+
break
|
| 270 |
+
if sabotage:
|
| 271 |
+
_make_class_unpicklable(enum_class)
|
| 272 |
+
# instantiate them, checking for duplicates as we go
|
| 273 |
+
# we instantiate first instead of checking for duplicates first in case
|
| 274 |
+
# a custom __new__ is doing something funky with the values -- such as
|
| 275 |
+
# auto-numbering ;)
|
| 276 |
+
for member_name in classdict._member_names:
|
| 277 |
+
value = enum_members[member_name]
|
| 278 |
+
if not isinstance(value, tuple):
|
| 279 |
+
args = (value, )
|
| 280 |
+
else:
|
| 281 |
+
args = value
|
| 282 |
+
if member_type is tuple: # special case for tuple enums
|
| 283 |
+
args = (args, ) # wrap it one more time
|
| 284 |
+
if not use_args:
|
| 285 |
+
enum_member = __new__(enum_class)
|
| 286 |
+
if not hasattr(enum_member, '_value_'):
|
| 287 |
+
enum_member._value_ = value
|
| 288 |
+
else:
|
| 289 |
+
enum_member = __new__(enum_class, *args)
|
| 290 |
+
if not hasattr(enum_member, '_value_'):
|
| 291 |
+
if member_type is object:
|
| 292 |
+
enum_member._value_ = value
|
| 293 |
+
else:
|
| 294 |
+
enum_member._value_ = member_type(*args)
|
| 295 |
+
value = enum_member._value_
|
| 296 |
+
enum_member._name_ = member_name
|
| 297 |
+
enum_member.__objclass__ = enum_class
|
| 298 |
+
enum_member.__init__(*args)
|
| 299 |
+
# If another member with the same value was already defined, the
|
| 300 |
+
# new member becomes an alias to the existing one.
|
| 301 |
+
for name, canonical_member in enum_class._member_map_.items():
|
| 302 |
+
if canonical_member._value_ == enum_member._value_:
|
| 303 |
+
enum_member = canonical_member
|
| 304 |
+
break
|
| 305 |
+
else:
|
| 306 |
+
# Aliases don't appear in member names (only in __members__).
|
| 307 |
+
enum_class._member_names_.append(member_name)
|
| 308 |
+
# performance boost for any member that would not shadow
|
| 309 |
+
# a DynamicClassAttribute
|
| 310 |
+
if member_name not in dynamic_attributes:
|
| 311 |
+
setattr(enum_class, member_name, enum_member)
|
| 312 |
+
# now add to _member_map_
|
| 313 |
+
enum_class._member_map_[member_name] = enum_member
|
| 314 |
+
try:
|
| 315 |
+
# This may fail if value is not hashable. We can't add the value
|
| 316 |
+
# to the map, and by-value lookups for this value will be
|
| 317 |
+
# linear.
|
| 318 |
+
enum_class._value2member_map_[value] = enum_member
|
| 319 |
+
except TypeError:
|
| 320 |
+
pass
|
| 321 |
+
|
| 322 |
+
# double check that repr and friends are not the mixin's or various
|
| 323 |
+
# things break (such as pickle)
|
| 324 |
+
# however, if the method is defined in the Enum itself, don't replace
|
| 325 |
+
# it
|
| 326 |
+
for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
|
| 327 |
+
if name in classdict:
|
| 328 |
+
continue
|
| 329 |
+
class_method = getattr(enum_class, name)
|
| 330 |
+
obj_method = getattr(member_type, name, None)
|
| 331 |
+
enum_method = getattr(first_enum, name, None)
|
| 332 |
+
if obj_method is not None and obj_method is class_method:
|
| 333 |
+
setattr(enum_class, name, enum_method)
|
| 334 |
+
|
| 335 |
+
# replace any other __new__ with our own (as long as Enum is not None,
|
| 336 |
+
# anyway) -- again, this is to support pickle
|
| 337 |
+
if Enum is not None:
|
| 338 |
+
# if the user defined their own __new__, save it before it gets
|
| 339 |
+
# clobbered in case they subclass later
|
| 340 |
+
if save_new:
|
| 341 |
+
enum_class.__new_member__ = __new__
|
| 342 |
+
enum_class.__new__ = Enum.__new__
|
| 343 |
+
|
| 344 |
+
# py3 support for definition order (helps keep py2/py3 code in sync)
|
| 345 |
+
if _order_ is not None:
|
| 346 |
+
if isinstance(_order_, str):
|
| 347 |
+
_order_ = _order_.replace(',', ' ').split()
|
| 348 |
+
if _order_ != enum_class._member_names_:
|
| 349 |
+
raise TypeError('member order does not match _order_')
|
| 350 |
+
|
| 351 |
+
return enum_class
|
| 352 |
+
|
| 353 |
+
def __bool__(self):
|
| 354 |
+
"""
|
| 355 |
+
classes/types should always be True.
|
| 356 |
+
"""
|
| 357 |
+
return True
|
| 358 |
+
|
| 359 |
+
def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1):
|
| 360 |
+
"""
|
| 361 |
+
Either returns an existing member, or creates a new enum class.
|
| 362 |
+
|
| 363 |
+
This method is used both when an enum class is given a value to match
|
| 364 |
+
to an enumeration member (i.e. Color(3)) and for the functional API
|
| 365 |
+
(i.e. Color = Enum('Color', names='RED GREEN BLUE')).
|
| 366 |
+
|
| 367 |
+
When used for the functional API:
|
| 368 |
+
|
| 369 |
+
`value` will be the name of the new class.
|
| 370 |
+
|
| 371 |
+
`names` should be either a string of white-space/comma delimited names
|
| 372 |
+
(values will start at `start`), or an iterator/mapping of name, value pairs.
|
| 373 |
+
|
| 374 |
+
`module` should be set to the module this class is being created in;
|
| 375 |
+
if it is not set, an attempt to find that module will be made, but if
|
| 376 |
+
it fails the class will not be picklable.
|
| 377 |
+
|
| 378 |
+
`qualname` should be set to the actual location this class can be found
|
| 379 |
+
at in its module; by default it is set to the global scope. If this is
|
| 380 |
+
not correct, unpickling will fail in some circumstances.
|
| 381 |
+
|
| 382 |
+
`type`, if set, will be mixed in as the first base class.
|
| 383 |
+
"""
|
| 384 |
+
if names is None: # simple value lookup
|
| 385 |
+
return cls.__new__(cls, value)
|
| 386 |
+
# otherwise, functional API: we're creating a new Enum type
|
| 387 |
+
return cls._create_(
|
| 388 |
+
value,
|
| 389 |
+
names,
|
| 390 |
+
module=module,
|
| 391 |
+
qualname=qualname,
|
| 392 |
+
type=type,
|
| 393 |
+
start=start,
|
| 394 |
+
)
|
| 395 |
+
|
| 396 |
+
def __contains__(cls, obj):
|
| 397 |
+
if not isinstance(obj, Enum):
|
| 398 |
+
import warnings
|
| 399 |
+
warnings.warn(
|
| 400 |
+
"in 3.12 __contains__ will no longer raise TypeError, but will return True if\n"
|
| 401 |
+
"obj is a member or a member's value",
|
| 402 |
+
DeprecationWarning,
|
| 403 |
+
stacklevel=2,
|
| 404 |
+
)
|
| 405 |
+
raise TypeError(
|
| 406 |
+
"unsupported operand type(s) for 'in': '%s' and '%s'" % (
|
| 407 |
+
type(obj).__qualname__, cls.__class__.__qualname__))
|
| 408 |
+
return isinstance(obj, cls) and obj._name_ in cls._member_map_
|
| 409 |
+
|
| 410 |
+
def __delattr__(cls, attr):
|
| 411 |
+
# nicer error message when someone tries to delete an attribute
|
| 412 |
+
# (see issue19025).
|
| 413 |
+
if attr in cls._member_map_:
|
| 414 |
+
raise AttributeError("%s: cannot delete Enum member." % cls.__name__)
|
| 415 |
+
super().__delattr__(attr)
|
| 416 |
+
|
| 417 |
+
def __dir__(self):
|
| 418 |
+
return (
|
| 419 |
+
['__class__', '__doc__', '__members__', '__module__']
|
| 420 |
+
+ self._member_names_
|
| 421 |
+
)
|
| 422 |
+
|
| 423 |
+
def __getattr__(cls, name):
|
| 424 |
+
"""
|
| 425 |
+
Return the enum member matching `name`
|
| 426 |
+
|
| 427 |
+
We use __getattr__ instead of descriptors or inserting into the enum
|
| 428 |
+
class' __dict__ in order to support `name` and `value` being both
|
| 429 |
+
properties for enum members (which live in the class' __dict__) and
|
| 430 |
+
enum members themselves.
|
| 431 |
+
"""
|
| 432 |
+
if _is_dunder(name):
|
| 433 |
+
raise AttributeError(name)
|
| 434 |
+
try:
|
| 435 |
+
return cls._member_map_[name]
|
| 436 |
+
except KeyError:
|
| 437 |
+
raise AttributeError(name) from None
|
| 438 |
+
|
| 439 |
+
def __getitem__(cls, name):
|
| 440 |
+
return cls._member_map_[name]
|
| 441 |
+
|
| 442 |
+
def __iter__(cls):
|
| 443 |
+
"""
|
| 444 |
+
Returns members in definition order.
|
| 445 |
+
"""
|
| 446 |
+
return (cls._member_map_[name] for name in cls._member_names_)
|
| 447 |
+
|
| 448 |
+
def __len__(cls):
|
| 449 |
+
return len(cls._member_names_)
|
| 450 |
+
|
| 451 |
+
@property
|
| 452 |
+
def __members__(cls):
|
| 453 |
+
"""
|
| 454 |
+
Returns a mapping of member name->value.
|
| 455 |
+
|
| 456 |
+
This mapping lists all enum members, including aliases. Note that this
|
| 457 |
+
is a read-only view of the internal mapping.
|
| 458 |
+
"""
|
| 459 |
+
return MappingProxyType(cls._member_map_)
|
| 460 |
+
|
| 461 |
+
def __repr__(cls):
|
| 462 |
+
return "<enum %r>" % cls.__name__
|
| 463 |
+
|
| 464 |
+
def __reversed__(cls):
|
| 465 |
+
"""
|
| 466 |
+
Returns members in reverse definition order.
|
| 467 |
+
"""
|
| 468 |
+
return (cls._member_map_[name] for name in reversed(cls._member_names_))
|
| 469 |
+
|
| 470 |
+
def __setattr__(cls, name, value):
|
| 471 |
+
"""
|
| 472 |
+
Block attempts to reassign Enum members.
|
| 473 |
+
|
| 474 |
+
A simple assignment to the class namespace only changes one of the
|
| 475 |
+
several possible ways to get an Enum member from the Enum class,
|
| 476 |
+
resulting in an inconsistent Enumeration.
|
| 477 |
+
"""
|
| 478 |
+
member_map = cls.__dict__.get('_member_map_', {})
|
| 479 |
+
if name in member_map:
|
| 480 |
+
raise AttributeError('Cannot reassign members.')
|
| 481 |
+
super().__setattr__(name, value)
|
| 482 |
+
|
| 483 |
+
def _create_(cls, class_name, names, *, module=None, qualname=None, type=None, start=1):
|
| 484 |
+
"""
|
| 485 |
+
Convenience method to create a new Enum class.
|
| 486 |
+
|
| 487 |
+
`names` can be:
|
| 488 |
+
|
| 489 |
+
* A string containing member names, separated either with spaces or
|
| 490 |
+
commas. Values are incremented by 1 from `start`.
|
| 491 |
+
* An iterable of member names. Values are incremented by 1 from `start`.
|
| 492 |
+
* An iterable of (member name, value) pairs.
|
| 493 |
+
* A mapping of member name -> value pairs.
|
| 494 |
+
"""
|
| 495 |
+
metacls = cls.__class__
|
| 496 |
+
bases = (cls, ) if type is None else (type, cls)
|
| 497 |
+
_, first_enum = cls._get_mixins_(cls, bases)
|
| 498 |
+
classdict = metacls.__prepare__(class_name, bases)
|
| 499 |
+
|
| 500 |
+
# special processing needed for names?
|
| 501 |
+
if isinstance(names, str):
|
| 502 |
+
names = names.replace(',', ' ').split()
|
| 503 |
+
if isinstance(names, (tuple, list)) and names and isinstance(names[0], str):
|
| 504 |
+
original_names, names = names, []
|
| 505 |
+
last_values = []
|
| 506 |
+
for count, name in enumerate(original_names):
|
| 507 |
+
value = first_enum._generate_next_value_(name, start, count, last_values[:])
|
| 508 |
+
last_values.append(value)
|
| 509 |
+
names.append((name, value))
|
| 510 |
+
|
| 511 |
+
# Here, names is either an iterable of (name, value) or a mapping.
|
| 512 |
+
for item in names:
|
| 513 |
+
if isinstance(item, str):
|
| 514 |
+
member_name, member_value = item, names[item]
|
| 515 |
+
else:
|
| 516 |
+
member_name, member_value = item
|
| 517 |
+
classdict[member_name] = member_value
|
| 518 |
+
enum_class = metacls.__new__(metacls, class_name, bases, classdict)
|
| 519 |
+
|
| 520 |
+
# TODO: replace the frame hack if a blessed way to know the calling
|
| 521 |
+
# module is ever developed
|
| 522 |
+
if module is None:
|
| 523 |
+
try:
|
| 524 |
+
module = sys._getframe(2).f_globals['__name__']
|
| 525 |
+
except (AttributeError, ValueError, KeyError):
|
| 526 |
+
pass
|
| 527 |
+
if module is None:
|
| 528 |
+
_make_class_unpicklable(enum_class)
|
| 529 |
+
else:
|
| 530 |
+
enum_class.__module__ = module
|
| 531 |
+
if qualname is not None:
|
| 532 |
+
enum_class.__qualname__ = qualname
|
| 533 |
+
|
| 534 |
+
return enum_class
|
| 535 |
+
|
| 536 |
+
def _convert_(cls, name, module, filter, source=None):
|
| 537 |
+
"""
|
| 538 |
+
Create a new Enum subclass that replaces a collection of global constants
|
| 539 |
+
"""
|
| 540 |
+
# convert all constants from source (or module) that pass filter() to
|
| 541 |
+
# a new Enum called name, and export the enum and its members back to
|
| 542 |
+
# module;
|
| 543 |
+
# also, replace the __reduce_ex__ method so unpickling works in
|
| 544 |
+
# previous Python versions
|
| 545 |
+
module_globals = vars(sys.modules[module])
|
| 546 |
+
if source:
|
| 547 |
+
source = vars(source)
|
| 548 |
+
else:
|
| 549 |
+
source = module_globals
|
| 550 |
+
# _value2member_map_ is populated in the same order every time
|
| 551 |
+
# for a consistent reverse mapping of number to name when there
|
| 552 |
+
# are multiple names for the same number.
|
| 553 |
+
members = [
|
| 554 |
+
(name, value)
|
| 555 |
+
for name, value in source.items()
|
| 556 |
+
if filter(name)]
|
| 557 |
+
try:
|
| 558 |
+
# sort by value
|
| 559 |
+
members.sort(key=lambda t: (t[1], t[0]))
|
| 560 |
+
except TypeError:
|
| 561 |
+
# unless some values aren't comparable, in which case sort by name
|
| 562 |
+
members.sort(key=lambda t: t[0])
|
| 563 |
+
cls = cls(name, members, module=module)
|
| 564 |
+
cls.__reduce_ex__ = _reduce_ex_by_name
|
| 565 |
+
module_globals.update(cls.__members__)
|
| 566 |
+
module_globals[name] = cls
|
| 567 |
+
return cls
|
| 568 |
+
|
| 569 |
+
@staticmethod
|
| 570 |
+
def _check_for_existing_members(class_name, bases):
|
| 571 |
+
for chain in bases:
|
| 572 |
+
for base in chain.__mro__:
|
| 573 |
+
if issubclass(base, Enum) and base._member_names_:
|
| 574 |
+
raise TypeError(
|
| 575 |
+
"%s: cannot extend enumeration %r"
|
| 576 |
+
% (class_name, base.__name__)
|
| 577 |
+
)
|
| 578 |
+
|
| 579 |
+
@staticmethod
|
| 580 |
+
def _get_mixins_(class_name, bases):
|
| 581 |
+
"""
|
| 582 |
+
Returns the type for creating enum members, and the first inherited
|
| 583 |
+
enum class.
|
| 584 |
+
|
| 585 |
+
bases: the tuple of bases that was given to __new__
|
| 586 |
+
"""
|
| 587 |
+
if not bases:
|
| 588 |
+
return object, Enum
|
| 589 |
+
|
| 590 |
+
def _find_data_type(bases):
|
| 591 |
+
data_types = set()
|
| 592 |
+
for chain in bases:
|
| 593 |
+
candidate = None
|
| 594 |
+
for base in chain.__mro__:
|
| 595 |
+
if base is object:
|
| 596 |
+
continue
|
| 597 |
+
elif issubclass(base, Enum):
|
| 598 |
+
if base._member_type_ is not object:
|
| 599 |
+
data_types.add(base._member_type_)
|
| 600 |
+
break
|
| 601 |
+
elif '__new__' in base.__dict__:
|
| 602 |
+
if issubclass(base, Enum):
|
| 603 |
+
continue
|
| 604 |
+
data_types.add(candidate or base)
|
| 605 |
+
break
|
| 606 |
+
else:
|
| 607 |
+
candidate = candidate or base
|
| 608 |
+
if len(data_types) > 1:
|
| 609 |
+
raise TypeError('%r: too many data types: %r' % (class_name, data_types))
|
| 610 |
+
elif data_types:
|
| 611 |
+
return data_types.pop()
|
| 612 |
+
else:
|
| 613 |
+
return None
|
| 614 |
+
|
| 615 |
+
# ensure final parent class is an Enum derivative, find any concrete
|
| 616 |
+
# data type, and check that Enum has no members
|
| 617 |
+
first_enum = bases[-1]
|
| 618 |
+
if not issubclass(first_enum, Enum):
|
| 619 |
+
raise TypeError("new enumerations should be created as "
|
| 620 |
+
"`EnumName([mixin_type, ...] [data_type,] enum_type)`")
|
| 621 |
+
member_type = _find_data_type(bases) or object
|
| 622 |
+
if first_enum._member_names_:
|
| 623 |
+
raise TypeError("Cannot extend enumerations")
|
| 624 |
+
return member_type, first_enum
|
| 625 |
+
|
| 626 |
+
@staticmethod
|
| 627 |
+
def _find_new_(classdict, member_type, first_enum):
|
| 628 |
+
"""
|
| 629 |
+
Returns the __new__ to be used for creating the enum members.
|
| 630 |
+
|
| 631 |
+
classdict: the class dictionary given to __new__
|
| 632 |
+
member_type: the data type whose __new__ will be used by default
|
| 633 |
+
first_enum: enumeration to check for an overriding __new__
|
| 634 |
+
"""
|
| 635 |
+
# now find the correct __new__, checking to see of one was defined
|
| 636 |
+
# by the user; also check earlier enum classes in case a __new__ was
|
| 637 |
+
# saved as __new_member__
|
| 638 |
+
__new__ = classdict.get('__new__', None)
|
| 639 |
+
|
| 640 |
+
# should __new__ be saved as __new_member__ later?
|
| 641 |
+
save_new = __new__ is not None
|
| 642 |
+
|
| 643 |
+
if __new__ is None:
|
| 644 |
+
# check all possibles for __new_member__ before falling back to
|
| 645 |
+
# __new__
|
| 646 |
+
for method in ('__new_member__', '__new__'):
|
| 647 |
+
for possible in (member_type, first_enum):
|
| 648 |
+
target = getattr(possible, method, None)
|
| 649 |
+
if target not in {
|
| 650 |
+
None,
|
| 651 |
+
None.__new__,
|
| 652 |
+
object.__new__,
|
| 653 |
+
Enum.__new__,
|
| 654 |
+
}:
|
| 655 |
+
__new__ = target
|
| 656 |
+
break
|
| 657 |
+
if __new__ is not None:
|
| 658 |
+
break
|
| 659 |
+
else:
|
| 660 |
+
__new__ = object.__new__
|
| 661 |
+
|
| 662 |
+
# if a non-object.__new__ is used then whatever value/tuple was
|
| 663 |
+
# assigned to the enum member name will be passed to __new__ and to the
|
| 664 |
+
# new enum member's __init__
|
| 665 |
+
if __new__ is object.__new__:
|
| 666 |
+
use_args = False
|
| 667 |
+
else:
|
| 668 |
+
use_args = True
|
| 669 |
+
return __new__, save_new, use_args
|
| 670 |
+
|
| 671 |
+
|
| 672 |
+
class Enum(metaclass=EnumMeta):
|
| 673 |
+
"""
|
| 674 |
+
Generic enumeration.
|
| 675 |
+
|
| 676 |
+
Derive from this class to define new enumerations.
|
| 677 |
+
"""
|
| 678 |
+
def __new__(cls, value):
|
| 679 |
+
# all enum instances are actually created during class construction
|
| 680 |
+
# without calling this method; this method is called by the metaclass'
|
| 681 |
+
# __call__ (i.e. Color(3) ), and by pickle
|
| 682 |
+
if type(value) is cls:
|
| 683 |
+
# For lookups like Color(Color.RED)
|
| 684 |
+
return value
|
| 685 |
+
# by-value search for a matching enum member
|
| 686 |
+
# see if it's in the reverse mapping (for hashable values)
|
| 687 |
+
try:
|
| 688 |
+
return cls._value2member_map_[value]
|
| 689 |
+
except KeyError:
|
| 690 |
+
# Not found, no need to do long O(n) search
|
| 691 |
+
pass
|
| 692 |
+
except TypeError:
|
| 693 |
+
# not there, now do long search -- O(n) behavior
|
| 694 |
+
for member in cls._member_map_.values():
|
| 695 |
+
if member._value_ == value:
|
| 696 |
+
return member
|
| 697 |
+
# still not found -- try _missing_ hook
|
| 698 |
+
try:
|
| 699 |
+
exc = None
|
| 700 |
+
result = cls._missing_(value)
|
| 701 |
+
except Exception as e:
|
| 702 |
+
exc = e
|
| 703 |
+
result = None
|
| 704 |
+
try:
|
| 705 |
+
if isinstance(result, cls):
|
| 706 |
+
return result
|
| 707 |
+
else:
|
| 708 |
+
ve_exc = ValueError("%r is not a valid %s" % (value, cls.__qualname__))
|
| 709 |
+
if result is None and exc is None:
|
| 710 |
+
raise ve_exc
|
| 711 |
+
elif exc is None:
|
| 712 |
+
exc = TypeError(
|
| 713 |
+
'error in %s._missing_: returned %r instead of None or a valid member'
|
| 714 |
+
% (cls.__name__, result)
|
| 715 |
+
)
|
| 716 |
+
if not isinstance(exc, ValueError):
|
| 717 |
+
exc.__context__ = ve_exc
|
| 718 |
+
raise exc
|
| 719 |
+
finally:
|
| 720 |
+
# ensure all variables that could hold an exception are destroyed
|
| 721 |
+
exc = None
|
| 722 |
+
ve_exc = None
|
| 723 |
+
|
| 724 |
+
def _generate_next_value_(name, start, count, last_values):
|
| 725 |
+
"""
|
| 726 |
+
Generate the next value when not given.
|
| 727 |
+
|
| 728 |
+
name: the name of the member
|
| 729 |
+
start: the initial start value or None
|
| 730 |
+
count: the number of existing members
|
| 731 |
+
last_value: the last value assigned or None
|
| 732 |
+
"""
|
| 733 |
+
for last_value in reversed(last_values):
|
| 734 |
+
try:
|
| 735 |
+
return last_value + 1
|
| 736 |
+
except TypeError:
|
| 737 |
+
pass
|
| 738 |
+
else:
|
| 739 |
+
return start
|
| 740 |
+
|
| 741 |
+
@classmethod
|
| 742 |
+
def _missing_(cls, value):
|
| 743 |
+
return None
|
| 744 |
+
|
| 745 |
+
def __repr__(self):
|
| 746 |
+
return "<%s.%s: %r>" % (
|
| 747 |
+
self.__class__.__name__, self._name_, self._value_)
|
| 748 |
+
|
| 749 |
+
def __str__(self):
|
| 750 |
+
return "%s.%s" % (self.__class__.__name__, self._name_)
|
| 751 |
+
|
| 752 |
+
def __dir__(self):
|
| 753 |
+
"""
|
| 754 |
+
Returns all members and all public methods
|
| 755 |
+
"""
|
| 756 |
+
added_behavior = [
|
| 757 |
+
m
|
| 758 |
+
for cls in self.__class__.mro()
|
| 759 |
+
for m in cls.__dict__
|
| 760 |
+
if m[0] != '_' and m not in self._member_map_
|
| 761 |
+
] + [m for m in self.__dict__ if m[0] != '_']
|
| 762 |
+
return (['__class__', '__doc__', '__module__'] + added_behavior)
|
| 763 |
+
|
| 764 |
+
def __format__(self, format_spec):
|
| 765 |
+
"""
|
| 766 |
+
Returns format using actual value type unless __str__ has been overridden.
|
| 767 |
+
"""
|
| 768 |
+
# mixed-in Enums should use the mixed-in type's __format__, otherwise
|
| 769 |
+
# we can get strange results with the Enum name showing up instead of
|
| 770 |
+
# the value
|
| 771 |
+
|
| 772 |
+
# pure Enum branch, or branch with __str__ explicitly overridden
|
| 773 |
+
str_overridden = type(self).__str__ not in (Enum.__str__, Flag.__str__)
|
| 774 |
+
if self._member_type_ is object or str_overridden:
|
| 775 |
+
cls = str
|
| 776 |
+
val = str(self)
|
| 777 |
+
# mix-in branch
|
| 778 |
+
else:
|
| 779 |
+
cls = self._member_type_
|
| 780 |
+
val = self._value_
|
| 781 |
+
return cls.__format__(val, format_spec)
|
| 782 |
+
|
| 783 |
+
def __hash__(self):
|
| 784 |
+
return hash(self._name_)
|
| 785 |
+
|
| 786 |
+
def __reduce_ex__(self, proto):
|
| 787 |
+
return self.__class__, (self._value_, )
|
| 788 |
+
|
| 789 |
+
# DynamicClassAttribute is used to provide access to the `name` and
|
| 790 |
+
# `value` properties of enum members while keeping some measure of
|
| 791 |
+
# protection from modification, while still allowing for an enumeration
|
| 792 |
+
# to have members named `name` and `value`. This works because enumeration
|
| 793 |
+
# members are not set directly on the enum class -- __getattr__ is
|
| 794 |
+
# used to look them up.
|
| 795 |
+
|
| 796 |
+
@DynamicClassAttribute
|
| 797 |
+
def name(self):
|
| 798 |
+
"""The name of the Enum member."""
|
| 799 |
+
return self._name_
|
| 800 |
+
|
| 801 |
+
@DynamicClassAttribute
|
| 802 |
+
def value(self):
|
| 803 |
+
"""The value of the Enum member."""
|
| 804 |
+
return self._value_
|
| 805 |
+
|
| 806 |
+
|
| 807 |
+
class IntEnum(int, Enum):
|
| 808 |
+
"""Enum where members are also (and must be) ints"""
|
| 809 |
+
|
| 810 |
+
|
| 811 |
+
def _reduce_ex_by_name(self, proto):
|
| 812 |
+
return self.name
|
| 813 |
+
|
| 814 |
+
class Flag(Enum):
|
| 815 |
+
"""
|
| 816 |
+
Support for flags
|
| 817 |
+
"""
|
| 818 |
+
|
| 819 |
+
def _generate_next_value_(name, start, count, last_values):
|
| 820 |
+
"""
|
| 821 |
+
Generate the next value when not given.
|
| 822 |
+
|
| 823 |
+
name: the name of the member
|
| 824 |
+
start: the initial start value or None
|
| 825 |
+
count: the number of existing members
|
| 826 |
+
last_value: the last value assigned or None
|
| 827 |
+
"""
|
| 828 |
+
if not count:
|
| 829 |
+
return start if start is not None else 1
|
| 830 |
+
for last_value in reversed(last_values):
|
| 831 |
+
try:
|
| 832 |
+
high_bit = _high_bit(last_value)
|
| 833 |
+
break
|
| 834 |
+
except Exception:
|
| 835 |
+
raise TypeError('Invalid Flag value: %r' % last_value) from None
|
| 836 |
+
return 2 ** (high_bit+1)
|
| 837 |
+
|
| 838 |
+
@classmethod
|
| 839 |
+
def _missing_(cls, value):
|
| 840 |
+
"""
|
| 841 |
+
Returns member (possibly creating it) if one can be found for value.
|
| 842 |
+
"""
|
| 843 |
+
original_value = value
|
| 844 |
+
if value < 0:
|
| 845 |
+
value = ~value
|
| 846 |
+
possible_member = cls._create_pseudo_member_(value)
|
| 847 |
+
if original_value < 0:
|
| 848 |
+
possible_member = ~possible_member
|
| 849 |
+
return possible_member
|
| 850 |
+
|
| 851 |
+
@classmethod
|
| 852 |
+
def _create_pseudo_member_(cls, value):
|
| 853 |
+
"""
|
| 854 |
+
Create a composite member iff value contains only members.
|
| 855 |
+
"""
|
| 856 |
+
pseudo_member = cls._value2member_map_.get(value, None)
|
| 857 |
+
if pseudo_member is None:
|
| 858 |
+
# verify all bits are accounted for
|
| 859 |
+
_, extra_flags = _decompose(cls, value)
|
| 860 |
+
if extra_flags:
|
| 861 |
+
raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
|
| 862 |
+
# construct a singleton enum pseudo-member
|
| 863 |
+
pseudo_member = object.__new__(cls)
|
| 864 |
+
pseudo_member._name_ = None
|
| 865 |
+
pseudo_member._value_ = value
|
| 866 |
+
# use setdefault in case another thread already created a composite
|
| 867 |
+
# with this value
|
| 868 |
+
pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
|
| 869 |
+
return pseudo_member
|
| 870 |
+
|
| 871 |
+
def __contains__(self, other):
|
| 872 |
+
"""
|
| 873 |
+
Returns True if self has at least the same flags set as other.
|
| 874 |
+
"""
|
| 875 |
+
if not isinstance(other, self.__class__):
|
| 876 |
+
raise TypeError(
|
| 877 |
+
"unsupported operand type(s) for 'in': '%s' and '%s'" % (
|
| 878 |
+
type(other).__qualname__, self.__class__.__qualname__))
|
| 879 |
+
return other._value_ & self._value_ == other._value_
|
| 880 |
+
|
| 881 |
+
def __repr__(self):
|
| 882 |
+
cls = self.__class__
|
| 883 |
+
if self._name_ is not None:
|
| 884 |
+
return '<%s.%s: %r>' % (cls.__name__, self._name_, self._value_)
|
| 885 |
+
members, uncovered = _decompose(cls, self._value_)
|
| 886 |
+
return '<%s.%s: %r>' % (
|
| 887 |
+
cls.__name__,
|
| 888 |
+
'|'.join([str(m._name_ or m._value_) for m in members]),
|
| 889 |
+
self._value_,
|
| 890 |
+
)
|
| 891 |
+
|
| 892 |
+
def __str__(self):
|
| 893 |
+
cls = self.__class__
|
| 894 |
+
if self._name_ is not None:
|
| 895 |
+
return '%s.%s' % (cls.__name__, self._name_)
|
| 896 |
+
members, uncovered = _decompose(cls, self._value_)
|
| 897 |
+
if len(members) == 1 and members[0]._name_ is None:
|
| 898 |
+
return '%s.%r' % (cls.__name__, members[0]._value_)
|
| 899 |
+
else:
|
| 900 |
+
return '%s.%s' % (
|
| 901 |
+
cls.__name__,
|
| 902 |
+
'|'.join([str(m._name_ or m._value_) for m in members]),
|
| 903 |
+
)
|
| 904 |
+
|
| 905 |
+
def __bool__(self):
|
| 906 |
+
return bool(self._value_)
|
| 907 |
+
|
| 908 |
+
def __or__(self, other):
|
| 909 |
+
if not isinstance(other, self.__class__):
|
| 910 |
+
return NotImplemented
|
| 911 |
+
return self.__class__(self._value_ | other._value_)
|
| 912 |
+
|
| 913 |
+
def __and__(self, other):
|
| 914 |
+
if not isinstance(other, self.__class__):
|
| 915 |
+
return NotImplemented
|
| 916 |
+
return self.__class__(self._value_ & other._value_)
|
| 917 |
+
|
| 918 |
+
def __xor__(self, other):
|
| 919 |
+
if not isinstance(other, self.__class__):
|
| 920 |
+
return NotImplemented
|
| 921 |
+
return self.__class__(self._value_ ^ other._value_)
|
| 922 |
+
|
| 923 |
+
def __invert__(self):
|
| 924 |
+
members, uncovered = _decompose(self.__class__, self._value_)
|
| 925 |
+
inverted = self.__class__(0)
|
| 926 |
+
for m in self.__class__:
|
| 927 |
+
if m not in members and not (m._value_ & self._value_):
|
| 928 |
+
inverted = inverted | m
|
| 929 |
+
return self.__class__(inverted)
|
| 930 |
+
|
| 931 |
+
|
| 932 |
+
class IntFlag(int, Flag):
|
| 933 |
+
"""
|
| 934 |
+
Support for integer-based Flags
|
| 935 |
+
"""
|
| 936 |
+
|
| 937 |
+
@classmethod
|
| 938 |
+
def _missing_(cls, value):
|
| 939 |
+
"""
|
| 940 |
+
Returns member (possibly creating it) if one can be found for value.
|
| 941 |
+
"""
|
| 942 |
+
if not isinstance(value, int):
|
| 943 |
+
raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
|
| 944 |
+
new_member = cls._create_pseudo_member_(value)
|
| 945 |
+
return new_member
|
| 946 |
+
|
| 947 |
+
@classmethod
|
| 948 |
+
def _create_pseudo_member_(cls, value):
|
| 949 |
+
"""
|
| 950 |
+
Create a composite member iff value contains only members.
|
| 951 |
+
"""
|
| 952 |
+
pseudo_member = cls._value2member_map_.get(value, None)
|
| 953 |
+
if pseudo_member is None:
|
| 954 |
+
need_to_create = [value]
|
| 955 |
+
# get unaccounted for bits
|
| 956 |
+
_, extra_flags = _decompose(cls, value)
|
| 957 |
+
# timer = 10
|
| 958 |
+
while extra_flags:
|
| 959 |
+
# timer -= 1
|
| 960 |
+
bit = _high_bit(extra_flags)
|
| 961 |
+
flag_value = 2 ** bit
|
| 962 |
+
if (flag_value not in cls._value2member_map_ and
|
| 963 |
+
flag_value not in need_to_create
|
| 964 |
+
):
|
| 965 |
+
need_to_create.append(flag_value)
|
| 966 |
+
if extra_flags == -flag_value:
|
| 967 |
+
extra_flags = 0
|
| 968 |
+
else:
|
| 969 |
+
extra_flags ^= flag_value
|
| 970 |
+
for value in reversed(need_to_create):
|
| 971 |
+
# construct singleton pseudo-members
|
| 972 |
+
pseudo_member = int.__new__(cls, value)
|
| 973 |
+
pseudo_member._name_ = None
|
| 974 |
+
pseudo_member._value_ = value
|
| 975 |
+
# use setdefault in case another thread already created a composite
|
| 976 |
+
# with this value
|
| 977 |
+
pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
|
| 978 |
+
return pseudo_member
|
| 979 |
+
|
| 980 |
+
def __or__(self, other):
|
| 981 |
+
if not isinstance(other, (self.__class__, int)):
|
| 982 |
+
return NotImplemented
|
| 983 |
+
result = self.__class__(self._value_ | self.__class__(other)._value_)
|
| 984 |
+
return result
|
| 985 |
+
|
| 986 |
+
def __and__(self, other):
|
| 987 |
+
if not isinstance(other, (self.__class__, int)):
|
| 988 |
+
return NotImplemented
|
| 989 |
+
return self.__class__(self._value_ & self.__class__(other)._value_)
|
| 990 |
+
|
| 991 |
+
def __xor__(self, other):
|
| 992 |
+
if not isinstance(other, (self.__class__, int)):
|
| 993 |
+
return NotImplemented
|
| 994 |
+
return self.__class__(self._value_ ^ self.__class__(other)._value_)
|
| 995 |
+
|
| 996 |
+
__ror__ = __or__
|
| 997 |
+
__rand__ = __and__
|
| 998 |
+
__rxor__ = __xor__
|
| 999 |
+
|
| 1000 |
+
def __invert__(self):
|
| 1001 |
+
result = self.__class__(~self._value_)
|
| 1002 |
+
return result
|
| 1003 |
+
|
| 1004 |
+
|
| 1005 |
+
def _high_bit(value):
|
| 1006 |
+
"""
|
| 1007 |
+
returns index of highest bit, or -1 if value is zero or negative
|
| 1008 |
+
"""
|
| 1009 |
+
return value.bit_length() - 1
|
| 1010 |
+
|
| 1011 |
+
def unique(enumeration):
|
| 1012 |
+
"""
|
| 1013 |
+
Class decorator for enumerations ensuring unique member values.
|
| 1014 |
+
"""
|
| 1015 |
+
duplicates = []
|
| 1016 |
+
for name, member in enumeration.__members__.items():
|
| 1017 |
+
if name != member.name:
|
| 1018 |
+
duplicates.append((name, member.name))
|
| 1019 |
+
if duplicates:
|
| 1020 |
+
alias_details = ', '.join(
|
| 1021 |
+
["%s -> %s" % (alias, name) for (alias, name) in duplicates])
|
| 1022 |
+
raise ValueError('duplicate values found in %r: %s' %
|
| 1023 |
+
(enumeration, alias_details))
|
| 1024 |
+
return enumeration
|
| 1025 |
+
|
| 1026 |
+
def _decompose(flag, value):
|
| 1027 |
+
"""
|
| 1028 |
+
Extract all members from the value.
|
| 1029 |
+
"""
|
| 1030 |
+
# _decompose is only called if the value is not named
|
| 1031 |
+
not_covered = value
|
| 1032 |
+
negative = value < 0
|
| 1033 |
+
members = []
|
| 1034 |
+
for member in flag:
|
| 1035 |
+
member_value = member.value
|
| 1036 |
+
if member_value and member_value & value == member_value:
|
| 1037 |
+
members.append(member)
|
| 1038 |
+
not_covered &= ~member_value
|
| 1039 |
+
if not negative:
|
| 1040 |
+
tmp = not_covered
|
| 1041 |
+
while tmp:
|
| 1042 |
+
flag_value = 2 ** _high_bit(tmp)
|
| 1043 |
+
if flag_value in flag._value2member_map_:
|
| 1044 |
+
members.append(flag._value2member_map_[flag_value])
|
| 1045 |
+
not_covered &= ~flag_value
|
| 1046 |
+
tmp &= ~flag_value
|
| 1047 |
+
if not members and value in flag._value2member_map_:
|
| 1048 |
+
members.append(flag._value2member_map_[value])
|
| 1049 |
+
members.sort(key=lambda m: m._value_, reverse=True)
|
| 1050 |
+
if len(members) > 1 and members[0].value == value:
|
| 1051 |
+
# we have the breakdown, don't need the value member itself
|
| 1052 |
+
members.pop(0)
|
| 1053 |
+
return members, not_covered
|
evalkit_tf446/lib/python3.10/fileinput.py
ADDED
|
@@ -0,0 +1,462 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper class to quickly write a loop over all standard input files.
|
| 2 |
+
|
| 3 |
+
Typical use is:
|
| 4 |
+
|
| 5 |
+
import fileinput
|
| 6 |
+
for line in fileinput.input(encoding="utf-8"):
|
| 7 |
+
process(line)
|
| 8 |
+
|
| 9 |
+
This iterates over the lines of all files listed in sys.argv[1:],
|
| 10 |
+
defaulting to sys.stdin if the list is empty. If a filename is '-' it
|
| 11 |
+
is also replaced by sys.stdin and the optional arguments mode and
|
| 12 |
+
openhook are ignored. To specify an alternative list of filenames,
|
| 13 |
+
pass it as the argument to input(). A single file name is also allowed.
|
| 14 |
+
|
| 15 |
+
Functions filename(), lineno() return the filename and cumulative line
|
| 16 |
+
number of the line that has just been read; filelineno() returns its
|
| 17 |
+
line number in the current file; isfirstline() returns true iff the
|
| 18 |
+
line just read is the first line of its file; isstdin() returns true
|
| 19 |
+
iff the line was read from sys.stdin. Function nextfile() closes the
|
| 20 |
+
current file so that the next iteration will read the first line from
|
| 21 |
+
the next file (if any); lines not read from the file will not count
|
| 22 |
+
towards the cumulative line count; the filename is not changed until
|
| 23 |
+
after the first line of the next file has been read. Function close()
|
| 24 |
+
closes the sequence.
|
| 25 |
+
|
| 26 |
+
Before any lines have been read, filename() returns None and both line
|
| 27 |
+
numbers are zero; nextfile() has no effect. After all lines have been
|
| 28 |
+
read, filename() and the line number functions return the values
|
| 29 |
+
pertaining to the last line read; nextfile() has no effect.
|
| 30 |
+
|
| 31 |
+
All files are opened in text mode by default, you can override this by
|
| 32 |
+
setting the mode parameter to input() or FileInput.__init__().
|
| 33 |
+
If an I/O error occurs during opening or reading a file, the OSError
|
| 34 |
+
exception is raised.
|
| 35 |
+
|
| 36 |
+
If sys.stdin is used more than once, the second and further use will
|
| 37 |
+
return no lines, except perhaps for interactive use, or if it has been
|
| 38 |
+
explicitly reset (e.g. using sys.stdin.seek(0)).
|
| 39 |
+
|
| 40 |
+
Empty files are opened and immediately closed; the only time their
|
| 41 |
+
presence in the list of filenames is noticeable at all is when the
|
| 42 |
+
last file opened is empty.
|
| 43 |
+
|
| 44 |
+
It is possible that the last line of a file doesn't end in a newline
|
| 45 |
+
character; otherwise lines are returned including the trailing
|
| 46 |
+
newline.
|
| 47 |
+
|
| 48 |
+
Class FileInput is the implementation; its methods filename(),
|
| 49 |
+
lineno(), fileline(), isfirstline(), isstdin(), nextfile() and close()
|
| 50 |
+
correspond to the functions in the module. In addition it has a
|
| 51 |
+
readline() method which returns the next input line, and a
|
| 52 |
+
__getitem__() method which implements the sequence behavior. The
|
| 53 |
+
sequence must be accessed in strictly sequential order; sequence
|
| 54 |
+
access and readline() cannot be mixed.
|
| 55 |
+
|
| 56 |
+
Optional in-place filtering: if the keyword argument inplace=1 is
|
| 57 |
+
passed to input() or to the FileInput constructor, the file is moved
|
| 58 |
+
to a backup file and standard output is directed to the input file.
|
| 59 |
+
This makes it possible to write a filter that rewrites its input file
|
| 60 |
+
in place. If the keyword argument backup=".<some extension>" is also
|
| 61 |
+
given, it specifies the extension for the backup file, and the backup
|
| 62 |
+
file remains around; by default, the extension is ".bak" and it is
|
| 63 |
+
deleted when the output file is closed. In-place filtering is
|
| 64 |
+
disabled when standard input is read. XXX The current implementation
|
| 65 |
+
does not work for MS-DOS 8+3 filesystems.
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
import io
|
| 69 |
+
import sys, os
|
| 70 |
+
from types import GenericAlias
|
| 71 |
+
|
| 72 |
+
__all__ = ["input", "close", "nextfile", "filename", "lineno", "filelineno",
|
| 73 |
+
"fileno", "isfirstline", "isstdin", "FileInput", "hook_compressed",
|
| 74 |
+
"hook_encoded"]
|
| 75 |
+
|
| 76 |
+
_state = None
|
| 77 |
+
|
| 78 |
+
def input(files=None, inplace=False, backup="", *, mode="r", openhook=None,
|
| 79 |
+
encoding=None, errors=None):
|
| 80 |
+
"""Return an instance of the FileInput class, which can be iterated.
|
| 81 |
+
|
| 82 |
+
The parameters are passed to the constructor of the FileInput class.
|
| 83 |
+
The returned instance, in addition to being an iterator,
|
| 84 |
+
keeps global state for the functions of this module,.
|
| 85 |
+
"""
|
| 86 |
+
global _state
|
| 87 |
+
if _state and _state._file:
|
| 88 |
+
raise RuntimeError("input() already active")
|
| 89 |
+
_state = FileInput(files, inplace, backup, mode=mode, openhook=openhook,
|
| 90 |
+
encoding=encoding, errors=errors)
|
| 91 |
+
return _state
|
| 92 |
+
|
| 93 |
+
def close():
|
| 94 |
+
"""Close the sequence."""
|
| 95 |
+
global _state
|
| 96 |
+
state = _state
|
| 97 |
+
_state = None
|
| 98 |
+
if state:
|
| 99 |
+
state.close()
|
| 100 |
+
|
| 101 |
+
def nextfile():
|
| 102 |
+
"""
|
| 103 |
+
Close the current file so that the next iteration will read the first
|
| 104 |
+
line from the next file (if any); lines not read from the file will
|
| 105 |
+
not count towards the cumulative line count. The filename is not
|
| 106 |
+
changed until after the first line of the next file has been read.
|
| 107 |
+
Before the first line has been read, this function has no effect;
|
| 108 |
+
it cannot be used to skip the first file. After the last line of the
|
| 109 |
+
last file has been read, this function has no effect.
|
| 110 |
+
"""
|
| 111 |
+
if not _state:
|
| 112 |
+
raise RuntimeError("no active input()")
|
| 113 |
+
return _state.nextfile()
|
| 114 |
+
|
| 115 |
+
def filename():
|
| 116 |
+
"""
|
| 117 |
+
Return the name of the file currently being read.
|
| 118 |
+
Before the first line has been read, returns None.
|
| 119 |
+
"""
|
| 120 |
+
if not _state:
|
| 121 |
+
raise RuntimeError("no active input()")
|
| 122 |
+
return _state.filename()
|
| 123 |
+
|
| 124 |
+
def lineno():
|
| 125 |
+
"""
|
| 126 |
+
Return the cumulative line number of the line that has just been read.
|
| 127 |
+
Before the first line has been read, returns 0. After the last line
|
| 128 |
+
of the last file has been read, returns the line number of that line.
|
| 129 |
+
"""
|
| 130 |
+
if not _state:
|
| 131 |
+
raise RuntimeError("no active input()")
|
| 132 |
+
return _state.lineno()
|
| 133 |
+
|
| 134 |
+
def filelineno():
|
| 135 |
+
"""
|
| 136 |
+
Return the line number in the current file. Before the first line
|
| 137 |
+
has been read, returns 0. After the last line of the last file has
|
| 138 |
+
been read, returns the line number of that line within the file.
|
| 139 |
+
"""
|
| 140 |
+
if not _state:
|
| 141 |
+
raise RuntimeError("no active input()")
|
| 142 |
+
return _state.filelineno()
|
| 143 |
+
|
| 144 |
+
def fileno():
|
| 145 |
+
"""
|
| 146 |
+
Return the file number of the current file. When no file is currently
|
| 147 |
+
opened, returns -1.
|
| 148 |
+
"""
|
| 149 |
+
if not _state:
|
| 150 |
+
raise RuntimeError("no active input()")
|
| 151 |
+
return _state.fileno()
|
| 152 |
+
|
| 153 |
+
def isfirstline():
|
| 154 |
+
"""
|
| 155 |
+
Returns true the line just read is the first line of its file,
|
| 156 |
+
otherwise returns false.
|
| 157 |
+
"""
|
| 158 |
+
if not _state:
|
| 159 |
+
raise RuntimeError("no active input()")
|
| 160 |
+
return _state.isfirstline()
|
| 161 |
+
|
| 162 |
+
def isstdin():
|
| 163 |
+
"""
|
| 164 |
+
Returns true if the last line was read from sys.stdin,
|
| 165 |
+
otherwise returns false.
|
| 166 |
+
"""
|
| 167 |
+
if not _state:
|
| 168 |
+
raise RuntimeError("no active input()")
|
| 169 |
+
return _state.isstdin()
|
| 170 |
+
|
| 171 |
+
class FileInput:
|
| 172 |
+
"""FileInput([files[, inplace[, backup]]], *, mode=None, openhook=None)
|
| 173 |
+
|
| 174 |
+
Class FileInput is the implementation of the module; its methods
|
| 175 |
+
filename(), lineno(), fileline(), isfirstline(), isstdin(), fileno(),
|
| 176 |
+
nextfile() and close() correspond to the functions of the same name
|
| 177 |
+
in the module.
|
| 178 |
+
In addition it has a readline() method which returns the next
|
| 179 |
+
input line, and a __getitem__() method which implements the
|
| 180 |
+
sequence behavior. The sequence must be accessed in strictly
|
| 181 |
+
sequential order; random access and readline() cannot be mixed.
|
| 182 |
+
"""
|
| 183 |
+
|
| 184 |
+
def __init__(self, files=None, inplace=False, backup="", *,
|
| 185 |
+
mode="r", openhook=None, encoding=None, errors=None):
|
| 186 |
+
if isinstance(files, str):
|
| 187 |
+
files = (files,)
|
| 188 |
+
elif isinstance(files, os.PathLike):
|
| 189 |
+
files = (os.fspath(files), )
|
| 190 |
+
else:
|
| 191 |
+
if files is None:
|
| 192 |
+
files = sys.argv[1:]
|
| 193 |
+
if not files:
|
| 194 |
+
files = ('-',)
|
| 195 |
+
else:
|
| 196 |
+
files = tuple(files)
|
| 197 |
+
self._files = files
|
| 198 |
+
self._inplace = inplace
|
| 199 |
+
self._backup = backup
|
| 200 |
+
self._savestdout = None
|
| 201 |
+
self._output = None
|
| 202 |
+
self._filename = None
|
| 203 |
+
self._startlineno = 0
|
| 204 |
+
self._filelineno = 0
|
| 205 |
+
self._file = None
|
| 206 |
+
self._isstdin = False
|
| 207 |
+
self._backupfilename = None
|
| 208 |
+
self._encoding = encoding
|
| 209 |
+
self._errors = errors
|
| 210 |
+
|
| 211 |
+
# We can not use io.text_encoding() here because old openhook doesn't
|
| 212 |
+
# take encoding parameter.
|
| 213 |
+
if (sys.flags.warn_default_encoding and
|
| 214 |
+
"b" not in mode and encoding is None and openhook is None):
|
| 215 |
+
import warnings
|
| 216 |
+
warnings.warn("'encoding' argument not specified.",
|
| 217 |
+
EncodingWarning, 2)
|
| 218 |
+
|
| 219 |
+
# restrict mode argument to reading modes
|
| 220 |
+
if mode not in ('r', 'rU', 'U', 'rb'):
|
| 221 |
+
raise ValueError("FileInput opening mode must be one of "
|
| 222 |
+
"'r', 'rU', 'U' and 'rb'")
|
| 223 |
+
if 'U' in mode:
|
| 224 |
+
import warnings
|
| 225 |
+
warnings.warn("'U' mode is deprecated",
|
| 226 |
+
DeprecationWarning, 2)
|
| 227 |
+
self._mode = mode
|
| 228 |
+
self._write_mode = mode.replace('r', 'w') if 'U' not in mode else 'w'
|
| 229 |
+
if openhook:
|
| 230 |
+
if inplace:
|
| 231 |
+
raise ValueError("FileInput cannot use an opening hook in inplace mode")
|
| 232 |
+
if not callable(openhook):
|
| 233 |
+
raise ValueError("FileInput openhook must be callable")
|
| 234 |
+
self._openhook = openhook
|
| 235 |
+
|
| 236 |
+
def __del__(self):
|
| 237 |
+
self.close()
|
| 238 |
+
|
| 239 |
+
def close(self):
|
| 240 |
+
try:
|
| 241 |
+
self.nextfile()
|
| 242 |
+
finally:
|
| 243 |
+
self._files = ()
|
| 244 |
+
|
| 245 |
+
def __enter__(self):
|
| 246 |
+
return self
|
| 247 |
+
|
| 248 |
+
def __exit__(self, type, value, traceback):
|
| 249 |
+
self.close()
|
| 250 |
+
|
| 251 |
+
def __iter__(self):
|
| 252 |
+
return self
|
| 253 |
+
|
| 254 |
+
def __next__(self):
|
| 255 |
+
while True:
|
| 256 |
+
line = self._readline()
|
| 257 |
+
if line:
|
| 258 |
+
self._filelineno += 1
|
| 259 |
+
return line
|
| 260 |
+
if not self._file:
|
| 261 |
+
raise StopIteration
|
| 262 |
+
self.nextfile()
|
| 263 |
+
# repeat with next file
|
| 264 |
+
|
| 265 |
+
def __getitem__(self, i):
|
| 266 |
+
import warnings
|
| 267 |
+
warnings.warn(
|
| 268 |
+
"Support for indexing FileInput objects is deprecated. "
|
| 269 |
+
"Use iterator protocol instead.",
|
| 270 |
+
DeprecationWarning,
|
| 271 |
+
stacklevel=2
|
| 272 |
+
)
|
| 273 |
+
if i != self.lineno():
|
| 274 |
+
raise RuntimeError("accessing lines out of order")
|
| 275 |
+
try:
|
| 276 |
+
return self.__next__()
|
| 277 |
+
except StopIteration:
|
| 278 |
+
raise IndexError("end of input reached")
|
| 279 |
+
|
| 280 |
+
def nextfile(self):
|
| 281 |
+
savestdout = self._savestdout
|
| 282 |
+
self._savestdout = None
|
| 283 |
+
if savestdout:
|
| 284 |
+
sys.stdout = savestdout
|
| 285 |
+
|
| 286 |
+
output = self._output
|
| 287 |
+
self._output = None
|
| 288 |
+
try:
|
| 289 |
+
if output:
|
| 290 |
+
output.close()
|
| 291 |
+
finally:
|
| 292 |
+
file = self._file
|
| 293 |
+
self._file = None
|
| 294 |
+
try:
|
| 295 |
+
del self._readline # restore FileInput._readline
|
| 296 |
+
except AttributeError:
|
| 297 |
+
pass
|
| 298 |
+
try:
|
| 299 |
+
if file and not self._isstdin:
|
| 300 |
+
file.close()
|
| 301 |
+
finally:
|
| 302 |
+
backupfilename = self._backupfilename
|
| 303 |
+
self._backupfilename = None
|
| 304 |
+
if backupfilename and not self._backup:
|
| 305 |
+
try: os.unlink(backupfilename)
|
| 306 |
+
except OSError: pass
|
| 307 |
+
|
| 308 |
+
self._isstdin = False
|
| 309 |
+
|
| 310 |
+
def readline(self):
|
| 311 |
+
while True:
|
| 312 |
+
line = self._readline()
|
| 313 |
+
if line:
|
| 314 |
+
self._filelineno += 1
|
| 315 |
+
return line
|
| 316 |
+
if not self._file:
|
| 317 |
+
return line
|
| 318 |
+
self.nextfile()
|
| 319 |
+
# repeat with next file
|
| 320 |
+
|
| 321 |
+
def _readline(self):
|
| 322 |
+
if not self._files:
|
| 323 |
+
if 'b' in self._mode:
|
| 324 |
+
return b''
|
| 325 |
+
else:
|
| 326 |
+
return ''
|
| 327 |
+
self._filename = self._files[0]
|
| 328 |
+
self._files = self._files[1:]
|
| 329 |
+
self._startlineno = self.lineno()
|
| 330 |
+
self._filelineno = 0
|
| 331 |
+
self._file = None
|
| 332 |
+
self._isstdin = False
|
| 333 |
+
self._backupfilename = 0
|
| 334 |
+
|
| 335 |
+
# EncodingWarning is emitted in __init__() already
|
| 336 |
+
if "b" not in self._mode:
|
| 337 |
+
encoding = self._encoding or "locale"
|
| 338 |
+
else:
|
| 339 |
+
encoding = None
|
| 340 |
+
|
| 341 |
+
if self._filename == '-':
|
| 342 |
+
self._filename = '<stdin>'
|
| 343 |
+
if 'b' in self._mode:
|
| 344 |
+
self._file = getattr(sys.stdin, 'buffer', sys.stdin)
|
| 345 |
+
else:
|
| 346 |
+
self._file = sys.stdin
|
| 347 |
+
self._isstdin = True
|
| 348 |
+
else:
|
| 349 |
+
if self._inplace:
|
| 350 |
+
self._backupfilename = (
|
| 351 |
+
os.fspath(self._filename) + (self._backup or ".bak"))
|
| 352 |
+
try:
|
| 353 |
+
os.unlink(self._backupfilename)
|
| 354 |
+
except OSError:
|
| 355 |
+
pass
|
| 356 |
+
# The next few lines may raise OSError
|
| 357 |
+
os.rename(self._filename, self._backupfilename)
|
| 358 |
+
self._file = open(self._backupfilename, self._mode,
|
| 359 |
+
encoding=encoding, errors=self._errors)
|
| 360 |
+
try:
|
| 361 |
+
perm = os.fstat(self._file.fileno()).st_mode
|
| 362 |
+
except OSError:
|
| 363 |
+
self._output = open(self._filename, self._write_mode,
|
| 364 |
+
encoding=encoding, errors=self._errors)
|
| 365 |
+
else:
|
| 366 |
+
mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
|
| 367 |
+
if hasattr(os, 'O_BINARY'):
|
| 368 |
+
mode |= os.O_BINARY
|
| 369 |
+
|
| 370 |
+
fd = os.open(self._filename, mode, perm)
|
| 371 |
+
self._output = os.fdopen(fd, self._write_mode,
|
| 372 |
+
encoding=encoding, errors=self._errors)
|
| 373 |
+
try:
|
| 374 |
+
os.chmod(self._filename, perm)
|
| 375 |
+
except OSError:
|
| 376 |
+
pass
|
| 377 |
+
self._savestdout = sys.stdout
|
| 378 |
+
sys.stdout = self._output
|
| 379 |
+
else:
|
| 380 |
+
# This may raise OSError
|
| 381 |
+
if self._openhook:
|
| 382 |
+
# Custom hooks made previous to Python 3.10 didn't have
|
| 383 |
+
# encoding argument
|
| 384 |
+
if self._encoding is None:
|
| 385 |
+
self._file = self._openhook(self._filename, self._mode)
|
| 386 |
+
else:
|
| 387 |
+
self._file = self._openhook(
|
| 388 |
+
self._filename, self._mode, encoding=self._encoding, errors=self._errors)
|
| 389 |
+
else:
|
| 390 |
+
self._file = open(self._filename, self._mode, encoding=encoding, errors=self._errors)
|
| 391 |
+
self._readline = self._file.readline # hide FileInput._readline
|
| 392 |
+
return self._readline()
|
| 393 |
+
|
| 394 |
+
def filename(self):
|
| 395 |
+
return self._filename
|
| 396 |
+
|
| 397 |
+
def lineno(self):
|
| 398 |
+
return self._startlineno + self._filelineno
|
| 399 |
+
|
| 400 |
+
def filelineno(self):
|
| 401 |
+
return self._filelineno
|
| 402 |
+
|
| 403 |
+
def fileno(self):
|
| 404 |
+
if self._file:
|
| 405 |
+
try:
|
| 406 |
+
return self._file.fileno()
|
| 407 |
+
except ValueError:
|
| 408 |
+
return -1
|
| 409 |
+
else:
|
| 410 |
+
return -1
|
| 411 |
+
|
| 412 |
+
def isfirstline(self):
|
| 413 |
+
return self._filelineno == 1
|
| 414 |
+
|
| 415 |
+
def isstdin(self):
|
| 416 |
+
return self._isstdin
|
| 417 |
+
|
| 418 |
+
__class_getitem__ = classmethod(GenericAlias)
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
def hook_compressed(filename, mode, *, encoding=None, errors=None):
|
| 422 |
+
if encoding is None and "b" not in mode: # EncodingWarning is emitted in FileInput() already.
|
| 423 |
+
encoding = "locale"
|
| 424 |
+
ext = os.path.splitext(filename)[1]
|
| 425 |
+
if ext == '.gz':
|
| 426 |
+
import gzip
|
| 427 |
+
stream = gzip.open(filename, mode)
|
| 428 |
+
elif ext == '.bz2':
|
| 429 |
+
import bz2
|
| 430 |
+
stream = bz2.BZ2File(filename, mode)
|
| 431 |
+
else:
|
| 432 |
+
return open(filename, mode, encoding=encoding, errors=errors)
|
| 433 |
+
|
| 434 |
+
# gzip and bz2 are binary mode by default.
|
| 435 |
+
if "b" not in mode:
|
| 436 |
+
stream = io.TextIOWrapper(stream, encoding=encoding, errors=errors)
|
| 437 |
+
return stream
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
def hook_encoded(encoding, errors=None):
|
| 441 |
+
def openhook(filename, mode):
|
| 442 |
+
return open(filename, mode, encoding=encoding, errors=errors)
|
| 443 |
+
return openhook
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def _test():
|
| 447 |
+
import getopt
|
| 448 |
+
inplace = False
|
| 449 |
+
backup = False
|
| 450 |
+
opts, args = getopt.getopt(sys.argv[1:], "ib:")
|
| 451 |
+
for o, a in opts:
|
| 452 |
+
if o == '-i': inplace = True
|
| 453 |
+
if o == '-b': backup = a
|
| 454 |
+
for line in input(args, inplace=inplace, backup=backup):
|
| 455 |
+
if line[-1:] == '\n': line = line[:-1]
|
| 456 |
+
if line[-1:] == '\r': line = line[:-1]
|
| 457 |
+
print("%d: %s[%d]%s %s" % (lineno(), filename(), filelineno(),
|
| 458 |
+
isfirstline() and "*" or "", line))
|
| 459 |
+
print("%d: %s[%d]" % (lineno(), filename(), filelineno()))
|
| 460 |
+
|
| 461 |
+
if __name__ == '__main__':
|
| 462 |
+
_test()
|
evalkit_tf446/lib/python3.10/fnmatch.py
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Filename matching with shell patterns.
|
| 2 |
+
|
| 3 |
+
fnmatch(FILENAME, PATTERN) matches according to the local convention.
|
| 4 |
+
fnmatchcase(FILENAME, PATTERN) always takes case in account.
|
| 5 |
+
|
| 6 |
+
The functions operate by translating the pattern into a regular
|
| 7 |
+
expression. They cache the compiled regular expressions for speed.
|
| 8 |
+
|
| 9 |
+
The function translate(PATTERN) returns a regular expression
|
| 10 |
+
corresponding to PATTERN. (It does not compile it.)
|
| 11 |
+
"""
|
| 12 |
+
import os
|
| 13 |
+
import posixpath
|
| 14 |
+
import re
|
| 15 |
+
import functools
|
| 16 |
+
|
| 17 |
+
__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"]
|
| 18 |
+
|
| 19 |
+
# Build a thread-safe incrementing counter to help create unique regexp group
|
| 20 |
+
# names across calls.
|
| 21 |
+
from itertools import count
|
| 22 |
+
_nextgroupnum = count().__next__
|
| 23 |
+
del count
|
| 24 |
+
|
| 25 |
+
def fnmatch(name, pat):
|
| 26 |
+
"""Test whether FILENAME matches PATTERN.
|
| 27 |
+
|
| 28 |
+
Patterns are Unix shell style:
|
| 29 |
+
|
| 30 |
+
* matches everything
|
| 31 |
+
? matches any single character
|
| 32 |
+
[seq] matches any character in seq
|
| 33 |
+
[!seq] matches any char not in seq
|
| 34 |
+
|
| 35 |
+
An initial period in FILENAME is not special.
|
| 36 |
+
Both FILENAME and PATTERN are first case-normalized
|
| 37 |
+
if the operating system requires it.
|
| 38 |
+
If you don't want this, use fnmatchcase(FILENAME, PATTERN).
|
| 39 |
+
"""
|
| 40 |
+
name = os.path.normcase(name)
|
| 41 |
+
pat = os.path.normcase(pat)
|
| 42 |
+
return fnmatchcase(name, pat)
|
| 43 |
+
|
| 44 |
+
@functools.lru_cache(maxsize=256, typed=True)
|
| 45 |
+
def _compile_pattern(pat):
|
| 46 |
+
if isinstance(pat, bytes):
|
| 47 |
+
pat_str = str(pat, 'ISO-8859-1')
|
| 48 |
+
res_str = translate(pat_str)
|
| 49 |
+
res = bytes(res_str, 'ISO-8859-1')
|
| 50 |
+
else:
|
| 51 |
+
res = translate(pat)
|
| 52 |
+
return re.compile(res).match
|
| 53 |
+
|
| 54 |
+
def filter(names, pat):
|
| 55 |
+
"""Construct a list from those elements of the iterable NAMES that match PAT."""
|
| 56 |
+
result = []
|
| 57 |
+
pat = os.path.normcase(pat)
|
| 58 |
+
match = _compile_pattern(pat)
|
| 59 |
+
if os.path is posixpath:
|
| 60 |
+
# normcase on posix is NOP. Optimize it away from the loop.
|
| 61 |
+
for name in names:
|
| 62 |
+
if match(name):
|
| 63 |
+
result.append(name)
|
| 64 |
+
else:
|
| 65 |
+
for name in names:
|
| 66 |
+
if match(os.path.normcase(name)):
|
| 67 |
+
result.append(name)
|
| 68 |
+
return result
|
| 69 |
+
|
| 70 |
+
def fnmatchcase(name, pat):
|
| 71 |
+
"""Test whether FILENAME matches PATTERN, including case.
|
| 72 |
+
|
| 73 |
+
This is a version of fnmatch() which doesn't case-normalize
|
| 74 |
+
its arguments.
|
| 75 |
+
"""
|
| 76 |
+
match = _compile_pattern(pat)
|
| 77 |
+
return match(name) is not None
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def translate(pat):
|
| 81 |
+
"""Translate a shell PATTERN to a regular expression.
|
| 82 |
+
|
| 83 |
+
There is no way to quote meta-characters.
|
| 84 |
+
"""
|
| 85 |
+
|
| 86 |
+
STAR = object()
|
| 87 |
+
res = []
|
| 88 |
+
add = res.append
|
| 89 |
+
i, n = 0, len(pat)
|
| 90 |
+
while i < n:
|
| 91 |
+
c = pat[i]
|
| 92 |
+
i = i+1
|
| 93 |
+
if c == '*':
|
| 94 |
+
# compress consecutive `*` into one
|
| 95 |
+
if (not res) or res[-1] is not STAR:
|
| 96 |
+
add(STAR)
|
| 97 |
+
elif c == '?':
|
| 98 |
+
add('.')
|
| 99 |
+
elif c == '[':
|
| 100 |
+
j = i
|
| 101 |
+
if j < n and pat[j] == '!':
|
| 102 |
+
j = j+1
|
| 103 |
+
if j < n and pat[j] == ']':
|
| 104 |
+
j = j+1
|
| 105 |
+
while j < n and pat[j] != ']':
|
| 106 |
+
j = j+1
|
| 107 |
+
if j >= n:
|
| 108 |
+
add('\\[')
|
| 109 |
+
else:
|
| 110 |
+
stuff = pat[i:j]
|
| 111 |
+
if '-' not in stuff:
|
| 112 |
+
stuff = stuff.replace('\\', r'\\')
|
| 113 |
+
else:
|
| 114 |
+
chunks = []
|
| 115 |
+
k = i+2 if pat[i] == '!' else i+1
|
| 116 |
+
while True:
|
| 117 |
+
k = pat.find('-', k, j)
|
| 118 |
+
if k < 0:
|
| 119 |
+
break
|
| 120 |
+
chunks.append(pat[i:k])
|
| 121 |
+
i = k+1
|
| 122 |
+
k = k+3
|
| 123 |
+
chunk = pat[i:j]
|
| 124 |
+
if chunk:
|
| 125 |
+
chunks.append(chunk)
|
| 126 |
+
else:
|
| 127 |
+
chunks[-1] += '-'
|
| 128 |
+
# Remove empty ranges -- invalid in RE.
|
| 129 |
+
for k in range(len(chunks)-1, 0, -1):
|
| 130 |
+
if chunks[k-1][-1] > chunks[k][0]:
|
| 131 |
+
chunks[k-1] = chunks[k-1][:-1] + chunks[k][1:]
|
| 132 |
+
del chunks[k]
|
| 133 |
+
# Escape backslashes and hyphens for set difference (--).
|
| 134 |
+
# Hyphens that create ranges shouldn't be escaped.
|
| 135 |
+
stuff = '-'.join(s.replace('\\', r'\\').replace('-', r'\-')
|
| 136 |
+
for s in chunks)
|
| 137 |
+
# Escape set operations (&&, ~~ and ||).
|
| 138 |
+
stuff = re.sub(r'([&~|])', r'\\\1', stuff)
|
| 139 |
+
i = j+1
|
| 140 |
+
if not stuff:
|
| 141 |
+
# Empty range: never match.
|
| 142 |
+
add('(?!)')
|
| 143 |
+
elif stuff == '!':
|
| 144 |
+
# Negated empty range: match any character.
|
| 145 |
+
add('.')
|
| 146 |
+
else:
|
| 147 |
+
if stuff[0] == '!':
|
| 148 |
+
stuff = '^' + stuff[1:]
|
| 149 |
+
elif stuff[0] in ('^', '['):
|
| 150 |
+
stuff = '\\' + stuff
|
| 151 |
+
add(f'[{stuff}]')
|
| 152 |
+
else:
|
| 153 |
+
add(re.escape(c))
|
| 154 |
+
assert i == n
|
| 155 |
+
|
| 156 |
+
# Deal with STARs.
|
| 157 |
+
inp = res
|
| 158 |
+
res = []
|
| 159 |
+
add = res.append
|
| 160 |
+
i, n = 0, len(inp)
|
| 161 |
+
# Fixed pieces at the start?
|
| 162 |
+
while i < n and inp[i] is not STAR:
|
| 163 |
+
add(inp[i])
|
| 164 |
+
i += 1
|
| 165 |
+
# Now deal with STAR fixed STAR fixed ...
|
| 166 |
+
# For an interior `STAR fixed` pairing, we want to do a minimal
|
| 167 |
+
# .*? match followed by `fixed`, with no possibility of backtracking.
|
| 168 |
+
# We can't spell that directly, but can trick it into working by matching
|
| 169 |
+
# .*?fixed
|
| 170 |
+
# in a lookahead assertion, save the matched part in a group, then
|
| 171 |
+
# consume that group via a backreference. If the overall match fails,
|
| 172 |
+
# the lookahead assertion won't try alternatives. So the translation is:
|
| 173 |
+
# (?=(?P<name>.*?fixed))(?P=name)
|
| 174 |
+
# Group names are created as needed: g0, g1, g2, ...
|
| 175 |
+
# The numbers are obtained from _nextgroupnum() to ensure they're unique
|
| 176 |
+
# across calls and across threads. This is because people rely on the
|
| 177 |
+
# undocumented ability to join multiple translate() results together via
|
| 178 |
+
# "|" to build large regexps matching "one of many" shell patterns.
|
| 179 |
+
while i < n:
|
| 180 |
+
assert inp[i] is STAR
|
| 181 |
+
i += 1
|
| 182 |
+
if i == n:
|
| 183 |
+
add(".*")
|
| 184 |
+
break
|
| 185 |
+
assert inp[i] is not STAR
|
| 186 |
+
fixed = []
|
| 187 |
+
while i < n and inp[i] is not STAR:
|
| 188 |
+
fixed.append(inp[i])
|
| 189 |
+
i += 1
|
| 190 |
+
fixed = "".join(fixed)
|
| 191 |
+
if i == n:
|
| 192 |
+
add(".*")
|
| 193 |
+
add(fixed)
|
| 194 |
+
else:
|
| 195 |
+
groupnum = _nextgroupnum()
|
| 196 |
+
add(f"(?=(?P<g{groupnum}>.*?{fixed}))(?P=g{groupnum})")
|
| 197 |
+
assert i == n
|
| 198 |
+
res = "".join(res)
|
| 199 |
+
return fr'(?s:{res})\Z'
|
evalkit_tf446/lib/python3.10/fractions.py
ADDED
|
@@ -0,0 +1,748 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Originally contributed by Sjoerd Mullender.
|
| 2 |
+
# Significantly modified by Jeffrey Yasskin <jyasskin at gmail.com>.
|
| 3 |
+
|
| 4 |
+
"""Fraction, infinite-precision, real numbers."""
|
| 5 |
+
|
| 6 |
+
from decimal import Decimal
|
| 7 |
+
import math
|
| 8 |
+
import numbers
|
| 9 |
+
import operator
|
| 10 |
+
import re
|
| 11 |
+
import sys
|
| 12 |
+
|
| 13 |
+
__all__ = ['Fraction']
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# Constants related to the hash implementation; hash(x) is based
|
| 17 |
+
# on the reduction of x modulo the prime _PyHASH_MODULUS.
|
| 18 |
+
_PyHASH_MODULUS = sys.hash_info.modulus
|
| 19 |
+
# Value to be used for rationals that reduce to infinity modulo
|
| 20 |
+
# _PyHASH_MODULUS.
|
| 21 |
+
_PyHASH_INF = sys.hash_info.inf
|
| 22 |
+
|
| 23 |
+
_RATIONAL_FORMAT = re.compile(r"""
|
| 24 |
+
\A\s* # optional whitespace at the start, then
|
| 25 |
+
(?P<sign>[-+]?) # an optional sign, then
|
| 26 |
+
(?=\d|\.\d) # lookahead for digit or .digit
|
| 27 |
+
(?P<num>\d*) # numerator (possibly empty)
|
| 28 |
+
(?: # followed by
|
| 29 |
+
(?:/(?P<denom>\d+))? # an optional denominator
|
| 30 |
+
| # or
|
| 31 |
+
(?:\.(?P<decimal>\d*))? # an optional fractional part
|
| 32 |
+
(?:E(?P<exp>[-+]?\d+))? # and optional exponent
|
| 33 |
+
)
|
| 34 |
+
\s*\Z # and optional whitespace to finish
|
| 35 |
+
""", re.VERBOSE | re.IGNORECASE)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class Fraction(numbers.Rational):
|
| 39 |
+
"""This class implements rational numbers.
|
| 40 |
+
|
| 41 |
+
In the two-argument form of the constructor, Fraction(8, 6) will
|
| 42 |
+
produce a rational number equivalent to 4/3. Both arguments must
|
| 43 |
+
be Rational. The numerator defaults to 0 and the denominator
|
| 44 |
+
defaults to 1 so that Fraction(3) == 3 and Fraction() == 0.
|
| 45 |
+
|
| 46 |
+
Fractions can also be constructed from:
|
| 47 |
+
|
| 48 |
+
- numeric strings similar to those accepted by the
|
| 49 |
+
float constructor (for example, '-2.3' or '1e10')
|
| 50 |
+
|
| 51 |
+
- strings of the form '123/456'
|
| 52 |
+
|
| 53 |
+
- float and Decimal instances
|
| 54 |
+
|
| 55 |
+
- other Rational instances (including integers)
|
| 56 |
+
|
| 57 |
+
"""
|
| 58 |
+
|
| 59 |
+
__slots__ = ('_numerator', '_denominator')
|
| 60 |
+
|
| 61 |
+
# We're immutable, so use __new__ not __init__
|
| 62 |
+
def __new__(cls, numerator=0, denominator=None, *, _normalize=True):
|
| 63 |
+
"""Constructs a Rational.
|
| 64 |
+
|
| 65 |
+
Takes a string like '3/2' or '1.5', another Rational instance, a
|
| 66 |
+
numerator/denominator pair, or a float.
|
| 67 |
+
|
| 68 |
+
Examples
|
| 69 |
+
--------
|
| 70 |
+
|
| 71 |
+
>>> Fraction(10, -8)
|
| 72 |
+
Fraction(-5, 4)
|
| 73 |
+
>>> Fraction(Fraction(1, 7), 5)
|
| 74 |
+
Fraction(1, 35)
|
| 75 |
+
>>> Fraction(Fraction(1, 7), Fraction(2, 3))
|
| 76 |
+
Fraction(3, 14)
|
| 77 |
+
>>> Fraction('314')
|
| 78 |
+
Fraction(314, 1)
|
| 79 |
+
>>> Fraction('-35/4')
|
| 80 |
+
Fraction(-35, 4)
|
| 81 |
+
>>> Fraction('3.1415') # conversion from numeric string
|
| 82 |
+
Fraction(6283, 2000)
|
| 83 |
+
>>> Fraction('-47e-2') # string may include a decimal exponent
|
| 84 |
+
Fraction(-47, 100)
|
| 85 |
+
>>> Fraction(1.47) # direct construction from float (exact conversion)
|
| 86 |
+
Fraction(6620291452234629, 4503599627370496)
|
| 87 |
+
>>> Fraction(2.25)
|
| 88 |
+
Fraction(9, 4)
|
| 89 |
+
>>> Fraction(Decimal('1.47'))
|
| 90 |
+
Fraction(147, 100)
|
| 91 |
+
|
| 92 |
+
"""
|
| 93 |
+
self = super(Fraction, cls).__new__(cls)
|
| 94 |
+
|
| 95 |
+
if denominator is None:
|
| 96 |
+
if type(numerator) is int:
|
| 97 |
+
self._numerator = numerator
|
| 98 |
+
self._denominator = 1
|
| 99 |
+
return self
|
| 100 |
+
|
| 101 |
+
elif isinstance(numerator, numbers.Rational):
|
| 102 |
+
self._numerator = numerator.numerator
|
| 103 |
+
self._denominator = numerator.denominator
|
| 104 |
+
return self
|
| 105 |
+
|
| 106 |
+
elif isinstance(numerator, (float, Decimal)):
|
| 107 |
+
# Exact conversion
|
| 108 |
+
self._numerator, self._denominator = numerator.as_integer_ratio()
|
| 109 |
+
return self
|
| 110 |
+
|
| 111 |
+
elif isinstance(numerator, str):
|
| 112 |
+
# Handle construction from strings.
|
| 113 |
+
m = _RATIONAL_FORMAT.match(numerator)
|
| 114 |
+
if m is None:
|
| 115 |
+
raise ValueError('Invalid literal for Fraction: %r' %
|
| 116 |
+
numerator)
|
| 117 |
+
numerator = int(m.group('num') or '0')
|
| 118 |
+
denom = m.group('denom')
|
| 119 |
+
if denom:
|
| 120 |
+
denominator = int(denom)
|
| 121 |
+
else:
|
| 122 |
+
denominator = 1
|
| 123 |
+
decimal = m.group('decimal')
|
| 124 |
+
if decimal:
|
| 125 |
+
scale = 10**len(decimal)
|
| 126 |
+
numerator = numerator * scale + int(decimal)
|
| 127 |
+
denominator *= scale
|
| 128 |
+
exp = m.group('exp')
|
| 129 |
+
if exp:
|
| 130 |
+
exp = int(exp)
|
| 131 |
+
if exp >= 0:
|
| 132 |
+
numerator *= 10**exp
|
| 133 |
+
else:
|
| 134 |
+
denominator *= 10**-exp
|
| 135 |
+
if m.group('sign') == '-':
|
| 136 |
+
numerator = -numerator
|
| 137 |
+
|
| 138 |
+
else:
|
| 139 |
+
raise TypeError("argument should be a string "
|
| 140 |
+
"or a Rational instance")
|
| 141 |
+
|
| 142 |
+
elif type(numerator) is int is type(denominator):
|
| 143 |
+
pass # *very* normal case
|
| 144 |
+
|
| 145 |
+
elif (isinstance(numerator, numbers.Rational) and
|
| 146 |
+
isinstance(denominator, numbers.Rational)):
|
| 147 |
+
numerator, denominator = (
|
| 148 |
+
numerator.numerator * denominator.denominator,
|
| 149 |
+
denominator.numerator * numerator.denominator
|
| 150 |
+
)
|
| 151 |
+
else:
|
| 152 |
+
raise TypeError("both arguments should be "
|
| 153 |
+
"Rational instances")
|
| 154 |
+
|
| 155 |
+
if denominator == 0:
|
| 156 |
+
raise ZeroDivisionError('Fraction(%s, 0)' % numerator)
|
| 157 |
+
if _normalize:
|
| 158 |
+
g = math.gcd(numerator, denominator)
|
| 159 |
+
if denominator < 0:
|
| 160 |
+
g = -g
|
| 161 |
+
numerator //= g
|
| 162 |
+
denominator //= g
|
| 163 |
+
self._numerator = numerator
|
| 164 |
+
self._denominator = denominator
|
| 165 |
+
return self
|
| 166 |
+
|
| 167 |
+
@classmethod
|
| 168 |
+
def from_float(cls, f):
|
| 169 |
+
"""Converts a finite float to a rational number, exactly.
|
| 170 |
+
|
| 171 |
+
Beware that Fraction.from_float(0.3) != Fraction(3, 10).
|
| 172 |
+
|
| 173 |
+
"""
|
| 174 |
+
if isinstance(f, numbers.Integral):
|
| 175 |
+
return cls(f)
|
| 176 |
+
elif not isinstance(f, float):
|
| 177 |
+
raise TypeError("%s.from_float() only takes floats, not %r (%s)" %
|
| 178 |
+
(cls.__name__, f, type(f).__name__))
|
| 179 |
+
return cls(*f.as_integer_ratio())
|
| 180 |
+
|
| 181 |
+
@classmethod
|
| 182 |
+
def from_decimal(cls, dec):
|
| 183 |
+
"""Converts a finite Decimal instance to a rational number, exactly."""
|
| 184 |
+
from decimal import Decimal
|
| 185 |
+
if isinstance(dec, numbers.Integral):
|
| 186 |
+
dec = Decimal(int(dec))
|
| 187 |
+
elif not isinstance(dec, Decimal):
|
| 188 |
+
raise TypeError(
|
| 189 |
+
"%s.from_decimal() only takes Decimals, not %r (%s)" %
|
| 190 |
+
(cls.__name__, dec, type(dec).__name__))
|
| 191 |
+
return cls(*dec.as_integer_ratio())
|
| 192 |
+
|
| 193 |
+
def as_integer_ratio(self):
|
| 194 |
+
"""Return the integer ratio as a tuple.
|
| 195 |
+
|
| 196 |
+
Return a tuple of two integers, whose ratio is equal to the
|
| 197 |
+
Fraction and with a positive denominator.
|
| 198 |
+
"""
|
| 199 |
+
return (self._numerator, self._denominator)
|
| 200 |
+
|
| 201 |
+
def limit_denominator(self, max_denominator=1000000):
|
| 202 |
+
"""Closest Fraction to self with denominator at most max_denominator.
|
| 203 |
+
|
| 204 |
+
>>> Fraction('3.141592653589793').limit_denominator(10)
|
| 205 |
+
Fraction(22, 7)
|
| 206 |
+
>>> Fraction('3.141592653589793').limit_denominator(100)
|
| 207 |
+
Fraction(311, 99)
|
| 208 |
+
>>> Fraction(4321, 8765).limit_denominator(10000)
|
| 209 |
+
Fraction(4321, 8765)
|
| 210 |
+
|
| 211 |
+
"""
|
| 212 |
+
# Algorithm notes: For any real number x, define a *best upper
|
| 213 |
+
# approximation* to x to be a rational number p/q such that:
|
| 214 |
+
#
|
| 215 |
+
# (1) p/q >= x, and
|
| 216 |
+
# (2) if p/q > r/s >= x then s > q, for any rational r/s.
|
| 217 |
+
#
|
| 218 |
+
# Define *best lower approximation* similarly. Then it can be
|
| 219 |
+
# proved that a rational number is a best upper or lower
|
| 220 |
+
# approximation to x if, and only if, it is a convergent or
|
| 221 |
+
# semiconvergent of the (unique shortest) continued fraction
|
| 222 |
+
# associated to x.
|
| 223 |
+
#
|
| 224 |
+
# To find a best rational approximation with denominator <= M,
|
| 225 |
+
# we find the best upper and lower approximations with
|
| 226 |
+
# denominator <= M and take whichever of these is closer to x.
|
| 227 |
+
# In the event of a tie, the bound with smaller denominator is
|
| 228 |
+
# chosen. If both denominators are equal (which can happen
|
| 229 |
+
# only when max_denominator == 1 and self is midway between
|
| 230 |
+
# two integers) the lower bound---i.e., the floor of self, is
|
| 231 |
+
# taken.
|
| 232 |
+
|
| 233 |
+
if max_denominator < 1:
|
| 234 |
+
raise ValueError("max_denominator should be at least 1")
|
| 235 |
+
if self._denominator <= max_denominator:
|
| 236 |
+
return Fraction(self)
|
| 237 |
+
|
| 238 |
+
p0, q0, p1, q1 = 0, 1, 1, 0
|
| 239 |
+
n, d = self._numerator, self._denominator
|
| 240 |
+
while True:
|
| 241 |
+
a = n//d
|
| 242 |
+
q2 = q0+a*q1
|
| 243 |
+
if q2 > max_denominator:
|
| 244 |
+
break
|
| 245 |
+
p0, q0, p1, q1 = p1, q1, p0+a*p1, q2
|
| 246 |
+
n, d = d, n-a*d
|
| 247 |
+
|
| 248 |
+
k = (max_denominator-q0)//q1
|
| 249 |
+
bound1 = Fraction(p0+k*p1, q0+k*q1)
|
| 250 |
+
bound2 = Fraction(p1, q1)
|
| 251 |
+
if abs(bound2 - self) <= abs(bound1-self):
|
| 252 |
+
return bound2
|
| 253 |
+
else:
|
| 254 |
+
return bound1
|
| 255 |
+
|
| 256 |
+
@property
|
| 257 |
+
def numerator(a):
|
| 258 |
+
return a._numerator
|
| 259 |
+
|
| 260 |
+
@property
|
| 261 |
+
def denominator(a):
|
| 262 |
+
return a._denominator
|
| 263 |
+
|
| 264 |
+
def __repr__(self):
|
| 265 |
+
"""repr(self)"""
|
| 266 |
+
return '%s(%s, %s)' % (self.__class__.__name__,
|
| 267 |
+
self._numerator, self._denominator)
|
| 268 |
+
|
| 269 |
+
def __str__(self):
|
| 270 |
+
"""str(self)"""
|
| 271 |
+
if self._denominator == 1:
|
| 272 |
+
return str(self._numerator)
|
| 273 |
+
else:
|
| 274 |
+
return '%s/%s' % (self._numerator, self._denominator)
|
| 275 |
+
|
| 276 |
+
def _operator_fallbacks(monomorphic_operator, fallback_operator):
|
| 277 |
+
"""Generates forward and reverse operators given a purely-rational
|
| 278 |
+
operator and a function from the operator module.
|
| 279 |
+
|
| 280 |
+
Use this like:
|
| 281 |
+
__op__, __rop__ = _operator_fallbacks(just_rational_op, operator.op)
|
| 282 |
+
|
| 283 |
+
In general, we want to implement the arithmetic operations so
|
| 284 |
+
that mixed-mode operations either call an implementation whose
|
| 285 |
+
author knew about the types of both arguments, or convert both
|
| 286 |
+
to the nearest built in type and do the operation there. In
|
| 287 |
+
Fraction, that means that we define __add__ and __radd__ as:
|
| 288 |
+
|
| 289 |
+
def __add__(self, other):
|
| 290 |
+
# Both types have numerators/denominator attributes,
|
| 291 |
+
# so do the operation directly
|
| 292 |
+
if isinstance(other, (int, Fraction)):
|
| 293 |
+
return Fraction(self.numerator * other.denominator +
|
| 294 |
+
other.numerator * self.denominator,
|
| 295 |
+
self.denominator * other.denominator)
|
| 296 |
+
# float and complex don't have those operations, but we
|
| 297 |
+
# know about those types, so special case them.
|
| 298 |
+
elif isinstance(other, float):
|
| 299 |
+
return float(self) + other
|
| 300 |
+
elif isinstance(other, complex):
|
| 301 |
+
return complex(self) + other
|
| 302 |
+
# Let the other type take over.
|
| 303 |
+
return NotImplemented
|
| 304 |
+
|
| 305 |
+
def __radd__(self, other):
|
| 306 |
+
# radd handles more types than add because there's
|
| 307 |
+
# nothing left to fall back to.
|
| 308 |
+
if isinstance(other, numbers.Rational):
|
| 309 |
+
return Fraction(self.numerator * other.denominator +
|
| 310 |
+
other.numerator * self.denominator,
|
| 311 |
+
self.denominator * other.denominator)
|
| 312 |
+
elif isinstance(other, Real):
|
| 313 |
+
return float(other) + float(self)
|
| 314 |
+
elif isinstance(other, Complex):
|
| 315 |
+
return complex(other) + complex(self)
|
| 316 |
+
return NotImplemented
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
There are 5 different cases for a mixed-type addition on
|
| 320 |
+
Fraction. I'll refer to all of the above code that doesn't
|
| 321 |
+
refer to Fraction, float, or complex as "boilerplate". 'r'
|
| 322 |
+
will be an instance of Fraction, which is a subtype of
|
| 323 |
+
Rational (r : Fraction <: Rational), and b : B <:
|
| 324 |
+
Complex. The first three involve 'r + b':
|
| 325 |
+
|
| 326 |
+
1. If B <: Fraction, int, float, or complex, we handle
|
| 327 |
+
that specially, and all is well.
|
| 328 |
+
2. If Fraction falls back to the boilerplate code, and it
|
| 329 |
+
were to return a value from __add__, we'd miss the
|
| 330 |
+
possibility that B defines a more intelligent __radd__,
|
| 331 |
+
so the boilerplate should return NotImplemented from
|
| 332 |
+
__add__. In particular, we don't handle Rational
|
| 333 |
+
here, even though we could get an exact answer, in case
|
| 334 |
+
the other type wants to do something special.
|
| 335 |
+
3. If B <: Fraction, Python tries B.__radd__ before
|
| 336 |
+
Fraction.__add__. This is ok, because it was
|
| 337 |
+
implemented with knowledge of Fraction, so it can
|
| 338 |
+
handle those instances before delegating to Real or
|
| 339 |
+
Complex.
|
| 340 |
+
|
| 341 |
+
The next two situations describe 'b + r'. We assume that b
|
| 342 |
+
didn't know about Fraction in its implementation, and that it
|
| 343 |
+
uses similar boilerplate code:
|
| 344 |
+
|
| 345 |
+
4. If B <: Rational, then __radd_ converts both to the
|
| 346 |
+
builtin rational type (hey look, that's us) and
|
| 347 |
+
proceeds.
|
| 348 |
+
5. Otherwise, __radd__ tries to find the nearest common
|
| 349 |
+
base ABC, and fall back to its builtin type. Since this
|
| 350 |
+
class doesn't subclass a concrete type, there's no
|
| 351 |
+
implementation to fall back to, so we need to try as
|
| 352 |
+
hard as possible to return an actual value, or the user
|
| 353 |
+
will get a TypeError.
|
| 354 |
+
|
| 355 |
+
"""
|
| 356 |
+
def forward(a, b):
|
| 357 |
+
if isinstance(b, (int, Fraction)):
|
| 358 |
+
return monomorphic_operator(a, b)
|
| 359 |
+
elif isinstance(b, float):
|
| 360 |
+
return fallback_operator(float(a), b)
|
| 361 |
+
elif isinstance(b, complex):
|
| 362 |
+
return fallback_operator(complex(a), b)
|
| 363 |
+
else:
|
| 364 |
+
return NotImplemented
|
| 365 |
+
forward.__name__ = '__' + fallback_operator.__name__ + '__'
|
| 366 |
+
forward.__doc__ = monomorphic_operator.__doc__
|
| 367 |
+
|
| 368 |
+
def reverse(b, a):
|
| 369 |
+
if isinstance(a, numbers.Rational):
|
| 370 |
+
# Includes ints.
|
| 371 |
+
return monomorphic_operator(a, b)
|
| 372 |
+
elif isinstance(a, numbers.Real):
|
| 373 |
+
return fallback_operator(float(a), float(b))
|
| 374 |
+
elif isinstance(a, numbers.Complex):
|
| 375 |
+
return fallback_operator(complex(a), complex(b))
|
| 376 |
+
else:
|
| 377 |
+
return NotImplemented
|
| 378 |
+
reverse.__name__ = '__r' + fallback_operator.__name__ + '__'
|
| 379 |
+
reverse.__doc__ = monomorphic_operator.__doc__
|
| 380 |
+
|
| 381 |
+
return forward, reverse
|
| 382 |
+
|
| 383 |
+
# Rational arithmetic algorithms: Knuth, TAOCP, Volume 2, 4.5.1.
|
| 384 |
+
#
|
| 385 |
+
# Assume input fractions a and b are normalized.
|
| 386 |
+
#
|
| 387 |
+
# 1) Consider addition/subtraction.
|
| 388 |
+
#
|
| 389 |
+
# Let g = gcd(da, db). Then
|
| 390 |
+
#
|
| 391 |
+
# na nb na*db ± nb*da
|
| 392 |
+
# a ± b == -- ± -- == ------------- ==
|
| 393 |
+
# da db da*db
|
| 394 |
+
#
|
| 395 |
+
# na*(db//g) ± nb*(da//g) t
|
| 396 |
+
# == ----------------------- == -
|
| 397 |
+
# (da*db)//g d
|
| 398 |
+
#
|
| 399 |
+
# Now, if g > 1, we're working with smaller integers.
|
| 400 |
+
#
|
| 401 |
+
# Note, that t, (da//g) and (db//g) are pairwise coprime.
|
| 402 |
+
#
|
| 403 |
+
# Indeed, (da//g) and (db//g) share no common factors (they were
|
| 404 |
+
# removed) and da is coprime with na (since input fractions are
|
| 405 |
+
# normalized), hence (da//g) and na are coprime. By symmetry,
|
| 406 |
+
# (db//g) and nb are coprime too. Then,
|
| 407 |
+
#
|
| 408 |
+
# gcd(t, da//g) == gcd(na*(db//g), da//g) == 1
|
| 409 |
+
# gcd(t, db//g) == gcd(nb*(da//g), db//g) == 1
|
| 410 |
+
#
|
| 411 |
+
# Above allows us optimize reduction of the result to lowest
|
| 412 |
+
# terms. Indeed,
|
| 413 |
+
#
|
| 414 |
+
# g2 = gcd(t, d) == gcd(t, (da//g)*(db//g)*g) == gcd(t, g)
|
| 415 |
+
#
|
| 416 |
+
# t//g2 t//g2
|
| 417 |
+
# a ± b == ----------------------- == ----------------
|
| 418 |
+
# (da//g)*(db//g)*(g//g2) (da//g)*(db//g2)
|
| 419 |
+
#
|
| 420 |
+
# is a normalized fraction. This is useful because the unnormalized
|
| 421 |
+
# denominator d could be much larger than g.
|
| 422 |
+
#
|
| 423 |
+
# We should special-case g == 1 (and g2 == 1), since 60.8% of
|
| 424 |
+
# randomly-chosen integers are coprime:
|
| 425 |
+
# https://en.wikipedia.org/wiki/Coprime_integers#Probability_of_coprimality
|
| 426 |
+
# Note, that g2 == 1 always for fractions, obtained from floats: here
|
| 427 |
+
# g is a power of 2 and the unnormalized numerator t is an odd integer.
|
| 428 |
+
#
|
| 429 |
+
# 2) Consider multiplication
|
| 430 |
+
#
|
| 431 |
+
# Let g1 = gcd(na, db) and g2 = gcd(nb, da), then
|
| 432 |
+
#
|
| 433 |
+
# na*nb na*nb (na//g1)*(nb//g2)
|
| 434 |
+
# a*b == ----- == ----- == -----------------
|
| 435 |
+
# da*db db*da (db//g1)*(da//g2)
|
| 436 |
+
#
|
| 437 |
+
# Note, that after divisions we're multiplying smaller integers.
|
| 438 |
+
#
|
| 439 |
+
# Also, the resulting fraction is normalized, because each of
|
| 440 |
+
# two factors in the numerator is coprime to each of the two factors
|
| 441 |
+
# in the denominator.
|
| 442 |
+
#
|
| 443 |
+
# Indeed, pick (na//g1). It's coprime with (da//g2), because input
|
| 444 |
+
# fractions are normalized. It's also coprime with (db//g1), because
|
| 445 |
+
# common factors are removed by g1 == gcd(na, db).
|
| 446 |
+
#
|
| 447 |
+
# As for addition/subtraction, we should special-case g1 == 1
|
| 448 |
+
# and g2 == 1 for same reason. That happens also for multiplying
|
| 449 |
+
# rationals, obtained from floats.
|
| 450 |
+
|
| 451 |
+
def _add(a, b):
|
| 452 |
+
"""a + b"""
|
| 453 |
+
na, da = a.numerator, a.denominator
|
| 454 |
+
nb, db = b.numerator, b.denominator
|
| 455 |
+
g = math.gcd(da, db)
|
| 456 |
+
if g == 1:
|
| 457 |
+
return Fraction(na * db + da * nb, da * db, _normalize=False)
|
| 458 |
+
s = da // g
|
| 459 |
+
t = na * (db // g) + nb * s
|
| 460 |
+
g2 = math.gcd(t, g)
|
| 461 |
+
if g2 == 1:
|
| 462 |
+
return Fraction(t, s * db, _normalize=False)
|
| 463 |
+
return Fraction(t // g2, s * (db // g2), _normalize=False)
|
| 464 |
+
|
| 465 |
+
__add__, __radd__ = _operator_fallbacks(_add, operator.add)
|
| 466 |
+
|
| 467 |
+
def _sub(a, b):
|
| 468 |
+
"""a - b"""
|
| 469 |
+
na, da = a.numerator, a.denominator
|
| 470 |
+
nb, db = b.numerator, b.denominator
|
| 471 |
+
g = math.gcd(da, db)
|
| 472 |
+
if g == 1:
|
| 473 |
+
return Fraction(na * db - da * nb, da * db, _normalize=False)
|
| 474 |
+
s = da // g
|
| 475 |
+
t = na * (db // g) - nb * s
|
| 476 |
+
g2 = math.gcd(t, g)
|
| 477 |
+
if g2 == 1:
|
| 478 |
+
return Fraction(t, s * db, _normalize=False)
|
| 479 |
+
return Fraction(t // g2, s * (db // g2), _normalize=False)
|
| 480 |
+
|
| 481 |
+
__sub__, __rsub__ = _operator_fallbacks(_sub, operator.sub)
|
| 482 |
+
|
| 483 |
+
def _mul(a, b):
|
| 484 |
+
"""a * b"""
|
| 485 |
+
na, da = a.numerator, a.denominator
|
| 486 |
+
nb, db = b.numerator, b.denominator
|
| 487 |
+
g1 = math.gcd(na, db)
|
| 488 |
+
if g1 > 1:
|
| 489 |
+
na //= g1
|
| 490 |
+
db //= g1
|
| 491 |
+
g2 = math.gcd(nb, da)
|
| 492 |
+
if g2 > 1:
|
| 493 |
+
nb //= g2
|
| 494 |
+
da //= g2
|
| 495 |
+
return Fraction(na * nb, db * da, _normalize=False)
|
| 496 |
+
|
| 497 |
+
__mul__, __rmul__ = _operator_fallbacks(_mul, operator.mul)
|
| 498 |
+
|
| 499 |
+
def _div(a, b):
|
| 500 |
+
"""a / b"""
|
| 501 |
+
# Same as _mul(), with inversed b.
|
| 502 |
+
na, da = a.numerator, a.denominator
|
| 503 |
+
nb, db = b.numerator, b.denominator
|
| 504 |
+
g1 = math.gcd(na, nb)
|
| 505 |
+
if g1 > 1:
|
| 506 |
+
na //= g1
|
| 507 |
+
nb //= g1
|
| 508 |
+
g2 = math.gcd(db, da)
|
| 509 |
+
if g2 > 1:
|
| 510 |
+
da //= g2
|
| 511 |
+
db //= g2
|
| 512 |
+
n, d = na * db, nb * da
|
| 513 |
+
if d < 0:
|
| 514 |
+
n, d = -n, -d
|
| 515 |
+
return Fraction(n, d, _normalize=False)
|
| 516 |
+
|
| 517 |
+
__truediv__, __rtruediv__ = _operator_fallbacks(_div, operator.truediv)
|
| 518 |
+
|
| 519 |
+
def _floordiv(a, b):
|
| 520 |
+
"""a // b"""
|
| 521 |
+
return (a.numerator * b.denominator) // (a.denominator * b.numerator)
|
| 522 |
+
|
| 523 |
+
__floordiv__, __rfloordiv__ = _operator_fallbacks(_floordiv, operator.floordiv)
|
| 524 |
+
|
| 525 |
+
def _divmod(a, b):
|
| 526 |
+
"""(a // b, a % b)"""
|
| 527 |
+
da, db = a.denominator, b.denominator
|
| 528 |
+
div, n_mod = divmod(a.numerator * db, da * b.numerator)
|
| 529 |
+
return div, Fraction(n_mod, da * db)
|
| 530 |
+
|
| 531 |
+
__divmod__, __rdivmod__ = _operator_fallbacks(_divmod, divmod)
|
| 532 |
+
|
| 533 |
+
def _mod(a, b):
|
| 534 |
+
"""a % b"""
|
| 535 |
+
da, db = a.denominator, b.denominator
|
| 536 |
+
return Fraction((a.numerator * db) % (b.numerator * da), da * db)
|
| 537 |
+
|
| 538 |
+
__mod__, __rmod__ = _operator_fallbacks(_mod, operator.mod)
|
| 539 |
+
|
| 540 |
+
def __pow__(a, b):
|
| 541 |
+
"""a ** b
|
| 542 |
+
|
| 543 |
+
If b is not an integer, the result will be a float or complex
|
| 544 |
+
since roots are generally irrational. If b is an integer, the
|
| 545 |
+
result will be rational.
|
| 546 |
+
|
| 547 |
+
"""
|
| 548 |
+
if isinstance(b, numbers.Rational):
|
| 549 |
+
if b.denominator == 1:
|
| 550 |
+
power = b.numerator
|
| 551 |
+
if power >= 0:
|
| 552 |
+
return Fraction(a._numerator ** power,
|
| 553 |
+
a._denominator ** power,
|
| 554 |
+
_normalize=False)
|
| 555 |
+
elif a._numerator >= 0:
|
| 556 |
+
return Fraction(a._denominator ** -power,
|
| 557 |
+
a._numerator ** -power,
|
| 558 |
+
_normalize=False)
|
| 559 |
+
else:
|
| 560 |
+
return Fraction((-a._denominator) ** -power,
|
| 561 |
+
(-a._numerator) ** -power,
|
| 562 |
+
_normalize=False)
|
| 563 |
+
else:
|
| 564 |
+
# A fractional power will generally produce an
|
| 565 |
+
# irrational number.
|
| 566 |
+
return float(a) ** float(b)
|
| 567 |
+
else:
|
| 568 |
+
return float(a) ** b
|
| 569 |
+
|
| 570 |
+
def __rpow__(b, a):
|
| 571 |
+
"""a ** b"""
|
| 572 |
+
if b._denominator == 1 and b._numerator >= 0:
|
| 573 |
+
# If a is an int, keep it that way if possible.
|
| 574 |
+
return a ** b._numerator
|
| 575 |
+
|
| 576 |
+
if isinstance(a, numbers.Rational):
|
| 577 |
+
return Fraction(a.numerator, a.denominator) ** b
|
| 578 |
+
|
| 579 |
+
if b._denominator == 1:
|
| 580 |
+
return a ** b._numerator
|
| 581 |
+
|
| 582 |
+
return a ** float(b)
|
| 583 |
+
|
| 584 |
+
def __pos__(a):
|
| 585 |
+
"""+a: Coerces a subclass instance to Fraction"""
|
| 586 |
+
return Fraction(a._numerator, a._denominator, _normalize=False)
|
| 587 |
+
|
| 588 |
+
def __neg__(a):
|
| 589 |
+
"""-a"""
|
| 590 |
+
return Fraction(-a._numerator, a._denominator, _normalize=False)
|
| 591 |
+
|
| 592 |
+
def __abs__(a):
|
| 593 |
+
"""abs(a)"""
|
| 594 |
+
return Fraction(abs(a._numerator), a._denominator, _normalize=False)
|
| 595 |
+
|
| 596 |
+
def __trunc__(a):
|
| 597 |
+
"""trunc(a)"""
|
| 598 |
+
if a._numerator < 0:
|
| 599 |
+
return -(-a._numerator // a._denominator)
|
| 600 |
+
else:
|
| 601 |
+
return a._numerator // a._denominator
|
| 602 |
+
|
| 603 |
+
def __floor__(a):
|
| 604 |
+
"""math.floor(a)"""
|
| 605 |
+
return a.numerator // a.denominator
|
| 606 |
+
|
| 607 |
+
def __ceil__(a):
|
| 608 |
+
"""math.ceil(a)"""
|
| 609 |
+
# The negations cleverly convince floordiv to return the ceiling.
|
| 610 |
+
return -(-a.numerator // a.denominator)
|
| 611 |
+
|
| 612 |
+
def __round__(self, ndigits=None):
|
| 613 |
+
"""round(self, ndigits)
|
| 614 |
+
|
| 615 |
+
Rounds half toward even.
|
| 616 |
+
"""
|
| 617 |
+
if ndigits is None:
|
| 618 |
+
floor, remainder = divmod(self.numerator, self.denominator)
|
| 619 |
+
if remainder * 2 < self.denominator:
|
| 620 |
+
return floor
|
| 621 |
+
elif remainder * 2 > self.denominator:
|
| 622 |
+
return floor + 1
|
| 623 |
+
# Deal with the half case:
|
| 624 |
+
elif floor % 2 == 0:
|
| 625 |
+
return floor
|
| 626 |
+
else:
|
| 627 |
+
return floor + 1
|
| 628 |
+
shift = 10**abs(ndigits)
|
| 629 |
+
# See _operator_fallbacks.forward to check that the results of
|
| 630 |
+
# these operations will always be Fraction and therefore have
|
| 631 |
+
# round().
|
| 632 |
+
if ndigits > 0:
|
| 633 |
+
return Fraction(round(self * shift), shift)
|
| 634 |
+
else:
|
| 635 |
+
return Fraction(round(self / shift) * shift)
|
| 636 |
+
|
| 637 |
+
def __hash__(self):
|
| 638 |
+
"""hash(self)"""
|
| 639 |
+
|
| 640 |
+
# To make sure that the hash of a Fraction agrees with the hash
|
| 641 |
+
# of a numerically equal integer, float or Decimal instance, we
|
| 642 |
+
# follow the rules for numeric hashes outlined in the
|
| 643 |
+
# documentation. (See library docs, 'Built-in Types').
|
| 644 |
+
|
| 645 |
+
try:
|
| 646 |
+
dinv = pow(self._denominator, -1, _PyHASH_MODULUS)
|
| 647 |
+
except ValueError:
|
| 648 |
+
# ValueError means there is no modular inverse.
|
| 649 |
+
hash_ = _PyHASH_INF
|
| 650 |
+
else:
|
| 651 |
+
# The general algorithm now specifies that the absolute value of
|
| 652 |
+
# the hash is
|
| 653 |
+
# (|N| * dinv) % P
|
| 654 |
+
# where N is self._numerator and P is _PyHASH_MODULUS. That's
|
| 655 |
+
# optimized here in two ways: first, for a non-negative int i,
|
| 656 |
+
# hash(i) == i % P, but the int hash implementation doesn't need
|
| 657 |
+
# to divide, and is faster than doing % P explicitly. So we do
|
| 658 |
+
# hash(|N| * dinv)
|
| 659 |
+
# instead. Second, N is unbounded, so its product with dinv may
|
| 660 |
+
# be arbitrarily expensive to compute. The final answer is the
|
| 661 |
+
# same if we use the bounded |N| % P instead, which can again
|
| 662 |
+
# be done with an int hash() call. If 0 <= i < P, hash(i) == i,
|
| 663 |
+
# so this nested hash() call wastes a bit of time making a
|
| 664 |
+
# redundant copy when |N| < P, but can save an arbitrarily large
|
| 665 |
+
# amount of computation for large |N|.
|
| 666 |
+
hash_ = hash(hash(abs(self._numerator)) * dinv)
|
| 667 |
+
result = hash_ if self._numerator >= 0 else -hash_
|
| 668 |
+
return -2 if result == -1 else result
|
| 669 |
+
|
| 670 |
+
def __eq__(a, b):
|
| 671 |
+
"""a == b"""
|
| 672 |
+
if type(b) is int:
|
| 673 |
+
return a._numerator == b and a._denominator == 1
|
| 674 |
+
if isinstance(b, numbers.Rational):
|
| 675 |
+
return (a._numerator == b.numerator and
|
| 676 |
+
a._denominator == b.denominator)
|
| 677 |
+
if isinstance(b, numbers.Complex) and b.imag == 0:
|
| 678 |
+
b = b.real
|
| 679 |
+
if isinstance(b, float):
|
| 680 |
+
if math.isnan(b) or math.isinf(b):
|
| 681 |
+
# comparisons with an infinity or nan should behave in
|
| 682 |
+
# the same way for any finite a, so treat a as zero.
|
| 683 |
+
return 0.0 == b
|
| 684 |
+
else:
|
| 685 |
+
return a == a.from_float(b)
|
| 686 |
+
else:
|
| 687 |
+
# Since a doesn't know how to compare with b, let's give b
|
| 688 |
+
# a chance to compare itself with a.
|
| 689 |
+
return NotImplemented
|
| 690 |
+
|
| 691 |
+
def _richcmp(self, other, op):
|
| 692 |
+
"""Helper for comparison operators, for internal use only.
|
| 693 |
+
|
| 694 |
+
Implement comparison between a Rational instance `self`, and
|
| 695 |
+
either another Rational instance or a float `other`. If
|
| 696 |
+
`other` is not a Rational instance or a float, return
|
| 697 |
+
NotImplemented. `op` should be one of the six standard
|
| 698 |
+
comparison operators.
|
| 699 |
+
|
| 700 |
+
"""
|
| 701 |
+
# convert other to a Rational instance where reasonable.
|
| 702 |
+
if isinstance(other, numbers.Rational):
|
| 703 |
+
return op(self._numerator * other.denominator,
|
| 704 |
+
self._denominator * other.numerator)
|
| 705 |
+
if isinstance(other, float):
|
| 706 |
+
if math.isnan(other) or math.isinf(other):
|
| 707 |
+
return op(0.0, other)
|
| 708 |
+
else:
|
| 709 |
+
return op(self, self.from_float(other))
|
| 710 |
+
else:
|
| 711 |
+
return NotImplemented
|
| 712 |
+
|
| 713 |
+
def __lt__(a, b):
|
| 714 |
+
"""a < b"""
|
| 715 |
+
return a._richcmp(b, operator.lt)
|
| 716 |
+
|
| 717 |
+
def __gt__(a, b):
|
| 718 |
+
"""a > b"""
|
| 719 |
+
return a._richcmp(b, operator.gt)
|
| 720 |
+
|
| 721 |
+
def __le__(a, b):
|
| 722 |
+
"""a <= b"""
|
| 723 |
+
return a._richcmp(b, operator.le)
|
| 724 |
+
|
| 725 |
+
def __ge__(a, b):
|
| 726 |
+
"""a >= b"""
|
| 727 |
+
return a._richcmp(b, operator.ge)
|
| 728 |
+
|
| 729 |
+
def __bool__(a):
|
| 730 |
+
"""a != 0"""
|
| 731 |
+
# bpo-39274: Use bool() because (a._numerator != 0) can return an
|
| 732 |
+
# object which is not a bool.
|
| 733 |
+
return bool(a._numerator)
|
| 734 |
+
|
| 735 |
+
# support for pickling, copy, and deepcopy
|
| 736 |
+
|
| 737 |
+
def __reduce__(self):
|
| 738 |
+
return (self.__class__, (str(self),))
|
| 739 |
+
|
| 740 |
+
def __copy__(self):
|
| 741 |
+
if type(self) == Fraction:
|
| 742 |
+
return self # I'm immutable; therefore I am my own clone
|
| 743 |
+
return self.__class__(self._numerator, self._denominator)
|
| 744 |
+
|
| 745 |
+
def __deepcopy__(self, memo):
|
| 746 |
+
if type(self) == Fraction:
|
| 747 |
+
return self # My components are also immutable
|
| 748 |
+
return self.__class__(self._numerator, self._denominator)
|
evalkit_tf446/lib/python3.10/functools.py
ADDED
|
@@ -0,0 +1,992 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""functools.py - Tools for working with functions and callable objects
|
| 2 |
+
"""
|
| 3 |
+
# Python module wrapper for _functools C module
|
| 4 |
+
# to allow utilities written in Python to be added
|
| 5 |
+
# to the functools module.
|
| 6 |
+
# Written by Nick Coghlan <ncoghlan at gmail.com>,
|
| 7 |
+
# Raymond Hettinger <python at rcn.com>,
|
| 8 |
+
# and Łukasz Langa <lukasz at langa.pl>.
|
| 9 |
+
# Copyright (C) 2006-2013 Python Software Foundation.
|
| 10 |
+
# See C source code for _functools credits/copyright
|
| 11 |
+
|
| 12 |
+
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
|
| 13 |
+
'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce',
|
| 14 |
+
'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod',
|
| 15 |
+
'cached_property']
|
| 16 |
+
|
| 17 |
+
from abc import get_cache_token
|
| 18 |
+
from collections import namedtuple
|
| 19 |
+
# import types, weakref # Deferred to single_dispatch()
|
| 20 |
+
from reprlib import recursive_repr
|
| 21 |
+
from _thread import RLock
|
| 22 |
+
from types import GenericAlias
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
################################################################################
|
| 26 |
+
### update_wrapper() and wraps() decorator
|
| 27 |
+
################################################################################
|
| 28 |
+
|
| 29 |
+
# update_wrapper() and wraps() are tools to help write
|
| 30 |
+
# wrapper functions that can handle naive introspection
|
| 31 |
+
|
| 32 |
+
WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__qualname__', '__doc__',
|
| 33 |
+
'__annotations__')
|
| 34 |
+
WRAPPER_UPDATES = ('__dict__',)
|
| 35 |
+
def update_wrapper(wrapper,
|
| 36 |
+
wrapped,
|
| 37 |
+
assigned = WRAPPER_ASSIGNMENTS,
|
| 38 |
+
updated = WRAPPER_UPDATES):
|
| 39 |
+
"""Update a wrapper function to look like the wrapped function
|
| 40 |
+
|
| 41 |
+
wrapper is the function to be updated
|
| 42 |
+
wrapped is the original function
|
| 43 |
+
assigned is a tuple naming the attributes assigned directly
|
| 44 |
+
from the wrapped function to the wrapper function (defaults to
|
| 45 |
+
functools.WRAPPER_ASSIGNMENTS)
|
| 46 |
+
updated is a tuple naming the attributes of the wrapper that
|
| 47 |
+
are updated with the corresponding attribute from the wrapped
|
| 48 |
+
function (defaults to functools.WRAPPER_UPDATES)
|
| 49 |
+
"""
|
| 50 |
+
for attr in assigned:
|
| 51 |
+
try:
|
| 52 |
+
value = getattr(wrapped, attr)
|
| 53 |
+
except AttributeError:
|
| 54 |
+
pass
|
| 55 |
+
else:
|
| 56 |
+
setattr(wrapper, attr, value)
|
| 57 |
+
for attr in updated:
|
| 58 |
+
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
|
| 59 |
+
# Issue #17482: set __wrapped__ last so we don't inadvertently copy it
|
| 60 |
+
# from the wrapped function when updating __dict__
|
| 61 |
+
wrapper.__wrapped__ = wrapped
|
| 62 |
+
# Return the wrapper so this can be used as a decorator via partial()
|
| 63 |
+
return wrapper
|
| 64 |
+
|
| 65 |
+
def wraps(wrapped,
|
| 66 |
+
assigned = WRAPPER_ASSIGNMENTS,
|
| 67 |
+
updated = WRAPPER_UPDATES):
|
| 68 |
+
"""Decorator factory to apply update_wrapper() to a wrapper function
|
| 69 |
+
|
| 70 |
+
Returns a decorator that invokes update_wrapper() with the decorated
|
| 71 |
+
function as the wrapper argument and the arguments to wraps() as the
|
| 72 |
+
remaining arguments. Default arguments are as for update_wrapper().
|
| 73 |
+
This is a convenience function to simplify applying partial() to
|
| 74 |
+
update_wrapper().
|
| 75 |
+
"""
|
| 76 |
+
return partial(update_wrapper, wrapped=wrapped,
|
| 77 |
+
assigned=assigned, updated=updated)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
################################################################################
|
| 81 |
+
### total_ordering class decorator
|
| 82 |
+
################################################################################
|
| 83 |
+
|
| 84 |
+
# The total ordering functions all invoke the root magic method directly
|
| 85 |
+
# rather than using the corresponding operator. This avoids possible
|
| 86 |
+
# infinite recursion that could occur when the operator dispatch logic
|
| 87 |
+
# detects a NotImplemented result and then calls a reflected method.
|
| 88 |
+
|
| 89 |
+
def _gt_from_lt(self, other, NotImplemented=NotImplemented):
|
| 90 |
+
'Return a > b. Computed by @total_ordering from (not a < b) and (a != b).'
|
| 91 |
+
op_result = type(self).__lt__(self, other)
|
| 92 |
+
if op_result is NotImplemented:
|
| 93 |
+
return op_result
|
| 94 |
+
return not op_result and self != other
|
| 95 |
+
|
| 96 |
+
def _le_from_lt(self, other, NotImplemented=NotImplemented):
|
| 97 |
+
'Return a <= b. Computed by @total_ordering from (a < b) or (a == b).'
|
| 98 |
+
op_result = type(self).__lt__(self, other)
|
| 99 |
+
if op_result is NotImplemented:
|
| 100 |
+
return op_result
|
| 101 |
+
return op_result or self == other
|
| 102 |
+
|
| 103 |
+
def _ge_from_lt(self, other, NotImplemented=NotImplemented):
|
| 104 |
+
'Return a >= b. Computed by @total_ordering from (not a < b).'
|
| 105 |
+
op_result = type(self).__lt__(self, other)
|
| 106 |
+
if op_result is NotImplemented:
|
| 107 |
+
return op_result
|
| 108 |
+
return not op_result
|
| 109 |
+
|
| 110 |
+
def _ge_from_le(self, other, NotImplemented=NotImplemented):
|
| 111 |
+
'Return a >= b. Computed by @total_ordering from (not a <= b) or (a == b).'
|
| 112 |
+
op_result = type(self).__le__(self, other)
|
| 113 |
+
if op_result is NotImplemented:
|
| 114 |
+
return op_result
|
| 115 |
+
return not op_result or self == other
|
| 116 |
+
|
| 117 |
+
def _lt_from_le(self, other, NotImplemented=NotImplemented):
|
| 118 |
+
'Return a < b. Computed by @total_ordering from (a <= b) and (a != b).'
|
| 119 |
+
op_result = type(self).__le__(self, other)
|
| 120 |
+
if op_result is NotImplemented:
|
| 121 |
+
return op_result
|
| 122 |
+
return op_result and self != other
|
| 123 |
+
|
| 124 |
+
def _gt_from_le(self, other, NotImplemented=NotImplemented):
|
| 125 |
+
'Return a > b. Computed by @total_ordering from (not a <= b).'
|
| 126 |
+
op_result = type(self).__le__(self, other)
|
| 127 |
+
if op_result is NotImplemented:
|
| 128 |
+
return op_result
|
| 129 |
+
return not op_result
|
| 130 |
+
|
| 131 |
+
def _lt_from_gt(self, other, NotImplemented=NotImplemented):
|
| 132 |
+
'Return a < b. Computed by @total_ordering from (not a > b) and (a != b).'
|
| 133 |
+
op_result = type(self).__gt__(self, other)
|
| 134 |
+
if op_result is NotImplemented:
|
| 135 |
+
return op_result
|
| 136 |
+
return not op_result and self != other
|
| 137 |
+
|
| 138 |
+
def _ge_from_gt(self, other, NotImplemented=NotImplemented):
|
| 139 |
+
'Return a >= b. Computed by @total_ordering from (a > b) or (a == b).'
|
| 140 |
+
op_result = type(self).__gt__(self, other)
|
| 141 |
+
if op_result is NotImplemented:
|
| 142 |
+
return op_result
|
| 143 |
+
return op_result or self == other
|
| 144 |
+
|
| 145 |
+
def _le_from_gt(self, other, NotImplemented=NotImplemented):
|
| 146 |
+
'Return a <= b. Computed by @total_ordering from (not a > b).'
|
| 147 |
+
op_result = type(self).__gt__(self, other)
|
| 148 |
+
if op_result is NotImplemented:
|
| 149 |
+
return op_result
|
| 150 |
+
return not op_result
|
| 151 |
+
|
| 152 |
+
def _le_from_ge(self, other, NotImplemented=NotImplemented):
|
| 153 |
+
'Return a <= b. Computed by @total_ordering from (not a >= b) or (a == b).'
|
| 154 |
+
op_result = type(self).__ge__(self, other)
|
| 155 |
+
if op_result is NotImplemented:
|
| 156 |
+
return op_result
|
| 157 |
+
return not op_result or self == other
|
| 158 |
+
|
| 159 |
+
def _gt_from_ge(self, other, NotImplemented=NotImplemented):
|
| 160 |
+
'Return a > b. Computed by @total_ordering from (a >= b) and (a != b).'
|
| 161 |
+
op_result = type(self).__ge__(self, other)
|
| 162 |
+
if op_result is NotImplemented:
|
| 163 |
+
return op_result
|
| 164 |
+
return op_result and self != other
|
| 165 |
+
|
| 166 |
+
def _lt_from_ge(self, other, NotImplemented=NotImplemented):
|
| 167 |
+
'Return a < b. Computed by @total_ordering from (not a >= b).'
|
| 168 |
+
op_result = type(self).__ge__(self, other)
|
| 169 |
+
if op_result is NotImplemented:
|
| 170 |
+
return op_result
|
| 171 |
+
return not op_result
|
| 172 |
+
|
| 173 |
+
_convert = {
|
| 174 |
+
'__lt__': [('__gt__', _gt_from_lt),
|
| 175 |
+
('__le__', _le_from_lt),
|
| 176 |
+
('__ge__', _ge_from_lt)],
|
| 177 |
+
'__le__': [('__ge__', _ge_from_le),
|
| 178 |
+
('__lt__', _lt_from_le),
|
| 179 |
+
('__gt__', _gt_from_le)],
|
| 180 |
+
'__gt__': [('__lt__', _lt_from_gt),
|
| 181 |
+
('__ge__', _ge_from_gt),
|
| 182 |
+
('__le__', _le_from_gt)],
|
| 183 |
+
'__ge__': [('__le__', _le_from_ge),
|
| 184 |
+
('__gt__', _gt_from_ge),
|
| 185 |
+
('__lt__', _lt_from_ge)]
|
| 186 |
+
}
|
| 187 |
+
|
| 188 |
+
def total_ordering(cls):
|
| 189 |
+
"""Class decorator that fills in missing ordering methods"""
|
| 190 |
+
# Find user-defined comparisons (not those inherited from object).
|
| 191 |
+
roots = {op for op in _convert if getattr(cls, op, None) is not getattr(object, op, None)}
|
| 192 |
+
if not roots:
|
| 193 |
+
raise ValueError('must define at least one ordering operation: < > <= >=')
|
| 194 |
+
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
|
| 195 |
+
for opname, opfunc in _convert[root]:
|
| 196 |
+
if opname not in roots:
|
| 197 |
+
opfunc.__name__ = opname
|
| 198 |
+
setattr(cls, opname, opfunc)
|
| 199 |
+
return cls
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
################################################################################
|
| 203 |
+
### cmp_to_key() function converter
|
| 204 |
+
################################################################################
|
| 205 |
+
|
| 206 |
+
def cmp_to_key(mycmp):
|
| 207 |
+
"""Convert a cmp= function into a key= function"""
|
| 208 |
+
class K(object):
|
| 209 |
+
__slots__ = ['obj']
|
| 210 |
+
def __init__(self, obj):
|
| 211 |
+
self.obj = obj
|
| 212 |
+
def __lt__(self, other):
|
| 213 |
+
return mycmp(self.obj, other.obj) < 0
|
| 214 |
+
def __gt__(self, other):
|
| 215 |
+
return mycmp(self.obj, other.obj) > 0
|
| 216 |
+
def __eq__(self, other):
|
| 217 |
+
return mycmp(self.obj, other.obj) == 0
|
| 218 |
+
def __le__(self, other):
|
| 219 |
+
return mycmp(self.obj, other.obj) <= 0
|
| 220 |
+
def __ge__(self, other):
|
| 221 |
+
return mycmp(self.obj, other.obj) >= 0
|
| 222 |
+
__hash__ = None
|
| 223 |
+
return K
|
| 224 |
+
|
| 225 |
+
try:
|
| 226 |
+
from _functools import cmp_to_key
|
| 227 |
+
except ImportError:
|
| 228 |
+
pass
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
################################################################################
|
| 232 |
+
### reduce() sequence to a single item
|
| 233 |
+
################################################################################
|
| 234 |
+
|
| 235 |
+
_initial_missing = object()
|
| 236 |
+
|
| 237 |
+
def reduce(function, sequence, initial=_initial_missing):
|
| 238 |
+
"""
|
| 239 |
+
reduce(function, iterable[, initial]) -> value
|
| 240 |
+
|
| 241 |
+
Apply a function of two arguments cumulatively to the items of a sequence
|
| 242 |
+
or iterable, from left to right, so as to reduce the iterable to a single
|
| 243 |
+
value. For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates
|
| 244 |
+
((((1+2)+3)+4)+5). If initial is present, it is placed before the items
|
| 245 |
+
of the iterable in the calculation, and serves as a default when the
|
| 246 |
+
iterable is empty.
|
| 247 |
+
"""
|
| 248 |
+
|
| 249 |
+
it = iter(sequence)
|
| 250 |
+
|
| 251 |
+
if initial is _initial_missing:
|
| 252 |
+
try:
|
| 253 |
+
value = next(it)
|
| 254 |
+
except StopIteration:
|
| 255 |
+
raise TypeError(
|
| 256 |
+
"reduce() of empty iterable with no initial value") from None
|
| 257 |
+
else:
|
| 258 |
+
value = initial
|
| 259 |
+
|
| 260 |
+
for element in it:
|
| 261 |
+
value = function(value, element)
|
| 262 |
+
|
| 263 |
+
return value
|
| 264 |
+
|
| 265 |
+
try:
|
| 266 |
+
from _functools import reduce
|
| 267 |
+
except ImportError:
|
| 268 |
+
pass
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
################################################################################
|
| 272 |
+
### partial() argument application
|
| 273 |
+
################################################################################
|
| 274 |
+
|
| 275 |
+
# Purely functional, no descriptor behaviour
|
| 276 |
+
class partial:
|
| 277 |
+
"""New function with partial application of the given arguments
|
| 278 |
+
and keywords.
|
| 279 |
+
"""
|
| 280 |
+
|
| 281 |
+
__slots__ = "func", "args", "keywords", "__dict__", "__weakref__"
|
| 282 |
+
|
| 283 |
+
def __new__(cls, func, /, *args, **keywords):
|
| 284 |
+
if not callable(func):
|
| 285 |
+
raise TypeError("the first argument must be callable")
|
| 286 |
+
|
| 287 |
+
if hasattr(func, "func"):
|
| 288 |
+
args = func.args + args
|
| 289 |
+
keywords = {**func.keywords, **keywords}
|
| 290 |
+
func = func.func
|
| 291 |
+
|
| 292 |
+
self = super(partial, cls).__new__(cls)
|
| 293 |
+
|
| 294 |
+
self.func = func
|
| 295 |
+
self.args = args
|
| 296 |
+
self.keywords = keywords
|
| 297 |
+
return self
|
| 298 |
+
|
| 299 |
+
def __call__(self, /, *args, **keywords):
|
| 300 |
+
keywords = {**self.keywords, **keywords}
|
| 301 |
+
return self.func(*self.args, *args, **keywords)
|
| 302 |
+
|
| 303 |
+
@recursive_repr()
|
| 304 |
+
def __repr__(self):
|
| 305 |
+
qualname = type(self).__qualname__
|
| 306 |
+
args = [repr(self.func)]
|
| 307 |
+
args.extend(repr(x) for x in self.args)
|
| 308 |
+
args.extend(f"{k}={v!r}" for (k, v) in self.keywords.items())
|
| 309 |
+
if type(self).__module__ == "functools":
|
| 310 |
+
return f"functools.{qualname}({', '.join(args)})"
|
| 311 |
+
return f"{qualname}({', '.join(args)})"
|
| 312 |
+
|
| 313 |
+
def __reduce__(self):
|
| 314 |
+
return type(self), (self.func,), (self.func, self.args,
|
| 315 |
+
self.keywords or None, self.__dict__ or None)
|
| 316 |
+
|
| 317 |
+
def __setstate__(self, state):
|
| 318 |
+
if not isinstance(state, tuple):
|
| 319 |
+
raise TypeError("argument to __setstate__ must be a tuple")
|
| 320 |
+
if len(state) != 4:
|
| 321 |
+
raise TypeError(f"expected 4 items in state, got {len(state)}")
|
| 322 |
+
func, args, kwds, namespace = state
|
| 323 |
+
if (not callable(func) or not isinstance(args, tuple) or
|
| 324 |
+
(kwds is not None and not isinstance(kwds, dict)) or
|
| 325 |
+
(namespace is not None and not isinstance(namespace, dict))):
|
| 326 |
+
raise TypeError("invalid partial state")
|
| 327 |
+
|
| 328 |
+
args = tuple(args) # just in case it's a subclass
|
| 329 |
+
if kwds is None:
|
| 330 |
+
kwds = {}
|
| 331 |
+
elif type(kwds) is not dict: # XXX does it need to be *exactly* dict?
|
| 332 |
+
kwds = dict(kwds)
|
| 333 |
+
if namespace is None:
|
| 334 |
+
namespace = {}
|
| 335 |
+
|
| 336 |
+
self.__dict__ = namespace
|
| 337 |
+
self.func = func
|
| 338 |
+
self.args = args
|
| 339 |
+
self.keywords = kwds
|
| 340 |
+
|
| 341 |
+
try:
|
| 342 |
+
from _functools import partial
|
| 343 |
+
except ImportError:
|
| 344 |
+
pass
|
| 345 |
+
|
| 346 |
+
# Descriptor version
|
| 347 |
+
class partialmethod(object):
|
| 348 |
+
"""Method descriptor with partial application of the given arguments
|
| 349 |
+
and keywords.
|
| 350 |
+
|
| 351 |
+
Supports wrapping existing descriptors and handles non-descriptor
|
| 352 |
+
callables as instance methods.
|
| 353 |
+
"""
|
| 354 |
+
|
| 355 |
+
def __init__(self, func, /, *args, **keywords):
|
| 356 |
+
if not callable(func) and not hasattr(func, "__get__"):
|
| 357 |
+
raise TypeError("{!r} is not callable or a descriptor"
|
| 358 |
+
.format(func))
|
| 359 |
+
|
| 360 |
+
# func could be a descriptor like classmethod which isn't callable,
|
| 361 |
+
# so we can't inherit from partial (it verifies func is callable)
|
| 362 |
+
if isinstance(func, partialmethod):
|
| 363 |
+
# flattening is mandatory in order to place cls/self before all
|
| 364 |
+
# other arguments
|
| 365 |
+
# it's also more efficient since only one function will be called
|
| 366 |
+
self.func = func.func
|
| 367 |
+
self.args = func.args + args
|
| 368 |
+
self.keywords = {**func.keywords, **keywords}
|
| 369 |
+
else:
|
| 370 |
+
self.func = func
|
| 371 |
+
self.args = args
|
| 372 |
+
self.keywords = keywords
|
| 373 |
+
|
| 374 |
+
def __repr__(self):
|
| 375 |
+
args = ", ".join(map(repr, self.args))
|
| 376 |
+
keywords = ", ".join("{}={!r}".format(k, v)
|
| 377 |
+
for k, v in self.keywords.items())
|
| 378 |
+
format_string = "{module}.{cls}({func}, {args}, {keywords})"
|
| 379 |
+
return format_string.format(module=self.__class__.__module__,
|
| 380 |
+
cls=self.__class__.__qualname__,
|
| 381 |
+
func=self.func,
|
| 382 |
+
args=args,
|
| 383 |
+
keywords=keywords)
|
| 384 |
+
|
| 385 |
+
def _make_unbound_method(self):
|
| 386 |
+
def _method(cls_or_self, /, *args, **keywords):
|
| 387 |
+
keywords = {**self.keywords, **keywords}
|
| 388 |
+
return self.func(cls_or_self, *self.args, *args, **keywords)
|
| 389 |
+
_method.__isabstractmethod__ = self.__isabstractmethod__
|
| 390 |
+
_method._partialmethod = self
|
| 391 |
+
return _method
|
| 392 |
+
|
| 393 |
+
def __get__(self, obj, cls=None):
|
| 394 |
+
get = getattr(self.func, "__get__", None)
|
| 395 |
+
result = None
|
| 396 |
+
if get is not None:
|
| 397 |
+
new_func = get(obj, cls)
|
| 398 |
+
if new_func is not self.func:
|
| 399 |
+
# Assume __get__ returning something new indicates the
|
| 400 |
+
# creation of an appropriate callable
|
| 401 |
+
result = partial(new_func, *self.args, **self.keywords)
|
| 402 |
+
try:
|
| 403 |
+
result.__self__ = new_func.__self__
|
| 404 |
+
except AttributeError:
|
| 405 |
+
pass
|
| 406 |
+
if result is None:
|
| 407 |
+
# If the underlying descriptor didn't do anything, treat this
|
| 408 |
+
# like an instance method
|
| 409 |
+
result = self._make_unbound_method().__get__(obj, cls)
|
| 410 |
+
return result
|
| 411 |
+
|
| 412 |
+
@property
|
| 413 |
+
def __isabstractmethod__(self):
|
| 414 |
+
return getattr(self.func, "__isabstractmethod__", False)
|
| 415 |
+
|
| 416 |
+
__class_getitem__ = classmethod(GenericAlias)
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
# Helper functions
|
| 420 |
+
|
| 421 |
+
def _unwrap_partial(func):
|
| 422 |
+
while isinstance(func, partial):
|
| 423 |
+
func = func.func
|
| 424 |
+
return func
|
| 425 |
+
|
| 426 |
+
################################################################################
|
| 427 |
+
### LRU Cache function decorator
|
| 428 |
+
################################################################################
|
| 429 |
+
|
| 430 |
+
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
| 431 |
+
|
| 432 |
+
class _HashedSeq(list):
|
| 433 |
+
""" This class guarantees that hash() will be called no more than once
|
| 434 |
+
per element. This is important because the lru_cache() will hash
|
| 435 |
+
the key multiple times on a cache miss.
|
| 436 |
+
|
| 437 |
+
"""
|
| 438 |
+
|
| 439 |
+
__slots__ = 'hashvalue'
|
| 440 |
+
|
| 441 |
+
def __init__(self, tup, hash=hash):
|
| 442 |
+
self[:] = tup
|
| 443 |
+
self.hashvalue = hash(tup)
|
| 444 |
+
|
| 445 |
+
def __hash__(self):
|
| 446 |
+
return self.hashvalue
|
| 447 |
+
|
| 448 |
+
def _make_key(args, kwds, typed,
|
| 449 |
+
kwd_mark = (object(),),
|
| 450 |
+
fasttypes = {int, str},
|
| 451 |
+
tuple=tuple, type=type, len=len):
|
| 452 |
+
"""Make a cache key from optionally typed positional and keyword arguments
|
| 453 |
+
|
| 454 |
+
The key is constructed in a way that is flat as possible rather than
|
| 455 |
+
as a nested structure that would take more memory.
|
| 456 |
+
|
| 457 |
+
If there is only a single argument and its data type is known to cache
|
| 458 |
+
its hash value, then that argument is returned without a wrapper. This
|
| 459 |
+
saves space and improves lookup speed.
|
| 460 |
+
|
| 461 |
+
"""
|
| 462 |
+
# All of code below relies on kwds preserving the order input by the user.
|
| 463 |
+
# Formerly, we sorted() the kwds before looping. The new way is *much*
|
| 464 |
+
# faster; however, it means that f(x=1, y=2) will now be treated as a
|
| 465 |
+
# distinct call from f(y=2, x=1) which will be cached separately.
|
| 466 |
+
key = args
|
| 467 |
+
if kwds:
|
| 468 |
+
key += kwd_mark
|
| 469 |
+
for item in kwds.items():
|
| 470 |
+
key += item
|
| 471 |
+
if typed:
|
| 472 |
+
key += tuple(type(v) for v in args)
|
| 473 |
+
if kwds:
|
| 474 |
+
key += tuple(type(v) for v in kwds.values())
|
| 475 |
+
elif len(key) == 1 and type(key[0]) in fasttypes:
|
| 476 |
+
return key[0]
|
| 477 |
+
return _HashedSeq(key)
|
| 478 |
+
|
| 479 |
+
def lru_cache(maxsize=128, typed=False):
|
| 480 |
+
"""Least-recently-used cache decorator.
|
| 481 |
+
|
| 482 |
+
If *maxsize* is set to None, the LRU features are disabled and the cache
|
| 483 |
+
can grow without bound.
|
| 484 |
+
|
| 485 |
+
If *typed* is True, arguments of different types will be cached separately.
|
| 486 |
+
For example, f(3.0) and f(3) will be treated as distinct calls with
|
| 487 |
+
distinct results.
|
| 488 |
+
|
| 489 |
+
Arguments to the cached function must be hashable.
|
| 490 |
+
|
| 491 |
+
View the cache statistics named tuple (hits, misses, maxsize, currsize)
|
| 492 |
+
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
| 493 |
+
Access the underlying function with f.__wrapped__.
|
| 494 |
+
|
| 495 |
+
See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
|
| 496 |
+
|
| 497 |
+
"""
|
| 498 |
+
|
| 499 |
+
# Users should only access the lru_cache through its public API:
|
| 500 |
+
# cache_info, cache_clear, and f.__wrapped__
|
| 501 |
+
# The internals of the lru_cache are encapsulated for thread safety and
|
| 502 |
+
# to allow the implementation to change (including a possible C version).
|
| 503 |
+
|
| 504 |
+
if isinstance(maxsize, int):
|
| 505 |
+
# Negative maxsize is treated as 0
|
| 506 |
+
if maxsize < 0:
|
| 507 |
+
maxsize = 0
|
| 508 |
+
elif callable(maxsize) and isinstance(typed, bool):
|
| 509 |
+
# The user_function was passed in directly via the maxsize argument
|
| 510 |
+
user_function, maxsize = maxsize, 128
|
| 511 |
+
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
| 512 |
+
wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
|
| 513 |
+
return update_wrapper(wrapper, user_function)
|
| 514 |
+
elif maxsize is not None:
|
| 515 |
+
raise TypeError(
|
| 516 |
+
'Expected first argument to be an integer, a callable, or None')
|
| 517 |
+
|
| 518 |
+
def decorating_function(user_function):
|
| 519 |
+
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
| 520 |
+
wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
|
| 521 |
+
return update_wrapper(wrapper, user_function)
|
| 522 |
+
|
| 523 |
+
return decorating_function
|
| 524 |
+
|
| 525 |
+
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
|
| 526 |
+
# Constants shared by all lru cache instances:
|
| 527 |
+
sentinel = object() # unique object used to signal cache misses
|
| 528 |
+
make_key = _make_key # build a key from the function arguments
|
| 529 |
+
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
| 530 |
+
|
| 531 |
+
cache = {}
|
| 532 |
+
hits = misses = 0
|
| 533 |
+
full = False
|
| 534 |
+
cache_get = cache.get # bound method to lookup a key or return None
|
| 535 |
+
cache_len = cache.__len__ # get cache size without calling len()
|
| 536 |
+
lock = RLock() # because linkedlist updates aren't threadsafe
|
| 537 |
+
root = [] # root of the circular doubly linked list
|
| 538 |
+
root[:] = [root, root, None, None] # initialize by pointing to self
|
| 539 |
+
|
| 540 |
+
if maxsize == 0:
|
| 541 |
+
|
| 542 |
+
def wrapper(*args, **kwds):
|
| 543 |
+
# No caching -- just a statistics update
|
| 544 |
+
nonlocal misses
|
| 545 |
+
misses += 1
|
| 546 |
+
result = user_function(*args, **kwds)
|
| 547 |
+
return result
|
| 548 |
+
|
| 549 |
+
elif maxsize is None:
|
| 550 |
+
|
| 551 |
+
def wrapper(*args, **kwds):
|
| 552 |
+
# Simple caching without ordering or size limit
|
| 553 |
+
nonlocal hits, misses
|
| 554 |
+
key = make_key(args, kwds, typed)
|
| 555 |
+
result = cache_get(key, sentinel)
|
| 556 |
+
if result is not sentinel:
|
| 557 |
+
hits += 1
|
| 558 |
+
return result
|
| 559 |
+
misses += 1
|
| 560 |
+
result = user_function(*args, **kwds)
|
| 561 |
+
cache[key] = result
|
| 562 |
+
return result
|
| 563 |
+
|
| 564 |
+
else:
|
| 565 |
+
|
| 566 |
+
def wrapper(*args, **kwds):
|
| 567 |
+
# Size limited caching that tracks accesses by recency
|
| 568 |
+
nonlocal root, hits, misses, full
|
| 569 |
+
key = make_key(args, kwds, typed)
|
| 570 |
+
with lock:
|
| 571 |
+
link = cache_get(key)
|
| 572 |
+
if link is not None:
|
| 573 |
+
# Move the link to the front of the circular queue
|
| 574 |
+
link_prev, link_next, _key, result = link
|
| 575 |
+
link_prev[NEXT] = link_next
|
| 576 |
+
link_next[PREV] = link_prev
|
| 577 |
+
last = root[PREV]
|
| 578 |
+
last[NEXT] = root[PREV] = link
|
| 579 |
+
link[PREV] = last
|
| 580 |
+
link[NEXT] = root
|
| 581 |
+
hits += 1
|
| 582 |
+
return result
|
| 583 |
+
misses += 1
|
| 584 |
+
result = user_function(*args, **kwds)
|
| 585 |
+
with lock:
|
| 586 |
+
if key in cache:
|
| 587 |
+
# Getting here means that this same key was added to the
|
| 588 |
+
# cache while the lock was released. Since the link
|
| 589 |
+
# update is already done, we need only return the
|
| 590 |
+
# computed result and update the count of misses.
|
| 591 |
+
pass
|
| 592 |
+
elif full:
|
| 593 |
+
# Use the old root to store the new key and result.
|
| 594 |
+
oldroot = root
|
| 595 |
+
oldroot[KEY] = key
|
| 596 |
+
oldroot[RESULT] = result
|
| 597 |
+
# Empty the oldest link and make it the new root.
|
| 598 |
+
# Keep a reference to the old key and old result to
|
| 599 |
+
# prevent their ref counts from going to zero during the
|
| 600 |
+
# update. That will prevent potentially arbitrary object
|
| 601 |
+
# clean-up code (i.e. __del__) from running while we're
|
| 602 |
+
# still adjusting the links.
|
| 603 |
+
root = oldroot[NEXT]
|
| 604 |
+
oldkey = root[KEY]
|
| 605 |
+
oldresult = root[RESULT]
|
| 606 |
+
root[KEY] = root[RESULT] = None
|
| 607 |
+
# Now update the cache dictionary.
|
| 608 |
+
del cache[oldkey]
|
| 609 |
+
# Save the potentially reentrant cache[key] assignment
|
| 610 |
+
# for last, after the root and links have been put in
|
| 611 |
+
# a consistent state.
|
| 612 |
+
cache[key] = oldroot
|
| 613 |
+
else:
|
| 614 |
+
# Put result in a new link at the front of the queue.
|
| 615 |
+
last = root[PREV]
|
| 616 |
+
link = [last, root, key, result]
|
| 617 |
+
last[NEXT] = root[PREV] = cache[key] = link
|
| 618 |
+
# Use the cache_len bound method instead of the len() function
|
| 619 |
+
# which could potentially be wrapped in an lru_cache itself.
|
| 620 |
+
full = (cache_len() >= maxsize)
|
| 621 |
+
return result
|
| 622 |
+
|
| 623 |
+
def cache_info():
|
| 624 |
+
"""Report cache statistics"""
|
| 625 |
+
with lock:
|
| 626 |
+
return _CacheInfo(hits, misses, maxsize, cache_len())
|
| 627 |
+
|
| 628 |
+
def cache_clear():
|
| 629 |
+
"""Clear the cache and cache statistics"""
|
| 630 |
+
nonlocal hits, misses, full
|
| 631 |
+
with lock:
|
| 632 |
+
cache.clear()
|
| 633 |
+
root[:] = [root, root, None, None]
|
| 634 |
+
hits = misses = 0
|
| 635 |
+
full = False
|
| 636 |
+
|
| 637 |
+
wrapper.cache_info = cache_info
|
| 638 |
+
wrapper.cache_clear = cache_clear
|
| 639 |
+
return wrapper
|
| 640 |
+
|
| 641 |
+
try:
|
| 642 |
+
from _functools import _lru_cache_wrapper
|
| 643 |
+
except ImportError:
|
| 644 |
+
pass
|
| 645 |
+
|
| 646 |
+
|
| 647 |
+
################################################################################
|
| 648 |
+
### cache -- simplified access to the infinity cache
|
| 649 |
+
################################################################################
|
| 650 |
+
|
| 651 |
+
def cache(user_function, /):
|
| 652 |
+
'Simple lightweight unbounded cache. Sometimes called "memoize".'
|
| 653 |
+
return lru_cache(maxsize=None)(user_function)
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
################################################################################
|
| 657 |
+
### singledispatch() - single-dispatch generic function decorator
|
| 658 |
+
################################################################################
|
| 659 |
+
|
| 660 |
+
def _c3_merge(sequences):
|
| 661 |
+
"""Merges MROs in *sequences* to a single MRO using the C3 algorithm.
|
| 662 |
+
|
| 663 |
+
Adapted from https://www.python.org/download/releases/2.3/mro/.
|
| 664 |
+
|
| 665 |
+
"""
|
| 666 |
+
result = []
|
| 667 |
+
while True:
|
| 668 |
+
sequences = [s for s in sequences if s] # purge empty sequences
|
| 669 |
+
if not sequences:
|
| 670 |
+
return result
|
| 671 |
+
for s1 in sequences: # find merge candidates among seq heads
|
| 672 |
+
candidate = s1[0]
|
| 673 |
+
for s2 in sequences:
|
| 674 |
+
if candidate in s2[1:]:
|
| 675 |
+
candidate = None
|
| 676 |
+
break # reject the current head, it appears later
|
| 677 |
+
else:
|
| 678 |
+
break
|
| 679 |
+
if candidate is None:
|
| 680 |
+
raise RuntimeError("Inconsistent hierarchy")
|
| 681 |
+
result.append(candidate)
|
| 682 |
+
# remove the chosen candidate
|
| 683 |
+
for seq in sequences:
|
| 684 |
+
if seq[0] == candidate:
|
| 685 |
+
del seq[0]
|
| 686 |
+
|
| 687 |
+
def _c3_mro(cls, abcs=None):
|
| 688 |
+
"""Computes the method resolution order using extended C3 linearization.
|
| 689 |
+
|
| 690 |
+
If no *abcs* are given, the algorithm works exactly like the built-in C3
|
| 691 |
+
linearization used for method resolution.
|
| 692 |
+
|
| 693 |
+
If given, *abcs* is a list of abstract base classes that should be inserted
|
| 694 |
+
into the resulting MRO. Unrelated ABCs are ignored and don't end up in the
|
| 695 |
+
result. The algorithm inserts ABCs where their functionality is introduced,
|
| 696 |
+
i.e. issubclass(cls, abc) returns True for the class itself but returns
|
| 697 |
+
False for all its direct base classes. Implicit ABCs for a given class
|
| 698 |
+
(either registered or inferred from the presence of a special method like
|
| 699 |
+
__len__) are inserted directly after the last ABC explicitly listed in the
|
| 700 |
+
MRO of said class. If two implicit ABCs end up next to each other in the
|
| 701 |
+
resulting MRO, their ordering depends on the order of types in *abcs*.
|
| 702 |
+
|
| 703 |
+
"""
|
| 704 |
+
for i, base in enumerate(reversed(cls.__bases__)):
|
| 705 |
+
if hasattr(base, '__abstractmethods__'):
|
| 706 |
+
boundary = len(cls.__bases__) - i
|
| 707 |
+
break # Bases up to the last explicit ABC are considered first.
|
| 708 |
+
else:
|
| 709 |
+
boundary = 0
|
| 710 |
+
abcs = list(abcs) if abcs else []
|
| 711 |
+
explicit_bases = list(cls.__bases__[:boundary])
|
| 712 |
+
abstract_bases = []
|
| 713 |
+
other_bases = list(cls.__bases__[boundary:])
|
| 714 |
+
for base in abcs:
|
| 715 |
+
if issubclass(cls, base) and not any(
|
| 716 |
+
issubclass(b, base) for b in cls.__bases__
|
| 717 |
+
):
|
| 718 |
+
# If *cls* is the class that introduces behaviour described by
|
| 719 |
+
# an ABC *base*, insert said ABC to its MRO.
|
| 720 |
+
abstract_bases.append(base)
|
| 721 |
+
for base in abstract_bases:
|
| 722 |
+
abcs.remove(base)
|
| 723 |
+
explicit_c3_mros = [_c3_mro(base, abcs=abcs) for base in explicit_bases]
|
| 724 |
+
abstract_c3_mros = [_c3_mro(base, abcs=abcs) for base in abstract_bases]
|
| 725 |
+
other_c3_mros = [_c3_mro(base, abcs=abcs) for base in other_bases]
|
| 726 |
+
return _c3_merge(
|
| 727 |
+
[[cls]] +
|
| 728 |
+
explicit_c3_mros + abstract_c3_mros + other_c3_mros +
|
| 729 |
+
[explicit_bases] + [abstract_bases] + [other_bases]
|
| 730 |
+
)
|
| 731 |
+
|
| 732 |
+
def _compose_mro(cls, types):
|
| 733 |
+
"""Calculates the method resolution order for a given class *cls*.
|
| 734 |
+
|
| 735 |
+
Includes relevant abstract base classes (with their respective bases) from
|
| 736 |
+
the *types* iterable. Uses a modified C3 linearization algorithm.
|
| 737 |
+
|
| 738 |
+
"""
|
| 739 |
+
bases = set(cls.__mro__)
|
| 740 |
+
# Remove entries which are already present in the __mro__ or unrelated.
|
| 741 |
+
def is_related(typ):
|
| 742 |
+
return (typ not in bases and hasattr(typ, '__mro__')
|
| 743 |
+
and not isinstance(typ, GenericAlias)
|
| 744 |
+
and issubclass(cls, typ))
|
| 745 |
+
types = [n for n in types if is_related(n)]
|
| 746 |
+
# Remove entries which are strict bases of other entries (they will end up
|
| 747 |
+
# in the MRO anyway.
|
| 748 |
+
def is_strict_base(typ):
|
| 749 |
+
for other in types:
|
| 750 |
+
if typ != other and typ in other.__mro__:
|
| 751 |
+
return True
|
| 752 |
+
return False
|
| 753 |
+
types = [n for n in types if not is_strict_base(n)]
|
| 754 |
+
# Subclasses of the ABCs in *types* which are also implemented by
|
| 755 |
+
# *cls* can be used to stabilize ABC ordering.
|
| 756 |
+
type_set = set(types)
|
| 757 |
+
mro = []
|
| 758 |
+
for typ in types:
|
| 759 |
+
found = []
|
| 760 |
+
for sub in typ.__subclasses__():
|
| 761 |
+
if sub not in bases and issubclass(cls, sub):
|
| 762 |
+
found.append([s for s in sub.__mro__ if s in type_set])
|
| 763 |
+
if not found:
|
| 764 |
+
mro.append(typ)
|
| 765 |
+
continue
|
| 766 |
+
# Favor subclasses with the biggest number of useful bases
|
| 767 |
+
found.sort(key=len, reverse=True)
|
| 768 |
+
for sub in found:
|
| 769 |
+
for subcls in sub:
|
| 770 |
+
if subcls not in mro:
|
| 771 |
+
mro.append(subcls)
|
| 772 |
+
return _c3_mro(cls, abcs=mro)
|
| 773 |
+
|
| 774 |
+
def _find_impl(cls, registry):
|
| 775 |
+
"""Returns the best matching implementation from *registry* for type *cls*.
|
| 776 |
+
|
| 777 |
+
Where there is no registered implementation for a specific type, its method
|
| 778 |
+
resolution order is used to find a more generic implementation.
|
| 779 |
+
|
| 780 |
+
Note: if *registry* does not contain an implementation for the base
|
| 781 |
+
*object* type, this function may return None.
|
| 782 |
+
|
| 783 |
+
"""
|
| 784 |
+
mro = _compose_mro(cls, registry.keys())
|
| 785 |
+
match = None
|
| 786 |
+
for t in mro:
|
| 787 |
+
if match is not None:
|
| 788 |
+
# If *match* is an implicit ABC but there is another unrelated,
|
| 789 |
+
# equally matching implicit ABC, refuse the temptation to guess.
|
| 790 |
+
if (t in registry and t not in cls.__mro__
|
| 791 |
+
and match not in cls.__mro__
|
| 792 |
+
and not issubclass(match, t)):
|
| 793 |
+
raise RuntimeError("Ambiguous dispatch: {} or {}".format(
|
| 794 |
+
match, t))
|
| 795 |
+
break
|
| 796 |
+
if t in registry:
|
| 797 |
+
match = t
|
| 798 |
+
return registry.get(match)
|
| 799 |
+
|
| 800 |
+
def singledispatch(func):
|
| 801 |
+
"""Single-dispatch generic function decorator.
|
| 802 |
+
|
| 803 |
+
Transforms a function into a generic function, which can have different
|
| 804 |
+
behaviours depending upon the type of its first argument. The decorated
|
| 805 |
+
function acts as the default implementation, and additional
|
| 806 |
+
implementations can be registered using the register() attribute of the
|
| 807 |
+
generic function.
|
| 808 |
+
"""
|
| 809 |
+
# There are many programs that use functools without singledispatch, so we
|
| 810 |
+
# trade-off making singledispatch marginally slower for the benefit of
|
| 811 |
+
# making start-up of such applications slightly faster.
|
| 812 |
+
import types, weakref
|
| 813 |
+
|
| 814 |
+
registry = {}
|
| 815 |
+
dispatch_cache = weakref.WeakKeyDictionary()
|
| 816 |
+
cache_token = None
|
| 817 |
+
|
| 818 |
+
def dispatch(cls):
|
| 819 |
+
"""generic_func.dispatch(cls) -> <function implementation>
|
| 820 |
+
|
| 821 |
+
Runs the dispatch algorithm to return the best available implementation
|
| 822 |
+
for the given *cls* registered on *generic_func*.
|
| 823 |
+
|
| 824 |
+
"""
|
| 825 |
+
nonlocal cache_token
|
| 826 |
+
if cache_token is not None:
|
| 827 |
+
current_token = get_cache_token()
|
| 828 |
+
if cache_token != current_token:
|
| 829 |
+
dispatch_cache.clear()
|
| 830 |
+
cache_token = current_token
|
| 831 |
+
try:
|
| 832 |
+
impl = dispatch_cache[cls]
|
| 833 |
+
except KeyError:
|
| 834 |
+
try:
|
| 835 |
+
impl = registry[cls]
|
| 836 |
+
except KeyError:
|
| 837 |
+
impl = _find_impl(cls, registry)
|
| 838 |
+
dispatch_cache[cls] = impl
|
| 839 |
+
return impl
|
| 840 |
+
|
| 841 |
+
def _is_valid_dispatch_type(cls):
|
| 842 |
+
return isinstance(cls, type) and not isinstance(cls, GenericAlias)
|
| 843 |
+
|
| 844 |
+
def register(cls, func=None):
|
| 845 |
+
"""generic_func.register(cls, func) -> func
|
| 846 |
+
|
| 847 |
+
Registers a new implementation for the given *cls* on a *generic_func*.
|
| 848 |
+
|
| 849 |
+
"""
|
| 850 |
+
nonlocal cache_token
|
| 851 |
+
if _is_valid_dispatch_type(cls):
|
| 852 |
+
if func is None:
|
| 853 |
+
return lambda f: register(cls, f)
|
| 854 |
+
else:
|
| 855 |
+
if func is not None:
|
| 856 |
+
raise TypeError(
|
| 857 |
+
f"Invalid first argument to `register()`. "
|
| 858 |
+
f"{cls!r} is not a class."
|
| 859 |
+
)
|
| 860 |
+
ann = getattr(cls, '__annotations__', {})
|
| 861 |
+
if not ann:
|
| 862 |
+
raise TypeError(
|
| 863 |
+
f"Invalid first argument to `register()`: {cls!r}. "
|
| 864 |
+
f"Use either `@register(some_class)` or plain `@register` "
|
| 865 |
+
f"on an annotated function."
|
| 866 |
+
)
|
| 867 |
+
func = cls
|
| 868 |
+
|
| 869 |
+
# only import typing if annotation parsing is necessary
|
| 870 |
+
from typing import get_type_hints
|
| 871 |
+
argname, cls = next(iter(get_type_hints(func).items()))
|
| 872 |
+
if not _is_valid_dispatch_type(cls):
|
| 873 |
+
raise TypeError(
|
| 874 |
+
f"Invalid annotation for {argname!r}. "
|
| 875 |
+
f"{cls!r} is not a class."
|
| 876 |
+
)
|
| 877 |
+
|
| 878 |
+
registry[cls] = func
|
| 879 |
+
if cache_token is None and hasattr(cls, '__abstractmethods__'):
|
| 880 |
+
cache_token = get_cache_token()
|
| 881 |
+
dispatch_cache.clear()
|
| 882 |
+
return func
|
| 883 |
+
|
| 884 |
+
def wrapper(*args, **kw):
|
| 885 |
+
if not args:
|
| 886 |
+
raise TypeError(f'{funcname} requires at least '
|
| 887 |
+
'1 positional argument')
|
| 888 |
+
|
| 889 |
+
return dispatch(args[0].__class__)(*args, **kw)
|
| 890 |
+
|
| 891 |
+
funcname = getattr(func, '__name__', 'singledispatch function')
|
| 892 |
+
registry[object] = func
|
| 893 |
+
wrapper.register = register
|
| 894 |
+
wrapper.dispatch = dispatch
|
| 895 |
+
wrapper.registry = types.MappingProxyType(registry)
|
| 896 |
+
wrapper._clear_cache = dispatch_cache.clear
|
| 897 |
+
update_wrapper(wrapper, func)
|
| 898 |
+
return wrapper
|
| 899 |
+
|
| 900 |
+
|
| 901 |
+
# Descriptor version
|
| 902 |
+
class singledispatchmethod:
|
| 903 |
+
"""Single-dispatch generic method descriptor.
|
| 904 |
+
|
| 905 |
+
Supports wrapping existing descriptors and handles non-descriptor
|
| 906 |
+
callables as instance methods.
|
| 907 |
+
"""
|
| 908 |
+
|
| 909 |
+
def __init__(self, func):
|
| 910 |
+
if not callable(func) and not hasattr(func, "__get__"):
|
| 911 |
+
raise TypeError(f"{func!r} is not callable or a descriptor")
|
| 912 |
+
|
| 913 |
+
self.dispatcher = singledispatch(func)
|
| 914 |
+
self.func = func
|
| 915 |
+
|
| 916 |
+
def register(self, cls, method=None):
|
| 917 |
+
"""generic_method.register(cls, func) -> func
|
| 918 |
+
|
| 919 |
+
Registers a new implementation for the given *cls* on a *generic_method*.
|
| 920 |
+
"""
|
| 921 |
+
return self.dispatcher.register(cls, func=method)
|
| 922 |
+
|
| 923 |
+
def __get__(self, obj, cls=None):
|
| 924 |
+
def _method(*args, **kwargs):
|
| 925 |
+
method = self.dispatcher.dispatch(args[0].__class__)
|
| 926 |
+
return method.__get__(obj, cls)(*args, **kwargs)
|
| 927 |
+
|
| 928 |
+
_method.__isabstractmethod__ = self.__isabstractmethod__
|
| 929 |
+
_method.register = self.register
|
| 930 |
+
update_wrapper(_method, self.func)
|
| 931 |
+
return _method
|
| 932 |
+
|
| 933 |
+
@property
|
| 934 |
+
def __isabstractmethod__(self):
|
| 935 |
+
return getattr(self.func, '__isabstractmethod__', False)
|
| 936 |
+
|
| 937 |
+
|
| 938 |
+
################################################################################
|
| 939 |
+
### cached_property() - computed once per instance, cached as attribute
|
| 940 |
+
################################################################################
|
| 941 |
+
|
| 942 |
+
_NOT_FOUND = object()
|
| 943 |
+
|
| 944 |
+
|
| 945 |
+
class cached_property:
|
| 946 |
+
def __init__(self, func):
|
| 947 |
+
self.func = func
|
| 948 |
+
self.attrname = None
|
| 949 |
+
self.__doc__ = func.__doc__
|
| 950 |
+
self.lock = RLock()
|
| 951 |
+
|
| 952 |
+
def __set_name__(self, owner, name):
|
| 953 |
+
if self.attrname is None:
|
| 954 |
+
self.attrname = name
|
| 955 |
+
elif name != self.attrname:
|
| 956 |
+
raise TypeError(
|
| 957 |
+
"Cannot assign the same cached_property to two different names "
|
| 958 |
+
f"({self.attrname!r} and {name!r})."
|
| 959 |
+
)
|
| 960 |
+
|
| 961 |
+
def __get__(self, instance, owner=None):
|
| 962 |
+
if instance is None:
|
| 963 |
+
return self
|
| 964 |
+
if self.attrname is None:
|
| 965 |
+
raise TypeError(
|
| 966 |
+
"Cannot use cached_property instance without calling __set_name__ on it.")
|
| 967 |
+
try:
|
| 968 |
+
cache = instance.__dict__
|
| 969 |
+
except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
|
| 970 |
+
msg = (
|
| 971 |
+
f"No '__dict__' attribute on {type(instance).__name__!r} "
|
| 972 |
+
f"instance to cache {self.attrname!r} property."
|
| 973 |
+
)
|
| 974 |
+
raise TypeError(msg) from None
|
| 975 |
+
val = cache.get(self.attrname, _NOT_FOUND)
|
| 976 |
+
if val is _NOT_FOUND:
|
| 977 |
+
with self.lock:
|
| 978 |
+
# check if another thread filled cache while we awaited lock
|
| 979 |
+
val = cache.get(self.attrname, _NOT_FOUND)
|
| 980 |
+
if val is _NOT_FOUND:
|
| 981 |
+
val = self.func(instance)
|
| 982 |
+
try:
|
| 983 |
+
cache[self.attrname] = val
|
| 984 |
+
except TypeError:
|
| 985 |
+
msg = (
|
| 986 |
+
f"The '__dict__' attribute on {type(instance).__name__!r} instance "
|
| 987 |
+
f"does not support item assignment for caching {self.attrname!r} property."
|
| 988 |
+
)
|
| 989 |
+
raise TypeError(msg) from None
|
| 990 |
+
return val
|
| 991 |
+
|
| 992 |
+
__class_getitem__ = classmethod(GenericAlias)
|