Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +4 -0
- llava/lib/libatomic.so.1.2.0 +3 -0
- llava/lib/libncurses.a +3 -0
- llava/lib/libtinfow.so.6.4 +3 -0
- llava/lib/libz.so.1 +3 -0
- llava/lib/python3.10/_compat_pickle.py +251 -0
- llava/lib/python3.10/_compression.py +162 -0
- llava/lib/python3.10/_py_abc.py +147 -0
- llava/lib/python3.10/_sitebuiltins.py +103 -0
- llava/lib/python3.10/_sysconfigdata__linux_x86_64-linux-gnu.py.orig +986 -0
- llava/lib/python3.10/_threading_local.py +242 -0
- llava/lib/python3.10/cgi.py +1004 -0
- llava/lib/python3.10/cmd.py +401 -0
- llava/lib/python3.10/enum.py +1053 -0
- llava/lib/python3.10/fileinput.py +462 -0
- llava/lib/python3.10/heapq.py +601 -0
- llava/lib/python3.10/imp.py +346 -0
- llava/lib/python3.10/inspect.py +0 -0
- llava/lib/python3.10/lzma.py +356 -0
- llava/lib/python3.10/pprint.py +670 -0
- llava/lib/python3.10/profile.py +611 -0
- llava/lib/python3.10/pty.py +187 -0
- llava/lib/python3.10/runpy.py +321 -0
- llava/lib/python3.10/sndhdr.py +257 -0
- llava/lib/python3.10/socket.py +972 -0
- llava/lib/python3.10/sre_compile.py +808 -0
- llava/lib/python3.10/struct.py +15 -0
- llava/lib/python3.10/symtable.py +322 -0
- llava/lib/python3.10/turtle.py +0 -0
- llava/lib/python3.10/uuid.py +733 -0
- llava/lib/python3.10/xdrlib.py +241 -0
- llava/lib/python3.10/zipfile.py +2510 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_embedding_bag_per_sample_weights_backward_cpu_dispatch.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_compositeexplicitautograd_dispatch.h +24 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_sinh_ops.h +50 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_fused_adamw_native.h +28 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_backward.h +30 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_cudnn_attention_backward_cuda_dispatch.h +24 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_efficient_attention_backward_ops.h +28 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_compressed_tensor_unsafe_native.h +21 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_sum_backward.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_standard_gamma_grad.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_to_cpu_compositeimplicitautograd_dispatch.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_upsample_bicubic2d_aa_meta_dispatch.h +28 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_upsample_bilinear2d_aa_cpu_dispatch.h +28 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/acosh_compositeexplicitautogradnonfunctional_dispatch.h +24 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/adjoint.h +30 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/cauchy_compositeexplicitautograd_dispatch.h +25 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/ceil_meta.h +27 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/cov_native.h +21 -0
.gitattributes
CHANGED
|
@@ -418,3 +418,7 @@ llava/lib/libform.a filter=lfs diff=lfs merge=lfs -text
|
|
| 418 |
llava/lib/liblsan.so filter=lfs diff=lfs merge=lfs -text
|
| 419 |
llava/lib/libgcc_s.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 420 |
llava/lib/libtinfow.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 418 |
llava/lib/liblsan.so filter=lfs diff=lfs merge=lfs -text
|
| 419 |
llava/lib/libgcc_s.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 420 |
llava/lib/libtinfow.so filter=lfs diff=lfs merge=lfs -text
|
| 421 |
+
llava/lib/libz.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 422 |
+
llava/lib/libatomic.so.1.2.0 filter=lfs diff=lfs merge=lfs -text
|
| 423 |
+
llava/lib/libncurses.a filter=lfs diff=lfs merge=lfs -text
|
| 424 |
+
llava/lib/libtinfow.so.6.4 filter=lfs diff=lfs merge=lfs -text
|
llava/lib/libatomic.so.1.2.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2f1a92c18f01c13c9a89908fb86a7309ae5b89a882db9914114957bc4b6fed92
|
| 3 |
+
size 143648
|
llava/lib/libncurses.a
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:873bc902fcba042e3e980451cae21218095aa52298dcce1a9ac7e415beecb1f0
|
| 3 |
+
size 544910
|
llava/lib/libtinfow.so.6.4
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f5679c9d7cc0ec2d6b08c6058198667efe71f657e89dcc0bd7adcf5d6cbc80c5
|
| 3 |
+
size 287080
|
llava/lib/libz.so.1
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0b0e682a9dc7fd4895a6783288f851b793dc89633f28714027974fa4d66f3914
|
| 3 |
+
size 124744
|
llava/lib/python3.10/_compat_pickle.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is used to map the old Python 2 names to the new names used in
|
| 2 |
+
# Python 3 for the pickle module. This needed to make pickle streams
|
| 3 |
+
# generated with Python 2 loadable by Python 3.
|
| 4 |
+
|
| 5 |
+
# This is a copy of lib2to3.fixes.fix_imports.MAPPING. We cannot import
|
| 6 |
+
# lib2to3 and use the mapping defined there, because lib2to3 uses pickle.
|
| 7 |
+
# Thus, this could cause the module to be imported recursively.
|
| 8 |
+
IMPORT_MAPPING = {
|
| 9 |
+
'__builtin__' : 'builtins',
|
| 10 |
+
'copy_reg': 'copyreg',
|
| 11 |
+
'Queue': 'queue',
|
| 12 |
+
'SocketServer': 'socketserver',
|
| 13 |
+
'ConfigParser': 'configparser',
|
| 14 |
+
'repr': 'reprlib',
|
| 15 |
+
'tkFileDialog': 'tkinter.filedialog',
|
| 16 |
+
'tkSimpleDialog': 'tkinter.simpledialog',
|
| 17 |
+
'tkColorChooser': 'tkinter.colorchooser',
|
| 18 |
+
'tkCommonDialog': 'tkinter.commondialog',
|
| 19 |
+
'Dialog': 'tkinter.dialog',
|
| 20 |
+
'Tkdnd': 'tkinter.dnd',
|
| 21 |
+
'tkFont': 'tkinter.font',
|
| 22 |
+
'tkMessageBox': 'tkinter.messagebox',
|
| 23 |
+
'ScrolledText': 'tkinter.scrolledtext',
|
| 24 |
+
'Tkconstants': 'tkinter.constants',
|
| 25 |
+
'Tix': 'tkinter.tix',
|
| 26 |
+
'ttk': 'tkinter.ttk',
|
| 27 |
+
'Tkinter': 'tkinter',
|
| 28 |
+
'markupbase': '_markupbase',
|
| 29 |
+
'_winreg': 'winreg',
|
| 30 |
+
'thread': '_thread',
|
| 31 |
+
'dummy_thread': '_dummy_thread',
|
| 32 |
+
'dbhash': 'dbm.bsd',
|
| 33 |
+
'dumbdbm': 'dbm.dumb',
|
| 34 |
+
'dbm': 'dbm.ndbm',
|
| 35 |
+
'gdbm': 'dbm.gnu',
|
| 36 |
+
'xmlrpclib': 'xmlrpc.client',
|
| 37 |
+
'SimpleXMLRPCServer': 'xmlrpc.server',
|
| 38 |
+
'httplib': 'http.client',
|
| 39 |
+
'htmlentitydefs' : 'html.entities',
|
| 40 |
+
'HTMLParser' : 'html.parser',
|
| 41 |
+
'Cookie': 'http.cookies',
|
| 42 |
+
'cookielib': 'http.cookiejar',
|
| 43 |
+
'BaseHTTPServer': 'http.server',
|
| 44 |
+
'test.test_support': 'test.support',
|
| 45 |
+
'commands': 'subprocess',
|
| 46 |
+
'urlparse' : 'urllib.parse',
|
| 47 |
+
'robotparser' : 'urllib.robotparser',
|
| 48 |
+
'urllib2': 'urllib.request',
|
| 49 |
+
'anydbm': 'dbm',
|
| 50 |
+
'_abcoll' : 'collections.abc',
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
# This contains rename rules that are easy to handle. We ignore the more
|
| 55 |
+
# complex stuff (e.g. mapping the names in the urllib and types modules).
|
| 56 |
+
# These rules should be run before import names are fixed.
|
| 57 |
+
NAME_MAPPING = {
|
| 58 |
+
('__builtin__', 'xrange'): ('builtins', 'range'),
|
| 59 |
+
('__builtin__', 'reduce'): ('functools', 'reduce'),
|
| 60 |
+
('__builtin__', 'intern'): ('sys', 'intern'),
|
| 61 |
+
('__builtin__', 'unichr'): ('builtins', 'chr'),
|
| 62 |
+
('__builtin__', 'unicode'): ('builtins', 'str'),
|
| 63 |
+
('__builtin__', 'long'): ('builtins', 'int'),
|
| 64 |
+
('itertools', 'izip'): ('builtins', 'zip'),
|
| 65 |
+
('itertools', 'imap'): ('builtins', 'map'),
|
| 66 |
+
('itertools', 'ifilter'): ('builtins', 'filter'),
|
| 67 |
+
('itertools', 'ifilterfalse'): ('itertools', 'filterfalse'),
|
| 68 |
+
('itertools', 'izip_longest'): ('itertools', 'zip_longest'),
|
| 69 |
+
('UserDict', 'IterableUserDict'): ('collections', 'UserDict'),
|
| 70 |
+
('UserList', 'UserList'): ('collections', 'UserList'),
|
| 71 |
+
('UserString', 'UserString'): ('collections', 'UserString'),
|
| 72 |
+
('whichdb', 'whichdb'): ('dbm', 'whichdb'),
|
| 73 |
+
('_socket', 'fromfd'): ('socket', 'fromfd'),
|
| 74 |
+
('_multiprocessing', 'Connection'): ('multiprocessing.connection', 'Connection'),
|
| 75 |
+
('multiprocessing.process', 'Process'): ('multiprocessing.context', 'Process'),
|
| 76 |
+
('multiprocessing.forking', 'Popen'): ('multiprocessing.popen_fork', 'Popen'),
|
| 77 |
+
('urllib', 'ContentTooShortError'): ('urllib.error', 'ContentTooShortError'),
|
| 78 |
+
('urllib', 'getproxies'): ('urllib.request', 'getproxies'),
|
| 79 |
+
('urllib', 'pathname2url'): ('urllib.request', 'pathname2url'),
|
| 80 |
+
('urllib', 'quote_plus'): ('urllib.parse', 'quote_plus'),
|
| 81 |
+
('urllib', 'quote'): ('urllib.parse', 'quote'),
|
| 82 |
+
('urllib', 'unquote_plus'): ('urllib.parse', 'unquote_plus'),
|
| 83 |
+
('urllib', 'unquote'): ('urllib.parse', 'unquote'),
|
| 84 |
+
('urllib', 'url2pathname'): ('urllib.request', 'url2pathname'),
|
| 85 |
+
('urllib', 'urlcleanup'): ('urllib.request', 'urlcleanup'),
|
| 86 |
+
('urllib', 'urlencode'): ('urllib.parse', 'urlencode'),
|
| 87 |
+
('urllib', 'urlopen'): ('urllib.request', 'urlopen'),
|
| 88 |
+
('urllib', 'urlretrieve'): ('urllib.request', 'urlretrieve'),
|
| 89 |
+
('urllib2', 'HTTPError'): ('urllib.error', 'HTTPError'),
|
| 90 |
+
('urllib2', 'URLError'): ('urllib.error', 'URLError'),
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
PYTHON2_EXCEPTIONS = (
|
| 94 |
+
"ArithmeticError",
|
| 95 |
+
"AssertionError",
|
| 96 |
+
"AttributeError",
|
| 97 |
+
"BaseException",
|
| 98 |
+
"BufferError",
|
| 99 |
+
"BytesWarning",
|
| 100 |
+
"DeprecationWarning",
|
| 101 |
+
"EOFError",
|
| 102 |
+
"EnvironmentError",
|
| 103 |
+
"Exception",
|
| 104 |
+
"FloatingPointError",
|
| 105 |
+
"FutureWarning",
|
| 106 |
+
"GeneratorExit",
|
| 107 |
+
"IOError",
|
| 108 |
+
"ImportError",
|
| 109 |
+
"ImportWarning",
|
| 110 |
+
"IndentationError",
|
| 111 |
+
"IndexError",
|
| 112 |
+
"KeyError",
|
| 113 |
+
"KeyboardInterrupt",
|
| 114 |
+
"LookupError",
|
| 115 |
+
"MemoryError",
|
| 116 |
+
"NameError",
|
| 117 |
+
"NotImplementedError",
|
| 118 |
+
"OSError",
|
| 119 |
+
"OverflowError",
|
| 120 |
+
"PendingDeprecationWarning",
|
| 121 |
+
"ReferenceError",
|
| 122 |
+
"RuntimeError",
|
| 123 |
+
"RuntimeWarning",
|
| 124 |
+
# StandardError is gone in Python 3, so we map it to Exception
|
| 125 |
+
"StopIteration",
|
| 126 |
+
"SyntaxError",
|
| 127 |
+
"SyntaxWarning",
|
| 128 |
+
"SystemError",
|
| 129 |
+
"SystemExit",
|
| 130 |
+
"TabError",
|
| 131 |
+
"TypeError",
|
| 132 |
+
"UnboundLocalError",
|
| 133 |
+
"UnicodeDecodeError",
|
| 134 |
+
"UnicodeEncodeError",
|
| 135 |
+
"UnicodeError",
|
| 136 |
+
"UnicodeTranslateError",
|
| 137 |
+
"UnicodeWarning",
|
| 138 |
+
"UserWarning",
|
| 139 |
+
"ValueError",
|
| 140 |
+
"Warning",
|
| 141 |
+
"ZeroDivisionError",
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
try:
|
| 145 |
+
WindowsError
|
| 146 |
+
except NameError:
|
| 147 |
+
pass
|
| 148 |
+
else:
|
| 149 |
+
PYTHON2_EXCEPTIONS += ("WindowsError",)
|
| 150 |
+
|
| 151 |
+
for excname in PYTHON2_EXCEPTIONS:
|
| 152 |
+
NAME_MAPPING[("exceptions", excname)] = ("builtins", excname)
|
| 153 |
+
|
| 154 |
+
MULTIPROCESSING_EXCEPTIONS = (
|
| 155 |
+
'AuthenticationError',
|
| 156 |
+
'BufferTooShort',
|
| 157 |
+
'ProcessError',
|
| 158 |
+
'TimeoutError',
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
for excname in MULTIPROCESSING_EXCEPTIONS:
|
| 162 |
+
NAME_MAPPING[("multiprocessing", excname)] = ("multiprocessing.context", excname)
|
| 163 |
+
|
| 164 |
+
# Same, but for 3.x to 2.x
|
| 165 |
+
REVERSE_IMPORT_MAPPING = dict((v, k) for (k, v) in IMPORT_MAPPING.items())
|
| 166 |
+
assert len(REVERSE_IMPORT_MAPPING) == len(IMPORT_MAPPING)
|
| 167 |
+
REVERSE_NAME_MAPPING = dict((v, k) for (k, v) in NAME_MAPPING.items())
|
| 168 |
+
assert len(REVERSE_NAME_MAPPING) == len(NAME_MAPPING)
|
| 169 |
+
|
| 170 |
+
# Non-mutual mappings.
|
| 171 |
+
|
| 172 |
+
IMPORT_MAPPING.update({
|
| 173 |
+
'cPickle': 'pickle',
|
| 174 |
+
'_elementtree': 'xml.etree.ElementTree',
|
| 175 |
+
'FileDialog': 'tkinter.filedialog',
|
| 176 |
+
'SimpleDialog': 'tkinter.simpledialog',
|
| 177 |
+
'DocXMLRPCServer': 'xmlrpc.server',
|
| 178 |
+
'SimpleHTTPServer': 'http.server',
|
| 179 |
+
'CGIHTTPServer': 'http.server',
|
| 180 |
+
# For compatibility with broken pickles saved in old Python 3 versions
|
| 181 |
+
'UserDict': 'collections',
|
| 182 |
+
'UserList': 'collections',
|
| 183 |
+
'UserString': 'collections',
|
| 184 |
+
'whichdb': 'dbm',
|
| 185 |
+
'StringIO': 'io',
|
| 186 |
+
'cStringIO': 'io',
|
| 187 |
+
})
|
| 188 |
+
|
| 189 |
+
REVERSE_IMPORT_MAPPING.update({
|
| 190 |
+
'_bz2': 'bz2',
|
| 191 |
+
'_dbm': 'dbm',
|
| 192 |
+
'_functools': 'functools',
|
| 193 |
+
'_gdbm': 'gdbm',
|
| 194 |
+
'_pickle': 'pickle',
|
| 195 |
+
})
|
| 196 |
+
|
| 197 |
+
NAME_MAPPING.update({
|
| 198 |
+
('__builtin__', 'basestring'): ('builtins', 'str'),
|
| 199 |
+
('exceptions', 'StandardError'): ('builtins', 'Exception'),
|
| 200 |
+
('UserDict', 'UserDict'): ('collections', 'UserDict'),
|
| 201 |
+
('socket', '_socketobject'): ('socket', 'SocketType'),
|
| 202 |
+
})
|
| 203 |
+
|
| 204 |
+
REVERSE_NAME_MAPPING.update({
|
| 205 |
+
('_functools', 'reduce'): ('__builtin__', 'reduce'),
|
| 206 |
+
('tkinter.filedialog', 'FileDialog'): ('FileDialog', 'FileDialog'),
|
| 207 |
+
('tkinter.filedialog', 'LoadFileDialog'): ('FileDialog', 'LoadFileDialog'),
|
| 208 |
+
('tkinter.filedialog', 'SaveFileDialog'): ('FileDialog', 'SaveFileDialog'),
|
| 209 |
+
('tkinter.simpledialog', 'SimpleDialog'): ('SimpleDialog', 'SimpleDialog'),
|
| 210 |
+
('xmlrpc.server', 'ServerHTMLDoc'): ('DocXMLRPCServer', 'ServerHTMLDoc'),
|
| 211 |
+
('xmlrpc.server', 'XMLRPCDocGenerator'):
|
| 212 |
+
('DocXMLRPCServer', 'XMLRPCDocGenerator'),
|
| 213 |
+
('xmlrpc.server', 'DocXMLRPCRequestHandler'):
|
| 214 |
+
('DocXMLRPCServer', 'DocXMLRPCRequestHandler'),
|
| 215 |
+
('xmlrpc.server', 'DocXMLRPCServer'):
|
| 216 |
+
('DocXMLRPCServer', 'DocXMLRPCServer'),
|
| 217 |
+
('xmlrpc.server', 'DocCGIXMLRPCRequestHandler'):
|
| 218 |
+
('DocXMLRPCServer', 'DocCGIXMLRPCRequestHandler'),
|
| 219 |
+
('http.server', 'SimpleHTTPRequestHandler'):
|
| 220 |
+
('SimpleHTTPServer', 'SimpleHTTPRequestHandler'),
|
| 221 |
+
('http.server', 'CGIHTTPRequestHandler'):
|
| 222 |
+
('CGIHTTPServer', 'CGIHTTPRequestHandler'),
|
| 223 |
+
('_socket', 'socket'): ('socket', '_socketobject'),
|
| 224 |
+
})
|
| 225 |
+
|
| 226 |
+
PYTHON3_OSERROR_EXCEPTIONS = (
|
| 227 |
+
'BrokenPipeError',
|
| 228 |
+
'ChildProcessError',
|
| 229 |
+
'ConnectionAbortedError',
|
| 230 |
+
'ConnectionError',
|
| 231 |
+
'ConnectionRefusedError',
|
| 232 |
+
'ConnectionResetError',
|
| 233 |
+
'FileExistsError',
|
| 234 |
+
'FileNotFoundError',
|
| 235 |
+
'InterruptedError',
|
| 236 |
+
'IsADirectoryError',
|
| 237 |
+
'NotADirectoryError',
|
| 238 |
+
'PermissionError',
|
| 239 |
+
'ProcessLookupError',
|
| 240 |
+
'TimeoutError',
|
| 241 |
+
)
|
| 242 |
+
|
| 243 |
+
for excname in PYTHON3_OSERROR_EXCEPTIONS:
|
| 244 |
+
REVERSE_NAME_MAPPING[('builtins', excname)] = ('exceptions', 'OSError')
|
| 245 |
+
|
| 246 |
+
PYTHON3_IMPORTERROR_EXCEPTIONS = (
|
| 247 |
+
'ModuleNotFoundError',
|
| 248 |
+
)
|
| 249 |
+
|
| 250 |
+
for excname in PYTHON3_IMPORTERROR_EXCEPTIONS:
|
| 251 |
+
REVERSE_NAME_MAPPING[('builtins', excname)] = ('exceptions', 'ImportError')
|
llava/lib/python3.10/_compression.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Internal classes used by the gzip, lzma and bz2 modules"""
|
| 2 |
+
|
| 3 |
+
import io
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE # Compressed data read chunk size
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class BaseStream(io.BufferedIOBase):
|
| 10 |
+
"""Mode-checking helper functions."""
|
| 11 |
+
|
| 12 |
+
def _check_not_closed(self):
|
| 13 |
+
if self.closed:
|
| 14 |
+
raise ValueError("I/O operation on closed file")
|
| 15 |
+
|
| 16 |
+
def _check_can_read(self):
|
| 17 |
+
if not self.readable():
|
| 18 |
+
raise io.UnsupportedOperation("File not open for reading")
|
| 19 |
+
|
| 20 |
+
def _check_can_write(self):
|
| 21 |
+
if not self.writable():
|
| 22 |
+
raise io.UnsupportedOperation("File not open for writing")
|
| 23 |
+
|
| 24 |
+
def _check_can_seek(self):
|
| 25 |
+
if not self.readable():
|
| 26 |
+
raise io.UnsupportedOperation("Seeking is only supported "
|
| 27 |
+
"on files open for reading")
|
| 28 |
+
if not self.seekable():
|
| 29 |
+
raise io.UnsupportedOperation("The underlying file object "
|
| 30 |
+
"does not support seeking")
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class DecompressReader(io.RawIOBase):
|
| 34 |
+
"""Adapts the decompressor API to a RawIOBase reader API"""
|
| 35 |
+
|
| 36 |
+
def readable(self):
|
| 37 |
+
return True
|
| 38 |
+
|
| 39 |
+
def __init__(self, fp, decomp_factory, trailing_error=(), **decomp_args):
|
| 40 |
+
self._fp = fp
|
| 41 |
+
self._eof = False
|
| 42 |
+
self._pos = 0 # Current offset in decompressed stream
|
| 43 |
+
|
| 44 |
+
# Set to size of decompressed stream once it is known, for SEEK_END
|
| 45 |
+
self._size = -1
|
| 46 |
+
|
| 47 |
+
# Save the decompressor factory and arguments.
|
| 48 |
+
# If the file contains multiple compressed streams, each
|
| 49 |
+
# stream will need a separate decompressor object. A new decompressor
|
| 50 |
+
# object is also needed when implementing a backwards seek().
|
| 51 |
+
self._decomp_factory = decomp_factory
|
| 52 |
+
self._decomp_args = decomp_args
|
| 53 |
+
self._decompressor = self._decomp_factory(**self._decomp_args)
|
| 54 |
+
|
| 55 |
+
# Exception class to catch from decompressor signifying invalid
|
| 56 |
+
# trailing data to ignore
|
| 57 |
+
self._trailing_error = trailing_error
|
| 58 |
+
|
| 59 |
+
def close(self):
|
| 60 |
+
self._decompressor = None
|
| 61 |
+
return super().close()
|
| 62 |
+
|
| 63 |
+
def seekable(self):
|
| 64 |
+
return self._fp.seekable()
|
| 65 |
+
|
| 66 |
+
def readinto(self, b):
|
| 67 |
+
with memoryview(b) as view, view.cast("B") as byte_view:
|
| 68 |
+
data = self.read(len(byte_view))
|
| 69 |
+
byte_view[:len(data)] = data
|
| 70 |
+
return len(data)
|
| 71 |
+
|
| 72 |
+
def read(self, size=-1):
|
| 73 |
+
if size < 0:
|
| 74 |
+
return self.readall()
|
| 75 |
+
|
| 76 |
+
if not size or self._eof:
|
| 77 |
+
return b""
|
| 78 |
+
data = None # Default if EOF is encountered
|
| 79 |
+
# Depending on the input data, our call to the decompressor may not
|
| 80 |
+
# return any data. In this case, try again after reading another block.
|
| 81 |
+
while True:
|
| 82 |
+
if self._decompressor.eof:
|
| 83 |
+
rawblock = (self._decompressor.unused_data or
|
| 84 |
+
self._fp.read(BUFFER_SIZE))
|
| 85 |
+
if not rawblock:
|
| 86 |
+
break
|
| 87 |
+
# Continue to next stream.
|
| 88 |
+
self._decompressor = self._decomp_factory(
|
| 89 |
+
**self._decomp_args)
|
| 90 |
+
try:
|
| 91 |
+
data = self._decompressor.decompress(rawblock, size)
|
| 92 |
+
except self._trailing_error:
|
| 93 |
+
# Trailing data isn't a valid compressed stream; ignore it.
|
| 94 |
+
break
|
| 95 |
+
else:
|
| 96 |
+
if self._decompressor.needs_input:
|
| 97 |
+
rawblock = self._fp.read(BUFFER_SIZE)
|
| 98 |
+
if not rawblock:
|
| 99 |
+
raise EOFError("Compressed file ended before the "
|
| 100 |
+
"end-of-stream marker was reached")
|
| 101 |
+
else:
|
| 102 |
+
rawblock = b""
|
| 103 |
+
data = self._decompressor.decompress(rawblock, size)
|
| 104 |
+
if data:
|
| 105 |
+
break
|
| 106 |
+
if not data:
|
| 107 |
+
self._eof = True
|
| 108 |
+
self._size = self._pos
|
| 109 |
+
return b""
|
| 110 |
+
self._pos += len(data)
|
| 111 |
+
return data
|
| 112 |
+
|
| 113 |
+
def readall(self):
|
| 114 |
+
chunks = []
|
| 115 |
+
# sys.maxsize means the max length of output buffer is unlimited,
|
| 116 |
+
# so that the whole input buffer can be decompressed within one
|
| 117 |
+
# .decompress() call.
|
| 118 |
+
while data := self.read(sys.maxsize):
|
| 119 |
+
chunks.append(data)
|
| 120 |
+
|
| 121 |
+
return b"".join(chunks)
|
| 122 |
+
|
| 123 |
+
# Rewind the file to the beginning of the data stream.
|
| 124 |
+
def _rewind(self):
|
| 125 |
+
self._fp.seek(0)
|
| 126 |
+
self._eof = False
|
| 127 |
+
self._pos = 0
|
| 128 |
+
self._decompressor = self._decomp_factory(**self._decomp_args)
|
| 129 |
+
|
| 130 |
+
def seek(self, offset, whence=io.SEEK_SET):
|
| 131 |
+
# Recalculate offset as an absolute file position.
|
| 132 |
+
if whence == io.SEEK_SET:
|
| 133 |
+
pass
|
| 134 |
+
elif whence == io.SEEK_CUR:
|
| 135 |
+
offset = self._pos + offset
|
| 136 |
+
elif whence == io.SEEK_END:
|
| 137 |
+
# Seeking relative to EOF - we need to know the file's size.
|
| 138 |
+
if self._size < 0:
|
| 139 |
+
while self.read(io.DEFAULT_BUFFER_SIZE):
|
| 140 |
+
pass
|
| 141 |
+
offset = self._size + offset
|
| 142 |
+
else:
|
| 143 |
+
raise ValueError("Invalid value for whence: {}".format(whence))
|
| 144 |
+
|
| 145 |
+
# Make it so that offset is the number of bytes to skip forward.
|
| 146 |
+
if offset < self._pos:
|
| 147 |
+
self._rewind()
|
| 148 |
+
else:
|
| 149 |
+
offset -= self._pos
|
| 150 |
+
|
| 151 |
+
# Read and discard data until we reach the desired position.
|
| 152 |
+
while offset > 0:
|
| 153 |
+
data = self.read(min(io.DEFAULT_BUFFER_SIZE, offset))
|
| 154 |
+
if not data:
|
| 155 |
+
break
|
| 156 |
+
offset -= len(data)
|
| 157 |
+
|
| 158 |
+
return self._pos
|
| 159 |
+
|
| 160 |
+
def tell(self):
|
| 161 |
+
"""Return the current file position."""
|
| 162 |
+
return self._pos
|
llava/lib/python3.10/_py_abc.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from _weakrefset import WeakSet
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def get_cache_token():
|
| 5 |
+
"""Returns the current ABC cache token.
|
| 6 |
+
|
| 7 |
+
The token is an opaque object (supporting equality testing) identifying the
|
| 8 |
+
current version of the ABC cache for virtual subclasses. The token changes
|
| 9 |
+
with every call to ``register()`` on any ABC.
|
| 10 |
+
"""
|
| 11 |
+
return ABCMeta._abc_invalidation_counter
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class ABCMeta(type):
|
| 15 |
+
"""Metaclass for defining Abstract Base Classes (ABCs).
|
| 16 |
+
|
| 17 |
+
Use this metaclass to create an ABC. An ABC can be subclassed
|
| 18 |
+
directly, and then acts as a mix-in class. You can also register
|
| 19 |
+
unrelated concrete classes (even built-in classes) and unrelated
|
| 20 |
+
ABCs as 'virtual subclasses' -- these and their descendants will
|
| 21 |
+
be considered subclasses of the registering ABC by the built-in
|
| 22 |
+
issubclass() function, but the registering ABC won't show up in
|
| 23 |
+
their MRO (Method Resolution Order) nor will method
|
| 24 |
+
implementations defined by the registering ABC be callable (not
|
| 25 |
+
even via super()).
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
# A global counter that is incremented each time a class is
|
| 29 |
+
# registered as a virtual subclass of anything. It forces the
|
| 30 |
+
# negative cache to be cleared before its next use.
|
| 31 |
+
# Note: this counter is private. Use `abc.get_cache_token()` for
|
| 32 |
+
# external code.
|
| 33 |
+
_abc_invalidation_counter = 0
|
| 34 |
+
|
| 35 |
+
def __new__(mcls, name, bases, namespace, /, **kwargs):
|
| 36 |
+
cls = super().__new__(mcls, name, bases, namespace, **kwargs)
|
| 37 |
+
# Compute set of abstract method names
|
| 38 |
+
abstracts = {name
|
| 39 |
+
for name, value in namespace.items()
|
| 40 |
+
if getattr(value, "__isabstractmethod__", False)}
|
| 41 |
+
for base in bases:
|
| 42 |
+
for name in getattr(base, "__abstractmethods__", set()):
|
| 43 |
+
value = getattr(cls, name, None)
|
| 44 |
+
if getattr(value, "__isabstractmethod__", False):
|
| 45 |
+
abstracts.add(name)
|
| 46 |
+
cls.__abstractmethods__ = frozenset(abstracts)
|
| 47 |
+
# Set up inheritance registry
|
| 48 |
+
cls._abc_registry = WeakSet()
|
| 49 |
+
cls._abc_cache = WeakSet()
|
| 50 |
+
cls._abc_negative_cache = WeakSet()
|
| 51 |
+
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
|
| 52 |
+
return cls
|
| 53 |
+
|
| 54 |
+
def register(cls, subclass):
|
| 55 |
+
"""Register a virtual subclass of an ABC.
|
| 56 |
+
|
| 57 |
+
Returns the subclass, to allow usage as a class decorator.
|
| 58 |
+
"""
|
| 59 |
+
if not isinstance(subclass, type):
|
| 60 |
+
raise TypeError("Can only register classes")
|
| 61 |
+
if issubclass(subclass, cls):
|
| 62 |
+
return subclass # Already a subclass
|
| 63 |
+
# Subtle: test for cycles *after* testing for "already a subclass";
|
| 64 |
+
# this means we allow X.register(X) and interpret it as a no-op.
|
| 65 |
+
if issubclass(cls, subclass):
|
| 66 |
+
# This would create a cycle, which is bad for the algorithm below
|
| 67 |
+
raise RuntimeError("Refusing to create an inheritance cycle")
|
| 68 |
+
cls._abc_registry.add(subclass)
|
| 69 |
+
ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache
|
| 70 |
+
return subclass
|
| 71 |
+
|
| 72 |
+
def _dump_registry(cls, file=None):
|
| 73 |
+
"""Debug helper to print the ABC registry."""
|
| 74 |
+
print(f"Class: {cls.__module__}.{cls.__qualname__}", file=file)
|
| 75 |
+
print(f"Inv. counter: {get_cache_token()}", file=file)
|
| 76 |
+
for name in cls.__dict__:
|
| 77 |
+
if name.startswith("_abc_"):
|
| 78 |
+
value = getattr(cls, name)
|
| 79 |
+
if isinstance(value, WeakSet):
|
| 80 |
+
value = set(value)
|
| 81 |
+
print(f"{name}: {value!r}", file=file)
|
| 82 |
+
|
| 83 |
+
def _abc_registry_clear(cls):
|
| 84 |
+
"""Clear the registry (for debugging or testing)."""
|
| 85 |
+
cls._abc_registry.clear()
|
| 86 |
+
|
| 87 |
+
def _abc_caches_clear(cls):
|
| 88 |
+
"""Clear the caches (for debugging or testing)."""
|
| 89 |
+
cls._abc_cache.clear()
|
| 90 |
+
cls._abc_negative_cache.clear()
|
| 91 |
+
|
| 92 |
+
def __instancecheck__(cls, instance):
|
| 93 |
+
"""Override for isinstance(instance, cls)."""
|
| 94 |
+
# Inline the cache checking
|
| 95 |
+
subclass = instance.__class__
|
| 96 |
+
if subclass in cls._abc_cache:
|
| 97 |
+
return True
|
| 98 |
+
subtype = type(instance)
|
| 99 |
+
if subtype is subclass:
|
| 100 |
+
if (cls._abc_negative_cache_version ==
|
| 101 |
+
ABCMeta._abc_invalidation_counter and
|
| 102 |
+
subclass in cls._abc_negative_cache):
|
| 103 |
+
return False
|
| 104 |
+
# Fall back to the subclass check.
|
| 105 |
+
return cls.__subclasscheck__(subclass)
|
| 106 |
+
return any(cls.__subclasscheck__(c) for c in (subclass, subtype))
|
| 107 |
+
|
| 108 |
+
def __subclasscheck__(cls, subclass):
|
| 109 |
+
"""Override for issubclass(subclass, cls)."""
|
| 110 |
+
if not isinstance(subclass, type):
|
| 111 |
+
raise TypeError('issubclass() arg 1 must be a class')
|
| 112 |
+
# Check cache
|
| 113 |
+
if subclass in cls._abc_cache:
|
| 114 |
+
return True
|
| 115 |
+
# Check negative cache; may have to invalidate
|
| 116 |
+
if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
|
| 117 |
+
# Invalidate the negative cache
|
| 118 |
+
cls._abc_negative_cache = WeakSet()
|
| 119 |
+
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
|
| 120 |
+
elif subclass in cls._abc_negative_cache:
|
| 121 |
+
return False
|
| 122 |
+
# Check the subclass hook
|
| 123 |
+
ok = cls.__subclasshook__(subclass)
|
| 124 |
+
if ok is not NotImplemented:
|
| 125 |
+
assert isinstance(ok, bool)
|
| 126 |
+
if ok:
|
| 127 |
+
cls._abc_cache.add(subclass)
|
| 128 |
+
else:
|
| 129 |
+
cls._abc_negative_cache.add(subclass)
|
| 130 |
+
return ok
|
| 131 |
+
# Check if it's a direct subclass
|
| 132 |
+
if cls in getattr(subclass, '__mro__', ()):
|
| 133 |
+
cls._abc_cache.add(subclass)
|
| 134 |
+
return True
|
| 135 |
+
# Check if it's a subclass of a registered class (recursive)
|
| 136 |
+
for rcls in cls._abc_registry:
|
| 137 |
+
if issubclass(subclass, rcls):
|
| 138 |
+
cls._abc_cache.add(subclass)
|
| 139 |
+
return True
|
| 140 |
+
# Check if it's a subclass of a subclass (recursive)
|
| 141 |
+
for scls in cls.__subclasses__():
|
| 142 |
+
if issubclass(subclass, scls):
|
| 143 |
+
cls._abc_cache.add(subclass)
|
| 144 |
+
return True
|
| 145 |
+
# No dice; update negative cache
|
| 146 |
+
cls._abc_negative_cache.add(subclass)
|
| 147 |
+
return False
|
llava/lib/python3.10/_sitebuiltins.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
The objects used by the site module to add custom builtins.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
# Those objects are almost immortal and they keep a reference to their module
|
| 6 |
+
# globals. Defining them in the site module would keep too many references
|
| 7 |
+
# alive.
|
| 8 |
+
# Note this means this module should also avoid keep things alive in its
|
| 9 |
+
# globals.
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
|
| 13 |
+
class Quitter(object):
|
| 14 |
+
def __init__(self, name, eof):
|
| 15 |
+
self.name = name
|
| 16 |
+
self.eof = eof
|
| 17 |
+
def __repr__(self):
|
| 18 |
+
return 'Use %s() or %s to exit' % (self.name, self.eof)
|
| 19 |
+
def __call__(self, code=None):
|
| 20 |
+
# Shells like IDLE catch the SystemExit, but listen when their
|
| 21 |
+
# stdin wrapper is closed.
|
| 22 |
+
try:
|
| 23 |
+
sys.stdin.close()
|
| 24 |
+
except:
|
| 25 |
+
pass
|
| 26 |
+
raise SystemExit(code)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class _Printer(object):
|
| 30 |
+
"""interactive prompt objects for printing the license text, a list of
|
| 31 |
+
contributors and the copyright notice."""
|
| 32 |
+
|
| 33 |
+
MAXLINES = 23
|
| 34 |
+
|
| 35 |
+
def __init__(self, name, data, files=(), dirs=()):
|
| 36 |
+
import os
|
| 37 |
+
self.__name = name
|
| 38 |
+
self.__data = data
|
| 39 |
+
self.__lines = None
|
| 40 |
+
self.__filenames = [os.path.join(dir, filename)
|
| 41 |
+
for dir in dirs
|
| 42 |
+
for filename in files]
|
| 43 |
+
|
| 44 |
+
def __setup(self):
|
| 45 |
+
if self.__lines:
|
| 46 |
+
return
|
| 47 |
+
data = None
|
| 48 |
+
for filename in self.__filenames:
|
| 49 |
+
try:
|
| 50 |
+
with open(filename, encoding='utf-8') as fp:
|
| 51 |
+
data = fp.read()
|
| 52 |
+
break
|
| 53 |
+
except OSError:
|
| 54 |
+
pass
|
| 55 |
+
if not data:
|
| 56 |
+
data = self.__data
|
| 57 |
+
self.__lines = data.split('\n')
|
| 58 |
+
self.__linecnt = len(self.__lines)
|
| 59 |
+
|
| 60 |
+
def __repr__(self):
|
| 61 |
+
self.__setup()
|
| 62 |
+
if len(self.__lines) <= self.MAXLINES:
|
| 63 |
+
return "\n".join(self.__lines)
|
| 64 |
+
else:
|
| 65 |
+
return "Type %s() to see the full %s text" % ((self.__name,)*2)
|
| 66 |
+
|
| 67 |
+
def __call__(self):
|
| 68 |
+
self.__setup()
|
| 69 |
+
prompt = 'Hit Return for more, or q (and Return) to quit: '
|
| 70 |
+
lineno = 0
|
| 71 |
+
while 1:
|
| 72 |
+
try:
|
| 73 |
+
for i in range(lineno, lineno + self.MAXLINES):
|
| 74 |
+
print(self.__lines[i])
|
| 75 |
+
except IndexError:
|
| 76 |
+
break
|
| 77 |
+
else:
|
| 78 |
+
lineno += self.MAXLINES
|
| 79 |
+
key = None
|
| 80 |
+
while key is None:
|
| 81 |
+
key = input(prompt)
|
| 82 |
+
if key not in ('', 'q'):
|
| 83 |
+
key = None
|
| 84 |
+
if key == 'q':
|
| 85 |
+
break
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class _Helper(object):
|
| 89 |
+
"""Define the builtin 'help'.
|
| 90 |
+
|
| 91 |
+
This is a wrapper around pydoc.help that provides a helpful message
|
| 92 |
+
when 'help' is typed at the Python interactive prompt.
|
| 93 |
+
|
| 94 |
+
Calling help() at the Python prompt starts an interactive help session.
|
| 95 |
+
Calling help(thing) prints help for the python object 'thing'.
|
| 96 |
+
"""
|
| 97 |
+
|
| 98 |
+
def __repr__(self):
|
| 99 |
+
return "Type help() for interactive help, " \
|
| 100 |
+
"or help(object) for help about object."
|
| 101 |
+
def __call__(self, *args, **kwds):
|
| 102 |
+
import pydoc
|
| 103 |
+
return pydoc.help(*args, **kwds)
|
llava/lib/python3.10/_sysconfigdata__linux_x86_64-linux-gnu.py.orig
ADDED
|
@@ -0,0 +1,986 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# system configuration generated and used by the sysconfig module
|
| 2 |
+
build_time_vars = {'ABIFLAGS': '',
|
| 3 |
+
'AC_APPLE_UNIVERSAL_BUILD': 0,
|
| 4 |
+
'AIX_BUILDDATE': 0,
|
| 5 |
+
'AIX_GENUINE_CPLUSPLUS': 0,
|
| 6 |
+
'ALIGNOF_LONG': 8,
|
| 7 |
+
'ALIGNOF_SIZE_T': 8,
|
| 8 |
+
'ALT_SOABI': 0,
|
| 9 |
+
'ANDROID_API_LEVEL': 0,
|
| 10 |
+
'AR': 'x86_64-conda-linux-gnu-ar',
|
| 11 |
+
'ARFLAGS': 'rcs',
|
| 12 |
+
'BASECFLAGS': '-Wno-unused-result -Wsign-compare',
|
| 13 |
+
'BASECPPFLAGS': '-IObjects -IInclude -IPython',
|
| 14 |
+
'BASEMODLIBS': '',
|
| 15 |
+
'BINDIR': '/root/envs/llava/bin',
|
| 16 |
+
'BINLIBDEST': '/root/envs/llava/lib/python3.10',
|
| 17 |
+
'BLDLIBRARY': 'libpython3.10.a',
|
| 18 |
+
'BLDSHARED': 'x86_64-conda-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 19 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 20 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 21 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 22 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 23 |
+
'-L/root/envs/llava/lib '
|
| 24 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 25 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 26 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 27 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 28 |
+
'-L/root/envs/llava/lib',
|
| 29 |
+
'BUILDEXE': '',
|
| 30 |
+
'BUILDPYTHON': 'python',
|
| 31 |
+
'BUILD_GNU_TYPE': 'x86_64-conda-linux-gnu',
|
| 32 |
+
'BYTESTR_DEPS': '\\',
|
| 33 |
+
'CC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 34 |
+
'CCSHARED': '-fPIC',
|
| 35 |
+
'CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 36 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 37 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 38 |
+
'-isystem '
|
| 39 |
+
'/root/envs/llava/include '
|
| 40 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 41 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 42 |
+
' '
|
| 43 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 44 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 45 |
+
'-isystem '
|
| 46 |
+
'/root/envs/llava/include '
|
| 47 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 48 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 49 |
+
' ',
|
| 50 |
+
'CFLAGSFORSHARED': '',
|
| 51 |
+
'CFLAGS_ALIASING': '',
|
| 52 |
+
'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in '
|
| 53 |
+
'Makefile.pre.in',
|
| 54 |
+
'CONFIGURE_CFLAGS': '-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 55 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 56 |
+
'-ffunction-sections -pipe -isystem '
|
| 57 |
+
'/root/envs/llava/include '
|
| 58 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 59 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 60 |
+
' '
|
| 61 |
+
' ',
|
| 62 |
+
'CONFIGURE_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 63 |
+
' '
|
| 64 |
+
' -g -std=c99 -Wextra '
|
| 65 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 66 |
+
'-Wno-missing-field-initializers '
|
| 67 |
+
'-Werror=implicit-function-declaration '
|
| 68 |
+
'-fvisibility=hidden',
|
| 69 |
+
'CONFIGURE_CPPFLAGS': '-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 70 |
+
'/root/envs/llava/include '
|
| 71 |
+
'-I/root/envs/llava/include',
|
| 72 |
+
'CONFIGURE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 73 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 74 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 75 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 76 |
+
'-L/root/envs/llava/lib',
|
| 77 |
+
'CONFIGURE_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 78 |
+
' '
|
| 79 |
+
' -g',
|
| 80 |
+
'CONFIG_ARGS': "'--prefix=/root/envs/llava' "
|
| 81 |
+
"'--build=x86_64-conda-linux-gnu' "
|
| 82 |
+
"'--host=x86_64-conda-linux-gnu' '--enable-ipv6' "
|
| 83 |
+
"'--with-ensurepip=no' "
|
| 84 |
+
"'--with-tzpath=/root/envs/llava/share/zoneinfo' "
|
| 85 |
+
"'--with-computed-gotos' '--with-system-ffi' "
|
| 86 |
+
"'--enable-loadable-sqlite-extensions' "
|
| 87 |
+
"'--with-tcltk-includes=-I/root/envs/llava/include' "
|
| 88 |
+
"'--with-tcltk-libs=-L/root/envs/llava/lib "
|
| 89 |
+
"-ltcl8.6 -ltk8.6' '--with-platlibdir=lib' '--with-lto' "
|
| 90 |
+
"'--enable-optimizations' "
|
| 91 |
+
"'-oldincludedir=/croot/python-split_1733933809325/_build_env/x86_64-conda-linux-gnu/sysroot/usr/include' "
|
| 92 |
+
"'--disable-shared' 'PROFILE_TASK=-m test --pgo' "
|
| 93 |
+
"'build_alias=x86_64-conda-linux-gnu' "
|
| 94 |
+
"'host_alias=x86_64-conda-linux-gnu' 'MACHDEP=linux' "
|
| 95 |
+
"'CC=x86_64-conda-linux-gnu-gcc' 'CFLAGS=-march=nocona "
|
| 96 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 97 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 98 |
+
'-pipe -isystem '
|
| 99 |
+
'/root/envs/llava/include '
|
| 100 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 101 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 102 |
+
' '
|
| 103 |
+
"' 'LDFLAGS=-Wl,-O2 -Wl,--sort-common -Wl,--as-needed "
|
| 104 |
+
'-Wl,-z,relro -Wl,-z,now -Wl,--disable-new-dtags '
|
| 105 |
+
'-Wl,--gc-sections '
|
| 106 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 107 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 108 |
+
"-L/root/envs/llava/lib' "
|
| 109 |
+
"'CPPFLAGS=-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem "
|
| 110 |
+
'/root/envs/llava/include '
|
| 111 |
+
"-I/root/envs/llava/include' "
|
| 112 |
+
"'CPP=/croot/python-split_1733933809325/_build_env/bin/x86_64-conda-linux-gnu-cpp' "
|
| 113 |
+
"'PKG_CONFIG_PATH=/root/envs/llava/lib/pkgconfig'",
|
| 114 |
+
'CONFINCLUDEDIR': '/root/envs/llava/include',
|
| 115 |
+
'CONFINCLUDEPY': '/root/envs/llava/include/python3.10',
|
| 116 |
+
'COREPYTHONPATH': '',
|
| 117 |
+
'COVERAGE_INFO': '/croot/python-split_1733933809325/work/build-static/coverage.info',
|
| 118 |
+
'COVERAGE_REPORT': '/croot/python-split_1733933809325/work/build-static/lcov-report',
|
| 119 |
+
'COVERAGE_REPORT_OPTIONS': '--no-branch-coverage --title "CPython lcov '
|
| 120 |
+
'report"',
|
| 121 |
+
'CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 122 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 123 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 124 |
+
'/root/envs/llava/include '
|
| 125 |
+
'-I/root/envs/llava/include '
|
| 126 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 127 |
+
'/root/envs/llava/include '
|
| 128 |
+
'-I/root/envs/llava/include',
|
| 129 |
+
'CXX': 'x86_64-conda-linux-gnu-c++ -pthread',
|
| 130 |
+
'DESTDIRS': '/root/envs/llava '
|
| 131 |
+
'/root/envs/llava/lib '
|
| 132 |
+
'/root/envs/llava/lib/python3.10 '
|
| 133 |
+
'/root/envs/llava/lib/python3.10/lib-dynload',
|
| 134 |
+
'DESTLIB': '/root/envs/llava/lib/python3.10',
|
| 135 |
+
'DESTPATH': '',
|
| 136 |
+
'DESTSHARED': '/root/envs/llava/lib/python3.10/lib-dynload',
|
| 137 |
+
'DFLAGS': '',
|
| 138 |
+
'DIRMODE': 755,
|
| 139 |
+
'DIST': 'README.rst ChangeLog configure configure.ac acconfig.h pyconfig.h.in '
|
| 140 |
+
'Makefile.pre.in Include Lib Misc Ext-dummy',
|
| 141 |
+
'DISTDIRS': 'Include Lib Misc Ext-dummy',
|
| 142 |
+
'DISTFILES': 'README.rst ChangeLog configure configure.ac acconfig.h '
|
| 143 |
+
'pyconfig.h.in Makefile.pre.in',
|
| 144 |
+
'DLINCLDIR': '.',
|
| 145 |
+
'DLLLIBRARY': '',
|
| 146 |
+
'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754': 0,
|
| 147 |
+
'DOUBLE_IS_BIG_ENDIAN_IEEE754': 0,
|
| 148 |
+
'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1,
|
| 149 |
+
'DTRACE': '',
|
| 150 |
+
'DTRACE_DEPS': '\\',
|
| 151 |
+
'DTRACE_HEADERS': '',
|
| 152 |
+
'DTRACE_OBJS': '',
|
| 153 |
+
'DYNLOADFILE': 'dynload_shlib.o',
|
| 154 |
+
'ENABLE_IPV6': 1,
|
| 155 |
+
'ENSUREPIP': 'no',
|
| 156 |
+
'EXE': '',
|
| 157 |
+
'EXEMODE': 755,
|
| 158 |
+
'EXPERIMENTAL_ISOLATED_SUBINTERPRETERS': 0,
|
| 159 |
+
'EXPORTSFROM': '',
|
| 160 |
+
'EXPORTSYMS': '',
|
| 161 |
+
'EXTRATESTOPTS': '',
|
| 162 |
+
'EXT_SUFFIX': '.cpython-310-x86_64-linux-gnu.so',
|
| 163 |
+
'FILEMODE': 644,
|
| 164 |
+
'FLOAT_WORDS_BIGENDIAN': 0,
|
| 165 |
+
'FLOCK_NEEDS_LIBBSD': 0,
|
| 166 |
+
'GETPGRP_HAVE_ARG': 0,
|
| 167 |
+
'GITBRANCH': '',
|
| 168 |
+
'GITTAG': '',
|
| 169 |
+
'GITVERSION': '',
|
| 170 |
+
'GNULD': 'yes',
|
| 171 |
+
'HAVE_ACCEPT4': 1,
|
| 172 |
+
'HAVE_ACOSH': 1,
|
| 173 |
+
'HAVE_ADDRINFO': 1,
|
| 174 |
+
'HAVE_ALARM': 1,
|
| 175 |
+
'HAVE_ALIGNED_REQUIRED': 0,
|
| 176 |
+
'HAVE_ALLOCA_H': 1,
|
| 177 |
+
'HAVE_ALTZONE': 0,
|
| 178 |
+
'HAVE_ASINH': 1,
|
| 179 |
+
'HAVE_ASM_TYPES_H': 1,
|
| 180 |
+
'HAVE_ATANH': 1,
|
| 181 |
+
'HAVE_BIND_TEXTDOMAIN_CODESET': 1,
|
| 182 |
+
'HAVE_BLUETOOTH_BLUETOOTH_H': 0,
|
| 183 |
+
'HAVE_BLUETOOTH_H': 0,
|
| 184 |
+
'HAVE_BROKEN_MBSTOWCS': 0,
|
| 185 |
+
'HAVE_BROKEN_NICE': 0,
|
| 186 |
+
'HAVE_BROKEN_PIPE_BUF': 0,
|
| 187 |
+
'HAVE_BROKEN_POLL': 0,
|
| 188 |
+
'HAVE_BROKEN_POSIX_SEMAPHORES': 0,
|
| 189 |
+
'HAVE_BROKEN_PTHREAD_SIGMASK': 0,
|
| 190 |
+
'HAVE_BROKEN_SEM_GETVALUE': 0,
|
| 191 |
+
'HAVE_BROKEN_UNSETENV': 0,
|
| 192 |
+
'HAVE_BUILTIN_ATOMIC': 1,
|
| 193 |
+
'HAVE_CHFLAGS': 0,
|
| 194 |
+
'HAVE_CHOWN': 1,
|
| 195 |
+
'HAVE_CHROOT': 1,
|
| 196 |
+
'HAVE_CLOCK': 1,
|
| 197 |
+
'HAVE_CLOCK_GETRES': 1,
|
| 198 |
+
'HAVE_CLOCK_GETTIME': 1,
|
| 199 |
+
'HAVE_CLOCK_SETTIME': 1,
|
| 200 |
+
'HAVE_CLOSE_RANGE': 0,
|
| 201 |
+
'HAVE_COMPUTED_GOTOS': 1,
|
| 202 |
+
'HAVE_CONFSTR': 1,
|
| 203 |
+
'HAVE_CONIO_H': 0,
|
| 204 |
+
'HAVE_COPYSIGN': 1,
|
| 205 |
+
'HAVE_COPY_FILE_RANGE': 0,
|
| 206 |
+
'HAVE_CRYPT_H': 1,
|
| 207 |
+
'HAVE_CRYPT_R': 1,
|
| 208 |
+
'HAVE_CTERMID': 1,
|
| 209 |
+
'HAVE_CTERMID_R': 0,
|
| 210 |
+
'HAVE_CURSES_FILTER': 1,
|
| 211 |
+
'HAVE_CURSES_H': 1,
|
| 212 |
+
'HAVE_CURSES_HAS_KEY': 1,
|
| 213 |
+
'HAVE_CURSES_IMMEDOK': 1,
|
| 214 |
+
'HAVE_CURSES_IS_PAD': 1,
|
| 215 |
+
'HAVE_CURSES_IS_TERM_RESIZED': 1,
|
| 216 |
+
'HAVE_CURSES_RESIZETERM': 1,
|
| 217 |
+
'HAVE_CURSES_RESIZE_TERM': 1,
|
| 218 |
+
'HAVE_CURSES_SYNCOK': 1,
|
| 219 |
+
'HAVE_CURSES_TYPEAHEAD': 1,
|
| 220 |
+
'HAVE_CURSES_USE_ENV': 1,
|
| 221 |
+
'HAVE_CURSES_WCHGAT': 1,
|
| 222 |
+
'HAVE_DECL_ISFINITE': 1,
|
| 223 |
+
'HAVE_DECL_ISINF': 1,
|
| 224 |
+
'HAVE_DECL_ISNAN': 1,
|
| 225 |
+
'HAVE_DECL_RTLD_DEEPBIND': 1,
|
| 226 |
+
'HAVE_DECL_RTLD_GLOBAL': 1,
|
| 227 |
+
'HAVE_DECL_RTLD_LAZY': 1,
|
| 228 |
+
'HAVE_DECL_RTLD_LOCAL': 1,
|
| 229 |
+
'HAVE_DECL_RTLD_MEMBER': 0,
|
| 230 |
+
'HAVE_DECL_RTLD_NODELETE': 1,
|
| 231 |
+
'HAVE_DECL_RTLD_NOLOAD': 1,
|
| 232 |
+
'HAVE_DECL_RTLD_NOW': 1,
|
| 233 |
+
'HAVE_DECL_TZNAME': 0,
|
| 234 |
+
'HAVE_DEVICE_MACROS': 1,
|
| 235 |
+
'HAVE_DEV_PTC': 0,
|
| 236 |
+
'HAVE_DEV_PTMX': 1,
|
| 237 |
+
'HAVE_DIRECT_H': 0,
|
| 238 |
+
'HAVE_DIRENT_D_TYPE': 1,
|
| 239 |
+
'HAVE_DIRENT_H': 1,
|
| 240 |
+
'HAVE_DIRFD': 1,
|
| 241 |
+
'HAVE_DLFCN_H': 1,
|
| 242 |
+
'HAVE_DLOPEN': 1,
|
| 243 |
+
'HAVE_DUP2': 1,
|
| 244 |
+
'HAVE_DUP3': 1,
|
| 245 |
+
'HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH': 0,
|
| 246 |
+
'HAVE_DYNAMIC_LOADING': 1,
|
| 247 |
+
'HAVE_ENDIAN_H': 1,
|
| 248 |
+
'HAVE_EPOLL': 1,
|
| 249 |
+
'HAVE_EPOLL_CREATE1': 1,
|
| 250 |
+
'HAVE_ERF': 1,
|
| 251 |
+
'HAVE_ERFC': 1,
|
| 252 |
+
'HAVE_ERRNO_H': 1,
|
| 253 |
+
'HAVE_EVENTFD': 1,
|
| 254 |
+
'HAVE_EXECV': 1,
|
| 255 |
+
'HAVE_EXPLICIT_BZERO': 0,
|
| 256 |
+
'HAVE_EXPLICIT_MEMSET': 0,
|
| 257 |
+
'HAVE_EXPM1': 1,
|
| 258 |
+
'HAVE_FACCESSAT': 1,
|
| 259 |
+
'HAVE_FCHDIR': 1,
|
| 260 |
+
'HAVE_FCHMOD': 1,
|
| 261 |
+
'HAVE_FCHMODAT': 1,
|
| 262 |
+
'HAVE_FCHOWN': 1,
|
| 263 |
+
'HAVE_FCHOWNAT': 1,
|
| 264 |
+
'HAVE_FCNTL_H': 1,
|
| 265 |
+
'HAVE_FDATASYNC': 1,
|
| 266 |
+
'HAVE_FDOPENDIR': 1,
|
| 267 |
+
'HAVE_FDWALK': 0,
|
| 268 |
+
'HAVE_FEXECVE': 1,
|
| 269 |
+
'HAVE_FINITE': 1,
|
| 270 |
+
'HAVE_FLOCK': 1,
|
| 271 |
+
'HAVE_FORK': 1,
|
| 272 |
+
'HAVE_FORKPTY': 1,
|
| 273 |
+
'HAVE_FPATHCONF': 1,
|
| 274 |
+
'HAVE_FSEEK64': 0,
|
| 275 |
+
'HAVE_FSEEKO': 1,
|
| 276 |
+
'HAVE_FSTATAT': 1,
|
| 277 |
+
'HAVE_FSTATVFS': 1,
|
| 278 |
+
'HAVE_FSYNC': 1,
|
| 279 |
+
'HAVE_FTELL64': 0,
|
| 280 |
+
'HAVE_FTELLO': 1,
|
| 281 |
+
'HAVE_FTIME': 1,
|
| 282 |
+
'HAVE_FTRUNCATE': 1,
|
| 283 |
+
'HAVE_FUTIMENS': 1,
|
| 284 |
+
'HAVE_FUTIMES': 1,
|
| 285 |
+
'HAVE_FUTIMESAT': 1,
|
| 286 |
+
'HAVE_GAI_STRERROR': 1,
|
| 287 |
+
'HAVE_GAMMA': 1,
|
| 288 |
+
'HAVE_GCC_ASM_FOR_MC68881': 0,
|
| 289 |
+
'HAVE_GCC_ASM_FOR_X64': 1,
|
| 290 |
+
'HAVE_GCC_ASM_FOR_X87': 1,
|
| 291 |
+
'HAVE_GCC_UINT128_T': 1,
|
| 292 |
+
'HAVE_GETADDRINFO': 1,
|
| 293 |
+
'HAVE_GETC_UNLOCKED': 1,
|
| 294 |
+
'HAVE_GETENTROPY': 0,
|
| 295 |
+
'HAVE_GETGRGID_R': 1,
|
| 296 |
+
'HAVE_GETGRNAM_R': 1,
|
| 297 |
+
'HAVE_GETGROUPLIST': 1,
|
| 298 |
+
'HAVE_GETGROUPS': 1,
|
| 299 |
+
'HAVE_GETHOSTBYNAME': 0,
|
| 300 |
+
'HAVE_GETHOSTBYNAME_R': 1,
|
| 301 |
+
'HAVE_GETHOSTBYNAME_R_3_ARG': 0,
|
| 302 |
+
'HAVE_GETHOSTBYNAME_R_5_ARG': 0,
|
| 303 |
+
'HAVE_GETHOSTBYNAME_R_6_ARG': 1,
|
| 304 |
+
'HAVE_GETITIMER': 1,
|
| 305 |
+
'HAVE_GETLOADAVG': 1,
|
| 306 |
+
'HAVE_GETLOGIN': 1,
|
| 307 |
+
'HAVE_GETNAMEINFO': 1,
|
| 308 |
+
'HAVE_GETPAGESIZE': 1,
|
| 309 |
+
'HAVE_GETPEERNAME': 1,
|
| 310 |
+
'HAVE_GETPGID': 1,
|
| 311 |
+
'HAVE_GETPGRP': 1,
|
| 312 |
+
'HAVE_GETPID': 1,
|
| 313 |
+
'HAVE_GETPRIORITY': 1,
|
| 314 |
+
'HAVE_GETPWENT': 1,
|
| 315 |
+
'HAVE_GETPWNAM_R': 1,
|
| 316 |
+
'HAVE_GETPWUID_R': 1,
|
| 317 |
+
'HAVE_GETRANDOM': 0,
|
| 318 |
+
'HAVE_GETRANDOM_SYSCALL': 1,
|
| 319 |
+
'HAVE_GETRESGID': 1,
|
| 320 |
+
'HAVE_GETRESUID': 1,
|
| 321 |
+
'HAVE_GETSID': 1,
|
| 322 |
+
'HAVE_GETSPENT': 1,
|
| 323 |
+
'HAVE_GETSPNAM': 1,
|
| 324 |
+
'HAVE_GETWD': 1,
|
| 325 |
+
'HAVE_GLIBC_MEMMOVE_BUG': 0,
|
| 326 |
+
'HAVE_GRP_H': 1,
|
| 327 |
+
'HAVE_HSTRERROR': 1,
|
| 328 |
+
'HAVE_HTOLE64': 1,
|
| 329 |
+
'HAVE_HYPOT': 1,
|
| 330 |
+
'HAVE_IEEEFP_H': 0,
|
| 331 |
+
'HAVE_IF_NAMEINDEX': 1,
|
| 332 |
+
'HAVE_INET_ATON': 1,
|
| 333 |
+
'HAVE_INET_PTON': 1,
|
| 334 |
+
'HAVE_INITGROUPS': 1,
|
| 335 |
+
'HAVE_INTTYPES_H': 1,
|
| 336 |
+
'HAVE_IO_H': 0,
|
| 337 |
+
'HAVE_IPA_PURE_CONST_BUG': 0,
|
| 338 |
+
'HAVE_KILL': 1,
|
| 339 |
+
'HAVE_KILLPG': 1,
|
| 340 |
+
'HAVE_KQUEUE': 0,
|
| 341 |
+
'HAVE_LANGINFO_H': 1,
|
| 342 |
+
'HAVE_LARGEFILE_SUPPORT': 0,
|
| 343 |
+
'HAVE_LCHFLAGS': 0,
|
| 344 |
+
'HAVE_LCHMOD': 0,
|
| 345 |
+
'HAVE_LCHOWN': 1,
|
| 346 |
+
'HAVE_LGAMMA': 1,
|
| 347 |
+
'HAVE_LIBDL': 1,
|
| 348 |
+
'HAVE_LIBDLD': 0,
|
| 349 |
+
'HAVE_LIBIEEE': 0,
|
| 350 |
+
'HAVE_LIBINTL_H': 1,
|
| 351 |
+
'HAVE_LIBREADLINE': 1,
|
| 352 |
+
'HAVE_LIBRESOLV': 0,
|
| 353 |
+
'HAVE_LIBSENDFILE': 0,
|
| 354 |
+
'HAVE_LIBUTIL_H': 0,
|
| 355 |
+
'HAVE_LIBUUID': 1,
|
| 356 |
+
'HAVE_LINK': 1,
|
| 357 |
+
'HAVE_LINKAT': 1,
|
| 358 |
+
'HAVE_LINUX_AUXVEC_H': 1,
|
| 359 |
+
'HAVE_LINUX_CAN_BCM_H': 1,
|
| 360 |
+
'HAVE_LINUX_CAN_H': 1,
|
| 361 |
+
'HAVE_LINUX_CAN_J1939_H': 0,
|
| 362 |
+
'HAVE_LINUX_CAN_RAW_FD_FRAMES': 1,
|
| 363 |
+
'HAVE_LINUX_CAN_RAW_H': 1,
|
| 364 |
+
'HAVE_LINUX_CAN_RAW_JOIN_FILTERS': 1,
|
| 365 |
+
'HAVE_LINUX_MEMFD_H': 1,
|
| 366 |
+
'HAVE_LINUX_NETLINK_H': 1,
|
| 367 |
+
'HAVE_LINUX_QRTR_H': 0,
|
| 368 |
+
'HAVE_LINUX_RANDOM_H': 1,
|
| 369 |
+
'HAVE_LINUX_TIPC_H': 1,
|
| 370 |
+
'HAVE_LINUX_VM_SOCKETS_H': 1,
|
| 371 |
+
'HAVE_LINUX_WAIT_H': 1,
|
| 372 |
+
'HAVE_LOCKF': 1,
|
| 373 |
+
'HAVE_LOG1P': 1,
|
| 374 |
+
'HAVE_LOG2': 1,
|
| 375 |
+
'HAVE_LONG_DOUBLE': 1,
|
| 376 |
+
'HAVE_LSTAT': 1,
|
| 377 |
+
'HAVE_LUTIMES': 1,
|
| 378 |
+
'HAVE_MADVISE': 1,
|
| 379 |
+
'HAVE_MAKEDEV': 1,
|
| 380 |
+
'HAVE_MBRTOWC': 1,
|
| 381 |
+
'HAVE_MEMFD_CREATE': 0,
|
| 382 |
+
'HAVE_MEMORY_H': 1,
|
| 383 |
+
'HAVE_MEMRCHR': 1,
|
| 384 |
+
'HAVE_MKDIRAT': 1,
|
| 385 |
+
'HAVE_MKFIFO': 1,
|
| 386 |
+
'HAVE_MKFIFOAT': 1,
|
| 387 |
+
'HAVE_MKNOD': 1,
|
| 388 |
+
'HAVE_MKNODAT': 1,
|
| 389 |
+
'HAVE_MKTIME': 1,
|
| 390 |
+
'HAVE_MMAP': 1,
|
| 391 |
+
'HAVE_MREMAP': 1,
|
| 392 |
+
'HAVE_NCURSES_H': 1,
|
| 393 |
+
'HAVE_NDIR_H': 0,
|
| 394 |
+
'HAVE_NETPACKET_PACKET_H': 1,
|
| 395 |
+
'HAVE_NET_IF_H': 1,
|
| 396 |
+
'HAVE_NICE': 1,
|
| 397 |
+
'HAVE_NON_UNICODE_WCHAR_T_REPRESENTATION': 0,
|
| 398 |
+
'HAVE_OPENAT': 1,
|
| 399 |
+
'HAVE_OPENPTY': 1,
|
| 400 |
+
'HAVE_PATHCONF': 1,
|
| 401 |
+
'HAVE_PAUSE': 1,
|
| 402 |
+
'HAVE_PIPE2': 1,
|
| 403 |
+
'HAVE_PLOCK': 0,
|
| 404 |
+
'HAVE_POLL': 1,
|
| 405 |
+
'HAVE_POLL_H': 1,
|
| 406 |
+
'HAVE_POSIX_FADVISE': 1,
|
| 407 |
+
'HAVE_POSIX_FALLOCATE': 1,
|
| 408 |
+
'HAVE_POSIX_SPAWN': 1,
|
| 409 |
+
'HAVE_POSIX_SPAWNP': 1,
|
| 410 |
+
'HAVE_PREAD': 1,
|
| 411 |
+
'HAVE_PREADV': 1,
|
| 412 |
+
'HAVE_PREADV2': 0,
|
| 413 |
+
'HAVE_PRLIMIT': 1,
|
| 414 |
+
'HAVE_PROCESS_H': 0,
|
| 415 |
+
'HAVE_PROTOTYPES': 1,
|
| 416 |
+
'HAVE_PTHREAD_CONDATTR_SETCLOCK': 1,
|
| 417 |
+
'HAVE_PTHREAD_DESTRUCTOR': 0,
|
| 418 |
+
'HAVE_PTHREAD_GETCPUCLOCKID': 1,
|
| 419 |
+
'HAVE_PTHREAD_H': 1,
|
| 420 |
+
'HAVE_PTHREAD_INIT': 0,
|
| 421 |
+
'HAVE_PTHREAD_KILL': 1,
|
| 422 |
+
'HAVE_PTHREAD_SIGMASK': 1,
|
| 423 |
+
'HAVE_PTY_H': 1,
|
| 424 |
+
'HAVE_PWRITE': 1,
|
| 425 |
+
'HAVE_PWRITEV': 1,
|
| 426 |
+
'HAVE_PWRITEV2': 0,
|
| 427 |
+
'HAVE_READLINK': 1,
|
| 428 |
+
'HAVE_READLINKAT': 1,
|
| 429 |
+
'HAVE_READV': 1,
|
| 430 |
+
'HAVE_REALPATH': 1,
|
| 431 |
+
'HAVE_RENAMEAT': 1,
|
| 432 |
+
'HAVE_RL_APPEND_HISTORY': 1,
|
| 433 |
+
'HAVE_RL_CATCH_SIGNAL': 1,
|
| 434 |
+
'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1,
|
| 435 |
+
'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1,
|
| 436 |
+
'HAVE_RL_COMPLETION_MATCHES': 1,
|
| 437 |
+
'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1,
|
| 438 |
+
'HAVE_RL_PRE_INPUT_HOOK': 1,
|
| 439 |
+
'HAVE_RL_RESIZE_TERMINAL': 1,
|
| 440 |
+
'HAVE_ROUND': 1,
|
| 441 |
+
'HAVE_RTPSPAWN': 0,
|
| 442 |
+
'HAVE_SCHED_GET_PRIORITY_MAX': 1,
|
| 443 |
+
'HAVE_SCHED_H': 1,
|
| 444 |
+
'HAVE_SCHED_RR_GET_INTERVAL': 1,
|
| 445 |
+
'HAVE_SCHED_SETAFFINITY': 1,
|
| 446 |
+
'HAVE_SCHED_SETPARAM': 1,
|
| 447 |
+
'HAVE_SCHED_SETSCHEDULER': 1,
|
| 448 |
+
'HAVE_SEM_CLOCKWAIT': 0,
|
| 449 |
+
'HAVE_SEM_GETVALUE': 1,
|
| 450 |
+
'HAVE_SEM_OPEN': 1,
|
| 451 |
+
'HAVE_SEM_TIMEDWAIT': 1,
|
| 452 |
+
'HAVE_SEM_UNLINK': 1,
|
| 453 |
+
'HAVE_SENDFILE': 1,
|
| 454 |
+
'HAVE_SETEGID': 1,
|
| 455 |
+
'HAVE_SETEUID': 1,
|
| 456 |
+
'HAVE_SETGID': 1,
|
| 457 |
+
'HAVE_SETGROUPS': 1,
|
| 458 |
+
'HAVE_SETHOSTNAME': 1,
|
| 459 |
+
'HAVE_SETITIMER': 1,
|
| 460 |
+
'HAVE_SETLOCALE': 1,
|
| 461 |
+
'HAVE_SETPGID': 1,
|
| 462 |
+
'HAVE_SETPGRP': 1,
|
| 463 |
+
'HAVE_SETPRIORITY': 1,
|
| 464 |
+
'HAVE_SETREGID': 1,
|
| 465 |
+
'HAVE_SETRESGID': 1,
|
| 466 |
+
'HAVE_SETRESUID': 1,
|
| 467 |
+
'HAVE_SETREUID': 1,
|
| 468 |
+
'HAVE_SETSID': 1,
|
| 469 |
+
'HAVE_SETUID': 1,
|
| 470 |
+
'HAVE_SETVBUF': 1,
|
| 471 |
+
'HAVE_SHADOW_H': 1,
|
| 472 |
+
'HAVE_SHM_OPEN': 1,
|
| 473 |
+
'HAVE_SHM_UNLINK': 1,
|
| 474 |
+
'HAVE_SIGACTION': 1,
|
| 475 |
+
'HAVE_SIGALTSTACK': 1,
|
| 476 |
+
'HAVE_SIGFILLSET': 1,
|
| 477 |
+
'HAVE_SIGINFO_T_SI_BAND': 1,
|
| 478 |
+
'HAVE_SIGINTERRUPT': 1,
|
| 479 |
+
'HAVE_SIGNAL_H': 1,
|
| 480 |
+
'HAVE_SIGPENDING': 1,
|
| 481 |
+
'HAVE_SIGRELSE': 1,
|
| 482 |
+
'HAVE_SIGTIMEDWAIT': 1,
|
| 483 |
+
'HAVE_SIGWAIT': 1,
|
| 484 |
+
'HAVE_SIGWAITINFO': 1,
|
| 485 |
+
'HAVE_SNPRINTF': 1,
|
| 486 |
+
'HAVE_SOCKADDR_ALG': 1,
|
| 487 |
+
'HAVE_SOCKADDR_SA_LEN': 0,
|
| 488 |
+
'HAVE_SOCKADDR_STORAGE': 1,
|
| 489 |
+
'HAVE_SOCKETPAIR': 1,
|
| 490 |
+
'HAVE_SPAWN_H': 1,
|
| 491 |
+
'HAVE_SPLICE': 1,
|
| 492 |
+
'HAVE_SSIZE_T': 1,
|
| 493 |
+
'HAVE_STATVFS': 1,
|
| 494 |
+
'HAVE_STAT_TV_NSEC': 1,
|
| 495 |
+
'HAVE_STAT_TV_NSEC2': 0,
|
| 496 |
+
'HAVE_STDARG_PROTOTYPES': 1,
|
| 497 |
+
'HAVE_STDINT_H': 1,
|
| 498 |
+
'HAVE_STDLIB_H': 1,
|
| 499 |
+
'HAVE_STD_ATOMIC': 1,
|
| 500 |
+
'HAVE_STRFTIME': 1,
|
| 501 |
+
'HAVE_STRINGS_H': 1,
|
| 502 |
+
'HAVE_STRING_H': 1,
|
| 503 |
+
'HAVE_STRLCPY': 0,
|
| 504 |
+
'HAVE_STROPTS_H': 0,
|
| 505 |
+
'HAVE_STRSIGNAL': 1,
|
| 506 |
+
'HAVE_STRUCT_PASSWD_PW_GECOS': 1,
|
| 507 |
+
'HAVE_STRUCT_PASSWD_PW_PASSWD': 1,
|
| 508 |
+
'HAVE_STRUCT_STAT_ST_BIRTHTIME': 0,
|
| 509 |
+
'HAVE_STRUCT_STAT_ST_BLKSIZE': 1,
|
| 510 |
+
'HAVE_STRUCT_STAT_ST_BLOCKS': 1,
|
| 511 |
+
'HAVE_STRUCT_STAT_ST_FLAGS': 0,
|
| 512 |
+
'HAVE_STRUCT_STAT_ST_GEN': 0,
|
| 513 |
+
'HAVE_STRUCT_STAT_ST_RDEV': 1,
|
| 514 |
+
'HAVE_STRUCT_TM_TM_ZONE': 1,
|
| 515 |
+
'HAVE_SYMLINK': 1,
|
| 516 |
+
'HAVE_SYMLINKAT': 1,
|
| 517 |
+
'HAVE_SYNC': 1,
|
| 518 |
+
'HAVE_SYSCONF': 1,
|
| 519 |
+
'HAVE_SYSEXITS_H': 1,
|
| 520 |
+
'HAVE_SYS_AUDIOIO_H': 0,
|
| 521 |
+
'HAVE_SYS_AUXV_H': 1,
|
| 522 |
+
'HAVE_SYS_BSDTTY_H': 0,
|
| 523 |
+
'HAVE_SYS_DEVPOLL_H': 0,
|
| 524 |
+
'HAVE_SYS_DIR_H': 0,
|
| 525 |
+
'HAVE_SYS_ENDIAN_H': 0,
|
| 526 |
+
'HAVE_SYS_EPOLL_H': 1,
|
| 527 |
+
'HAVE_SYS_EVENTFD_H': 1,
|
| 528 |
+
'HAVE_SYS_EVENT_H': 0,
|
| 529 |
+
'HAVE_SYS_FILE_H': 1,
|
| 530 |
+
'HAVE_SYS_IOCTL_H': 1,
|
| 531 |
+
'HAVE_SYS_KERN_CONTROL_H': 0,
|
| 532 |
+
'HAVE_SYS_LOADAVG_H': 0,
|
| 533 |
+
'HAVE_SYS_LOCK_H': 0,
|
| 534 |
+
'HAVE_SYS_MEMFD_H': 0,
|
| 535 |
+
'HAVE_SYS_MKDEV_H': 0,
|
| 536 |
+
'HAVE_SYS_MMAN_H': 1,
|
| 537 |
+
'HAVE_SYS_MODEM_H': 0,
|
| 538 |
+
'HAVE_SYS_NDIR_H': 0,
|
| 539 |
+
'HAVE_SYS_PARAM_H': 1,
|
| 540 |
+
'HAVE_SYS_POLL_H': 1,
|
| 541 |
+
'HAVE_SYS_RANDOM_H': 0,
|
| 542 |
+
'HAVE_SYS_RESOURCE_H': 1,
|
| 543 |
+
'HAVE_SYS_SELECT_H': 1,
|
| 544 |
+
'HAVE_SYS_SENDFILE_H': 1,
|
| 545 |
+
'HAVE_SYS_SOCKET_H': 1,
|
| 546 |
+
'HAVE_SYS_STATVFS_H': 1,
|
| 547 |
+
'HAVE_SYS_STAT_H': 1,
|
| 548 |
+
'HAVE_SYS_SYSCALL_H': 1,
|
| 549 |
+
'HAVE_SYS_SYSMACROS_H': 1,
|
| 550 |
+
'HAVE_SYS_SYS_DOMAIN_H': 0,
|
| 551 |
+
'HAVE_SYS_TERMIO_H': 0,
|
| 552 |
+
'HAVE_SYS_TIMES_H': 1,
|
| 553 |
+
'HAVE_SYS_TIME_H': 1,
|
| 554 |
+
'HAVE_SYS_TYPES_H': 1,
|
| 555 |
+
'HAVE_SYS_UIO_H': 1,
|
| 556 |
+
'HAVE_SYS_UN_H': 1,
|
| 557 |
+
'HAVE_SYS_UTSNAME_H': 1,
|
| 558 |
+
'HAVE_SYS_WAIT_H': 1,
|
| 559 |
+
'HAVE_SYS_XATTR_H': 1,
|
| 560 |
+
'HAVE_TCGETPGRP': 1,
|
| 561 |
+
'HAVE_TCSETPGRP': 1,
|
| 562 |
+
'HAVE_TEMPNAM': 1,
|
| 563 |
+
'HAVE_TERMIOS_H': 1,
|
| 564 |
+
'HAVE_TERM_H': 1,
|
| 565 |
+
'HAVE_TGAMMA': 1,
|
| 566 |
+
'HAVE_TIMEGM': 1,
|
| 567 |
+
'HAVE_TIMES': 1,
|
| 568 |
+
'HAVE_TMPFILE': 1,
|
| 569 |
+
'HAVE_TMPNAM': 1,
|
| 570 |
+
'HAVE_TMPNAM_R': 1,
|
| 571 |
+
'HAVE_TM_ZONE': 1,
|
| 572 |
+
'HAVE_TRUNCATE': 1,
|
| 573 |
+
'HAVE_TZNAME': 0,
|
| 574 |
+
'HAVE_UCS4_TCL': 0,
|
| 575 |
+
'HAVE_UNAME': 1,
|
| 576 |
+
'HAVE_UNISTD_H': 1,
|
| 577 |
+
'HAVE_UNLINKAT': 1,
|
| 578 |
+
'HAVE_USABLE_WCHAR_T': 0,
|
| 579 |
+
'HAVE_UTIL_H': 0,
|
| 580 |
+
'HAVE_UTIMENSAT': 1,
|
| 581 |
+
'HAVE_UTIMES': 1,
|
| 582 |
+
'HAVE_UTIME_H': 1,
|
| 583 |
+
'HAVE_UUID_CREATE': 0,
|
| 584 |
+
'HAVE_UUID_ENC_BE': 0,
|
| 585 |
+
'HAVE_UUID_GENERATE_TIME_SAFE': 1,
|
| 586 |
+
'HAVE_UUID_H': 1,
|
| 587 |
+
'HAVE_UUID_UUID_H': 1,
|
| 588 |
+
'HAVE_VFORK': 1,
|
| 589 |
+
'HAVE_WAIT3': 1,
|
| 590 |
+
'HAVE_WAIT4': 1,
|
| 591 |
+
'HAVE_WAITID': 1,
|
| 592 |
+
'HAVE_WAITPID': 1,
|
| 593 |
+
'HAVE_WCHAR_H': 1,
|
| 594 |
+
'HAVE_WCSCOLL': 1,
|
| 595 |
+
'HAVE_WCSFTIME': 1,
|
| 596 |
+
'HAVE_WCSXFRM': 1,
|
| 597 |
+
'HAVE_WMEMCMP': 1,
|
| 598 |
+
'HAVE_WORKING_TZSET': 1,
|
| 599 |
+
'HAVE_WRITEV': 1,
|
| 600 |
+
'HAVE_ZLIB_COPY': 1,
|
| 601 |
+
'HAVE__GETPTY': 0,
|
| 602 |
+
'HOST_GNU_TYPE': 'x86_64-conda-linux-gnu',
|
| 603 |
+
'INCLDIRSTOMAKE': '/root/envs/llava/include '
|
| 604 |
+
'/root/envs/llava/include '
|
| 605 |
+
'/root/envs/llava/include/python3.10 '
|
| 606 |
+
'/root/envs/llava/include/python3.10',
|
| 607 |
+
'INCLUDEDIR': '/root/envs/llava/include',
|
| 608 |
+
'INCLUDEPY': '/root/envs/llava/include/python3.10',
|
| 609 |
+
'INSTALL': '/usr/bin/install -c',
|
| 610 |
+
'INSTALL_DATA': '/usr/bin/install -c -m 644',
|
| 611 |
+
'INSTALL_PROGRAM': '/usr/bin/install -c',
|
| 612 |
+
'INSTALL_SCRIPT': '/usr/bin/install -c',
|
| 613 |
+
'INSTALL_SHARED': '/usr/bin/install -c -m 755',
|
| 614 |
+
'INSTSONAME': 'libpython3.10.a',
|
| 615 |
+
'IO_H': 'Modules/_io/_iomodule.h',
|
| 616 |
+
'IO_OBJS': '\\',
|
| 617 |
+
'LDCXXSHARED': 'x86_64-conda-linux-gnu-c++ -pthread -shared',
|
| 618 |
+
'LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 619 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 620 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 621 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 622 |
+
'-L/root/envs/llava/lib '
|
| 623 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 624 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 625 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 626 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 627 |
+
'-L/root/envs/llava/lib',
|
| 628 |
+
'LDLIBRARY': 'libpython3.10.a',
|
| 629 |
+
'LDLIBRARYDIR': '',
|
| 630 |
+
'LDSHARED': 'x86_64-conda-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 631 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 632 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 633 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 634 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 635 |
+
'-L/root/envs/llava/lib '
|
| 636 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 637 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 638 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 639 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 640 |
+
'-L/root/envs/llava/lib',
|
| 641 |
+
'LDVERSION': '3.10',
|
| 642 |
+
'LIBC': '',
|
| 643 |
+
'LIBDEST': '/root/envs/llava/lib/python3.10',
|
| 644 |
+
'LIBDIR': '/root/envs/llava/lib',
|
| 645 |
+
'LIBFFI_INCLUDEDIR': '/root/envs/llava/include',
|
| 646 |
+
'LIBM': '-lm',
|
| 647 |
+
'LIBOBJDIR': 'Python/',
|
| 648 |
+
'LIBOBJS': '',
|
| 649 |
+
'LIBPC': '/root/envs/llava/lib/pkgconfig',
|
| 650 |
+
'LIBPL': '/root/envs/llava/lib/python3.10/config-3.10-x86_64-linux-gnu',
|
| 651 |
+
'LIBPYTHON': '',
|
| 652 |
+
'LIBRARY': 'libpython3.10.a',
|
| 653 |
+
'LIBRARY_DEPS': 'libpython3.10.a',
|
| 654 |
+
'LIBRARY_OBJS': '\\',
|
| 655 |
+
'LIBRARY_OBJS_OMIT_FROZEN': '\\',
|
| 656 |
+
'LIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 657 |
+
'LIBSUBDIRS': 'asyncio \\',
|
| 658 |
+
'LINKCC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 659 |
+
'LINKFORSHARED': '-Xlinker -export-dynamic',
|
| 660 |
+
'LIPO_32BIT_FLAGS': '',
|
| 661 |
+
'LIPO_INTEL64_FLAGS': '',
|
| 662 |
+
'LLVM_PROF_ERR': 'no',
|
| 663 |
+
'LLVM_PROF_FILE': '',
|
| 664 |
+
'LLVM_PROF_MERGER': 'true',
|
| 665 |
+
'LN': 'ln',
|
| 666 |
+
'LOCALMODLIBS': '',
|
| 667 |
+
'MACHDEP': 'linux',
|
| 668 |
+
'MACHDEP_OBJS': '',
|
| 669 |
+
'MACHDESTLIB': '/root/envs/llava/lib/python3.10',
|
| 670 |
+
'MACOSX_DEPLOYMENT_TARGET': '',
|
| 671 |
+
'MAINCC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 672 |
+
'MAJOR_IN_MKDEV': 0,
|
| 673 |
+
'MAJOR_IN_SYSMACROS': 0,
|
| 674 |
+
'MAKESETUP': '/croot/python-split_1733933809325/work/Modules/makesetup',
|
| 675 |
+
'MANDIR': '/root/envs/llava/share/man',
|
| 676 |
+
'MKDIR_P': '/usr/bin/mkdir -p',
|
| 677 |
+
'MODBUILT_NAMES': 'posix errno pwd _sre _codecs _weakref _functools '
|
| 678 |
+
'_operator _collections _abc itertools atexit _signal '
|
| 679 |
+
'_stat time _thread _locale _io faulthandler '
|
| 680 |
+
'_tracemalloc _symtable xxsubtype',
|
| 681 |
+
'MODDISABLED_NAMES': '',
|
| 682 |
+
'MODLIBS': '',
|
| 683 |
+
'MODOBJS': 'Modules/posixmodule.o Modules/errnomodule.o '
|
| 684 |
+
'Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o '
|
| 685 |
+
'Modules/_weakref.o Modules/_functoolsmodule.o '
|
| 686 |
+
'Modules/_operator.o Modules/_collectionsmodule.o '
|
| 687 |
+
'Modules/_abc.o Modules/itertoolsmodule.o '
|
| 688 |
+
'Modules/atexitmodule.o Modules/signalmodule.o Modules/_stat.o '
|
| 689 |
+
'Modules/timemodule.o Modules/_threadmodule.o '
|
| 690 |
+
'Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o '
|
| 691 |
+
'Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o '
|
| 692 |
+
'Modules/textio.o Modules/stringio.o Modules/faulthandler.o '
|
| 693 |
+
'Modules/_tracemalloc.o Modules/symtablemodule.o '
|
| 694 |
+
'Modules/xxsubtype.o',
|
| 695 |
+
'MODULE_OBJS': '\\',
|
| 696 |
+
'MULTIARCH': 'x86_64-linux-gnu',
|
| 697 |
+
'MULTIARCH_CPPFLAGS': '-DMULTIARCH=\\"x86_64-linux-gnu\\"',
|
| 698 |
+
'MVWDELCH_IS_EXPRESSION': 1,
|
| 699 |
+
'NO_AS_NEEDED': '-Wl,--no-as-needed',
|
| 700 |
+
'OBJECT_OBJS': '\\',
|
| 701 |
+
'OPENSSL_INCLUDES': '-I/root/envs/llava/include',
|
| 702 |
+
'OPENSSL_LDFLAGS': '-L/root/envs/llava/lib',
|
| 703 |
+
'OPENSSL_LIBS': '-lssl -lcrypto',
|
| 704 |
+
'OPENSSL_RPATH': '',
|
| 705 |
+
'OPT': '-DNDEBUG -fwrapv -O2 -Wall',
|
| 706 |
+
'OTHER_LIBTOOL_OPT': '',
|
| 707 |
+
'PACKAGE_BUGREPORT': 0,
|
| 708 |
+
'PACKAGE_NAME': 0,
|
| 709 |
+
'PACKAGE_STRING': 0,
|
| 710 |
+
'PACKAGE_TARNAME': 0,
|
| 711 |
+
'PACKAGE_URL': 0,
|
| 712 |
+
'PACKAGE_VERSION': 0,
|
| 713 |
+
'PARSER_HEADERS': '\\',
|
| 714 |
+
'PARSER_OBJS': '\\ \\ Parser/myreadline.o Parser/tokenizer.o',
|
| 715 |
+
'PEGEN_HEADERS': '\\',
|
| 716 |
+
'PEGEN_OBJS': '\\',
|
| 717 |
+
'PGO_PROF_GEN_FLAG': '-fprofile-generate',
|
| 718 |
+
'PGO_PROF_USE_FLAG': ' ',
|
| 719 |
+
'PLATLIBDIR': 'lib',
|
| 720 |
+
'POBJS': '\\',
|
| 721 |
+
'POSIX_SEMAPHORES_NOT_ENABLED': 0,
|
| 722 |
+
'PROFILE_TASK': '-m test --pgo',
|
| 723 |
+
'PTHREAD_KEY_T_IS_COMPATIBLE_WITH_INT': 1,
|
| 724 |
+
'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1,
|
| 725 |
+
'PURIFY': '',
|
| 726 |
+
'PY3LIBRARY': '',
|
| 727 |
+
'PYLONG_BITS_IN_DIGIT': 0,
|
| 728 |
+
'PYTHON': 'python',
|
| 729 |
+
'PYTHONFRAMEWORK': '',
|
| 730 |
+
'PYTHONFRAMEWORKDIR': 'no-framework',
|
| 731 |
+
'PYTHONFRAMEWORKINSTALLDIR': '',
|
| 732 |
+
'PYTHONFRAMEWORKPREFIX': '',
|
| 733 |
+
'PYTHONPATH': '',
|
| 734 |
+
'PYTHON_FOR_BUILD': './python -E',
|
| 735 |
+
'PYTHON_FOR_REGEN': '',
|
| 736 |
+
'PYTHON_HEADERS': '\\',
|
| 737 |
+
'PYTHON_OBJS': '\\',
|
| 738 |
+
'PY_BUILD_ENVIRON': '',
|
| 739 |
+
'PY_BUILTIN_HASHLIB_HASHES': '"md5,sha1,sha256,sha512,sha3,blake2"',
|
| 740 |
+
'PY_BUILTIN_MODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG '
|
| 741 |
+
'-fwrapv -O2 -Wall -march=nocona -mtune=haswell '
|
| 742 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 743 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 744 |
+
'/root/envs/llava/include '
|
| 745 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 746 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 747 |
+
' '
|
| 748 |
+
' -march=nocona '
|
| 749 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 750 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 751 |
+
'-ffunction-sections -pipe -isystem '
|
| 752 |
+
'/root/envs/llava/include '
|
| 753 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 754 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 755 |
+
' '
|
| 756 |
+
' '
|
| 757 |
+
'-fno-semantic-interposition '
|
| 758 |
+
' '
|
| 759 |
+
' -g -std=c99 -Wextra '
|
| 760 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 761 |
+
'-Wno-missing-field-initializers '
|
| 762 |
+
'-Werror=implicit-function-declaration '
|
| 763 |
+
'-fvisibility=hidden '
|
| 764 |
+
' '
|
| 765 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 766 |
+
'-IObjects -IInclude -IPython -I. '
|
| 767 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 768 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 769 |
+
'/root/envs/llava/include '
|
| 770 |
+
'-I/root/envs/llava/include '
|
| 771 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 772 |
+
'/root/envs/llava/include '
|
| 773 |
+
'-I/root/envs/llava/include '
|
| 774 |
+
'-DPy_BUILD_CORE_BUILTIN',
|
| 775 |
+
'PY_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 776 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 777 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 778 |
+
'-isystem '
|
| 779 |
+
'/root/envs/llava/include '
|
| 780 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 781 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 782 |
+
' '
|
| 783 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 784 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 785 |
+
'-isystem '
|
| 786 |
+
'/root/envs/llava/include '
|
| 787 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 788 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 789 |
+
' '
|
| 790 |
+
'',
|
| 791 |
+
'PY_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 792 |
+
' -g -std=c99 '
|
| 793 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 794 |
+
'-Wno-missing-field-initializers '
|
| 795 |
+
'-Werror=implicit-function-declaration '
|
| 796 |
+
'-fvisibility=hidden '
|
| 797 |
+
'-I/croot/python-split_1733933809325/work/Include/internal',
|
| 798 |
+
'PY_COERCE_C_LOCALE': 1,
|
| 799 |
+
'PY_CORE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 '
|
| 800 |
+
'-Wall -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 801 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 802 |
+
'-pipe -isystem '
|
| 803 |
+
'/root/envs/llava/include '
|
| 804 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 805 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 806 |
+
' '
|
| 807 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 808 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 809 |
+
'-pipe -isystem '
|
| 810 |
+
'/root/envs/llava/include '
|
| 811 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 812 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 813 |
+
' '
|
| 814 |
+
' -fno-semantic-interposition '
|
| 815 |
+
' '
|
| 816 |
+
'-g -std=c99 -Wextra -Wno-unused-result '
|
| 817 |
+
'-Wno-unused-parameter -Wno-missing-field-initializers '
|
| 818 |
+
'-Werror=implicit-function-declaration -fvisibility=hidden '
|
| 819 |
+
' '
|
| 820 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 821 |
+
'-IObjects -IInclude -IPython -I. '
|
| 822 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 823 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 824 |
+
'/root/envs/llava/include '
|
| 825 |
+
'-I/root/envs/llava/include '
|
| 826 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 827 |
+
'/root/envs/llava/include '
|
| 828 |
+
'-I/root/envs/llava/include '
|
| 829 |
+
'-DPy_BUILD_CORE',
|
| 830 |
+
'PY_CORE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 831 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 832 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 833 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 834 |
+
'-L/root/envs/llava/lib '
|
| 835 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 836 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 837 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 838 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 839 |
+
'-L/root/envs/llava/lib '
|
| 840 |
+
'-fno-semantic-interposition '
|
| 841 |
+
' -g',
|
| 842 |
+
'PY_CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 843 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 844 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 845 |
+
'/root/envs/llava/include '
|
| 846 |
+
'-I/root/envs/llava/include '
|
| 847 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 848 |
+
'/root/envs/llava/include '
|
| 849 |
+
'-I/root/envs/llava/include',
|
| 850 |
+
'PY_ENABLE_SHARED': 0,
|
| 851 |
+
'PY_FORMAT_SIZE_T': '"z"',
|
| 852 |
+
'PY_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 853 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 854 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 855 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 856 |
+
'-L/root/envs/llava/lib '
|
| 857 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 858 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 859 |
+
'-Wl,-rpath,/root/envs/llava/lib '
|
| 860 |
+
'-Wl,-rpath-link,/root/envs/llava/lib '
|
| 861 |
+
'-L/root/envs/llava/lib',
|
| 862 |
+
'PY_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 863 |
+
' -g',
|
| 864 |
+
'PY_SSL_DEFAULT_CIPHERS': 1,
|
| 865 |
+
'PY_SSL_DEFAULT_CIPHER_STRING': 0,
|
| 866 |
+
'PY_STDMODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv '
|
| 867 |
+
'-O2 -Wall -march=nocona -mtune=haswell '
|
| 868 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 869 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 870 |
+
'/root/envs/llava/include '
|
| 871 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 872 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 873 |
+
' '
|
| 874 |
+
' -march=nocona '
|
| 875 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 876 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 877 |
+
'-ffunction-sections -pipe -isystem '
|
| 878 |
+
'/root/envs/llava/include '
|
| 879 |
+
'-fdebug-prefix-map=/croot/python-split_1733933809325/work=/usr/local/src/conda/python-3.10.16 '
|
| 880 |
+
'-fdebug-prefix-map=/root/envs/llava=/usr/local/src/conda-prefix '
|
| 881 |
+
' '
|
| 882 |
+
' '
|
| 883 |
+
'-fno-semantic-interposition '
|
| 884 |
+
' -g -std=c99 '
|
| 885 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 886 |
+
'-Wno-missing-field-initializers '
|
| 887 |
+
'-Werror=implicit-function-declaration '
|
| 888 |
+
'-fvisibility=hidden '
|
| 889 |
+
' '
|
| 890 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 891 |
+
'-IObjects -IInclude -IPython -I. '
|
| 892 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 893 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 894 |
+
'/root/envs/llava/include '
|
| 895 |
+
'-I/root/envs/llava/include '
|
| 896 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 897 |
+
'/root/envs/llava/include '
|
| 898 |
+
'-I/root/envs/llava/include',
|
| 899 |
+
'Py_DEBUG': 0,
|
| 900 |
+
'Py_ENABLE_SHARED': 0,
|
| 901 |
+
'Py_HASH_ALGORITHM': 0,
|
| 902 |
+
'Py_TRACE_REFS': 0,
|
| 903 |
+
'QUICKTESTOPTS': '-x test_subprocess test_io test_lib2to3 \\',
|
| 904 |
+
'READELF': 'x86_64-conda-linux-gnu-readelf',
|
| 905 |
+
'RESSRCDIR': 'Mac/Resources/framework',
|
| 906 |
+
'RETSIGTYPE': 'void',
|
| 907 |
+
'RUNSHARED': '',
|
| 908 |
+
'SCRIPTDIR': '/root/envs/llava/lib',
|
| 909 |
+
'SETPGRP_HAVE_ARG': 0,
|
| 910 |
+
'SHELL': '/bin/sh',
|
| 911 |
+
'SHLIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 912 |
+
'SHLIB_SUFFIX': '.so',
|
| 913 |
+
'SHM_NEEDS_LIBRT': 1,
|
| 914 |
+
'SIGNED_RIGHT_SHIFT_ZERO_FILLS': 0,
|
| 915 |
+
'SITEPATH': '',
|
| 916 |
+
'SIZEOF_DOUBLE': 8,
|
| 917 |
+
'SIZEOF_FLOAT': 4,
|
| 918 |
+
'SIZEOF_FPOS_T': 16,
|
| 919 |
+
'SIZEOF_INT': 4,
|
| 920 |
+
'SIZEOF_LONG': 8,
|
| 921 |
+
'SIZEOF_LONG_DOUBLE': 16,
|
| 922 |
+
'SIZEOF_LONG_LONG': 8,
|
| 923 |
+
'SIZEOF_OFF_T': 8,
|
| 924 |
+
'SIZEOF_PID_T': 4,
|
| 925 |
+
'SIZEOF_PTHREAD_KEY_T': 4,
|
| 926 |
+
'SIZEOF_PTHREAD_T': 8,
|
| 927 |
+
'SIZEOF_SHORT': 2,
|
| 928 |
+
'SIZEOF_SIZE_T': 8,
|
| 929 |
+
'SIZEOF_TIME_T': 8,
|
| 930 |
+
'SIZEOF_UINTPTR_T': 8,
|
| 931 |
+
'SIZEOF_VOID_P': 8,
|
| 932 |
+
'SIZEOF_WCHAR_T': 4,
|
| 933 |
+
'SIZEOF__BOOL': 1,
|
| 934 |
+
'SOABI': 'cpython-310-x86_64-linux-gnu',
|
| 935 |
+
'SRCDIRS': 'Parser Objects Python Modules Modules/_io Programs',
|
| 936 |
+
'SRC_GDB_HOOKS': '/croot/python-split_1733933809325/work/Tools/gdb/libpython.py',
|
| 937 |
+
'STATIC_LIBPYTHON': 1,
|
| 938 |
+
'STDC_HEADERS': 1,
|
| 939 |
+
'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */",
|
| 940 |
+
'STRIPFLAG': '-s',
|
| 941 |
+
'SUBDIRS': '',
|
| 942 |
+
'SUBDIRSTOO': 'Include Lib Misc',
|
| 943 |
+
'SYSLIBS': '-lm',
|
| 944 |
+
'SYS_SELECT_WITH_SYS_TIME': 1,
|
| 945 |
+
'TCLTK_INCLUDES': '-I/root/envs/llava/include',
|
| 946 |
+
'TCLTK_LIBS': '-L/root/envs/llava/lib '
|
| 947 |
+
'-ltcl8.6 -ltk8.6',
|
| 948 |
+
'TESTOPTS': '',
|
| 949 |
+
'TESTPATH': '',
|
| 950 |
+
'TESTPYTHON': './python',
|
| 951 |
+
'TESTPYTHONOPTS': '',
|
| 952 |
+
'TESTRUNNER': './python '
|
| 953 |
+
'/croot/python-split_1733933809325/work/Tools/scripts/run_tests.py',
|
| 954 |
+
'TESTSUBDIRS': 'ctypes/test \\',
|
| 955 |
+
'TESTTIMEOUT': 1200,
|
| 956 |
+
'TEST_MODULES': 'yes',
|
| 957 |
+
'THREAD_STACK_SIZE': 0,
|
| 958 |
+
'TIMEMODULE_LIB': 0,
|
| 959 |
+
'TIME_WITH_SYS_TIME': 1,
|
| 960 |
+
'TM_IN_SYS_TIME': 0,
|
| 961 |
+
'TZPATH': '/root/envs/llava/share/zoneinfo',
|
| 962 |
+
'UNICODE_DEPS': '\\',
|
| 963 |
+
'UNIVERSALSDK': '',
|
| 964 |
+
'UPDATE_FILE': '/croot/python-split_1733933809325/work/Tools/scripts/update_file.py',
|
| 965 |
+
'USE_COMPUTED_GOTOS': 1,
|
| 966 |
+
'VERSION': '3.10',
|
| 967 |
+
'VPATH': '/croot/python-split_1733933809325/work',
|
| 968 |
+
'WHEEL_PKG_DIR': '',
|
| 969 |
+
'WINDOW_HAS_FLAGS': 1,
|
| 970 |
+
'WITH_DECIMAL_CONTEXTVAR': 1,
|
| 971 |
+
'WITH_DOC_STRINGS': 1,
|
| 972 |
+
'WITH_DTRACE': 0,
|
| 973 |
+
'WITH_DYLD': 0,
|
| 974 |
+
'WITH_EDITLINE': 0,
|
| 975 |
+
'WITH_LIBINTL': 0,
|
| 976 |
+
'WITH_NEXT_FRAMEWORK': 0,
|
| 977 |
+
'WITH_PYMALLOC': 1,
|
| 978 |
+
'WITH_VALGRIND': 0,
|
| 979 |
+
'X87_DOUBLE_ROUNDING': 0,
|
| 980 |
+
'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax',
|
| 981 |
+
'abs_builddir': '/croot/python-split_1733933809325/work/build-static',
|
| 982 |
+
'abs_srcdir': '/croot/python-split_1733933809325/work',
|
| 983 |
+
'datarootdir': '/root/envs/llava/share',
|
| 984 |
+
'exec_prefix': '/root/envs/llava',
|
| 985 |
+
'prefix': '/root/envs/llava',
|
| 986 |
+
'srcdir': '/croot/python-split_1733933809325/work'}
|
llava/lib/python3.10/_threading_local.py
ADDED
|
@@ -0,0 +1,242 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Thread-local objects.
|
| 2 |
+
|
| 3 |
+
(Note that this module provides a Python version of the threading.local
|
| 4 |
+
class. Depending on the version of Python you're using, there may be a
|
| 5 |
+
faster one available. You should always import the `local` class from
|
| 6 |
+
`threading`.)
|
| 7 |
+
|
| 8 |
+
Thread-local objects support the management of thread-local data.
|
| 9 |
+
If you have data that you want to be local to a thread, simply create
|
| 10 |
+
a thread-local object and use its attributes:
|
| 11 |
+
|
| 12 |
+
>>> mydata = local()
|
| 13 |
+
>>> mydata.number = 42
|
| 14 |
+
>>> mydata.number
|
| 15 |
+
42
|
| 16 |
+
|
| 17 |
+
You can also access the local-object's dictionary:
|
| 18 |
+
|
| 19 |
+
>>> mydata.__dict__
|
| 20 |
+
{'number': 42}
|
| 21 |
+
>>> mydata.__dict__.setdefault('widgets', [])
|
| 22 |
+
[]
|
| 23 |
+
>>> mydata.widgets
|
| 24 |
+
[]
|
| 25 |
+
|
| 26 |
+
What's important about thread-local objects is that their data are
|
| 27 |
+
local to a thread. If we access the data in a different thread:
|
| 28 |
+
|
| 29 |
+
>>> log = []
|
| 30 |
+
>>> def f():
|
| 31 |
+
... items = sorted(mydata.__dict__.items())
|
| 32 |
+
... log.append(items)
|
| 33 |
+
... mydata.number = 11
|
| 34 |
+
... log.append(mydata.number)
|
| 35 |
+
|
| 36 |
+
>>> import threading
|
| 37 |
+
>>> thread = threading.Thread(target=f)
|
| 38 |
+
>>> thread.start()
|
| 39 |
+
>>> thread.join()
|
| 40 |
+
>>> log
|
| 41 |
+
[[], 11]
|
| 42 |
+
|
| 43 |
+
we get different data. Furthermore, changes made in the other thread
|
| 44 |
+
don't affect data seen in this thread:
|
| 45 |
+
|
| 46 |
+
>>> mydata.number
|
| 47 |
+
42
|
| 48 |
+
|
| 49 |
+
Of course, values you get from a local object, including a __dict__
|
| 50 |
+
attribute, are for whatever thread was current at the time the
|
| 51 |
+
attribute was read. For that reason, you generally don't want to save
|
| 52 |
+
these values across threads, as they apply only to the thread they
|
| 53 |
+
came from.
|
| 54 |
+
|
| 55 |
+
You can create custom local objects by subclassing the local class:
|
| 56 |
+
|
| 57 |
+
>>> class MyLocal(local):
|
| 58 |
+
... number = 2
|
| 59 |
+
... def __init__(self, /, **kw):
|
| 60 |
+
... self.__dict__.update(kw)
|
| 61 |
+
... def squared(self):
|
| 62 |
+
... return self.number ** 2
|
| 63 |
+
|
| 64 |
+
This can be useful to support default values, methods and
|
| 65 |
+
initialization. Note that if you define an __init__ method, it will be
|
| 66 |
+
called each time the local object is used in a separate thread. This
|
| 67 |
+
is necessary to initialize each thread's dictionary.
|
| 68 |
+
|
| 69 |
+
Now if we create a local object:
|
| 70 |
+
|
| 71 |
+
>>> mydata = MyLocal(color='red')
|
| 72 |
+
|
| 73 |
+
Now we have a default number:
|
| 74 |
+
|
| 75 |
+
>>> mydata.number
|
| 76 |
+
2
|
| 77 |
+
|
| 78 |
+
an initial color:
|
| 79 |
+
|
| 80 |
+
>>> mydata.color
|
| 81 |
+
'red'
|
| 82 |
+
>>> del mydata.color
|
| 83 |
+
|
| 84 |
+
And a method that operates on the data:
|
| 85 |
+
|
| 86 |
+
>>> mydata.squared()
|
| 87 |
+
4
|
| 88 |
+
|
| 89 |
+
As before, we can access the data in a separate thread:
|
| 90 |
+
|
| 91 |
+
>>> log = []
|
| 92 |
+
>>> thread = threading.Thread(target=f)
|
| 93 |
+
>>> thread.start()
|
| 94 |
+
>>> thread.join()
|
| 95 |
+
>>> log
|
| 96 |
+
[[('color', 'red')], 11]
|
| 97 |
+
|
| 98 |
+
without affecting this thread's data:
|
| 99 |
+
|
| 100 |
+
>>> mydata.number
|
| 101 |
+
2
|
| 102 |
+
>>> mydata.color
|
| 103 |
+
Traceback (most recent call last):
|
| 104 |
+
...
|
| 105 |
+
AttributeError: 'MyLocal' object has no attribute 'color'
|
| 106 |
+
|
| 107 |
+
Note that subclasses can define slots, but they are not thread
|
| 108 |
+
local. They are shared across threads:
|
| 109 |
+
|
| 110 |
+
>>> class MyLocal(local):
|
| 111 |
+
... __slots__ = 'number'
|
| 112 |
+
|
| 113 |
+
>>> mydata = MyLocal()
|
| 114 |
+
>>> mydata.number = 42
|
| 115 |
+
>>> mydata.color = 'red'
|
| 116 |
+
|
| 117 |
+
So, the separate thread:
|
| 118 |
+
|
| 119 |
+
>>> thread = threading.Thread(target=f)
|
| 120 |
+
>>> thread.start()
|
| 121 |
+
>>> thread.join()
|
| 122 |
+
|
| 123 |
+
affects what we see:
|
| 124 |
+
|
| 125 |
+
>>> mydata.number
|
| 126 |
+
11
|
| 127 |
+
|
| 128 |
+
>>> del mydata
|
| 129 |
+
"""
|
| 130 |
+
|
| 131 |
+
from weakref import ref
|
| 132 |
+
from contextlib import contextmanager
|
| 133 |
+
|
| 134 |
+
__all__ = ["local"]
|
| 135 |
+
|
| 136 |
+
# We need to use objects from the threading module, but the threading
|
| 137 |
+
# module may also want to use our `local` class, if support for locals
|
| 138 |
+
# isn't compiled in to the `thread` module. This creates potential problems
|
| 139 |
+
# with circular imports. For that reason, we don't import `threading`
|
| 140 |
+
# until the bottom of this file (a hack sufficient to worm around the
|
| 141 |
+
# potential problems). Note that all platforms on CPython do have support
|
| 142 |
+
# for locals in the `thread` module, and there is no circular import problem
|
| 143 |
+
# then, so problems introduced by fiddling the order of imports here won't
|
| 144 |
+
# manifest.
|
| 145 |
+
|
| 146 |
+
class _localimpl:
|
| 147 |
+
"""A class managing thread-local dicts"""
|
| 148 |
+
__slots__ = 'key', 'dicts', 'localargs', 'locallock', '__weakref__'
|
| 149 |
+
|
| 150 |
+
def __init__(self):
|
| 151 |
+
# The key used in the Thread objects' attribute dicts.
|
| 152 |
+
# We keep it a string for speed but make it unlikely to clash with
|
| 153 |
+
# a "real" attribute.
|
| 154 |
+
self.key = '_threading_local._localimpl.' + str(id(self))
|
| 155 |
+
# { id(Thread) -> (ref(Thread), thread-local dict) }
|
| 156 |
+
self.dicts = {}
|
| 157 |
+
|
| 158 |
+
def get_dict(self):
|
| 159 |
+
"""Return the dict for the current thread. Raises KeyError if none
|
| 160 |
+
defined."""
|
| 161 |
+
thread = current_thread()
|
| 162 |
+
return self.dicts[id(thread)][1]
|
| 163 |
+
|
| 164 |
+
def create_dict(self):
|
| 165 |
+
"""Create a new dict for the current thread, and return it."""
|
| 166 |
+
localdict = {}
|
| 167 |
+
key = self.key
|
| 168 |
+
thread = current_thread()
|
| 169 |
+
idt = id(thread)
|
| 170 |
+
def local_deleted(_, key=key):
|
| 171 |
+
# When the localimpl is deleted, remove the thread attribute.
|
| 172 |
+
thread = wrthread()
|
| 173 |
+
if thread is not None:
|
| 174 |
+
del thread.__dict__[key]
|
| 175 |
+
def thread_deleted(_, idt=idt):
|
| 176 |
+
# When the thread is deleted, remove the local dict.
|
| 177 |
+
# Note that this is suboptimal if the thread object gets
|
| 178 |
+
# caught in a reference loop. We would like to be called
|
| 179 |
+
# as soon as the OS-level thread ends instead.
|
| 180 |
+
local = wrlocal()
|
| 181 |
+
if local is not None:
|
| 182 |
+
dct = local.dicts.pop(idt)
|
| 183 |
+
wrlocal = ref(self, local_deleted)
|
| 184 |
+
wrthread = ref(thread, thread_deleted)
|
| 185 |
+
thread.__dict__[key] = wrlocal
|
| 186 |
+
self.dicts[idt] = wrthread, localdict
|
| 187 |
+
return localdict
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
@contextmanager
|
| 191 |
+
def _patch(self):
|
| 192 |
+
impl = object.__getattribute__(self, '_local__impl')
|
| 193 |
+
try:
|
| 194 |
+
dct = impl.get_dict()
|
| 195 |
+
except KeyError:
|
| 196 |
+
dct = impl.create_dict()
|
| 197 |
+
args, kw = impl.localargs
|
| 198 |
+
self.__init__(*args, **kw)
|
| 199 |
+
with impl.locallock:
|
| 200 |
+
object.__setattr__(self, '__dict__', dct)
|
| 201 |
+
yield
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
class local:
|
| 205 |
+
__slots__ = '_local__impl', '__dict__'
|
| 206 |
+
|
| 207 |
+
def __new__(cls, /, *args, **kw):
|
| 208 |
+
if (args or kw) and (cls.__init__ is object.__init__):
|
| 209 |
+
raise TypeError("Initialization arguments are not supported")
|
| 210 |
+
self = object.__new__(cls)
|
| 211 |
+
impl = _localimpl()
|
| 212 |
+
impl.localargs = (args, kw)
|
| 213 |
+
impl.locallock = RLock()
|
| 214 |
+
object.__setattr__(self, '_local__impl', impl)
|
| 215 |
+
# We need to create the thread dict in anticipation of
|
| 216 |
+
# __init__ being called, to make sure we don't call it
|
| 217 |
+
# again ourselves.
|
| 218 |
+
impl.create_dict()
|
| 219 |
+
return self
|
| 220 |
+
|
| 221 |
+
def __getattribute__(self, name):
|
| 222 |
+
with _patch(self):
|
| 223 |
+
return object.__getattribute__(self, name)
|
| 224 |
+
|
| 225 |
+
def __setattr__(self, name, value):
|
| 226 |
+
if name == '__dict__':
|
| 227 |
+
raise AttributeError(
|
| 228 |
+
"%r object attribute '__dict__' is read-only"
|
| 229 |
+
% self.__class__.__name__)
|
| 230 |
+
with _patch(self):
|
| 231 |
+
return object.__setattr__(self, name, value)
|
| 232 |
+
|
| 233 |
+
def __delattr__(self, name):
|
| 234 |
+
if name == '__dict__':
|
| 235 |
+
raise AttributeError(
|
| 236 |
+
"%r object attribute '__dict__' is read-only"
|
| 237 |
+
% self.__class__.__name__)
|
| 238 |
+
with _patch(self):
|
| 239 |
+
return object.__delattr__(self, name)
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
from threading import current_thread, RLock
|
llava/lib/python3.10/cgi.py
ADDED
|
@@ -0,0 +1,1004 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/local/bin/python
|
| 2 |
+
|
| 3 |
+
# NOTE: the above "/usr/local/bin/python" is NOT a mistake. It is
|
| 4 |
+
# intentionally NOT "/usr/bin/env python". On many systems
|
| 5 |
+
# (e.g. Solaris), /usr/local/bin is not in $PATH as passed to CGI
|
| 6 |
+
# scripts, and /usr/local/bin is the default directory where Python is
|
| 7 |
+
# installed, so /usr/bin/env would be unable to find python. Granted,
|
| 8 |
+
# binary installations by Linux vendors often install Python in
|
| 9 |
+
# /usr/bin. So let those vendors patch cgi.py to match their choice
|
| 10 |
+
# of installation.
|
| 11 |
+
|
| 12 |
+
"""Support module for CGI (Common Gateway Interface) scripts.
|
| 13 |
+
|
| 14 |
+
This module defines a number of utilities for use by CGI scripts
|
| 15 |
+
written in Python.
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
# History
|
| 19 |
+
# -------
|
| 20 |
+
#
|
| 21 |
+
# Michael McLay started this module. Steve Majewski changed the
|
| 22 |
+
# interface to SvFormContentDict and FormContentDict. The multipart
|
| 23 |
+
# parsing was inspired by code submitted by Andreas Paepcke. Guido van
|
| 24 |
+
# Rossum rewrote, reformatted and documented the module and is currently
|
| 25 |
+
# responsible for its maintenance.
|
| 26 |
+
#
|
| 27 |
+
|
| 28 |
+
__version__ = "2.6"
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# Imports
|
| 32 |
+
# =======
|
| 33 |
+
|
| 34 |
+
from io import StringIO, BytesIO, TextIOWrapper
|
| 35 |
+
from collections.abc import Mapping
|
| 36 |
+
import sys
|
| 37 |
+
import os
|
| 38 |
+
import urllib.parse
|
| 39 |
+
from email.parser import FeedParser
|
| 40 |
+
from email.message import Message
|
| 41 |
+
import html
|
| 42 |
+
import locale
|
| 43 |
+
import tempfile
|
| 44 |
+
import warnings
|
| 45 |
+
|
| 46 |
+
__all__ = ["MiniFieldStorage", "FieldStorage", "parse", "parse_multipart",
|
| 47 |
+
"parse_header", "test", "print_exception", "print_environ",
|
| 48 |
+
"print_form", "print_directory", "print_arguments",
|
| 49 |
+
"print_environ_usage"]
|
| 50 |
+
|
| 51 |
+
# Logging support
|
| 52 |
+
# ===============
|
| 53 |
+
|
| 54 |
+
logfile = "" # Filename to log to, if not empty
|
| 55 |
+
logfp = None # File object to log to, if not None
|
| 56 |
+
|
| 57 |
+
def initlog(*allargs):
|
| 58 |
+
"""Write a log message, if there is a log file.
|
| 59 |
+
|
| 60 |
+
Even though this function is called initlog(), you should always
|
| 61 |
+
use log(); log is a variable that is set either to initlog
|
| 62 |
+
(initially), to dolog (once the log file has been opened), or to
|
| 63 |
+
nolog (when logging is disabled).
|
| 64 |
+
|
| 65 |
+
The first argument is a format string; the remaining arguments (if
|
| 66 |
+
any) are arguments to the % operator, so e.g.
|
| 67 |
+
log("%s: %s", "a", "b")
|
| 68 |
+
will write "a: b" to the log file, followed by a newline.
|
| 69 |
+
|
| 70 |
+
If the global logfp is not None, it should be a file object to
|
| 71 |
+
which log data is written.
|
| 72 |
+
|
| 73 |
+
If the global logfp is None, the global logfile may be a string
|
| 74 |
+
giving a filename to open, in append mode. This file should be
|
| 75 |
+
world writable!!! If the file can't be opened, logging is
|
| 76 |
+
silently disabled (since there is no safe place where we could
|
| 77 |
+
send an error message).
|
| 78 |
+
|
| 79 |
+
"""
|
| 80 |
+
global log, logfile, logfp
|
| 81 |
+
warnings.warn("cgi.log() is deprecated as of 3.10. Use logging instead",
|
| 82 |
+
DeprecationWarning, stacklevel=2)
|
| 83 |
+
if logfile and not logfp:
|
| 84 |
+
try:
|
| 85 |
+
logfp = open(logfile, "a", encoding="locale")
|
| 86 |
+
except OSError:
|
| 87 |
+
pass
|
| 88 |
+
if not logfp:
|
| 89 |
+
log = nolog
|
| 90 |
+
else:
|
| 91 |
+
log = dolog
|
| 92 |
+
log(*allargs)
|
| 93 |
+
|
| 94 |
+
def dolog(fmt, *args):
|
| 95 |
+
"""Write a log message to the log file. See initlog() for docs."""
|
| 96 |
+
logfp.write(fmt%args + "\n")
|
| 97 |
+
|
| 98 |
+
def nolog(*allargs):
|
| 99 |
+
"""Dummy function, assigned to log when logging is disabled."""
|
| 100 |
+
pass
|
| 101 |
+
|
| 102 |
+
def closelog():
|
| 103 |
+
"""Close the log file."""
|
| 104 |
+
global log, logfile, logfp
|
| 105 |
+
logfile = ''
|
| 106 |
+
if logfp:
|
| 107 |
+
logfp.close()
|
| 108 |
+
logfp = None
|
| 109 |
+
log = initlog
|
| 110 |
+
|
| 111 |
+
log = initlog # The current logging function
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
# Parsing functions
|
| 115 |
+
# =================
|
| 116 |
+
|
| 117 |
+
# Maximum input we will accept when REQUEST_METHOD is POST
|
| 118 |
+
# 0 ==> unlimited input
|
| 119 |
+
maxlen = 0
|
| 120 |
+
|
| 121 |
+
def parse(fp=None, environ=os.environ, keep_blank_values=0,
|
| 122 |
+
strict_parsing=0, separator='&'):
|
| 123 |
+
"""Parse a query in the environment or from a file (default stdin)
|
| 124 |
+
|
| 125 |
+
Arguments, all optional:
|
| 126 |
+
|
| 127 |
+
fp : file pointer; default: sys.stdin.buffer
|
| 128 |
+
|
| 129 |
+
environ : environment dictionary; default: os.environ
|
| 130 |
+
|
| 131 |
+
keep_blank_values: flag indicating whether blank values in
|
| 132 |
+
percent-encoded forms should be treated as blank strings.
|
| 133 |
+
A true value indicates that blanks should be retained as
|
| 134 |
+
blank strings. The default false value indicates that
|
| 135 |
+
blank values are to be ignored and treated as if they were
|
| 136 |
+
not included.
|
| 137 |
+
|
| 138 |
+
strict_parsing: flag indicating what to do with parsing errors.
|
| 139 |
+
If false (the default), errors are silently ignored.
|
| 140 |
+
If true, errors raise a ValueError exception.
|
| 141 |
+
|
| 142 |
+
separator: str. The symbol to use for separating the query arguments.
|
| 143 |
+
Defaults to &.
|
| 144 |
+
"""
|
| 145 |
+
if fp is None:
|
| 146 |
+
fp = sys.stdin
|
| 147 |
+
|
| 148 |
+
# field keys and values (except for files) are returned as strings
|
| 149 |
+
# an encoding is required to decode the bytes read from self.fp
|
| 150 |
+
if hasattr(fp,'encoding'):
|
| 151 |
+
encoding = fp.encoding
|
| 152 |
+
else:
|
| 153 |
+
encoding = 'latin-1'
|
| 154 |
+
|
| 155 |
+
# fp.read() must return bytes
|
| 156 |
+
if isinstance(fp, TextIOWrapper):
|
| 157 |
+
fp = fp.buffer
|
| 158 |
+
|
| 159 |
+
if not 'REQUEST_METHOD' in environ:
|
| 160 |
+
environ['REQUEST_METHOD'] = 'GET' # For testing stand-alone
|
| 161 |
+
if environ['REQUEST_METHOD'] == 'POST':
|
| 162 |
+
ctype, pdict = parse_header(environ['CONTENT_TYPE'])
|
| 163 |
+
if ctype == 'multipart/form-data':
|
| 164 |
+
return parse_multipart(fp, pdict, separator=separator)
|
| 165 |
+
elif ctype == 'application/x-www-form-urlencoded':
|
| 166 |
+
clength = int(environ['CONTENT_LENGTH'])
|
| 167 |
+
if maxlen and clength > maxlen:
|
| 168 |
+
raise ValueError('Maximum content length exceeded')
|
| 169 |
+
qs = fp.read(clength).decode(encoding)
|
| 170 |
+
else:
|
| 171 |
+
qs = '' # Unknown content-type
|
| 172 |
+
if 'QUERY_STRING' in environ:
|
| 173 |
+
if qs: qs = qs + '&'
|
| 174 |
+
qs = qs + environ['QUERY_STRING']
|
| 175 |
+
elif sys.argv[1:]:
|
| 176 |
+
if qs: qs = qs + '&'
|
| 177 |
+
qs = qs + sys.argv[1]
|
| 178 |
+
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
|
| 179 |
+
elif 'QUERY_STRING' in environ:
|
| 180 |
+
qs = environ['QUERY_STRING']
|
| 181 |
+
else:
|
| 182 |
+
if sys.argv[1:]:
|
| 183 |
+
qs = sys.argv[1]
|
| 184 |
+
else:
|
| 185 |
+
qs = ""
|
| 186 |
+
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
|
| 187 |
+
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing,
|
| 188 |
+
encoding=encoding, separator=separator)
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def parse_multipart(fp, pdict, encoding="utf-8", errors="replace", separator='&'):
|
| 192 |
+
"""Parse multipart input.
|
| 193 |
+
|
| 194 |
+
Arguments:
|
| 195 |
+
fp : input file
|
| 196 |
+
pdict: dictionary containing other parameters of content-type header
|
| 197 |
+
encoding, errors: request encoding and error handler, passed to
|
| 198 |
+
FieldStorage
|
| 199 |
+
|
| 200 |
+
Returns a dictionary just like parse_qs(): keys are the field names, each
|
| 201 |
+
value is a list of values for that field. For non-file fields, the value
|
| 202 |
+
is a list of strings.
|
| 203 |
+
"""
|
| 204 |
+
# RFC 2046, Section 5.1 : The "multipart" boundary delimiters are always
|
| 205 |
+
# represented as 7bit US-ASCII.
|
| 206 |
+
boundary = pdict['boundary'].decode('ascii')
|
| 207 |
+
ctype = "multipart/form-data; boundary={}".format(boundary)
|
| 208 |
+
headers = Message()
|
| 209 |
+
headers.set_type(ctype)
|
| 210 |
+
try:
|
| 211 |
+
headers['Content-Length'] = pdict['CONTENT-LENGTH']
|
| 212 |
+
except KeyError:
|
| 213 |
+
pass
|
| 214 |
+
fs = FieldStorage(fp, headers=headers, encoding=encoding, errors=errors,
|
| 215 |
+
environ={'REQUEST_METHOD': 'POST'}, separator=separator)
|
| 216 |
+
return {k: fs.getlist(k) for k in fs}
|
| 217 |
+
|
| 218 |
+
def _parseparam(s):
|
| 219 |
+
while s[:1] == ';':
|
| 220 |
+
s = s[1:]
|
| 221 |
+
end = s.find(';')
|
| 222 |
+
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
|
| 223 |
+
end = s.find(';', end + 1)
|
| 224 |
+
if end < 0:
|
| 225 |
+
end = len(s)
|
| 226 |
+
f = s[:end]
|
| 227 |
+
yield f.strip()
|
| 228 |
+
s = s[end:]
|
| 229 |
+
|
| 230 |
+
def parse_header(line):
|
| 231 |
+
"""Parse a Content-type like header.
|
| 232 |
+
|
| 233 |
+
Return the main content-type and a dictionary of options.
|
| 234 |
+
|
| 235 |
+
"""
|
| 236 |
+
parts = _parseparam(';' + line)
|
| 237 |
+
key = parts.__next__()
|
| 238 |
+
pdict = {}
|
| 239 |
+
for p in parts:
|
| 240 |
+
i = p.find('=')
|
| 241 |
+
if i >= 0:
|
| 242 |
+
name = p[:i].strip().lower()
|
| 243 |
+
value = p[i+1:].strip()
|
| 244 |
+
if len(value) >= 2 and value[0] == value[-1] == '"':
|
| 245 |
+
value = value[1:-1]
|
| 246 |
+
value = value.replace('\\\\', '\\').replace('\\"', '"')
|
| 247 |
+
pdict[name] = value
|
| 248 |
+
return key, pdict
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
# Classes for field storage
|
| 252 |
+
# =========================
|
| 253 |
+
|
| 254 |
+
class MiniFieldStorage:
|
| 255 |
+
|
| 256 |
+
"""Like FieldStorage, for use when no file uploads are possible."""
|
| 257 |
+
|
| 258 |
+
# Dummy attributes
|
| 259 |
+
filename = None
|
| 260 |
+
list = None
|
| 261 |
+
type = None
|
| 262 |
+
file = None
|
| 263 |
+
type_options = {}
|
| 264 |
+
disposition = None
|
| 265 |
+
disposition_options = {}
|
| 266 |
+
headers = {}
|
| 267 |
+
|
| 268 |
+
def __init__(self, name, value):
|
| 269 |
+
"""Constructor from field name and value."""
|
| 270 |
+
self.name = name
|
| 271 |
+
self.value = value
|
| 272 |
+
# self.file = StringIO(value)
|
| 273 |
+
|
| 274 |
+
def __repr__(self):
|
| 275 |
+
"""Return printable representation."""
|
| 276 |
+
return "MiniFieldStorage(%r, %r)" % (self.name, self.value)
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
class FieldStorage:
|
| 280 |
+
|
| 281 |
+
"""Store a sequence of fields, reading multipart/form-data.
|
| 282 |
+
|
| 283 |
+
This class provides naming, typing, files stored on disk, and
|
| 284 |
+
more. At the top level, it is accessible like a dictionary, whose
|
| 285 |
+
keys are the field names. (Note: None can occur as a field name.)
|
| 286 |
+
The items are either a Python list (if there's multiple values) or
|
| 287 |
+
another FieldStorage or MiniFieldStorage object. If it's a single
|
| 288 |
+
object, it has the following attributes:
|
| 289 |
+
|
| 290 |
+
name: the field name, if specified; otherwise None
|
| 291 |
+
|
| 292 |
+
filename: the filename, if specified; otherwise None; this is the
|
| 293 |
+
client side filename, *not* the file name on which it is
|
| 294 |
+
stored (that's a temporary file you don't deal with)
|
| 295 |
+
|
| 296 |
+
value: the value as a *string*; for file uploads, this
|
| 297 |
+
transparently reads the file every time you request the value
|
| 298 |
+
and returns *bytes*
|
| 299 |
+
|
| 300 |
+
file: the file(-like) object from which you can read the data *as
|
| 301 |
+
bytes* ; None if the data is stored a simple string
|
| 302 |
+
|
| 303 |
+
type: the content-type, or None if not specified
|
| 304 |
+
|
| 305 |
+
type_options: dictionary of options specified on the content-type
|
| 306 |
+
line
|
| 307 |
+
|
| 308 |
+
disposition: content-disposition, or None if not specified
|
| 309 |
+
|
| 310 |
+
disposition_options: dictionary of corresponding options
|
| 311 |
+
|
| 312 |
+
headers: a dictionary(-like) object (sometimes email.message.Message or a
|
| 313 |
+
subclass thereof) containing *all* headers
|
| 314 |
+
|
| 315 |
+
The class is subclassable, mostly for the purpose of overriding
|
| 316 |
+
the make_file() method, which is called internally to come up with
|
| 317 |
+
a file open for reading and writing. This makes it possible to
|
| 318 |
+
override the default choice of storing all files in a temporary
|
| 319 |
+
directory and unlinking them as soon as they have been opened.
|
| 320 |
+
|
| 321 |
+
"""
|
| 322 |
+
def __init__(self, fp=None, headers=None, outerboundary=b'',
|
| 323 |
+
environ=os.environ, keep_blank_values=0, strict_parsing=0,
|
| 324 |
+
limit=None, encoding='utf-8', errors='replace',
|
| 325 |
+
max_num_fields=None, separator='&'):
|
| 326 |
+
"""Constructor. Read multipart/* until last part.
|
| 327 |
+
|
| 328 |
+
Arguments, all optional:
|
| 329 |
+
|
| 330 |
+
fp : file pointer; default: sys.stdin.buffer
|
| 331 |
+
(not used when the request method is GET)
|
| 332 |
+
Can be :
|
| 333 |
+
1. a TextIOWrapper object
|
| 334 |
+
2. an object whose read() and readline() methods return bytes
|
| 335 |
+
|
| 336 |
+
headers : header dictionary-like object; default:
|
| 337 |
+
taken from environ as per CGI spec
|
| 338 |
+
|
| 339 |
+
outerboundary : terminating multipart boundary
|
| 340 |
+
(for internal use only)
|
| 341 |
+
|
| 342 |
+
environ : environment dictionary; default: os.environ
|
| 343 |
+
|
| 344 |
+
keep_blank_values: flag indicating whether blank values in
|
| 345 |
+
percent-encoded forms should be treated as blank strings.
|
| 346 |
+
A true value indicates that blanks should be retained as
|
| 347 |
+
blank strings. The default false value indicates that
|
| 348 |
+
blank values are to be ignored and treated as if they were
|
| 349 |
+
not included.
|
| 350 |
+
|
| 351 |
+
strict_parsing: flag indicating what to do with parsing errors.
|
| 352 |
+
If false (the default), errors are silently ignored.
|
| 353 |
+
If true, errors raise a ValueError exception.
|
| 354 |
+
|
| 355 |
+
limit : used internally to read parts of multipart/form-data forms,
|
| 356 |
+
to exit from the reading loop when reached. It is the difference
|
| 357 |
+
between the form content-length and the number of bytes already
|
| 358 |
+
read
|
| 359 |
+
|
| 360 |
+
encoding, errors : the encoding and error handler used to decode the
|
| 361 |
+
binary stream to strings. Must be the same as the charset defined
|
| 362 |
+
for the page sending the form (content-type : meta http-equiv or
|
| 363 |
+
header)
|
| 364 |
+
|
| 365 |
+
max_num_fields: int. If set, then __init__ throws a ValueError
|
| 366 |
+
if there are more than n fields read by parse_qsl().
|
| 367 |
+
|
| 368 |
+
"""
|
| 369 |
+
method = 'GET'
|
| 370 |
+
self.keep_blank_values = keep_blank_values
|
| 371 |
+
self.strict_parsing = strict_parsing
|
| 372 |
+
self.max_num_fields = max_num_fields
|
| 373 |
+
self.separator = separator
|
| 374 |
+
if 'REQUEST_METHOD' in environ:
|
| 375 |
+
method = environ['REQUEST_METHOD'].upper()
|
| 376 |
+
self.qs_on_post = None
|
| 377 |
+
if method == 'GET' or method == 'HEAD':
|
| 378 |
+
if 'QUERY_STRING' in environ:
|
| 379 |
+
qs = environ['QUERY_STRING']
|
| 380 |
+
elif sys.argv[1:]:
|
| 381 |
+
qs = sys.argv[1]
|
| 382 |
+
else:
|
| 383 |
+
qs = ""
|
| 384 |
+
qs = qs.encode(locale.getpreferredencoding(), 'surrogateescape')
|
| 385 |
+
fp = BytesIO(qs)
|
| 386 |
+
if headers is None:
|
| 387 |
+
headers = {'content-type':
|
| 388 |
+
"application/x-www-form-urlencoded"}
|
| 389 |
+
if headers is None:
|
| 390 |
+
headers = {}
|
| 391 |
+
if method == 'POST':
|
| 392 |
+
# Set default content-type for POST to what's traditional
|
| 393 |
+
headers['content-type'] = "application/x-www-form-urlencoded"
|
| 394 |
+
if 'CONTENT_TYPE' in environ:
|
| 395 |
+
headers['content-type'] = environ['CONTENT_TYPE']
|
| 396 |
+
if 'QUERY_STRING' in environ:
|
| 397 |
+
self.qs_on_post = environ['QUERY_STRING']
|
| 398 |
+
if 'CONTENT_LENGTH' in environ:
|
| 399 |
+
headers['content-length'] = environ['CONTENT_LENGTH']
|
| 400 |
+
else:
|
| 401 |
+
if not (isinstance(headers, (Mapping, Message))):
|
| 402 |
+
raise TypeError("headers must be mapping or an instance of "
|
| 403 |
+
"email.message.Message")
|
| 404 |
+
self.headers = headers
|
| 405 |
+
if fp is None:
|
| 406 |
+
self.fp = sys.stdin.buffer
|
| 407 |
+
# self.fp.read() must return bytes
|
| 408 |
+
elif isinstance(fp, TextIOWrapper):
|
| 409 |
+
self.fp = fp.buffer
|
| 410 |
+
else:
|
| 411 |
+
if not (hasattr(fp, 'read') and hasattr(fp, 'readline')):
|
| 412 |
+
raise TypeError("fp must be file pointer")
|
| 413 |
+
self.fp = fp
|
| 414 |
+
|
| 415 |
+
self.encoding = encoding
|
| 416 |
+
self.errors = errors
|
| 417 |
+
|
| 418 |
+
if not isinstance(outerboundary, bytes):
|
| 419 |
+
raise TypeError('outerboundary must be bytes, not %s'
|
| 420 |
+
% type(outerboundary).__name__)
|
| 421 |
+
self.outerboundary = outerboundary
|
| 422 |
+
|
| 423 |
+
self.bytes_read = 0
|
| 424 |
+
self.limit = limit
|
| 425 |
+
|
| 426 |
+
# Process content-disposition header
|
| 427 |
+
cdisp, pdict = "", {}
|
| 428 |
+
if 'content-disposition' in self.headers:
|
| 429 |
+
cdisp, pdict = parse_header(self.headers['content-disposition'])
|
| 430 |
+
self.disposition = cdisp
|
| 431 |
+
self.disposition_options = pdict
|
| 432 |
+
self.name = None
|
| 433 |
+
if 'name' in pdict:
|
| 434 |
+
self.name = pdict['name']
|
| 435 |
+
self.filename = None
|
| 436 |
+
if 'filename' in pdict:
|
| 437 |
+
self.filename = pdict['filename']
|
| 438 |
+
self._binary_file = self.filename is not None
|
| 439 |
+
|
| 440 |
+
# Process content-type header
|
| 441 |
+
#
|
| 442 |
+
# Honor any existing content-type header. But if there is no
|
| 443 |
+
# content-type header, use some sensible defaults. Assume
|
| 444 |
+
# outerboundary is "" at the outer level, but something non-false
|
| 445 |
+
# inside a multi-part. The default for an inner part is text/plain,
|
| 446 |
+
# but for an outer part it should be urlencoded. This should catch
|
| 447 |
+
# bogus clients which erroneously forget to include a content-type
|
| 448 |
+
# header.
|
| 449 |
+
#
|
| 450 |
+
# See below for what we do if there does exist a content-type header,
|
| 451 |
+
# but it happens to be something we don't understand.
|
| 452 |
+
if 'content-type' in self.headers:
|
| 453 |
+
ctype, pdict = parse_header(self.headers['content-type'])
|
| 454 |
+
elif self.outerboundary or method != 'POST':
|
| 455 |
+
ctype, pdict = "text/plain", {}
|
| 456 |
+
else:
|
| 457 |
+
ctype, pdict = 'application/x-www-form-urlencoded', {}
|
| 458 |
+
self.type = ctype
|
| 459 |
+
self.type_options = pdict
|
| 460 |
+
if 'boundary' in pdict:
|
| 461 |
+
self.innerboundary = pdict['boundary'].encode(self.encoding,
|
| 462 |
+
self.errors)
|
| 463 |
+
else:
|
| 464 |
+
self.innerboundary = b""
|
| 465 |
+
|
| 466 |
+
clen = -1
|
| 467 |
+
if 'content-length' in self.headers:
|
| 468 |
+
try:
|
| 469 |
+
clen = int(self.headers['content-length'])
|
| 470 |
+
except ValueError:
|
| 471 |
+
pass
|
| 472 |
+
if maxlen and clen > maxlen:
|
| 473 |
+
raise ValueError('Maximum content length exceeded')
|
| 474 |
+
self.length = clen
|
| 475 |
+
if self.limit is None and clen >= 0:
|
| 476 |
+
self.limit = clen
|
| 477 |
+
|
| 478 |
+
self.list = self.file = None
|
| 479 |
+
self.done = 0
|
| 480 |
+
if ctype == 'application/x-www-form-urlencoded':
|
| 481 |
+
self.read_urlencoded()
|
| 482 |
+
elif ctype[:10] == 'multipart/':
|
| 483 |
+
self.read_multi(environ, keep_blank_values, strict_parsing)
|
| 484 |
+
else:
|
| 485 |
+
self.read_single()
|
| 486 |
+
|
| 487 |
+
def __del__(self):
|
| 488 |
+
try:
|
| 489 |
+
self.file.close()
|
| 490 |
+
except AttributeError:
|
| 491 |
+
pass
|
| 492 |
+
|
| 493 |
+
def __enter__(self):
|
| 494 |
+
return self
|
| 495 |
+
|
| 496 |
+
def __exit__(self, *args):
|
| 497 |
+
self.file.close()
|
| 498 |
+
|
| 499 |
+
def __repr__(self):
|
| 500 |
+
"""Return a printable representation."""
|
| 501 |
+
return "FieldStorage(%r, %r, %r)" % (
|
| 502 |
+
self.name, self.filename, self.value)
|
| 503 |
+
|
| 504 |
+
def __iter__(self):
|
| 505 |
+
return iter(self.keys())
|
| 506 |
+
|
| 507 |
+
def __getattr__(self, name):
|
| 508 |
+
if name != 'value':
|
| 509 |
+
raise AttributeError(name)
|
| 510 |
+
if self.file:
|
| 511 |
+
self.file.seek(0)
|
| 512 |
+
value = self.file.read()
|
| 513 |
+
self.file.seek(0)
|
| 514 |
+
elif self.list is not None:
|
| 515 |
+
value = self.list
|
| 516 |
+
else:
|
| 517 |
+
value = None
|
| 518 |
+
return value
|
| 519 |
+
|
| 520 |
+
def __getitem__(self, key):
|
| 521 |
+
"""Dictionary style indexing."""
|
| 522 |
+
if self.list is None:
|
| 523 |
+
raise TypeError("not indexable")
|
| 524 |
+
found = []
|
| 525 |
+
for item in self.list:
|
| 526 |
+
if item.name == key: found.append(item)
|
| 527 |
+
if not found:
|
| 528 |
+
raise KeyError(key)
|
| 529 |
+
if len(found) == 1:
|
| 530 |
+
return found[0]
|
| 531 |
+
else:
|
| 532 |
+
return found
|
| 533 |
+
|
| 534 |
+
def getvalue(self, key, default=None):
|
| 535 |
+
"""Dictionary style get() method, including 'value' lookup."""
|
| 536 |
+
if key in self:
|
| 537 |
+
value = self[key]
|
| 538 |
+
if isinstance(value, list):
|
| 539 |
+
return [x.value for x in value]
|
| 540 |
+
else:
|
| 541 |
+
return value.value
|
| 542 |
+
else:
|
| 543 |
+
return default
|
| 544 |
+
|
| 545 |
+
def getfirst(self, key, default=None):
|
| 546 |
+
""" Return the first value received."""
|
| 547 |
+
if key in self:
|
| 548 |
+
value = self[key]
|
| 549 |
+
if isinstance(value, list):
|
| 550 |
+
return value[0].value
|
| 551 |
+
else:
|
| 552 |
+
return value.value
|
| 553 |
+
else:
|
| 554 |
+
return default
|
| 555 |
+
|
| 556 |
+
def getlist(self, key):
|
| 557 |
+
""" Return list of received values."""
|
| 558 |
+
if key in self:
|
| 559 |
+
value = self[key]
|
| 560 |
+
if isinstance(value, list):
|
| 561 |
+
return [x.value for x in value]
|
| 562 |
+
else:
|
| 563 |
+
return [value.value]
|
| 564 |
+
else:
|
| 565 |
+
return []
|
| 566 |
+
|
| 567 |
+
def keys(self):
|
| 568 |
+
"""Dictionary style keys() method."""
|
| 569 |
+
if self.list is None:
|
| 570 |
+
raise TypeError("not indexable")
|
| 571 |
+
return list(set(item.name for item in self.list))
|
| 572 |
+
|
| 573 |
+
def __contains__(self, key):
|
| 574 |
+
"""Dictionary style __contains__ method."""
|
| 575 |
+
if self.list is None:
|
| 576 |
+
raise TypeError("not indexable")
|
| 577 |
+
return any(item.name == key for item in self.list)
|
| 578 |
+
|
| 579 |
+
def __len__(self):
|
| 580 |
+
"""Dictionary style len(x) support."""
|
| 581 |
+
return len(self.keys())
|
| 582 |
+
|
| 583 |
+
def __bool__(self):
|
| 584 |
+
if self.list is None:
|
| 585 |
+
raise TypeError("Cannot be converted to bool.")
|
| 586 |
+
return bool(self.list)
|
| 587 |
+
|
| 588 |
+
def read_urlencoded(self):
|
| 589 |
+
"""Internal: read data in query string format."""
|
| 590 |
+
qs = self.fp.read(self.length)
|
| 591 |
+
if not isinstance(qs, bytes):
|
| 592 |
+
raise ValueError("%s should return bytes, got %s" \
|
| 593 |
+
% (self.fp, type(qs).__name__))
|
| 594 |
+
qs = qs.decode(self.encoding, self.errors)
|
| 595 |
+
if self.qs_on_post:
|
| 596 |
+
qs += '&' + self.qs_on_post
|
| 597 |
+
query = urllib.parse.parse_qsl(
|
| 598 |
+
qs, self.keep_blank_values, self.strict_parsing,
|
| 599 |
+
encoding=self.encoding, errors=self.errors,
|
| 600 |
+
max_num_fields=self.max_num_fields, separator=self.separator)
|
| 601 |
+
self.list = [MiniFieldStorage(key, value) for key, value in query]
|
| 602 |
+
self.skip_lines()
|
| 603 |
+
|
| 604 |
+
FieldStorageClass = None
|
| 605 |
+
|
| 606 |
+
def read_multi(self, environ, keep_blank_values, strict_parsing):
|
| 607 |
+
"""Internal: read a part that is itself multipart."""
|
| 608 |
+
ib = self.innerboundary
|
| 609 |
+
if not valid_boundary(ib):
|
| 610 |
+
raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
|
| 611 |
+
self.list = []
|
| 612 |
+
if self.qs_on_post:
|
| 613 |
+
query = urllib.parse.parse_qsl(
|
| 614 |
+
self.qs_on_post, self.keep_blank_values, self.strict_parsing,
|
| 615 |
+
encoding=self.encoding, errors=self.errors,
|
| 616 |
+
max_num_fields=self.max_num_fields, separator=self.separator)
|
| 617 |
+
self.list.extend(MiniFieldStorage(key, value) for key, value in query)
|
| 618 |
+
|
| 619 |
+
klass = self.FieldStorageClass or self.__class__
|
| 620 |
+
first_line = self.fp.readline() # bytes
|
| 621 |
+
if not isinstance(first_line, bytes):
|
| 622 |
+
raise ValueError("%s should return bytes, got %s" \
|
| 623 |
+
% (self.fp, type(first_line).__name__))
|
| 624 |
+
self.bytes_read += len(first_line)
|
| 625 |
+
|
| 626 |
+
# Ensure that we consume the file until we've hit our inner boundary
|
| 627 |
+
while (first_line.strip() != (b"--" + self.innerboundary) and
|
| 628 |
+
first_line):
|
| 629 |
+
first_line = self.fp.readline()
|
| 630 |
+
self.bytes_read += len(first_line)
|
| 631 |
+
|
| 632 |
+
# Propagate max_num_fields into the sub class appropriately
|
| 633 |
+
max_num_fields = self.max_num_fields
|
| 634 |
+
if max_num_fields is not None:
|
| 635 |
+
max_num_fields -= len(self.list)
|
| 636 |
+
|
| 637 |
+
while True:
|
| 638 |
+
parser = FeedParser()
|
| 639 |
+
hdr_text = b""
|
| 640 |
+
while True:
|
| 641 |
+
data = self.fp.readline()
|
| 642 |
+
hdr_text += data
|
| 643 |
+
if not data.strip():
|
| 644 |
+
break
|
| 645 |
+
if not hdr_text:
|
| 646 |
+
break
|
| 647 |
+
# parser takes strings, not bytes
|
| 648 |
+
self.bytes_read += len(hdr_text)
|
| 649 |
+
parser.feed(hdr_text.decode(self.encoding, self.errors))
|
| 650 |
+
headers = parser.close()
|
| 651 |
+
|
| 652 |
+
# Some clients add Content-Length for part headers, ignore them
|
| 653 |
+
if 'content-length' in headers:
|
| 654 |
+
del headers['content-length']
|
| 655 |
+
|
| 656 |
+
limit = None if self.limit is None \
|
| 657 |
+
else self.limit - self.bytes_read
|
| 658 |
+
part = klass(self.fp, headers, ib, environ, keep_blank_values,
|
| 659 |
+
strict_parsing, limit,
|
| 660 |
+
self.encoding, self.errors, max_num_fields, self.separator)
|
| 661 |
+
|
| 662 |
+
if max_num_fields is not None:
|
| 663 |
+
max_num_fields -= 1
|
| 664 |
+
if part.list:
|
| 665 |
+
max_num_fields -= len(part.list)
|
| 666 |
+
if max_num_fields < 0:
|
| 667 |
+
raise ValueError('Max number of fields exceeded')
|
| 668 |
+
|
| 669 |
+
self.bytes_read += part.bytes_read
|
| 670 |
+
self.list.append(part)
|
| 671 |
+
if part.done or self.bytes_read >= self.length > 0:
|
| 672 |
+
break
|
| 673 |
+
self.skip_lines()
|
| 674 |
+
|
| 675 |
+
def read_single(self):
|
| 676 |
+
"""Internal: read an atomic part."""
|
| 677 |
+
if self.length >= 0:
|
| 678 |
+
self.read_binary()
|
| 679 |
+
self.skip_lines()
|
| 680 |
+
else:
|
| 681 |
+
self.read_lines()
|
| 682 |
+
self.file.seek(0)
|
| 683 |
+
|
| 684 |
+
bufsize = 8*1024 # I/O buffering size for copy to file
|
| 685 |
+
|
| 686 |
+
def read_binary(self):
|
| 687 |
+
"""Internal: read binary data."""
|
| 688 |
+
self.file = self.make_file()
|
| 689 |
+
todo = self.length
|
| 690 |
+
if todo >= 0:
|
| 691 |
+
while todo > 0:
|
| 692 |
+
data = self.fp.read(min(todo, self.bufsize)) # bytes
|
| 693 |
+
if not isinstance(data, bytes):
|
| 694 |
+
raise ValueError("%s should return bytes, got %s"
|
| 695 |
+
% (self.fp, type(data).__name__))
|
| 696 |
+
self.bytes_read += len(data)
|
| 697 |
+
if not data:
|
| 698 |
+
self.done = -1
|
| 699 |
+
break
|
| 700 |
+
self.file.write(data)
|
| 701 |
+
todo = todo - len(data)
|
| 702 |
+
|
| 703 |
+
def read_lines(self):
|
| 704 |
+
"""Internal: read lines until EOF or outerboundary."""
|
| 705 |
+
if self._binary_file:
|
| 706 |
+
self.file = self.__file = BytesIO() # store data as bytes for files
|
| 707 |
+
else:
|
| 708 |
+
self.file = self.__file = StringIO() # as strings for other fields
|
| 709 |
+
if self.outerboundary:
|
| 710 |
+
self.read_lines_to_outerboundary()
|
| 711 |
+
else:
|
| 712 |
+
self.read_lines_to_eof()
|
| 713 |
+
|
| 714 |
+
def __write(self, line):
|
| 715 |
+
"""line is always bytes, not string"""
|
| 716 |
+
if self.__file is not None:
|
| 717 |
+
if self.__file.tell() + len(line) > 1000:
|
| 718 |
+
self.file = self.make_file()
|
| 719 |
+
data = self.__file.getvalue()
|
| 720 |
+
self.file.write(data)
|
| 721 |
+
self.__file = None
|
| 722 |
+
if self._binary_file:
|
| 723 |
+
# keep bytes
|
| 724 |
+
self.file.write(line)
|
| 725 |
+
else:
|
| 726 |
+
# decode to string
|
| 727 |
+
self.file.write(line.decode(self.encoding, self.errors))
|
| 728 |
+
|
| 729 |
+
def read_lines_to_eof(self):
|
| 730 |
+
"""Internal: read lines until EOF."""
|
| 731 |
+
while 1:
|
| 732 |
+
line = self.fp.readline(1<<16) # bytes
|
| 733 |
+
self.bytes_read += len(line)
|
| 734 |
+
if not line:
|
| 735 |
+
self.done = -1
|
| 736 |
+
break
|
| 737 |
+
self.__write(line)
|
| 738 |
+
|
| 739 |
+
def read_lines_to_outerboundary(self):
|
| 740 |
+
"""Internal: read lines until outerboundary.
|
| 741 |
+
Data is read as bytes: boundaries and line ends must be converted
|
| 742 |
+
to bytes for comparisons.
|
| 743 |
+
"""
|
| 744 |
+
next_boundary = b"--" + self.outerboundary
|
| 745 |
+
last_boundary = next_boundary + b"--"
|
| 746 |
+
delim = b""
|
| 747 |
+
last_line_lfend = True
|
| 748 |
+
_read = 0
|
| 749 |
+
while 1:
|
| 750 |
+
|
| 751 |
+
if self.limit is not None and 0 <= self.limit <= _read:
|
| 752 |
+
break
|
| 753 |
+
line = self.fp.readline(1<<16) # bytes
|
| 754 |
+
self.bytes_read += len(line)
|
| 755 |
+
_read += len(line)
|
| 756 |
+
if not line:
|
| 757 |
+
self.done = -1
|
| 758 |
+
break
|
| 759 |
+
if delim == b"\r":
|
| 760 |
+
line = delim + line
|
| 761 |
+
delim = b""
|
| 762 |
+
if line.startswith(b"--") and last_line_lfend:
|
| 763 |
+
strippedline = line.rstrip()
|
| 764 |
+
if strippedline == next_boundary:
|
| 765 |
+
break
|
| 766 |
+
if strippedline == last_boundary:
|
| 767 |
+
self.done = 1
|
| 768 |
+
break
|
| 769 |
+
odelim = delim
|
| 770 |
+
if line.endswith(b"\r\n"):
|
| 771 |
+
delim = b"\r\n"
|
| 772 |
+
line = line[:-2]
|
| 773 |
+
last_line_lfend = True
|
| 774 |
+
elif line.endswith(b"\n"):
|
| 775 |
+
delim = b"\n"
|
| 776 |
+
line = line[:-1]
|
| 777 |
+
last_line_lfend = True
|
| 778 |
+
elif line.endswith(b"\r"):
|
| 779 |
+
# We may interrupt \r\n sequences if they span the 2**16
|
| 780 |
+
# byte boundary
|
| 781 |
+
delim = b"\r"
|
| 782 |
+
line = line[:-1]
|
| 783 |
+
last_line_lfend = False
|
| 784 |
+
else:
|
| 785 |
+
delim = b""
|
| 786 |
+
last_line_lfend = False
|
| 787 |
+
self.__write(odelim + line)
|
| 788 |
+
|
| 789 |
+
def skip_lines(self):
|
| 790 |
+
"""Internal: skip lines until outer boundary if defined."""
|
| 791 |
+
if not self.outerboundary or self.done:
|
| 792 |
+
return
|
| 793 |
+
next_boundary = b"--" + self.outerboundary
|
| 794 |
+
last_boundary = next_boundary + b"--"
|
| 795 |
+
last_line_lfend = True
|
| 796 |
+
while True:
|
| 797 |
+
line = self.fp.readline(1<<16)
|
| 798 |
+
self.bytes_read += len(line)
|
| 799 |
+
if not line:
|
| 800 |
+
self.done = -1
|
| 801 |
+
break
|
| 802 |
+
if line.endswith(b"--") and last_line_lfend:
|
| 803 |
+
strippedline = line.strip()
|
| 804 |
+
if strippedline == next_boundary:
|
| 805 |
+
break
|
| 806 |
+
if strippedline == last_boundary:
|
| 807 |
+
self.done = 1
|
| 808 |
+
break
|
| 809 |
+
last_line_lfend = line.endswith(b'\n')
|
| 810 |
+
|
| 811 |
+
def make_file(self):
|
| 812 |
+
"""Overridable: return a readable & writable file.
|
| 813 |
+
|
| 814 |
+
The file will be used as follows:
|
| 815 |
+
- data is written to it
|
| 816 |
+
- seek(0)
|
| 817 |
+
- data is read from it
|
| 818 |
+
|
| 819 |
+
The file is opened in binary mode for files, in text mode
|
| 820 |
+
for other fields
|
| 821 |
+
|
| 822 |
+
This version opens a temporary file for reading and writing,
|
| 823 |
+
and immediately deletes (unlinks) it. The trick (on Unix!) is
|
| 824 |
+
that the file can still be used, but it can't be opened by
|
| 825 |
+
another process, and it will automatically be deleted when it
|
| 826 |
+
is closed or when the current process terminates.
|
| 827 |
+
|
| 828 |
+
If you want a more permanent file, you derive a class which
|
| 829 |
+
overrides this method. If you want a visible temporary file
|
| 830 |
+
that is nevertheless automatically deleted when the script
|
| 831 |
+
terminates, try defining a __del__ method in a derived class
|
| 832 |
+
which unlinks the temporary files you have created.
|
| 833 |
+
|
| 834 |
+
"""
|
| 835 |
+
if self._binary_file:
|
| 836 |
+
return tempfile.TemporaryFile("wb+")
|
| 837 |
+
else:
|
| 838 |
+
return tempfile.TemporaryFile("w+",
|
| 839 |
+
encoding=self.encoding, newline = '\n')
|
| 840 |
+
|
| 841 |
+
|
| 842 |
+
# Test/debug code
|
| 843 |
+
# ===============
|
| 844 |
+
|
| 845 |
+
def test(environ=os.environ):
|
| 846 |
+
"""Robust test CGI script, usable as main program.
|
| 847 |
+
|
| 848 |
+
Write minimal HTTP headers and dump all information provided to
|
| 849 |
+
the script in HTML form.
|
| 850 |
+
|
| 851 |
+
"""
|
| 852 |
+
print("Content-type: text/html")
|
| 853 |
+
print()
|
| 854 |
+
sys.stderr = sys.stdout
|
| 855 |
+
try:
|
| 856 |
+
form = FieldStorage() # Replace with other classes to test those
|
| 857 |
+
print_directory()
|
| 858 |
+
print_arguments()
|
| 859 |
+
print_form(form)
|
| 860 |
+
print_environ(environ)
|
| 861 |
+
print_environ_usage()
|
| 862 |
+
def f():
|
| 863 |
+
exec("testing print_exception() -- <I>italics?</I>")
|
| 864 |
+
def g(f=f):
|
| 865 |
+
f()
|
| 866 |
+
print("<H3>What follows is a test, not an actual exception:</H3>")
|
| 867 |
+
g()
|
| 868 |
+
except:
|
| 869 |
+
print_exception()
|
| 870 |
+
|
| 871 |
+
print("<H1>Second try with a small maxlen...</H1>")
|
| 872 |
+
|
| 873 |
+
global maxlen
|
| 874 |
+
maxlen = 50
|
| 875 |
+
try:
|
| 876 |
+
form = FieldStorage() # Replace with other classes to test those
|
| 877 |
+
print_directory()
|
| 878 |
+
print_arguments()
|
| 879 |
+
print_form(form)
|
| 880 |
+
print_environ(environ)
|
| 881 |
+
except:
|
| 882 |
+
print_exception()
|
| 883 |
+
|
| 884 |
+
def print_exception(type=None, value=None, tb=None, limit=None):
|
| 885 |
+
if type is None:
|
| 886 |
+
type, value, tb = sys.exc_info()
|
| 887 |
+
import traceback
|
| 888 |
+
print()
|
| 889 |
+
print("<H3>Traceback (most recent call last):</H3>")
|
| 890 |
+
list = traceback.format_tb(tb, limit) + \
|
| 891 |
+
traceback.format_exception_only(type, value)
|
| 892 |
+
print("<PRE>%s<B>%s</B></PRE>" % (
|
| 893 |
+
html.escape("".join(list[:-1])),
|
| 894 |
+
html.escape(list[-1]),
|
| 895 |
+
))
|
| 896 |
+
del tb
|
| 897 |
+
|
| 898 |
+
def print_environ(environ=os.environ):
|
| 899 |
+
"""Dump the shell environment as HTML."""
|
| 900 |
+
keys = sorted(environ.keys())
|
| 901 |
+
print()
|
| 902 |
+
print("<H3>Shell Environment:</H3>")
|
| 903 |
+
print("<DL>")
|
| 904 |
+
for key in keys:
|
| 905 |
+
print("<DT>", html.escape(key), "<DD>", html.escape(environ[key]))
|
| 906 |
+
print("</DL>")
|
| 907 |
+
print()
|
| 908 |
+
|
| 909 |
+
def print_form(form):
|
| 910 |
+
"""Dump the contents of a form as HTML."""
|
| 911 |
+
keys = sorted(form.keys())
|
| 912 |
+
print()
|
| 913 |
+
print("<H3>Form Contents:</H3>")
|
| 914 |
+
if not keys:
|
| 915 |
+
print("<P>No form fields.")
|
| 916 |
+
print("<DL>")
|
| 917 |
+
for key in keys:
|
| 918 |
+
print("<DT>" + html.escape(key) + ":", end=' ')
|
| 919 |
+
value = form[key]
|
| 920 |
+
print("<i>" + html.escape(repr(type(value))) + "</i>")
|
| 921 |
+
print("<DD>" + html.escape(repr(value)))
|
| 922 |
+
print("</DL>")
|
| 923 |
+
print()
|
| 924 |
+
|
| 925 |
+
def print_directory():
|
| 926 |
+
"""Dump the current directory as HTML."""
|
| 927 |
+
print()
|
| 928 |
+
print("<H3>Current Working Directory:</H3>")
|
| 929 |
+
try:
|
| 930 |
+
pwd = os.getcwd()
|
| 931 |
+
except OSError as msg:
|
| 932 |
+
print("OSError:", html.escape(str(msg)))
|
| 933 |
+
else:
|
| 934 |
+
print(html.escape(pwd))
|
| 935 |
+
print()
|
| 936 |
+
|
| 937 |
+
def print_arguments():
|
| 938 |
+
print()
|
| 939 |
+
print("<H3>Command Line Arguments:</H3>")
|
| 940 |
+
print()
|
| 941 |
+
print(sys.argv)
|
| 942 |
+
print()
|
| 943 |
+
|
| 944 |
+
def print_environ_usage():
|
| 945 |
+
"""Dump a list of environment variables used by CGI as HTML."""
|
| 946 |
+
print("""
|
| 947 |
+
<H3>These environment variables could have been set:</H3>
|
| 948 |
+
<UL>
|
| 949 |
+
<LI>AUTH_TYPE
|
| 950 |
+
<LI>CONTENT_LENGTH
|
| 951 |
+
<LI>CONTENT_TYPE
|
| 952 |
+
<LI>DATE_GMT
|
| 953 |
+
<LI>DATE_LOCAL
|
| 954 |
+
<LI>DOCUMENT_NAME
|
| 955 |
+
<LI>DOCUMENT_ROOT
|
| 956 |
+
<LI>DOCUMENT_URI
|
| 957 |
+
<LI>GATEWAY_INTERFACE
|
| 958 |
+
<LI>LAST_MODIFIED
|
| 959 |
+
<LI>PATH
|
| 960 |
+
<LI>PATH_INFO
|
| 961 |
+
<LI>PATH_TRANSLATED
|
| 962 |
+
<LI>QUERY_STRING
|
| 963 |
+
<LI>REMOTE_ADDR
|
| 964 |
+
<LI>REMOTE_HOST
|
| 965 |
+
<LI>REMOTE_IDENT
|
| 966 |
+
<LI>REMOTE_USER
|
| 967 |
+
<LI>REQUEST_METHOD
|
| 968 |
+
<LI>SCRIPT_NAME
|
| 969 |
+
<LI>SERVER_NAME
|
| 970 |
+
<LI>SERVER_PORT
|
| 971 |
+
<LI>SERVER_PROTOCOL
|
| 972 |
+
<LI>SERVER_ROOT
|
| 973 |
+
<LI>SERVER_SOFTWARE
|
| 974 |
+
</UL>
|
| 975 |
+
In addition, HTTP headers sent by the server may be passed in the
|
| 976 |
+
environment as well. Here are some common variable names:
|
| 977 |
+
<UL>
|
| 978 |
+
<LI>HTTP_ACCEPT
|
| 979 |
+
<LI>HTTP_CONNECTION
|
| 980 |
+
<LI>HTTP_HOST
|
| 981 |
+
<LI>HTTP_PRAGMA
|
| 982 |
+
<LI>HTTP_REFERER
|
| 983 |
+
<LI>HTTP_USER_AGENT
|
| 984 |
+
</UL>
|
| 985 |
+
""")
|
| 986 |
+
|
| 987 |
+
|
| 988 |
+
# Utilities
|
| 989 |
+
# =========
|
| 990 |
+
|
| 991 |
+
def valid_boundary(s):
|
| 992 |
+
import re
|
| 993 |
+
if isinstance(s, bytes):
|
| 994 |
+
_vb_pattern = b"^[ -~]{0,200}[!-~]$"
|
| 995 |
+
else:
|
| 996 |
+
_vb_pattern = "^[ -~]{0,200}[!-~]$"
|
| 997 |
+
return re.match(_vb_pattern, s)
|
| 998 |
+
|
| 999 |
+
# Invoke mainline
|
| 1000 |
+
# ===============
|
| 1001 |
+
|
| 1002 |
+
# Call test() when this file is run as a script (not imported as a module)
|
| 1003 |
+
if __name__ == '__main__':
|
| 1004 |
+
test()
|
llava/lib/python3.10/cmd.py
ADDED
|
@@ -0,0 +1,401 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A generic class to build line-oriented command interpreters.
|
| 2 |
+
|
| 3 |
+
Interpreters constructed with this class obey the following conventions:
|
| 4 |
+
|
| 5 |
+
1. End of file on input is processed as the command 'EOF'.
|
| 6 |
+
2. A command is parsed out of each line by collecting the prefix composed
|
| 7 |
+
of characters in the identchars member.
|
| 8 |
+
3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method
|
| 9 |
+
is passed a single argument consisting of the remainder of the line.
|
| 10 |
+
4. Typing an empty line repeats the last command. (Actually, it calls the
|
| 11 |
+
method `emptyline', which may be overridden in a subclass.)
|
| 12 |
+
5. There is a predefined `help' method. Given an argument `topic', it
|
| 13 |
+
calls the command `help_topic'. With no arguments, it lists all topics
|
| 14 |
+
with defined help_ functions, broken into up to three topics; documented
|
| 15 |
+
commands, miscellaneous help topics, and undocumented commands.
|
| 16 |
+
6. The command '?' is a synonym for `help'. The command '!' is a synonym
|
| 17 |
+
for `shell', if a do_shell method exists.
|
| 18 |
+
7. If completion is enabled, completing commands will be done automatically,
|
| 19 |
+
and completing of commands args is done by calling complete_foo() with
|
| 20 |
+
arguments text, line, begidx, endidx. text is string we are matching
|
| 21 |
+
against, all returned matches must begin with it. line is the current
|
| 22 |
+
input line (lstripped), begidx and endidx are the beginning and end
|
| 23 |
+
indexes of the text being matched, which could be used to provide
|
| 24 |
+
different completion depending upon which position the argument is in.
|
| 25 |
+
|
| 26 |
+
The `default' method may be overridden to intercept commands for which there
|
| 27 |
+
is no do_ method.
|
| 28 |
+
|
| 29 |
+
The `completedefault' method may be overridden to intercept completions for
|
| 30 |
+
commands that have no complete_ method.
|
| 31 |
+
|
| 32 |
+
The data member `self.ruler' sets the character used to draw separator lines
|
| 33 |
+
in the help messages. If empty, no ruler line is drawn. It defaults to "=".
|
| 34 |
+
|
| 35 |
+
If the value of `self.intro' is nonempty when the cmdloop method is called,
|
| 36 |
+
it is printed out on interpreter startup. This value may be overridden
|
| 37 |
+
via an optional argument to the cmdloop() method.
|
| 38 |
+
|
| 39 |
+
The data members `self.doc_header', `self.misc_header', and
|
| 40 |
+
`self.undoc_header' set the headers used for the help function's
|
| 41 |
+
listings of documented functions, miscellaneous topics, and undocumented
|
| 42 |
+
functions respectively.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
import string, sys
|
| 46 |
+
|
| 47 |
+
__all__ = ["Cmd"]
|
| 48 |
+
|
| 49 |
+
PROMPT = '(Cmd) '
|
| 50 |
+
IDENTCHARS = string.ascii_letters + string.digits + '_'
|
| 51 |
+
|
| 52 |
+
class Cmd:
|
| 53 |
+
"""A simple framework for writing line-oriented command interpreters.
|
| 54 |
+
|
| 55 |
+
These are often useful for test harnesses, administrative tools, and
|
| 56 |
+
prototypes that will later be wrapped in a more sophisticated interface.
|
| 57 |
+
|
| 58 |
+
A Cmd instance or subclass instance is a line-oriented interpreter
|
| 59 |
+
framework. There is no good reason to instantiate Cmd itself; rather,
|
| 60 |
+
it's useful as a superclass of an interpreter class you define yourself
|
| 61 |
+
in order to inherit Cmd's methods and encapsulate action methods.
|
| 62 |
+
|
| 63 |
+
"""
|
| 64 |
+
prompt = PROMPT
|
| 65 |
+
identchars = IDENTCHARS
|
| 66 |
+
ruler = '='
|
| 67 |
+
lastcmd = ''
|
| 68 |
+
intro = None
|
| 69 |
+
doc_leader = ""
|
| 70 |
+
doc_header = "Documented commands (type help <topic>):"
|
| 71 |
+
misc_header = "Miscellaneous help topics:"
|
| 72 |
+
undoc_header = "Undocumented commands:"
|
| 73 |
+
nohelp = "*** No help on %s"
|
| 74 |
+
use_rawinput = 1
|
| 75 |
+
|
| 76 |
+
def __init__(self, completekey='tab', stdin=None, stdout=None):
|
| 77 |
+
"""Instantiate a line-oriented interpreter framework.
|
| 78 |
+
|
| 79 |
+
The optional argument 'completekey' is the readline name of a
|
| 80 |
+
completion key; it defaults to the Tab key. If completekey is
|
| 81 |
+
not None and the readline module is available, command completion
|
| 82 |
+
is done automatically. The optional arguments stdin and stdout
|
| 83 |
+
specify alternate input and output file objects; if not specified,
|
| 84 |
+
sys.stdin and sys.stdout are used.
|
| 85 |
+
|
| 86 |
+
"""
|
| 87 |
+
if stdin is not None:
|
| 88 |
+
self.stdin = stdin
|
| 89 |
+
else:
|
| 90 |
+
self.stdin = sys.stdin
|
| 91 |
+
if stdout is not None:
|
| 92 |
+
self.stdout = stdout
|
| 93 |
+
else:
|
| 94 |
+
self.stdout = sys.stdout
|
| 95 |
+
self.cmdqueue = []
|
| 96 |
+
self.completekey = completekey
|
| 97 |
+
|
| 98 |
+
def cmdloop(self, intro=None):
|
| 99 |
+
"""Repeatedly issue a prompt, accept input, parse an initial prefix
|
| 100 |
+
off the received input, and dispatch to action methods, passing them
|
| 101 |
+
the remainder of the line as argument.
|
| 102 |
+
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
self.preloop()
|
| 106 |
+
if self.use_rawinput and self.completekey:
|
| 107 |
+
try:
|
| 108 |
+
import readline
|
| 109 |
+
self.old_completer = readline.get_completer()
|
| 110 |
+
readline.set_completer(self.complete)
|
| 111 |
+
readline.parse_and_bind(self.completekey+": complete")
|
| 112 |
+
except ImportError:
|
| 113 |
+
pass
|
| 114 |
+
try:
|
| 115 |
+
if intro is not None:
|
| 116 |
+
self.intro = intro
|
| 117 |
+
if self.intro:
|
| 118 |
+
self.stdout.write(str(self.intro)+"\n")
|
| 119 |
+
stop = None
|
| 120 |
+
while not stop:
|
| 121 |
+
if self.cmdqueue:
|
| 122 |
+
line = self.cmdqueue.pop(0)
|
| 123 |
+
else:
|
| 124 |
+
if self.use_rawinput:
|
| 125 |
+
try:
|
| 126 |
+
line = input(self.prompt)
|
| 127 |
+
except EOFError:
|
| 128 |
+
line = 'EOF'
|
| 129 |
+
else:
|
| 130 |
+
self.stdout.write(self.prompt)
|
| 131 |
+
self.stdout.flush()
|
| 132 |
+
line = self.stdin.readline()
|
| 133 |
+
if not len(line):
|
| 134 |
+
line = 'EOF'
|
| 135 |
+
else:
|
| 136 |
+
line = line.rstrip('\r\n')
|
| 137 |
+
line = self.precmd(line)
|
| 138 |
+
stop = self.onecmd(line)
|
| 139 |
+
stop = self.postcmd(stop, line)
|
| 140 |
+
self.postloop()
|
| 141 |
+
finally:
|
| 142 |
+
if self.use_rawinput and self.completekey:
|
| 143 |
+
try:
|
| 144 |
+
import readline
|
| 145 |
+
readline.set_completer(self.old_completer)
|
| 146 |
+
except ImportError:
|
| 147 |
+
pass
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def precmd(self, line):
|
| 151 |
+
"""Hook method executed just before the command line is
|
| 152 |
+
interpreted, but after the input prompt is generated and issued.
|
| 153 |
+
|
| 154 |
+
"""
|
| 155 |
+
return line
|
| 156 |
+
|
| 157 |
+
def postcmd(self, stop, line):
|
| 158 |
+
"""Hook method executed just after a command dispatch is finished."""
|
| 159 |
+
return stop
|
| 160 |
+
|
| 161 |
+
def preloop(self):
|
| 162 |
+
"""Hook method executed once when the cmdloop() method is called."""
|
| 163 |
+
pass
|
| 164 |
+
|
| 165 |
+
def postloop(self):
|
| 166 |
+
"""Hook method executed once when the cmdloop() method is about to
|
| 167 |
+
return.
|
| 168 |
+
|
| 169 |
+
"""
|
| 170 |
+
pass
|
| 171 |
+
|
| 172 |
+
def parseline(self, line):
|
| 173 |
+
"""Parse the line into a command name and a string containing
|
| 174 |
+
the arguments. Returns a tuple containing (command, args, line).
|
| 175 |
+
'command' and 'args' may be None if the line couldn't be parsed.
|
| 176 |
+
"""
|
| 177 |
+
line = line.strip()
|
| 178 |
+
if not line:
|
| 179 |
+
return None, None, line
|
| 180 |
+
elif line[0] == '?':
|
| 181 |
+
line = 'help ' + line[1:]
|
| 182 |
+
elif line[0] == '!':
|
| 183 |
+
if hasattr(self, 'do_shell'):
|
| 184 |
+
line = 'shell ' + line[1:]
|
| 185 |
+
else:
|
| 186 |
+
return None, None, line
|
| 187 |
+
i, n = 0, len(line)
|
| 188 |
+
while i < n and line[i] in self.identchars: i = i+1
|
| 189 |
+
cmd, arg = line[:i], line[i:].strip()
|
| 190 |
+
return cmd, arg, line
|
| 191 |
+
|
| 192 |
+
def onecmd(self, line):
|
| 193 |
+
"""Interpret the argument as though it had been typed in response
|
| 194 |
+
to the prompt.
|
| 195 |
+
|
| 196 |
+
This may be overridden, but should not normally need to be;
|
| 197 |
+
see the precmd() and postcmd() methods for useful execution hooks.
|
| 198 |
+
The return value is a flag indicating whether interpretation of
|
| 199 |
+
commands by the interpreter should stop.
|
| 200 |
+
|
| 201 |
+
"""
|
| 202 |
+
cmd, arg, line = self.parseline(line)
|
| 203 |
+
if not line:
|
| 204 |
+
return self.emptyline()
|
| 205 |
+
if cmd is None:
|
| 206 |
+
return self.default(line)
|
| 207 |
+
self.lastcmd = line
|
| 208 |
+
if line == 'EOF' :
|
| 209 |
+
self.lastcmd = ''
|
| 210 |
+
if cmd == '':
|
| 211 |
+
return self.default(line)
|
| 212 |
+
else:
|
| 213 |
+
try:
|
| 214 |
+
func = getattr(self, 'do_' + cmd)
|
| 215 |
+
except AttributeError:
|
| 216 |
+
return self.default(line)
|
| 217 |
+
return func(arg)
|
| 218 |
+
|
| 219 |
+
def emptyline(self):
|
| 220 |
+
"""Called when an empty line is entered in response to the prompt.
|
| 221 |
+
|
| 222 |
+
If this method is not overridden, it repeats the last nonempty
|
| 223 |
+
command entered.
|
| 224 |
+
|
| 225 |
+
"""
|
| 226 |
+
if self.lastcmd:
|
| 227 |
+
return self.onecmd(self.lastcmd)
|
| 228 |
+
|
| 229 |
+
def default(self, line):
|
| 230 |
+
"""Called on an input line when the command prefix is not recognized.
|
| 231 |
+
|
| 232 |
+
If this method is not overridden, it prints an error message and
|
| 233 |
+
returns.
|
| 234 |
+
|
| 235 |
+
"""
|
| 236 |
+
self.stdout.write('*** Unknown syntax: %s\n'%line)
|
| 237 |
+
|
| 238 |
+
def completedefault(self, *ignored):
|
| 239 |
+
"""Method called to complete an input line when no command-specific
|
| 240 |
+
complete_*() method is available.
|
| 241 |
+
|
| 242 |
+
By default, it returns an empty list.
|
| 243 |
+
|
| 244 |
+
"""
|
| 245 |
+
return []
|
| 246 |
+
|
| 247 |
+
def completenames(self, text, *ignored):
|
| 248 |
+
dotext = 'do_'+text
|
| 249 |
+
return [a[3:] for a in self.get_names() if a.startswith(dotext)]
|
| 250 |
+
|
| 251 |
+
def complete(self, text, state):
|
| 252 |
+
"""Return the next possible completion for 'text'.
|
| 253 |
+
|
| 254 |
+
If a command has not been entered, then complete against command list.
|
| 255 |
+
Otherwise try to call complete_<command> to get list of completions.
|
| 256 |
+
"""
|
| 257 |
+
if state == 0:
|
| 258 |
+
import readline
|
| 259 |
+
origline = readline.get_line_buffer()
|
| 260 |
+
line = origline.lstrip()
|
| 261 |
+
stripped = len(origline) - len(line)
|
| 262 |
+
begidx = readline.get_begidx() - stripped
|
| 263 |
+
endidx = readline.get_endidx() - stripped
|
| 264 |
+
if begidx>0:
|
| 265 |
+
cmd, args, foo = self.parseline(line)
|
| 266 |
+
if cmd == '':
|
| 267 |
+
compfunc = self.completedefault
|
| 268 |
+
else:
|
| 269 |
+
try:
|
| 270 |
+
compfunc = getattr(self, 'complete_' + cmd)
|
| 271 |
+
except AttributeError:
|
| 272 |
+
compfunc = self.completedefault
|
| 273 |
+
else:
|
| 274 |
+
compfunc = self.completenames
|
| 275 |
+
self.completion_matches = compfunc(text, line, begidx, endidx)
|
| 276 |
+
try:
|
| 277 |
+
return self.completion_matches[state]
|
| 278 |
+
except IndexError:
|
| 279 |
+
return None
|
| 280 |
+
|
| 281 |
+
def get_names(self):
|
| 282 |
+
# This method used to pull in base class attributes
|
| 283 |
+
# at a time dir() didn't do it yet.
|
| 284 |
+
return dir(self.__class__)
|
| 285 |
+
|
| 286 |
+
def complete_help(self, *args):
|
| 287 |
+
commands = set(self.completenames(*args))
|
| 288 |
+
topics = set(a[5:] for a in self.get_names()
|
| 289 |
+
if a.startswith('help_' + args[0]))
|
| 290 |
+
return list(commands | topics)
|
| 291 |
+
|
| 292 |
+
def do_help(self, arg):
|
| 293 |
+
'List available commands with "help" or detailed help with "help cmd".'
|
| 294 |
+
if arg:
|
| 295 |
+
# XXX check arg syntax
|
| 296 |
+
try:
|
| 297 |
+
func = getattr(self, 'help_' + arg)
|
| 298 |
+
except AttributeError:
|
| 299 |
+
try:
|
| 300 |
+
doc=getattr(self, 'do_' + arg).__doc__
|
| 301 |
+
if doc:
|
| 302 |
+
self.stdout.write("%s\n"%str(doc))
|
| 303 |
+
return
|
| 304 |
+
except AttributeError:
|
| 305 |
+
pass
|
| 306 |
+
self.stdout.write("%s\n"%str(self.nohelp % (arg,)))
|
| 307 |
+
return
|
| 308 |
+
func()
|
| 309 |
+
else:
|
| 310 |
+
names = self.get_names()
|
| 311 |
+
cmds_doc = []
|
| 312 |
+
cmds_undoc = []
|
| 313 |
+
help = {}
|
| 314 |
+
for name in names:
|
| 315 |
+
if name[:5] == 'help_':
|
| 316 |
+
help[name[5:]]=1
|
| 317 |
+
names.sort()
|
| 318 |
+
# There can be duplicates if routines overridden
|
| 319 |
+
prevname = ''
|
| 320 |
+
for name in names:
|
| 321 |
+
if name[:3] == 'do_':
|
| 322 |
+
if name == prevname:
|
| 323 |
+
continue
|
| 324 |
+
prevname = name
|
| 325 |
+
cmd=name[3:]
|
| 326 |
+
if cmd in help:
|
| 327 |
+
cmds_doc.append(cmd)
|
| 328 |
+
del help[cmd]
|
| 329 |
+
elif getattr(self, name).__doc__:
|
| 330 |
+
cmds_doc.append(cmd)
|
| 331 |
+
else:
|
| 332 |
+
cmds_undoc.append(cmd)
|
| 333 |
+
self.stdout.write("%s\n"%str(self.doc_leader))
|
| 334 |
+
self.print_topics(self.doc_header, cmds_doc, 15,80)
|
| 335 |
+
self.print_topics(self.misc_header, list(help.keys()),15,80)
|
| 336 |
+
self.print_topics(self.undoc_header, cmds_undoc, 15,80)
|
| 337 |
+
|
| 338 |
+
def print_topics(self, header, cmds, cmdlen, maxcol):
|
| 339 |
+
if cmds:
|
| 340 |
+
self.stdout.write("%s\n"%str(header))
|
| 341 |
+
if self.ruler:
|
| 342 |
+
self.stdout.write("%s\n"%str(self.ruler * len(header)))
|
| 343 |
+
self.columnize(cmds, maxcol-1)
|
| 344 |
+
self.stdout.write("\n")
|
| 345 |
+
|
| 346 |
+
def columnize(self, list, displaywidth=80):
|
| 347 |
+
"""Display a list of strings as a compact set of columns.
|
| 348 |
+
|
| 349 |
+
Each column is only as wide as necessary.
|
| 350 |
+
Columns are separated by two spaces (one was not legible enough).
|
| 351 |
+
"""
|
| 352 |
+
if not list:
|
| 353 |
+
self.stdout.write("<empty>\n")
|
| 354 |
+
return
|
| 355 |
+
|
| 356 |
+
nonstrings = [i for i in range(len(list))
|
| 357 |
+
if not isinstance(list[i], str)]
|
| 358 |
+
if nonstrings:
|
| 359 |
+
raise TypeError("list[i] not a string for i in %s"
|
| 360 |
+
% ", ".join(map(str, nonstrings)))
|
| 361 |
+
size = len(list)
|
| 362 |
+
if size == 1:
|
| 363 |
+
self.stdout.write('%s\n'%str(list[0]))
|
| 364 |
+
return
|
| 365 |
+
# Try every row count from 1 upwards
|
| 366 |
+
for nrows in range(1, len(list)):
|
| 367 |
+
ncols = (size+nrows-1) // nrows
|
| 368 |
+
colwidths = []
|
| 369 |
+
totwidth = -2
|
| 370 |
+
for col in range(ncols):
|
| 371 |
+
colwidth = 0
|
| 372 |
+
for row in range(nrows):
|
| 373 |
+
i = row + nrows*col
|
| 374 |
+
if i >= size:
|
| 375 |
+
break
|
| 376 |
+
x = list[i]
|
| 377 |
+
colwidth = max(colwidth, len(x))
|
| 378 |
+
colwidths.append(colwidth)
|
| 379 |
+
totwidth += colwidth + 2
|
| 380 |
+
if totwidth > displaywidth:
|
| 381 |
+
break
|
| 382 |
+
if totwidth <= displaywidth:
|
| 383 |
+
break
|
| 384 |
+
else:
|
| 385 |
+
nrows = len(list)
|
| 386 |
+
ncols = 1
|
| 387 |
+
colwidths = [0]
|
| 388 |
+
for row in range(nrows):
|
| 389 |
+
texts = []
|
| 390 |
+
for col in range(ncols):
|
| 391 |
+
i = row + nrows*col
|
| 392 |
+
if i >= size:
|
| 393 |
+
x = ""
|
| 394 |
+
else:
|
| 395 |
+
x = list[i]
|
| 396 |
+
texts.append(x)
|
| 397 |
+
while texts and not texts[-1]:
|
| 398 |
+
del texts[-1]
|
| 399 |
+
for col in range(len(texts)):
|
| 400 |
+
texts[col] = texts[col].ljust(colwidths[col])
|
| 401 |
+
self.stdout.write("%s\n"%str(" ".join(texts)))
|
llava/lib/python3.10/enum.py
ADDED
|
@@ -0,0 +1,1053 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from types import MappingProxyType, DynamicClassAttribute
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
__all__ = [
|
| 6 |
+
'EnumMeta',
|
| 7 |
+
'Enum', 'IntEnum', 'Flag', 'IntFlag',
|
| 8 |
+
'auto', 'unique',
|
| 9 |
+
]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def _is_descriptor(obj):
|
| 13 |
+
"""
|
| 14 |
+
Returns True if obj is a descriptor, False otherwise.
|
| 15 |
+
"""
|
| 16 |
+
return (
|
| 17 |
+
hasattr(obj, '__get__') or
|
| 18 |
+
hasattr(obj, '__set__') or
|
| 19 |
+
hasattr(obj, '__delete__')
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
def _is_dunder(name):
|
| 23 |
+
"""
|
| 24 |
+
Returns True if a __dunder__ name, False otherwise.
|
| 25 |
+
"""
|
| 26 |
+
return (
|
| 27 |
+
len(name) > 4 and
|
| 28 |
+
name[:2] == name[-2:] == '__' and
|
| 29 |
+
name[2] != '_' and
|
| 30 |
+
name[-3] != '_'
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
def _is_sunder(name):
|
| 34 |
+
"""
|
| 35 |
+
Returns True if a _sunder_ name, False otherwise.
|
| 36 |
+
"""
|
| 37 |
+
return (
|
| 38 |
+
len(name) > 2 and
|
| 39 |
+
name[0] == name[-1] == '_' and
|
| 40 |
+
name[1:2] != '_' and
|
| 41 |
+
name[-2:-1] != '_'
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
def _is_private(cls_name, name):
|
| 45 |
+
# do not use `re` as `re` imports `enum`
|
| 46 |
+
pattern = '_%s__' % (cls_name, )
|
| 47 |
+
pat_len = len(pattern)
|
| 48 |
+
if (
|
| 49 |
+
len(name) > pat_len
|
| 50 |
+
and name.startswith(pattern)
|
| 51 |
+
and name[pat_len:pat_len+1] != ['_']
|
| 52 |
+
and (name[-1] != '_' or name[-2] != '_')
|
| 53 |
+
):
|
| 54 |
+
return True
|
| 55 |
+
else:
|
| 56 |
+
return False
|
| 57 |
+
|
| 58 |
+
def _make_class_unpicklable(cls):
|
| 59 |
+
"""
|
| 60 |
+
Make the given class un-picklable.
|
| 61 |
+
"""
|
| 62 |
+
def _break_on_call_reduce(self, proto):
|
| 63 |
+
raise TypeError('%r cannot be pickled' % self)
|
| 64 |
+
cls.__reduce_ex__ = _break_on_call_reduce
|
| 65 |
+
cls.__module__ = '<unknown>'
|
| 66 |
+
|
| 67 |
+
_auto_null = object()
|
| 68 |
+
class auto:
|
| 69 |
+
"""
|
| 70 |
+
Instances are replaced with an appropriate value in Enum class suites.
|
| 71 |
+
"""
|
| 72 |
+
value = _auto_null
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class _EnumDict(dict):
|
| 76 |
+
"""
|
| 77 |
+
Track enum member order and ensure member names are not reused.
|
| 78 |
+
|
| 79 |
+
EnumMeta will use the names found in self._member_names as the
|
| 80 |
+
enumeration member names.
|
| 81 |
+
"""
|
| 82 |
+
def __init__(self):
|
| 83 |
+
super().__init__()
|
| 84 |
+
self._member_names = []
|
| 85 |
+
self._last_values = []
|
| 86 |
+
self._ignore = []
|
| 87 |
+
self._auto_called = False
|
| 88 |
+
|
| 89 |
+
def __setitem__(self, key, value):
|
| 90 |
+
"""
|
| 91 |
+
Changes anything not dundered or not a descriptor.
|
| 92 |
+
|
| 93 |
+
If an enum member name is used twice, an error is raised; duplicate
|
| 94 |
+
values are not checked for.
|
| 95 |
+
|
| 96 |
+
Single underscore (sunder) names are reserved.
|
| 97 |
+
"""
|
| 98 |
+
if _is_private(self._cls_name, key):
|
| 99 |
+
import warnings
|
| 100 |
+
warnings.warn(
|
| 101 |
+
"private variables, such as %r, will be normal attributes in 3.11"
|
| 102 |
+
% (key, ),
|
| 103 |
+
DeprecationWarning,
|
| 104 |
+
stacklevel=2,
|
| 105 |
+
)
|
| 106 |
+
if _is_sunder(key):
|
| 107 |
+
if key not in (
|
| 108 |
+
'_order_', '_create_pseudo_member_',
|
| 109 |
+
'_generate_next_value_', '_missing_', '_ignore_',
|
| 110 |
+
):
|
| 111 |
+
raise ValueError('_names_ are reserved for future Enum use')
|
| 112 |
+
if key == '_generate_next_value_':
|
| 113 |
+
# check if members already defined as auto()
|
| 114 |
+
if self._auto_called:
|
| 115 |
+
raise TypeError("_generate_next_value_ must be defined before members")
|
| 116 |
+
setattr(self, '_generate_next_value', value)
|
| 117 |
+
elif key == '_ignore_':
|
| 118 |
+
if isinstance(value, str):
|
| 119 |
+
value = value.replace(',',' ').split()
|
| 120 |
+
else:
|
| 121 |
+
value = list(value)
|
| 122 |
+
self._ignore = value
|
| 123 |
+
already = set(value) & set(self._member_names)
|
| 124 |
+
if already:
|
| 125 |
+
raise ValueError(
|
| 126 |
+
'_ignore_ cannot specify already set names: %r'
|
| 127 |
+
% (already, )
|
| 128 |
+
)
|
| 129 |
+
elif _is_dunder(key):
|
| 130 |
+
if key == '__order__':
|
| 131 |
+
key = '_order_'
|
| 132 |
+
elif key in self._member_names:
|
| 133 |
+
# descriptor overwriting an enum?
|
| 134 |
+
raise TypeError('Attempted to reuse key: %r' % key)
|
| 135 |
+
elif key in self._ignore:
|
| 136 |
+
pass
|
| 137 |
+
elif not _is_descriptor(value):
|
| 138 |
+
if key in self:
|
| 139 |
+
# enum overwriting a descriptor?
|
| 140 |
+
raise TypeError('%r already defined as: %r' % (key, self[key]))
|
| 141 |
+
if isinstance(value, auto):
|
| 142 |
+
if value.value == _auto_null:
|
| 143 |
+
value.value = self._generate_next_value(
|
| 144 |
+
key,
|
| 145 |
+
1,
|
| 146 |
+
len(self._member_names),
|
| 147 |
+
self._last_values[:],
|
| 148 |
+
)
|
| 149 |
+
self._auto_called = True
|
| 150 |
+
value = value.value
|
| 151 |
+
self._member_names.append(key)
|
| 152 |
+
self._last_values.append(value)
|
| 153 |
+
super().__setitem__(key, value)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
# Dummy value for Enum as EnumMeta explicitly checks for it, but of course
|
| 157 |
+
# until EnumMeta finishes running the first time the Enum class doesn't exist.
|
| 158 |
+
# This is also why there are checks in EnumMeta like `if Enum is not None`
|
| 159 |
+
Enum = None
|
| 160 |
+
|
| 161 |
+
class EnumMeta(type):
|
| 162 |
+
"""
|
| 163 |
+
Metaclass for Enum
|
| 164 |
+
"""
|
| 165 |
+
@classmethod
|
| 166 |
+
def __prepare__(metacls, cls, bases, **kwds):
|
| 167 |
+
# check that previous enum members do not exist
|
| 168 |
+
metacls._check_for_existing_members(cls, bases)
|
| 169 |
+
# create the namespace dict
|
| 170 |
+
enum_dict = _EnumDict()
|
| 171 |
+
enum_dict._cls_name = cls
|
| 172 |
+
# inherit previous flags and _generate_next_value_ function
|
| 173 |
+
member_type, first_enum = metacls._get_mixins_(cls, bases)
|
| 174 |
+
if first_enum is not None:
|
| 175 |
+
enum_dict['_generate_next_value_'] = getattr(
|
| 176 |
+
first_enum, '_generate_next_value_', None,
|
| 177 |
+
)
|
| 178 |
+
return enum_dict
|
| 179 |
+
|
| 180 |
+
def __new__(metacls, cls, bases, classdict, **kwds):
|
| 181 |
+
# an Enum class is final once enumeration items have been defined; it
|
| 182 |
+
# cannot be mixed with other types (int, float, etc.) if it has an
|
| 183 |
+
# inherited __new__ unless a new __new__ is defined (or the resulting
|
| 184 |
+
# class will fail).
|
| 185 |
+
#
|
| 186 |
+
# remove any keys listed in _ignore_
|
| 187 |
+
classdict.setdefault('_ignore_', []).append('_ignore_')
|
| 188 |
+
ignore = classdict['_ignore_']
|
| 189 |
+
for key in ignore:
|
| 190 |
+
classdict.pop(key, None)
|
| 191 |
+
member_type, first_enum = metacls._get_mixins_(cls, bases)
|
| 192 |
+
__new__, save_new, use_args = metacls._find_new_(
|
| 193 |
+
classdict, member_type, first_enum,
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# save enum items into separate mapping so they don't get baked into
|
| 197 |
+
# the new class
|
| 198 |
+
enum_members = {k: classdict[k] for k in classdict._member_names}
|
| 199 |
+
for name in classdict._member_names:
|
| 200 |
+
del classdict[name]
|
| 201 |
+
|
| 202 |
+
# adjust the sunders
|
| 203 |
+
_order_ = classdict.pop('_order_', None)
|
| 204 |
+
|
| 205 |
+
# check for illegal enum names (any others?)
|
| 206 |
+
invalid_names = set(enum_members) & {'mro', ''}
|
| 207 |
+
if invalid_names:
|
| 208 |
+
raise ValueError('Invalid enum member name: {0}'.format(
|
| 209 |
+
','.join(invalid_names)))
|
| 210 |
+
|
| 211 |
+
# create a default docstring if one has not been provided
|
| 212 |
+
if '__doc__' not in classdict:
|
| 213 |
+
classdict['__doc__'] = 'An enumeration.'
|
| 214 |
+
|
| 215 |
+
enum_class = super().__new__(metacls, cls, bases, classdict, **kwds)
|
| 216 |
+
enum_class._member_names_ = [] # names in definition order
|
| 217 |
+
enum_class._member_map_ = {} # name->value map
|
| 218 |
+
enum_class._member_type_ = member_type
|
| 219 |
+
|
| 220 |
+
# save DynamicClassAttribute attributes from super classes so we know
|
| 221 |
+
# if we can take the shortcut of storing members in the class dict
|
| 222 |
+
dynamic_attributes = {
|
| 223 |
+
k for c in enum_class.mro()
|
| 224 |
+
for k, v in c.__dict__.items()
|
| 225 |
+
if isinstance(v, DynamicClassAttribute)
|
| 226 |
+
}
|
| 227 |
+
|
| 228 |
+
# Reverse value->name map for hashable values.
|
| 229 |
+
enum_class._value2member_map_ = {}
|
| 230 |
+
|
| 231 |
+
# If a custom type is mixed into the Enum, and it does not know how
|
| 232 |
+
# to pickle itself, pickle.dumps will succeed but pickle.loads will
|
| 233 |
+
# fail. Rather than have the error show up later and possibly far
|
| 234 |
+
# from the source, sabotage the pickle protocol for this class so
|
| 235 |
+
# that pickle.dumps also fails.
|
| 236 |
+
#
|
| 237 |
+
# However, if the new class implements its own __reduce_ex__, do not
|
| 238 |
+
# sabotage -- it's on them to make sure it works correctly. We use
|
| 239 |
+
# __reduce_ex__ instead of any of the others as it is preferred by
|
| 240 |
+
# pickle over __reduce__, and it handles all pickle protocols.
|
| 241 |
+
if '__reduce_ex__' not in classdict:
|
| 242 |
+
if member_type is not object:
|
| 243 |
+
methods = ('__getnewargs_ex__', '__getnewargs__',
|
| 244 |
+
'__reduce_ex__', '__reduce__')
|
| 245 |
+
if not any(m in member_type.__dict__ for m in methods):
|
| 246 |
+
if '__new__' in classdict:
|
| 247 |
+
# too late, sabotage
|
| 248 |
+
_make_class_unpicklable(enum_class)
|
| 249 |
+
else:
|
| 250 |
+
# final attempt to verify that pickling would work:
|
| 251 |
+
# travel mro until __new__ is found, checking for
|
| 252 |
+
# __reduce__ and friends along the way -- if any of them
|
| 253 |
+
# are found before/when __new__ is found, pickling should
|
| 254 |
+
# work
|
| 255 |
+
sabotage = None
|
| 256 |
+
for chain in bases:
|
| 257 |
+
for base in chain.__mro__:
|
| 258 |
+
if base is object:
|
| 259 |
+
continue
|
| 260 |
+
elif any(m in base.__dict__ for m in methods):
|
| 261 |
+
# found one, we're good
|
| 262 |
+
sabotage = False
|
| 263 |
+
break
|
| 264 |
+
elif '__new__' in base.__dict__:
|
| 265 |
+
# not good
|
| 266 |
+
sabotage = True
|
| 267 |
+
break
|
| 268 |
+
if sabotage is not None:
|
| 269 |
+
break
|
| 270 |
+
if sabotage:
|
| 271 |
+
_make_class_unpicklable(enum_class)
|
| 272 |
+
# instantiate them, checking for duplicates as we go
|
| 273 |
+
# we instantiate first instead of checking for duplicates first in case
|
| 274 |
+
# a custom __new__ is doing something funky with the values -- such as
|
| 275 |
+
# auto-numbering ;)
|
| 276 |
+
for member_name in classdict._member_names:
|
| 277 |
+
value = enum_members[member_name]
|
| 278 |
+
if not isinstance(value, tuple):
|
| 279 |
+
args = (value, )
|
| 280 |
+
else:
|
| 281 |
+
args = value
|
| 282 |
+
if member_type is tuple: # special case for tuple enums
|
| 283 |
+
args = (args, ) # wrap it one more time
|
| 284 |
+
if not use_args:
|
| 285 |
+
enum_member = __new__(enum_class)
|
| 286 |
+
if not hasattr(enum_member, '_value_'):
|
| 287 |
+
enum_member._value_ = value
|
| 288 |
+
else:
|
| 289 |
+
enum_member = __new__(enum_class, *args)
|
| 290 |
+
if not hasattr(enum_member, '_value_'):
|
| 291 |
+
if member_type is object:
|
| 292 |
+
enum_member._value_ = value
|
| 293 |
+
else:
|
| 294 |
+
enum_member._value_ = member_type(*args)
|
| 295 |
+
value = enum_member._value_
|
| 296 |
+
enum_member._name_ = member_name
|
| 297 |
+
enum_member.__objclass__ = enum_class
|
| 298 |
+
enum_member.__init__(*args)
|
| 299 |
+
# If another member with the same value was already defined, the
|
| 300 |
+
# new member becomes an alias to the existing one.
|
| 301 |
+
for name, canonical_member in enum_class._member_map_.items():
|
| 302 |
+
if canonical_member._value_ == enum_member._value_:
|
| 303 |
+
enum_member = canonical_member
|
| 304 |
+
break
|
| 305 |
+
else:
|
| 306 |
+
# Aliases don't appear in member names (only in __members__).
|
| 307 |
+
enum_class._member_names_.append(member_name)
|
| 308 |
+
# performance boost for any member that would not shadow
|
| 309 |
+
# a DynamicClassAttribute
|
| 310 |
+
if member_name not in dynamic_attributes:
|
| 311 |
+
setattr(enum_class, member_name, enum_member)
|
| 312 |
+
# now add to _member_map_
|
| 313 |
+
enum_class._member_map_[member_name] = enum_member
|
| 314 |
+
try:
|
| 315 |
+
# This may fail if value is not hashable. We can't add the value
|
| 316 |
+
# to the map, and by-value lookups for this value will be
|
| 317 |
+
# linear.
|
| 318 |
+
enum_class._value2member_map_[value] = enum_member
|
| 319 |
+
except TypeError:
|
| 320 |
+
pass
|
| 321 |
+
|
| 322 |
+
# double check that repr and friends are not the mixin's or various
|
| 323 |
+
# things break (such as pickle)
|
| 324 |
+
# however, if the method is defined in the Enum itself, don't replace
|
| 325 |
+
# it
|
| 326 |
+
for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
|
| 327 |
+
if name in classdict:
|
| 328 |
+
continue
|
| 329 |
+
class_method = getattr(enum_class, name)
|
| 330 |
+
obj_method = getattr(member_type, name, None)
|
| 331 |
+
enum_method = getattr(first_enum, name, None)
|
| 332 |
+
if obj_method is not None and obj_method is class_method:
|
| 333 |
+
setattr(enum_class, name, enum_method)
|
| 334 |
+
|
| 335 |
+
# replace any other __new__ with our own (as long as Enum is not None,
|
| 336 |
+
# anyway) -- again, this is to support pickle
|
| 337 |
+
if Enum is not None:
|
| 338 |
+
# if the user defined their own __new__, save it before it gets
|
| 339 |
+
# clobbered in case they subclass later
|
| 340 |
+
if save_new:
|
| 341 |
+
enum_class.__new_member__ = __new__
|
| 342 |
+
enum_class.__new__ = Enum.__new__
|
| 343 |
+
|
| 344 |
+
# py3 support for definition order (helps keep py2/py3 code in sync)
|
| 345 |
+
if _order_ is not None:
|
| 346 |
+
if isinstance(_order_, str):
|
| 347 |
+
_order_ = _order_.replace(',', ' ').split()
|
| 348 |
+
if _order_ != enum_class._member_names_:
|
| 349 |
+
raise TypeError('member order does not match _order_')
|
| 350 |
+
|
| 351 |
+
return enum_class
|
| 352 |
+
|
| 353 |
+
def __bool__(self):
|
| 354 |
+
"""
|
| 355 |
+
classes/types should always be True.
|
| 356 |
+
"""
|
| 357 |
+
return True
|
| 358 |
+
|
| 359 |
+
def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1):
|
| 360 |
+
"""
|
| 361 |
+
Either returns an existing member, or creates a new enum class.
|
| 362 |
+
|
| 363 |
+
This method is used both when an enum class is given a value to match
|
| 364 |
+
to an enumeration member (i.e. Color(3)) and for the functional API
|
| 365 |
+
(i.e. Color = Enum('Color', names='RED GREEN BLUE')).
|
| 366 |
+
|
| 367 |
+
When used for the functional API:
|
| 368 |
+
|
| 369 |
+
`value` will be the name of the new class.
|
| 370 |
+
|
| 371 |
+
`names` should be either a string of white-space/comma delimited names
|
| 372 |
+
(values will start at `start`), or an iterator/mapping of name, value pairs.
|
| 373 |
+
|
| 374 |
+
`module` should be set to the module this class is being created in;
|
| 375 |
+
if it is not set, an attempt to find that module will be made, but if
|
| 376 |
+
it fails the class will not be picklable.
|
| 377 |
+
|
| 378 |
+
`qualname` should be set to the actual location this class can be found
|
| 379 |
+
at in its module; by default it is set to the global scope. If this is
|
| 380 |
+
not correct, unpickling will fail in some circumstances.
|
| 381 |
+
|
| 382 |
+
`type`, if set, will be mixed in as the first base class.
|
| 383 |
+
"""
|
| 384 |
+
if names is None: # simple value lookup
|
| 385 |
+
return cls.__new__(cls, value)
|
| 386 |
+
# otherwise, functional API: we're creating a new Enum type
|
| 387 |
+
return cls._create_(
|
| 388 |
+
value,
|
| 389 |
+
names,
|
| 390 |
+
module=module,
|
| 391 |
+
qualname=qualname,
|
| 392 |
+
type=type,
|
| 393 |
+
start=start,
|
| 394 |
+
)
|
| 395 |
+
|
| 396 |
+
def __contains__(cls, obj):
|
| 397 |
+
if not isinstance(obj, Enum):
|
| 398 |
+
import warnings
|
| 399 |
+
warnings.warn(
|
| 400 |
+
"in 3.12 __contains__ will no longer raise TypeError, but will return True if\n"
|
| 401 |
+
"obj is a member or a member's value",
|
| 402 |
+
DeprecationWarning,
|
| 403 |
+
stacklevel=2,
|
| 404 |
+
)
|
| 405 |
+
raise TypeError(
|
| 406 |
+
"unsupported operand type(s) for 'in': '%s' and '%s'" % (
|
| 407 |
+
type(obj).__qualname__, cls.__class__.__qualname__))
|
| 408 |
+
return isinstance(obj, cls) and obj._name_ in cls._member_map_
|
| 409 |
+
|
| 410 |
+
def __delattr__(cls, attr):
|
| 411 |
+
# nicer error message when someone tries to delete an attribute
|
| 412 |
+
# (see issue19025).
|
| 413 |
+
if attr in cls._member_map_:
|
| 414 |
+
raise AttributeError("%s: cannot delete Enum member." % cls.__name__)
|
| 415 |
+
super().__delattr__(attr)
|
| 416 |
+
|
| 417 |
+
def __dir__(self):
|
| 418 |
+
return (
|
| 419 |
+
['__class__', '__doc__', '__members__', '__module__']
|
| 420 |
+
+ self._member_names_
|
| 421 |
+
)
|
| 422 |
+
|
| 423 |
+
def __getattr__(cls, name):
|
| 424 |
+
"""
|
| 425 |
+
Return the enum member matching `name`
|
| 426 |
+
|
| 427 |
+
We use __getattr__ instead of descriptors or inserting into the enum
|
| 428 |
+
class' __dict__ in order to support `name` and `value` being both
|
| 429 |
+
properties for enum members (which live in the class' __dict__) and
|
| 430 |
+
enum members themselves.
|
| 431 |
+
"""
|
| 432 |
+
if _is_dunder(name):
|
| 433 |
+
raise AttributeError(name)
|
| 434 |
+
try:
|
| 435 |
+
return cls._member_map_[name]
|
| 436 |
+
except KeyError:
|
| 437 |
+
raise AttributeError(name) from None
|
| 438 |
+
|
| 439 |
+
def __getitem__(cls, name):
|
| 440 |
+
return cls._member_map_[name]
|
| 441 |
+
|
| 442 |
+
def __iter__(cls):
|
| 443 |
+
"""
|
| 444 |
+
Returns members in definition order.
|
| 445 |
+
"""
|
| 446 |
+
return (cls._member_map_[name] for name in cls._member_names_)
|
| 447 |
+
|
| 448 |
+
def __len__(cls):
|
| 449 |
+
return len(cls._member_names_)
|
| 450 |
+
|
| 451 |
+
@property
|
| 452 |
+
def __members__(cls):
|
| 453 |
+
"""
|
| 454 |
+
Returns a mapping of member name->value.
|
| 455 |
+
|
| 456 |
+
This mapping lists all enum members, including aliases. Note that this
|
| 457 |
+
is a read-only view of the internal mapping.
|
| 458 |
+
"""
|
| 459 |
+
return MappingProxyType(cls._member_map_)
|
| 460 |
+
|
| 461 |
+
def __repr__(cls):
|
| 462 |
+
return "<enum %r>" % cls.__name__
|
| 463 |
+
|
| 464 |
+
def __reversed__(cls):
|
| 465 |
+
"""
|
| 466 |
+
Returns members in reverse definition order.
|
| 467 |
+
"""
|
| 468 |
+
return (cls._member_map_[name] for name in reversed(cls._member_names_))
|
| 469 |
+
|
| 470 |
+
def __setattr__(cls, name, value):
|
| 471 |
+
"""
|
| 472 |
+
Block attempts to reassign Enum members.
|
| 473 |
+
|
| 474 |
+
A simple assignment to the class namespace only changes one of the
|
| 475 |
+
several possible ways to get an Enum member from the Enum class,
|
| 476 |
+
resulting in an inconsistent Enumeration.
|
| 477 |
+
"""
|
| 478 |
+
member_map = cls.__dict__.get('_member_map_', {})
|
| 479 |
+
if name in member_map:
|
| 480 |
+
raise AttributeError('Cannot reassign members.')
|
| 481 |
+
super().__setattr__(name, value)
|
| 482 |
+
|
| 483 |
+
def _create_(cls, class_name, names, *, module=None, qualname=None, type=None, start=1):
|
| 484 |
+
"""
|
| 485 |
+
Convenience method to create a new Enum class.
|
| 486 |
+
|
| 487 |
+
`names` can be:
|
| 488 |
+
|
| 489 |
+
* A string containing member names, separated either with spaces or
|
| 490 |
+
commas. Values are incremented by 1 from `start`.
|
| 491 |
+
* An iterable of member names. Values are incremented by 1 from `start`.
|
| 492 |
+
* An iterable of (member name, value) pairs.
|
| 493 |
+
* A mapping of member name -> value pairs.
|
| 494 |
+
"""
|
| 495 |
+
metacls = cls.__class__
|
| 496 |
+
bases = (cls, ) if type is None else (type, cls)
|
| 497 |
+
_, first_enum = cls._get_mixins_(cls, bases)
|
| 498 |
+
classdict = metacls.__prepare__(class_name, bases)
|
| 499 |
+
|
| 500 |
+
# special processing needed for names?
|
| 501 |
+
if isinstance(names, str):
|
| 502 |
+
names = names.replace(',', ' ').split()
|
| 503 |
+
if isinstance(names, (tuple, list)) and names and isinstance(names[0], str):
|
| 504 |
+
original_names, names = names, []
|
| 505 |
+
last_values = []
|
| 506 |
+
for count, name in enumerate(original_names):
|
| 507 |
+
value = first_enum._generate_next_value_(name, start, count, last_values[:])
|
| 508 |
+
last_values.append(value)
|
| 509 |
+
names.append((name, value))
|
| 510 |
+
|
| 511 |
+
# Here, names is either an iterable of (name, value) or a mapping.
|
| 512 |
+
for item in names:
|
| 513 |
+
if isinstance(item, str):
|
| 514 |
+
member_name, member_value = item, names[item]
|
| 515 |
+
else:
|
| 516 |
+
member_name, member_value = item
|
| 517 |
+
classdict[member_name] = member_value
|
| 518 |
+
enum_class = metacls.__new__(metacls, class_name, bases, classdict)
|
| 519 |
+
|
| 520 |
+
# TODO: replace the frame hack if a blessed way to know the calling
|
| 521 |
+
# module is ever developed
|
| 522 |
+
if module is None:
|
| 523 |
+
try:
|
| 524 |
+
module = sys._getframe(2).f_globals['__name__']
|
| 525 |
+
except (AttributeError, ValueError, KeyError):
|
| 526 |
+
pass
|
| 527 |
+
if module is None:
|
| 528 |
+
_make_class_unpicklable(enum_class)
|
| 529 |
+
else:
|
| 530 |
+
enum_class.__module__ = module
|
| 531 |
+
if qualname is not None:
|
| 532 |
+
enum_class.__qualname__ = qualname
|
| 533 |
+
|
| 534 |
+
return enum_class
|
| 535 |
+
|
| 536 |
+
def _convert_(cls, name, module, filter, source=None):
|
| 537 |
+
"""
|
| 538 |
+
Create a new Enum subclass that replaces a collection of global constants
|
| 539 |
+
"""
|
| 540 |
+
# convert all constants from source (or module) that pass filter() to
|
| 541 |
+
# a new Enum called name, and export the enum and its members back to
|
| 542 |
+
# module;
|
| 543 |
+
# also, replace the __reduce_ex__ method so unpickling works in
|
| 544 |
+
# previous Python versions
|
| 545 |
+
module_globals = vars(sys.modules[module])
|
| 546 |
+
if source:
|
| 547 |
+
source = vars(source)
|
| 548 |
+
else:
|
| 549 |
+
source = module_globals
|
| 550 |
+
# _value2member_map_ is populated in the same order every time
|
| 551 |
+
# for a consistent reverse mapping of number to name when there
|
| 552 |
+
# are multiple names for the same number.
|
| 553 |
+
members = [
|
| 554 |
+
(name, value)
|
| 555 |
+
for name, value in source.items()
|
| 556 |
+
if filter(name)]
|
| 557 |
+
try:
|
| 558 |
+
# sort by value
|
| 559 |
+
members.sort(key=lambda t: (t[1], t[0]))
|
| 560 |
+
except TypeError:
|
| 561 |
+
# unless some values aren't comparable, in which case sort by name
|
| 562 |
+
members.sort(key=lambda t: t[0])
|
| 563 |
+
cls = cls(name, members, module=module)
|
| 564 |
+
cls.__reduce_ex__ = _reduce_ex_by_name
|
| 565 |
+
module_globals.update(cls.__members__)
|
| 566 |
+
module_globals[name] = cls
|
| 567 |
+
return cls
|
| 568 |
+
|
| 569 |
+
@staticmethod
|
| 570 |
+
def _check_for_existing_members(class_name, bases):
|
| 571 |
+
for chain in bases:
|
| 572 |
+
for base in chain.__mro__:
|
| 573 |
+
if issubclass(base, Enum) and base._member_names_:
|
| 574 |
+
raise TypeError(
|
| 575 |
+
"%s: cannot extend enumeration %r"
|
| 576 |
+
% (class_name, base.__name__)
|
| 577 |
+
)
|
| 578 |
+
|
| 579 |
+
@staticmethod
|
| 580 |
+
def _get_mixins_(class_name, bases):
|
| 581 |
+
"""
|
| 582 |
+
Returns the type for creating enum members, and the first inherited
|
| 583 |
+
enum class.
|
| 584 |
+
|
| 585 |
+
bases: the tuple of bases that was given to __new__
|
| 586 |
+
"""
|
| 587 |
+
if not bases:
|
| 588 |
+
return object, Enum
|
| 589 |
+
|
| 590 |
+
def _find_data_type(bases):
|
| 591 |
+
data_types = set()
|
| 592 |
+
for chain in bases:
|
| 593 |
+
candidate = None
|
| 594 |
+
for base in chain.__mro__:
|
| 595 |
+
if base is object:
|
| 596 |
+
continue
|
| 597 |
+
elif issubclass(base, Enum):
|
| 598 |
+
if base._member_type_ is not object:
|
| 599 |
+
data_types.add(base._member_type_)
|
| 600 |
+
break
|
| 601 |
+
elif '__new__' in base.__dict__:
|
| 602 |
+
if issubclass(base, Enum):
|
| 603 |
+
continue
|
| 604 |
+
data_types.add(candidate or base)
|
| 605 |
+
break
|
| 606 |
+
else:
|
| 607 |
+
candidate = candidate or base
|
| 608 |
+
if len(data_types) > 1:
|
| 609 |
+
raise TypeError('%r: too many data types: %r' % (class_name, data_types))
|
| 610 |
+
elif data_types:
|
| 611 |
+
return data_types.pop()
|
| 612 |
+
else:
|
| 613 |
+
return None
|
| 614 |
+
|
| 615 |
+
# ensure final parent class is an Enum derivative, find any concrete
|
| 616 |
+
# data type, and check that Enum has no members
|
| 617 |
+
first_enum = bases[-1]
|
| 618 |
+
if not issubclass(first_enum, Enum):
|
| 619 |
+
raise TypeError("new enumerations should be created as "
|
| 620 |
+
"`EnumName([mixin_type, ...] [data_type,] enum_type)`")
|
| 621 |
+
member_type = _find_data_type(bases) or object
|
| 622 |
+
if first_enum._member_names_:
|
| 623 |
+
raise TypeError("Cannot extend enumerations")
|
| 624 |
+
return member_type, first_enum
|
| 625 |
+
|
| 626 |
+
@staticmethod
|
| 627 |
+
def _find_new_(classdict, member_type, first_enum):
|
| 628 |
+
"""
|
| 629 |
+
Returns the __new__ to be used for creating the enum members.
|
| 630 |
+
|
| 631 |
+
classdict: the class dictionary given to __new__
|
| 632 |
+
member_type: the data type whose __new__ will be used by default
|
| 633 |
+
first_enum: enumeration to check for an overriding __new__
|
| 634 |
+
"""
|
| 635 |
+
# now find the correct __new__, checking to see of one was defined
|
| 636 |
+
# by the user; also check earlier enum classes in case a __new__ was
|
| 637 |
+
# saved as __new_member__
|
| 638 |
+
__new__ = classdict.get('__new__', None)
|
| 639 |
+
|
| 640 |
+
# should __new__ be saved as __new_member__ later?
|
| 641 |
+
save_new = __new__ is not None
|
| 642 |
+
|
| 643 |
+
if __new__ is None:
|
| 644 |
+
# check all possibles for __new_member__ before falling back to
|
| 645 |
+
# __new__
|
| 646 |
+
for method in ('__new_member__', '__new__'):
|
| 647 |
+
for possible in (member_type, first_enum):
|
| 648 |
+
target = getattr(possible, method, None)
|
| 649 |
+
if target not in {
|
| 650 |
+
None,
|
| 651 |
+
None.__new__,
|
| 652 |
+
object.__new__,
|
| 653 |
+
Enum.__new__,
|
| 654 |
+
}:
|
| 655 |
+
__new__ = target
|
| 656 |
+
break
|
| 657 |
+
if __new__ is not None:
|
| 658 |
+
break
|
| 659 |
+
else:
|
| 660 |
+
__new__ = object.__new__
|
| 661 |
+
|
| 662 |
+
# if a non-object.__new__ is used then whatever value/tuple was
|
| 663 |
+
# assigned to the enum member name will be passed to __new__ and to the
|
| 664 |
+
# new enum member's __init__
|
| 665 |
+
if __new__ is object.__new__:
|
| 666 |
+
use_args = False
|
| 667 |
+
else:
|
| 668 |
+
use_args = True
|
| 669 |
+
return __new__, save_new, use_args
|
| 670 |
+
|
| 671 |
+
|
| 672 |
+
class Enum(metaclass=EnumMeta):
|
| 673 |
+
"""
|
| 674 |
+
Generic enumeration.
|
| 675 |
+
|
| 676 |
+
Derive from this class to define new enumerations.
|
| 677 |
+
"""
|
| 678 |
+
def __new__(cls, value):
|
| 679 |
+
# all enum instances are actually created during class construction
|
| 680 |
+
# without calling this method; this method is called by the metaclass'
|
| 681 |
+
# __call__ (i.e. Color(3) ), and by pickle
|
| 682 |
+
if type(value) is cls:
|
| 683 |
+
# For lookups like Color(Color.RED)
|
| 684 |
+
return value
|
| 685 |
+
# by-value search for a matching enum member
|
| 686 |
+
# see if it's in the reverse mapping (for hashable values)
|
| 687 |
+
try:
|
| 688 |
+
return cls._value2member_map_[value]
|
| 689 |
+
except KeyError:
|
| 690 |
+
# Not found, no need to do long O(n) search
|
| 691 |
+
pass
|
| 692 |
+
except TypeError:
|
| 693 |
+
# not there, now do long search -- O(n) behavior
|
| 694 |
+
for member in cls._member_map_.values():
|
| 695 |
+
if member._value_ == value:
|
| 696 |
+
return member
|
| 697 |
+
# still not found -- try _missing_ hook
|
| 698 |
+
try:
|
| 699 |
+
exc = None
|
| 700 |
+
result = cls._missing_(value)
|
| 701 |
+
except Exception as e:
|
| 702 |
+
exc = e
|
| 703 |
+
result = None
|
| 704 |
+
try:
|
| 705 |
+
if isinstance(result, cls):
|
| 706 |
+
return result
|
| 707 |
+
else:
|
| 708 |
+
ve_exc = ValueError("%r is not a valid %s" % (value, cls.__qualname__))
|
| 709 |
+
if result is None and exc is None:
|
| 710 |
+
raise ve_exc
|
| 711 |
+
elif exc is None:
|
| 712 |
+
exc = TypeError(
|
| 713 |
+
'error in %s._missing_: returned %r instead of None or a valid member'
|
| 714 |
+
% (cls.__name__, result)
|
| 715 |
+
)
|
| 716 |
+
if not isinstance(exc, ValueError):
|
| 717 |
+
exc.__context__ = ve_exc
|
| 718 |
+
raise exc
|
| 719 |
+
finally:
|
| 720 |
+
# ensure all variables that could hold an exception are destroyed
|
| 721 |
+
exc = None
|
| 722 |
+
ve_exc = None
|
| 723 |
+
|
| 724 |
+
def _generate_next_value_(name, start, count, last_values):
|
| 725 |
+
"""
|
| 726 |
+
Generate the next value when not given.
|
| 727 |
+
|
| 728 |
+
name: the name of the member
|
| 729 |
+
start: the initial start value or None
|
| 730 |
+
count: the number of existing members
|
| 731 |
+
last_value: the last value assigned or None
|
| 732 |
+
"""
|
| 733 |
+
for last_value in reversed(last_values):
|
| 734 |
+
try:
|
| 735 |
+
return last_value + 1
|
| 736 |
+
except TypeError:
|
| 737 |
+
pass
|
| 738 |
+
else:
|
| 739 |
+
return start
|
| 740 |
+
|
| 741 |
+
@classmethod
|
| 742 |
+
def _missing_(cls, value):
|
| 743 |
+
return None
|
| 744 |
+
|
| 745 |
+
def __repr__(self):
|
| 746 |
+
return "<%s.%s: %r>" % (
|
| 747 |
+
self.__class__.__name__, self._name_, self._value_)
|
| 748 |
+
|
| 749 |
+
def __str__(self):
|
| 750 |
+
return "%s.%s" % (self.__class__.__name__, self._name_)
|
| 751 |
+
|
| 752 |
+
def __dir__(self):
|
| 753 |
+
"""
|
| 754 |
+
Returns all members and all public methods
|
| 755 |
+
"""
|
| 756 |
+
added_behavior = [
|
| 757 |
+
m
|
| 758 |
+
for cls in self.__class__.mro()
|
| 759 |
+
for m in cls.__dict__
|
| 760 |
+
if m[0] != '_' and m not in self._member_map_
|
| 761 |
+
] + [m for m in self.__dict__ if m[0] != '_']
|
| 762 |
+
return (['__class__', '__doc__', '__module__'] + added_behavior)
|
| 763 |
+
|
| 764 |
+
def __format__(self, format_spec):
|
| 765 |
+
"""
|
| 766 |
+
Returns format using actual value type unless __str__ has been overridden.
|
| 767 |
+
"""
|
| 768 |
+
# mixed-in Enums should use the mixed-in type's __format__, otherwise
|
| 769 |
+
# we can get strange results with the Enum name showing up instead of
|
| 770 |
+
# the value
|
| 771 |
+
|
| 772 |
+
# pure Enum branch, or branch with __str__ explicitly overridden
|
| 773 |
+
str_overridden = type(self).__str__ not in (Enum.__str__, Flag.__str__)
|
| 774 |
+
if self._member_type_ is object or str_overridden:
|
| 775 |
+
cls = str
|
| 776 |
+
val = str(self)
|
| 777 |
+
# mix-in branch
|
| 778 |
+
else:
|
| 779 |
+
cls = self._member_type_
|
| 780 |
+
val = self._value_
|
| 781 |
+
return cls.__format__(val, format_spec)
|
| 782 |
+
|
| 783 |
+
def __hash__(self):
|
| 784 |
+
return hash(self._name_)
|
| 785 |
+
|
| 786 |
+
def __reduce_ex__(self, proto):
|
| 787 |
+
return self.__class__, (self._value_, )
|
| 788 |
+
|
| 789 |
+
# DynamicClassAttribute is used to provide access to the `name` and
|
| 790 |
+
# `value` properties of enum members while keeping some measure of
|
| 791 |
+
# protection from modification, while still allowing for an enumeration
|
| 792 |
+
# to have members named `name` and `value`. This works because enumeration
|
| 793 |
+
# members are not set directly on the enum class -- __getattr__ is
|
| 794 |
+
# used to look them up.
|
| 795 |
+
|
| 796 |
+
@DynamicClassAttribute
|
| 797 |
+
def name(self):
|
| 798 |
+
"""The name of the Enum member."""
|
| 799 |
+
return self._name_
|
| 800 |
+
|
| 801 |
+
@DynamicClassAttribute
|
| 802 |
+
def value(self):
|
| 803 |
+
"""The value of the Enum member."""
|
| 804 |
+
return self._value_
|
| 805 |
+
|
| 806 |
+
|
| 807 |
+
class IntEnum(int, Enum):
|
| 808 |
+
"""Enum where members are also (and must be) ints"""
|
| 809 |
+
|
| 810 |
+
|
| 811 |
+
def _reduce_ex_by_name(self, proto):
|
| 812 |
+
return self.name
|
| 813 |
+
|
| 814 |
+
class Flag(Enum):
|
| 815 |
+
"""
|
| 816 |
+
Support for flags
|
| 817 |
+
"""
|
| 818 |
+
|
| 819 |
+
def _generate_next_value_(name, start, count, last_values):
|
| 820 |
+
"""
|
| 821 |
+
Generate the next value when not given.
|
| 822 |
+
|
| 823 |
+
name: the name of the member
|
| 824 |
+
start: the initial start value or None
|
| 825 |
+
count: the number of existing members
|
| 826 |
+
last_value: the last value assigned or None
|
| 827 |
+
"""
|
| 828 |
+
if not count:
|
| 829 |
+
return start if start is not None else 1
|
| 830 |
+
for last_value in reversed(last_values):
|
| 831 |
+
try:
|
| 832 |
+
high_bit = _high_bit(last_value)
|
| 833 |
+
break
|
| 834 |
+
except Exception:
|
| 835 |
+
raise TypeError('Invalid Flag value: %r' % last_value) from None
|
| 836 |
+
return 2 ** (high_bit+1)
|
| 837 |
+
|
| 838 |
+
@classmethod
|
| 839 |
+
def _missing_(cls, value):
|
| 840 |
+
"""
|
| 841 |
+
Returns member (possibly creating it) if one can be found for value.
|
| 842 |
+
"""
|
| 843 |
+
original_value = value
|
| 844 |
+
if value < 0:
|
| 845 |
+
value = ~value
|
| 846 |
+
possible_member = cls._create_pseudo_member_(value)
|
| 847 |
+
if original_value < 0:
|
| 848 |
+
possible_member = ~possible_member
|
| 849 |
+
return possible_member
|
| 850 |
+
|
| 851 |
+
@classmethod
|
| 852 |
+
def _create_pseudo_member_(cls, value):
|
| 853 |
+
"""
|
| 854 |
+
Create a composite member iff value contains only members.
|
| 855 |
+
"""
|
| 856 |
+
pseudo_member = cls._value2member_map_.get(value, None)
|
| 857 |
+
if pseudo_member is None:
|
| 858 |
+
# verify all bits are accounted for
|
| 859 |
+
_, extra_flags = _decompose(cls, value)
|
| 860 |
+
if extra_flags:
|
| 861 |
+
raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
|
| 862 |
+
# construct a singleton enum pseudo-member
|
| 863 |
+
pseudo_member = object.__new__(cls)
|
| 864 |
+
pseudo_member._name_ = None
|
| 865 |
+
pseudo_member._value_ = value
|
| 866 |
+
# use setdefault in case another thread already created a composite
|
| 867 |
+
# with this value
|
| 868 |
+
pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
|
| 869 |
+
return pseudo_member
|
| 870 |
+
|
| 871 |
+
def __contains__(self, other):
|
| 872 |
+
"""
|
| 873 |
+
Returns True if self has at least the same flags set as other.
|
| 874 |
+
"""
|
| 875 |
+
if not isinstance(other, self.__class__):
|
| 876 |
+
raise TypeError(
|
| 877 |
+
"unsupported operand type(s) for 'in': '%s' and '%s'" % (
|
| 878 |
+
type(other).__qualname__, self.__class__.__qualname__))
|
| 879 |
+
return other._value_ & self._value_ == other._value_
|
| 880 |
+
|
| 881 |
+
def __repr__(self):
|
| 882 |
+
cls = self.__class__
|
| 883 |
+
if self._name_ is not None:
|
| 884 |
+
return '<%s.%s: %r>' % (cls.__name__, self._name_, self._value_)
|
| 885 |
+
members, uncovered = _decompose(cls, self._value_)
|
| 886 |
+
return '<%s.%s: %r>' % (
|
| 887 |
+
cls.__name__,
|
| 888 |
+
'|'.join([str(m._name_ or m._value_) for m in members]),
|
| 889 |
+
self._value_,
|
| 890 |
+
)
|
| 891 |
+
|
| 892 |
+
def __str__(self):
|
| 893 |
+
cls = self.__class__
|
| 894 |
+
if self._name_ is not None:
|
| 895 |
+
return '%s.%s' % (cls.__name__, self._name_)
|
| 896 |
+
members, uncovered = _decompose(cls, self._value_)
|
| 897 |
+
if len(members) == 1 and members[0]._name_ is None:
|
| 898 |
+
return '%s.%r' % (cls.__name__, members[0]._value_)
|
| 899 |
+
else:
|
| 900 |
+
return '%s.%s' % (
|
| 901 |
+
cls.__name__,
|
| 902 |
+
'|'.join([str(m._name_ or m._value_) for m in members]),
|
| 903 |
+
)
|
| 904 |
+
|
| 905 |
+
def __bool__(self):
|
| 906 |
+
return bool(self._value_)
|
| 907 |
+
|
| 908 |
+
def __or__(self, other):
|
| 909 |
+
if not isinstance(other, self.__class__):
|
| 910 |
+
return NotImplemented
|
| 911 |
+
return self.__class__(self._value_ | other._value_)
|
| 912 |
+
|
| 913 |
+
def __and__(self, other):
|
| 914 |
+
if not isinstance(other, self.__class__):
|
| 915 |
+
return NotImplemented
|
| 916 |
+
return self.__class__(self._value_ & other._value_)
|
| 917 |
+
|
| 918 |
+
def __xor__(self, other):
|
| 919 |
+
if not isinstance(other, self.__class__):
|
| 920 |
+
return NotImplemented
|
| 921 |
+
return self.__class__(self._value_ ^ other._value_)
|
| 922 |
+
|
| 923 |
+
def __invert__(self):
|
| 924 |
+
members, uncovered = _decompose(self.__class__, self._value_)
|
| 925 |
+
inverted = self.__class__(0)
|
| 926 |
+
for m in self.__class__:
|
| 927 |
+
if m not in members and not (m._value_ & self._value_):
|
| 928 |
+
inverted = inverted | m
|
| 929 |
+
return self.__class__(inverted)
|
| 930 |
+
|
| 931 |
+
|
| 932 |
+
class IntFlag(int, Flag):
|
| 933 |
+
"""
|
| 934 |
+
Support for integer-based Flags
|
| 935 |
+
"""
|
| 936 |
+
|
| 937 |
+
@classmethod
|
| 938 |
+
def _missing_(cls, value):
|
| 939 |
+
"""
|
| 940 |
+
Returns member (possibly creating it) if one can be found for value.
|
| 941 |
+
"""
|
| 942 |
+
if not isinstance(value, int):
|
| 943 |
+
raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
|
| 944 |
+
new_member = cls._create_pseudo_member_(value)
|
| 945 |
+
return new_member
|
| 946 |
+
|
| 947 |
+
@classmethod
|
| 948 |
+
def _create_pseudo_member_(cls, value):
|
| 949 |
+
"""
|
| 950 |
+
Create a composite member iff value contains only members.
|
| 951 |
+
"""
|
| 952 |
+
pseudo_member = cls._value2member_map_.get(value, None)
|
| 953 |
+
if pseudo_member is None:
|
| 954 |
+
need_to_create = [value]
|
| 955 |
+
# get unaccounted for bits
|
| 956 |
+
_, extra_flags = _decompose(cls, value)
|
| 957 |
+
# timer = 10
|
| 958 |
+
while extra_flags:
|
| 959 |
+
# timer -= 1
|
| 960 |
+
bit = _high_bit(extra_flags)
|
| 961 |
+
flag_value = 2 ** bit
|
| 962 |
+
if (flag_value not in cls._value2member_map_ and
|
| 963 |
+
flag_value not in need_to_create
|
| 964 |
+
):
|
| 965 |
+
need_to_create.append(flag_value)
|
| 966 |
+
if extra_flags == -flag_value:
|
| 967 |
+
extra_flags = 0
|
| 968 |
+
else:
|
| 969 |
+
extra_flags ^= flag_value
|
| 970 |
+
for value in reversed(need_to_create):
|
| 971 |
+
# construct singleton pseudo-members
|
| 972 |
+
pseudo_member = int.__new__(cls, value)
|
| 973 |
+
pseudo_member._name_ = None
|
| 974 |
+
pseudo_member._value_ = value
|
| 975 |
+
# use setdefault in case another thread already created a composite
|
| 976 |
+
# with this value
|
| 977 |
+
pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
|
| 978 |
+
return pseudo_member
|
| 979 |
+
|
| 980 |
+
def __or__(self, other):
|
| 981 |
+
if not isinstance(other, (self.__class__, int)):
|
| 982 |
+
return NotImplemented
|
| 983 |
+
result = self.__class__(self._value_ | self.__class__(other)._value_)
|
| 984 |
+
return result
|
| 985 |
+
|
| 986 |
+
def __and__(self, other):
|
| 987 |
+
if not isinstance(other, (self.__class__, int)):
|
| 988 |
+
return NotImplemented
|
| 989 |
+
return self.__class__(self._value_ & self.__class__(other)._value_)
|
| 990 |
+
|
| 991 |
+
def __xor__(self, other):
|
| 992 |
+
if not isinstance(other, (self.__class__, int)):
|
| 993 |
+
return NotImplemented
|
| 994 |
+
return self.__class__(self._value_ ^ self.__class__(other)._value_)
|
| 995 |
+
|
| 996 |
+
__ror__ = __or__
|
| 997 |
+
__rand__ = __and__
|
| 998 |
+
__rxor__ = __xor__
|
| 999 |
+
|
| 1000 |
+
def __invert__(self):
|
| 1001 |
+
result = self.__class__(~self._value_)
|
| 1002 |
+
return result
|
| 1003 |
+
|
| 1004 |
+
|
| 1005 |
+
def _high_bit(value):
|
| 1006 |
+
"""
|
| 1007 |
+
returns index of highest bit, or -1 if value is zero or negative
|
| 1008 |
+
"""
|
| 1009 |
+
return value.bit_length() - 1
|
| 1010 |
+
|
| 1011 |
+
def unique(enumeration):
|
| 1012 |
+
"""
|
| 1013 |
+
Class decorator for enumerations ensuring unique member values.
|
| 1014 |
+
"""
|
| 1015 |
+
duplicates = []
|
| 1016 |
+
for name, member in enumeration.__members__.items():
|
| 1017 |
+
if name != member.name:
|
| 1018 |
+
duplicates.append((name, member.name))
|
| 1019 |
+
if duplicates:
|
| 1020 |
+
alias_details = ', '.join(
|
| 1021 |
+
["%s -> %s" % (alias, name) for (alias, name) in duplicates])
|
| 1022 |
+
raise ValueError('duplicate values found in %r: %s' %
|
| 1023 |
+
(enumeration, alias_details))
|
| 1024 |
+
return enumeration
|
| 1025 |
+
|
| 1026 |
+
def _decompose(flag, value):
|
| 1027 |
+
"""
|
| 1028 |
+
Extract all members from the value.
|
| 1029 |
+
"""
|
| 1030 |
+
# _decompose is only called if the value is not named
|
| 1031 |
+
not_covered = value
|
| 1032 |
+
negative = value < 0
|
| 1033 |
+
members = []
|
| 1034 |
+
for member in flag:
|
| 1035 |
+
member_value = member.value
|
| 1036 |
+
if member_value and member_value & value == member_value:
|
| 1037 |
+
members.append(member)
|
| 1038 |
+
not_covered &= ~member_value
|
| 1039 |
+
if not negative:
|
| 1040 |
+
tmp = not_covered
|
| 1041 |
+
while tmp:
|
| 1042 |
+
flag_value = 2 ** _high_bit(tmp)
|
| 1043 |
+
if flag_value in flag._value2member_map_:
|
| 1044 |
+
members.append(flag._value2member_map_[flag_value])
|
| 1045 |
+
not_covered &= ~flag_value
|
| 1046 |
+
tmp &= ~flag_value
|
| 1047 |
+
if not members and value in flag._value2member_map_:
|
| 1048 |
+
members.append(flag._value2member_map_[value])
|
| 1049 |
+
members.sort(key=lambda m: m._value_, reverse=True)
|
| 1050 |
+
if len(members) > 1 and members[0].value == value:
|
| 1051 |
+
# we have the breakdown, don't need the value member itself
|
| 1052 |
+
members.pop(0)
|
| 1053 |
+
return members, not_covered
|
llava/lib/python3.10/fileinput.py
ADDED
|
@@ -0,0 +1,462 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper class to quickly write a loop over all standard input files.
|
| 2 |
+
|
| 3 |
+
Typical use is:
|
| 4 |
+
|
| 5 |
+
import fileinput
|
| 6 |
+
for line in fileinput.input(encoding="utf-8"):
|
| 7 |
+
process(line)
|
| 8 |
+
|
| 9 |
+
This iterates over the lines of all files listed in sys.argv[1:],
|
| 10 |
+
defaulting to sys.stdin if the list is empty. If a filename is '-' it
|
| 11 |
+
is also replaced by sys.stdin and the optional arguments mode and
|
| 12 |
+
openhook are ignored. To specify an alternative list of filenames,
|
| 13 |
+
pass it as the argument to input(). A single file name is also allowed.
|
| 14 |
+
|
| 15 |
+
Functions filename(), lineno() return the filename and cumulative line
|
| 16 |
+
number of the line that has just been read; filelineno() returns its
|
| 17 |
+
line number in the current file; isfirstline() returns true iff the
|
| 18 |
+
line just read is the first line of its file; isstdin() returns true
|
| 19 |
+
iff the line was read from sys.stdin. Function nextfile() closes the
|
| 20 |
+
current file so that the next iteration will read the first line from
|
| 21 |
+
the next file (if any); lines not read from the file will not count
|
| 22 |
+
towards the cumulative line count; the filename is not changed until
|
| 23 |
+
after the first line of the next file has been read. Function close()
|
| 24 |
+
closes the sequence.
|
| 25 |
+
|
| 26 |
+
Before any lines have been read, filename() returns None and both line
|
| 27 |
+
numbers are zero; nextfile() has no effect. After all lines have been
|
| 28 |
+
read, filename() and the line number functions return the values
|
| 29 |
+
pertaining to the last line read; nextfile() has no effect.
|
| 30 |
+
|
| 31 |
+
All files are opened in text mode by default, you can override this by
|
| 32 |
+
setting the mode parameter to input() or FileInput.__init__().
|
| 33 |
+
If an I/O error occurs during opening or reading a file, the OSError
|
| 34 |
+
exception is raised.
|
| 35 |
+
|
| 36 |
+
If sys.stdin is used more than once, the second and further use will
|
| 37 |
+
return no lines, except perhaps for interactive use, or if it has been
|
| 38 |
+
explicitly reset (e.g. using sys.stdin.seek(0)).
|
| 39 |
+
|
| 40 |
+
Empty files are opened and immediately closed; the only time their
|
| 41 |
+
presence in the list of filenames is noticeable at all is when the
|
| 42 |
+
last file opened is empty.
|
| 43 |
+
|
| 44 |
+
It is possible that the last line of a file doesn't end in a newline
|
| 45 |
+
character; otherwise lines are returned including the trailing
|
| 46 |
+
newline.
|
| 47 |
+
|
| 48 |
+
Class FileInput is the implementation; its methods filename(),
|
| 49 |
+
lineno(), fileline(), isfirstline(), isstdin(), nextfile() and close()
|
| 50 |
+
correspond to the functions in the module. In addition it has a
|
| 51 |
+
readline() method which returns the next input line, and a
|
| 52 |
+
__getitem__() method which implements the sequence behavior. The
|
| 53 |
+
sequence must be accessed in strictly sequential order; sequence
|
| 54 |
+
access and readline() cannot be mixed.
|
| 55 |
+
|
| 56 |
+
Optional in-place filtering: if the keyword argument inplace=1 is
|
| 57 |
+
passed to input() or to the FileInput constructor, the file is moved
|
| 58 |
+
to a backup file and standard output is directed to the input file.
|
| 59 |
+
This makes it possible to write a filter that rewrites its input file
|
| 60 |
+
in place. If the keyword argument backup=".<some extension>" is also
|
| 61 |
+
given, it specifies the extension for the backup file, and the backup
|
| 62 |
+
file remains around; by default, the extension is ".bak" and it is
|
| 63 |
+
deleted when the output file is closed. In-place filtering is
|
| 64 |
+
disabled when standard input is read. XXX The current implementation
|
| 65 |
+
does not work for MS-DOS 8+3 filesystems.
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
import io
|
| 69 |
+
import sys, os
|
| 70 |
+
from types import GenericAlias
|
| 71 |
+
|
| 72 |
+
__all__ = ["input", "close", "nextfile", "filename", "lineno", "filelineno",
|
| 73 |
+
"fileno", "isfirstline", "isstdin", "FileInput", "hook_compressed",
|
| 74 |
+
"hook_encoded"]
|
| 75 |
+
|
| 76 |
+
_state = None
|
| 77 |
+
|
| 78 |
+
def input(files=None, inplace=False, backup="", *, mode="r", openhook=None,
|
| 79 |
+
encoding=None, errors=None):
|
| 80 |
+
"""Return an instance of the FileInput class, which can be iterated.
|
| 81 |
+
|
| 82 |
+
The parameters are passed to the constructor of the FileInput class.
|
| 83 |
+
The returned instance, in addition to being an iterator,
|
| 84 |
+
keeps global state for the functions of this module,.
|
| 85 |
+
"""
|
| 86 |
+
global _state
|
| 87 |
+
if _state and _state._file:
|
| 88 |
+
raise RuntimeError("input() already active")
|
| 89 |
+
_state = FileInput(files, inplace, backup, mode=mode, openhook=openhook,
|
| 90 |
+
encoding=encoding, errors=errors)
|
| 91 |
+
return _state
|
| 92 |
+
|
| 93 |
+
def close():
|
| 94 |
+
"""Close the sequence."""
|
| 95 |
+
global _state
|
| 96 |
+
state = _state
|
| 97 |
+
_state = None
|
| 98 |
+
if state:
|
| 99 |
+
state.close()
|
| 100 |
+
|
| 101 |
+
def nextfile():
|
| 102 |
+
"""
|
| 103 |
+
Close the current file so that the next iteration will read the first
|
| 104 |
+
line from the next file (if any); lines not read from the file will
|
| 105 |
+
not count towards the cumulative line count. The filename is not
|
| 106 |
+
changed until after the first line of the next file has been read.
|
| 107 |
+
Before the first line has been read, this function has no effect;
|
| 108 |
+
it cannot be used to skip the first file. After the last line of the
|
| 109 |
+
last file has been read, this function has no effect.
|
| 110 |
+
"""
|
| 111 |
+
if not _state:
|
| 112 |
+
raise RuntimeError("no active input()")
|
| 113 |
+
return _state.nextfile()
|
| 114 |
+
|
| 115 |
+
def filename():
|
| 116 |
+
"""
|
| 117 |
+
Return the name of the file currently being read.
|
| 118 |
+
Before the first line has been read, returns None.
|
| 119 |
+
"""
|
| 120 |
+
if not _state:
|
| 121 |
+
raise RuntimeError("no active input()")
|
| 122 |
+
return _state.filename()
|
| 123 |
+
|
| 124 |
+
def lineno():
|
| 125 |
+
"""
|
| 126 |
+
Return the cumulative line number of the line that has just been read.
|
| 127 |
+
Before the first line has been read, returns 0. After the last line
|
| 128 |
+
of the last file has been read, returns the line number of that line.
|
| 129 |
+
"""
|
| 130 |
+
if not _state:
|
| 131 |
+
raise RuntimeError("no active input()")
|
| 132 |
+
return _state.lineno()
|
| 133 |
+
|
| 134 |
+
def filelineno():
|
| 135 |
+
"""
|
| 136 |
+
Return the line number in the current file. Before the first line
|
| 137 |
+
has been read, returns 0. After the last line of the last file has
|
| 138 |
+
been read, returns the line number of that line within the file.
|
| 139 |
+
"""
|
| 140 |
+
if not _state:
|
| 141 |
+
raise RuntimeError("no active input()")
|
| 142 |
+
return _state.filelineno()
|
| 143 |
+
|
| 144 |
+
def fileno():
|
| 145 |
+
"""
|
| 146 |
+
Return the file number of the current file. When no file is currently
|
| 147 |
+
opened, returns -1.
|
| 148 |
+
"""
|
| 149 |
+
if not _state:
|
| 150 |
+
raise RuntimeError("no active input()")
|
| 151 |
+
return _state.fileno()
|
| 152 |
+
|
| 153 |
+
def isfirstline():
|
| 154 |
+
"""
|
| 155 |
+
Returns true the line just read is the first line of its file,
|
| 156 |
+
otherwise returns false.
|
| 157 |
+
"""
|
| 158 |
+
if not _state:
|
| 159 |
+
raise RuntimeError("no active input()")
|
| 160 |
+
return _state.isfirstline()
|
| 161 |
+
|
| 162 |
+
def isstdin():
|
| 163 |
+
"""
|
| 164 |
+
Returns true if the last line was read from sys.stdin,
|
| 165 |
+
otherwise returns false.
|
| 166 |
+
"""
|
| 167 |
+
if not _state:
|
| 168 |
+
raise RuntimeError("no active input()")
|
| 169 |
+
return _state.isstdin()
|
| 170 |
+
|
| 171 |
+
class FileInput:
|
| 172 |
+
"""FileInput([files[, inplace[, backup]]], *, mode=None, openhook=None)
|
| 173 |
+
|
| 174 |
+
Class FileInput is the implementation of the module; its methods
|
| 175 |
+
filename(), lineno(), fileline(), isfirstline(), isstdin(), fileno(),
|
| 176 |
+
nextfile() and close() correspond to the functions of the same name
|
| 177 |
+
in the module.
|
| 178 |
+
In addition it has a readline() method which returns the next
|
| 179 |
+
input line, and a __getitem__() method which implements the
|
| 180 |
+
sequence behavior. The sequence must be accessed in strictly
|
| 181 |
+
sequential order; random access and readline() cannot be mixed.
|
| 182 |
+
"""
|
| 183 |
+
|
| 184 |
+
def __init__(self, files=None, inplace=False, backup="", *,
|
| 185 |
+
mode="r", openhook=None, encoding=None, errors=None):
|
| 186 |
+
if isinstance(files, str):
|
| 187 |
+
files = (files,)
|
| 188 |
+
elif isinstance(files, os.PathLike):
|
| 189 |
+
files = (os.fspath(files), )
|
| 190 |
+
else:
|
| 191 |
+
if files is None:
|
| 192 |
+
files = sys.argv[1:]
|
| 193 |
+
if not files:
|
| 194 |
+
files = ('-',)
|
| 195 |
+
else:
|
| 196 |
+
files = tuple(files)
|
| 197 |
+
self._files = files
|
| 198 |
+
self._inplace = inplace
|
| 199 |
+
self._backup = backup
|
| 200 |
+
self._savestdout = None
|
| 201 |
+
self._output = None
|
| 202 |
+
self._filename = None
|
| 203 |
+
self._startlineno = 0
|
| 204 |
+
self._filelineno = 0
|
| 205 |
+
self._file = None
|
| 206 |
+
self._isstdin = False
|
| 207 |
+
self._backupfilename = None
|
| 208 |
+
self._encoding = encoding
|
| 209 |
+
self._errors = errors
|
| 210 |
+
|
| 211 |
+
# We can not use io.text_encoding() here because old openhook doesn't
|
| 212 |
+
# take encoding parameter.
|
| 213 |
+
if (sys.flags.warn_default_encoding and
|
| 214 |
+
"b" not in mode and encoding is None and openhook is None):
|
| 215 |
+
import warnings
|
| 216 |
+
warnings.warn("'encoding' argument not specified.",
|
| 217 |
+
EncodingWarning, 2)
|
| 218 |
+
|
| 219 |
+
# restrict mode argument to reading modes
|
| 220 |
+
if mode not in ('r', 'rU', 'U', 'rb'):
|
| 221 |
+
raise ValueError("FileInput opening mode must be one of "
|
| 222 |
+
"'r', 'rU', 'U' and 'rb'")
|
| 223 |
+
if 'U' in mode:
|
| 224 |
+
import warnings
|
| 225 |
+
warnings.warn("'U' mode is deprecated",
|
| 226 |
+
DeprecationWarning, 2)
|
| 227 |
+
self._mode = mode
|
| 228 |
+
self._write_mode = mode.replace('r', 'w') if 'U' not in mode else 'w'
|
| 229 |
+
if openhook:
|
| 230 |
+
if inplace:
|
| 231 |
+
raise ValueError("FileInput cannot use an opening hook in inplace mode")
|
| 232 |
+
if not callable(openhook):
|
| 233 |
+
raise ValueError("FileInput openhook must be callable")
|
| 234 |
+
self._openhook = openhook
|
| 235 |
+
|
| 236 |
+
def __del__(self):
|
| 237 |
+
self.close()
|
| 238 |
+
|
| 239 |
+
def close(self):
|
| 240 |
+
try:
|
| 241 |
+
self.nextfile()
|
| 242 |
+
finally:
|
| 243 |
+
self._files = ()
|
| 244 |
+
|
| 245 |
+
def __enter__(self):
|
| 246 |
+
return self
|
| 247 |
+
|
| 248 |
+
def __exit__(self, type, value, traceback):
|
| 249 |
+
self.close()
|
| 250 |
+
|
| 251 |
+
def __iter__(self):
|
| 252 |
+
return self
|
| 253 |
+
|
| 254 |
+
def __next__(self):
|
| 255 |
+
while True:
|
| 256 |
+
line = self._readline()
|
| 257 |
+
if line:
|
| 258 |
+
self._filelineno += 1
|
| 259 |
+
return line
|
| 260 |
+
if not self._file:
|
| 261 |
+
raise StopIteration
|
| 262 |
+
self.nextfile()
|
| 263 |
+
# repeat with next file
|
| 264 |
+
|
| 265 |
+
def __getitem__(self, i):
|
| 266 |
+
import warnings
|
| 267 |
+
warnings.warn(
|
| 268 |
+
"Support for indexing FileInput objects is deprecated. "
|
| 269 |
+
"Use iterator protocol instead.",
|
| 270 |
+
DeprecationWarning,
|
| 271 |
+
stacklevel=2
|
| 272 |
+
)
|
| 273 |
+
if i != self.lineno():
|
| 274 |
+
raise RuntimeError("accessing lines out of order")
|
| 275 |
+
try:
|
| 276 |
+
return self.__next__()
|
| 277 |
+
except StopIteration:
|
| 278 |
+
raise IndexError("end of input reached")
|
| 279 |
+
|
| 280 |
+
def nextfile(self):
|
| 281 |
+
savestdout = self._savestdout
|
| 282 |
+
self._savestdout = None
|
| 283 |
+
if savestdout:
|
| 284 |
+
sys.stdout = savestdout
|
| 285 |
+
|
| 286 |
+
output = self._output
|
| 287 |
+
self._output = None
|
| 288 |
+
try:
|
| 289 |
+
if output:
|
| 290 |
+
output.close()
|
| 291 |
+
finally:
|
| 292 |
+
file = self._file
|
| 293 |
+
self._file = None
|
| 294 |
+
try:
|
| 295 |
+
del self._readline # restore FileInput._readline
|
| 296 |
+
except AttributeError:
|
| 297 |
+
pass
|
| 298 |
+
try:
|
| 299 |
+
if file and not self._isstdin:
|
| 300 |
+
file.close()
|
| 301 |
+
finally:
|
| 302 |
+
backupfilename = self._backupfilename
|
| 303 |
+
self._backupfilename = None
|
| 304 |
+
if backupfilename and not self._backup:
|
| 305 |
+
try: os.unlink(backupfilename)
|
| 306 |
+
except OSError: pass
|
| 307 |
+
|
| 308 |
+
self._isstdin = False
|
| 309 |
+
|
| 310 |
+
def readline(self):
|
| 311 |
+
while True:
|
| 312 |
+
line = self._readline()
|
| 313 |
+
if line:
|
| 314 |
+
self._filelineno += 1
|
| 315 |
+
return line
|
| 316 |
+
if not self._file:
|
| 317 |
+
return line
|
| 318 |
+
self.nextfile()
|
| 319 |
+
# repeat with next file
|
| 320 |
+
|
| 321 |
+
def _readline(self):
|
| 322 |
+
if not self._files:
|
| 323 |
+
if 'b' in self._mode:
|
| 324 |
+
return b''
|
| 325 |
+
else:
|
| 326 |
+
return ''
|
| 327 |
+
self._filename = self._files[0]
|
| 328 |
+
self._files = self._files[1:]
|
| 329 |
+
self._startlineno = self.lineno()
|
| 330 |
+
self._filelineno = 0
|
| 331 |
+
self._file = None
|
| 332 |
+
self._isstdin = False
|
| 333 |
+
self._backupfilename = 0
|
| 334 |
+
|
| 335 |
+
# EncodingWarning is emitted in __init__() already
|
| 336 |
+
if "b" not in self._mode:
|
| 337 |
+
encoding = self._encoding or "locale"
|
| 338 |
+
else:
|
| 339 |
+
encoding = None
|
| 340 |
+
|
| 341 |
+
if self._filename == '-':
|
| 342 |
+
self._filename = '<stdin>'
|
| 343 |
+
if 'b' in self._mode:
|
| 344 |
+
self._file = getattr(sys.stdin, 'buffer', sys.stdin)
|
| 345 |
+
else:
|
| 346 |
+
self._file = sys.stdin
|
| 347 |
+
self._isstdin = True
|
| 348 |
+
else:
|
| 349 |
+
if self._inplace:
|
| 350 |
+
self._backupfilename = (
|
| 351 |
+
os.fspath(self._filename) + (self._backup or ".bak"))
|
| 352 |
+
try:
|
| 353 |
+
os.unlink(self._backupfilename)
|
| 354 |
+
except OSError:
|
| 355 |
+
pass
|
| 356 |
+
# The next few lines may raise OSError
|
| 357 |
+
os.rename(self._filename, self._backupfilename)
|
| 358 |
+
self._file = open(self._backupfilename, self._mode,
|
| 359 |
+
encoding=encoding, errors=self._errors)
|
| 360 |
+
try:
|
| 361 |
+
perm = os.fstat(self._file.fileno()).st_mode
|
| 362 |
+
except OSError:
|
| 363 |
+
self._output = open(self._filename, self._write_mode,
|
| 364 |
+
encoding=encoding, errors=self._errors)
|
| 365 |
+
else:
|
| 366 |
+
mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
|
| 367 |
+
if hasattr(os, 'O_BINARY'):
|
| 368 |
+
mode |= os.O_BINARY
|
| 369 |
+
|
| 370 |
+
fd = os.open(self._filename, mode, perm)
|
| 371 |
+
self._output = os.fdopen(fd, self._write_mode,
|
| 372 |
+
encoding=encoding, errors=self._errors)
|
| 373 |
+
try:
|
| 374 |
+
os.chmod(self._filename, perm)
|
| 375 |
+
except OSError:
|
| 376 |
+
pass
|
| 377 |
+
self._savestdout = sys.stdout
|
| 378 |
+
sys.stdout = self._output
|
| 379 |
+
else:
|
| 380 |
+
# This may raise OSError
|
| 381 |
+
if self._openhook:
|
| 382 |
+
# Custom hooks made previous to Python 3.10 didn't have
|
| 383 |
+
# encoding argument
|
| 384 |
+
if self._encoding is None:
|
| 385 |
+
self._file = self._openhook(self._filename, self._mode)
|
| 386 |
+
else:
|
| 387 |
+
self._file = self._openhook(
|
| 388 |
+
self._filename, self._mode, encoding=self._encoding, errors=self._errors)
|
| 389 |
+
else:
|
| 390 |
+
self._file = open(self._filename, self._mode, encoding=encoding, errors=self._errors)
|
| 391 |
+
self._readline = self._file.readline # hide FileInput._readline
|
| 392 |
+
return self._readline()
|
| 393 |
+
|
| 394 |
+
def filename(self):
|
| 395 |
+
return self._filename
|
| 396 |
+
|
| 397 |
+
def lineno(self):
|
| 398 |
+
return self._startlineno + self._filelineno
|
| 399 |
+
|
| 400 |
+
def filelineno(self):
|
| 401 |
+
return self._filelineno
|
| 402 |
+
|
| 403 |
+
def fileno(self):
|
| 404 |
+
if self._file:
|
| 405 |
+
try:
|
| 406 |
+
return self._file.fileno()
|
| 407 |
+
except ValueError:
|
| 408 |
+
return -1
|
| 409 |
+
else:
|
| 410 |
+
return -1
|
| 411 |
+
|
| 412 |
+
def isfirstline(self):
|
| 413 |
+
return self._filelineno == 1
|
| 414 |
+
|
| 415 |
+
def isstdin(self):
|
| 416 |
+
return self._isstdin
|
| 417 |
+
|
| 418 |
+
__class_getitem__ = classmethod(GenericAlias)
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
def hook_compressed(filename, mode, *, encoding=None, errors=None):
|
| 422 |
+
if encoding is None and "b" not in mode: # EncodingWarning is emitted in FileInput() already.
|
| 423 |
+
encoding = "locale"
|
| 424 |
+
ext = os.path.splitext(filename)[1]
|
| 425 |
+
if ext == '.gz':
|
| 426 |
+
import gzip
|
| 427 |
+
stream = gzip.open(filename, mode)
|
| 428 |
+
elif ext == '.bz2':
|
| 429 |
+
import bz2
|
| 430 |
+
stream = bz2.BZ2File(filename, mode)
|
| 431 |
+
else:
|
| 432 |
+
return open(filename, mode, encoding=encoding, errors=errors)
|
| 433 |
+
|
| 434 |
+
# gzip and bz2 are binary mode by default.
|
| 435 |
+
if "b" not in mode:
|
| 436 |
+
stream = io.TextIOWrapper(stream, encoding=encoding, errors=errors)
|
| 437 |
+
return stream
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
def hook_encoded(encoding, errors=None):
|
| 441 |
+
def openhook(filename, mode):
|
| 442 |
+
return open(filename, mode, encoding=encoding, errors=errors)
|
| 443 |
+
return openhook
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def _test():
|
| 447 |
+
import getopt
|
| 448 |
+
inplace = False
|
| 449 |
+
backup = False
|
| 450 |
+
opts, args = getopt.getopt(sys.argv[1:], "ib:")
|
| 451 |
+
for o, a in opts:
|
| 452 |
+
if o == '-i': inplace = True
|
| 453 |
+
if o == '-b': backup = a
|
| 454 |
+
for line in input(args, inplace=inplace, backup=backup):
|
| 455 |
+
if line[-1:] == '\n': line = line[:-1]
|
| 456 |
+
if line[-1:] == '\r': line = line[:-1]
|
| 457 |
+
print("%d: %s[%d]%s %s" % (lineno(), filename(), filelineno(),
|
| 458 |
+
isfirstline() and "*" or "", line))
|
| 459 |
+
print("%d: %s[%d]" % (lineno(), filename(), filelineno()))
|
| 460 |
+
|
| 461 |
+
if __name__ == '__main__':
|
| 462 |
+
_test()
|
llava/lib/python3.10/heapq.py
ADDED
|
@@ -0,0 +1,601 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Heap queue algorithm (a.k.a. priority queue).
|
| 2 |
+
|
| 3 |
+
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
|
| 4 |
+
all k, counting elements from 0. For the sake of comparison,
|
| 5 |
+
non-existing elements are considered to be infinite. The interesting
|
| 6 |
+
property of a heap is that a[0] is always its smallest element.
|
| 7 |
+
|
| 8 |
+
Usage:
|
| 9 |
+
|
| 10 |
+
heap = [] # creates an empty heap
|
| 11 |
+
heappush(heap, item) # pushes a new item on the heap
|
| 12 |
+
item = heappop(heap) # pops the smallest item from the heap
|
| 13 |
+
item = heap[0] # smallest item on the heap without popping it
|
| 14 |
+
heapify(x) # transforms list into a heap, in-place, in linear time
|
| 15 |
+
item = heapreplace(heap, item) # pops and returns smallest item, and adds
|
| 16 |
+
# new item; the heap size is unchanged
|
| 17 |
+
|
| 18 |
+
Our API differs from textbook heap algorithms as follows:
|
| 19 |
+
|
| 20 |
+
- We use 0-based indexing. This makes the relationship between the
|
| 21 |
+
index for a node and the indexes for its children slightly less
|
| 22 |
+
obvious, but is more suitable since Python uses 0-based indexing.
|
| 23 |
+
|
| 24 |
+
- Our heappop() method returns the smallest item, not the largest.
|
| 25 |
+
|
| 26 |
+
These two make it possible to view the heap as a regular Python list
|
| 27 |
+
without surprises: heap[0] is the smallest item, and heap.sort()
|
| 28 |
+
maintains the heap invariant!
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
# Original code by Kevin O'Connor, augmented by Tim Peters and Raymond Hettinger
|
| 32 |
+
|
| 33 |
+
__about__ = """Heap queues
|
| 34 |
+
|
| 35 |
+
[explanation by François Pinard]
|
| 36 |
+
|
| 37 |
+
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
|
| 38 |
+
all k, counting elements from 0. For the sake of comparison,
|
| 39 |
+
non-existing elements are considered to be infinite. The interesting
|
| 40 |
+
property of a heap is that a[0] is always its smallest element.
|
| 41 |
+
|
| 42 |
+
The strange invariant above is meant to be an efficient memory
|
| 43 |
+
representation for a tournament. The numbers below are `k', not a[k]:
|
| 44 |
+
|
| 45 |
+
0
|
| 46 |
+
|
| 47 |
+
1 2
|
| 48 |
+
|
| 49 |
+
3 4 5 6
|
| 50 |
+
|
| 51 |
+
7 8 9 10 11 12 13 14
|
| 52 |
+
|
| 53 |
+
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
In the tree above, each cell `k' is topping `2*k+1' and `2*k+2'. In
|
| 57 |
+
a usual binary tournament we see in sports, each cell is the winner
|
| 58 |
+
over the two cells it tops, and we can trace the winner down the tree
|
| 59 |
+
to see all opponents s/he had. However, in many computer applications
|
| 60 |
+
of such tournaments, we do not need to trace the history of a winner.
|
| 61 |
+
To be more memory efficient, when a winner is promoted, we try to
|
| 62 |
+
replace it by something else at a lower level, and the rule becomes
|
| 63 |
+
that a cell and the two cells it tops contain three different items,
|
| 64 |
+
but the top cell "wins" over the two topped cells.
|
| 65 |
+
|
| 66 |
+
If this heap invariant is protected at all time, index 0 is clearly
|
| 67 |
+
the overall winner. The simplest algorithmic way to remove it and
|
| 68 |
+
find the "next" winner is to move some loser (let's say cell 30 in the
|
| 69 |
+
diagram above) into the 0 position, and then percolate this new 0 down
|
| 70 |
+
the tree, exchanging values, until the invariant is re-established.
|
| 71 |
+
This is clearly logarithmic on the total number of items in the tree.
|
| 72 |
+
By iterating over all items, you get an O(n ln n) sort.
|
| 73 |
+
|
| 74 |
+
A nice feature of this sort is that you can efficiently insert new
|
| 75 |
+
items while the sort is going on, provided that the inserted items are
|
| 76 |
+
not "better" than the last 0'th element you extracted. This is
|
| 77 |
+
especially useful in simulation contexts, where the tree holds all
|
| 78 |
+
incoming events, and the "win" condition means the smallest scheduled
|
| 79 |
+
time. When an event schedule other events for execution, they are
|
| 80 |
+
scheduled into the future, so they can easily go into the heap. So, a
|
| 81 |
+
heap is a good structure for implementing schedulers (this is what I
|
| 82 |
+
used for my MIDI sequencer :-).
|
| 83 |
+
|
| 84 |
+
Various structures for implementing schedulers have been extensively
|
| 85 |
+
studied, and heaps are good for this, as they are reasonably speedy,
|
| 86 |
+
the speed is almost constant, and the worst case is not much different
|
| 87 |
+
than the average case. However, there are other representations which
|
| 88 |
+
are more efficient overall, yet the worst cases might be terrible.
|
| 89 |
+
|
| 90 |
+
Heaps are also very useful in big disk sorts. You most probably all
|
| 91 |
+
know that a big sort implies producing "runs" (which are pre-sorted
|
| 92 |
+
sequences, which size is usually related to the amount of CPU memory),
|
| 93 |
+
followed by a merging passes for these runs, which merging is often
|
| 94 |
+
very cleverly organised[1]. It is very important that the initial
|
| 95 |
+
sort produces the longest runs possible. Tournaments are a good way
|
| 96 |
+
to that. If, using all the memory available to hold a tournament, you
|
| 97 |
+
replace and percolate items that happen to fit the current run, you'll
|
| 98 |
+
produce runs which are twice the size of the memory for random input,
|
| 99 |
+
and much better for input fuzzily ordered.
|
| 100 |
+
|
| 101 |
+
Moreover, if you output the 0'th item on disk and get an input which
|
| 102 |
+
may not fit in the current tournament (because the value "wins" over
|
| 103 |
+
the last output value), it cannot fit in the heap, so the size of the
|
| 104 |
+
heap decreases. The freed memory could be cleverly reused immediately
|
| 105 |
+
for progressively building a second heap, which grows at exactly the
|
| 106 |
+
same rate the first heap is melting. When the first heap completely
|
| 107 |
+
vanishes, you switch heaps and start a new run. Clever and quite
|
| 108 |
+
effective!
|
| 109 |
+
|
| 110 |
+
In a word, heaps are useful memory structures to know. I use them in
|
| 111 |
+
a few applications, and I think it is good to keep a `heap' module
|
| 112 |
+
around. :-)
|
| 113 |
+
|
| 114 |
+
--------------------
|
| 115 |
+
[1] The disk balancing algorithms which are current, nowadays, are
|
| 116 |
+
more annoying than clever, and this is a consequence of the seeking
|
| 117 |
+
capabilities of the disks. On devices which cannot seek, like big
|
| 118 |
+
tape drives, the story was quite different, and one had to be very
|
| 119 |
+
clever to ensure (far in advance) that each tape movement will be the
|
| 120 |
+
most effective possible (that is, will best participate at
|
| 121 |
+
"progressing" the merge). Some tapes were even able to read
|
| 122 |
+
backwards, and this was also used to avoid the rewinding time.
|
| 123 |
+
Believe me, real good tape sorts were quite spectacular to watch!
|
| 124 |
+
From all times, sorting has always been a Great Art! :-)
|
| 125 |
+
"""
|
| 126 |
+
|
| 127 |
+
__all__ = ['heappush', 'heappop', 'heapify', 'heapreplace', 'merge',
|
| 128 |
+
'nlargest', 'nsmallest', 'heappushpop']
|
| 129 |
+
|
| 130 |
+
def heappush(heap, item):
|
| 131 |
+
"""Push item onto heap, maintaining the heap invariant."""
|
| 132 |
+
heap.append(item)
|
| 133 |
+
_siftdown(heap, 0, len(heap)-1)
|
| 134 |
+
|
| 135 |
+
def heappop(heap):
|
| 136 |
+
"""Pop the smallest item off the heap, maintaining the heap invariant."""
|
| 137 |
+
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
|
| 138 |
+
if heap:
|
| 139 |
+
returnitem = heap[0]
|
| 140 |
+
heap[0] = lastelt
|
| 141 |
+
_siftup(heap, 0)
|
| 142 |
+
return returnitem
|
| 143 |
+
return lastelt
|
| 144 |
+
|
| 145 |
+
def heapreplace(heap, item):
|
| 146 |
+
"""Pop and return the current smallest value, and add the new item.
|
| 147 |
+
|
| 148 |
+
This is more efficient than heappop() followed by heappush(), and can be
|
| 149 |
+
more appropriate when using a fixed-size heap. Note that the value
|
| 150 |
+
returned may be larger than item! That constrains reasonable uses of
|
| 151 |
+
this routine unless written as part of a conditional replacement:
|
| 152 |
+
|
| 153 |
+
if item > heap[0]:
|
| 154 |
+
item = heapreplace(heap, item)
|
| 155 |
+
"""
|
| 156 |
+
returnitem = heap[0] # raises appropriate IndexError if heap is empty
|
| 157 |
+
heap[0] = item
|
| 158 |
+
_siftup(heap, 0)
|
| 159 |
+
return returnitem
|
| 160 |
+
|
| 161 |
+
def heappushpop(heap, item):
|
| 162 |
+
"""Fast version of a heappush followed by a heappop."""
|
| 163 |
+
if heap and heap[0] < item:
|
| 164 |
+
item, heap[0] = heap[0], item
|
| 165 |
+
_siftup(heap, 0)
|
| 166 |
+
return item
|
| 167 |
+
|
| 168 |
+
def heapify(x):
|
| 169 |
+
"""Transform list into a heap, in-place, in O(len(x)) time."""
|
| 170 |
+
n = len(x)
|
| 171 |
+
# Transform bottom-up. The largest index there's any point to looking at
|
| 172 |
+
# is the largest with a child index in-range, so must have 2*i + 1 < n,
|
| 173 |
+
# or i < (n-1)/2. If n is even = 2*j, this is (2*j-1)/2 = j-1/2 so
|
| 174 |
+
# j-1 is the largest, which is n//2 - 1. If n is odd = 2*j+1, this is
|
| 175 |
+
# (2*j+1-1)/2 = j so j-1 is the largest, and that's again n//2-1.
|
| 176 |
+
for i in reversed(range(n//2)):
|
| 177 |
+
_siftup(x, i)
|
| 178 |
+
|
| 179 |
+
def _heappop_max(heap):
|
| 180 |
+
"""Maxheap version of a heappop."""
|
| 181 |
+
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
|
| 182 |
+
if heap:
|
| 183 |
+
returnitem = heap[0]
|
| 184 |
+
heap[0] = lastelt
|
| 185 |
+
_siftup_max(heap, 0)
|
| 186 |
+
return returnitem
|
| 187 |
+
return lastelt
|
| 188 |
+
|
| 189 |
+
def _heapreplace_max(heap, item):
|
| 190 |
+
"""Maxheap version of a heappop followed by a heappush."""
|
| 191 |
+
returnitem = heap[0] # raises appropriate IndexError if heap is empty
|
| 192 |
+
heap[0] = item
|
| 193 |
+
_siftup_max(heap, 0)
|
| 194 |
+
return returnitem
|
| 195 |
+
|
| 196 |
+
def _heapify_max(x):
|
| 197 |
+
"""Transform list into a maxheap, in-place, in O(len(x)) time."""
|
| 198 |
+
n = len(x)
|
| 199 |
+
for i in reversed(range(n//2)):
|
| 200 |
+
_siftup_max(x, i)
|
| 201 |
+
|
| 202 |
+
# 'heap' is a heap at all indices >= startpos, except possibly for pos. pos
|
| 203 |
+
# is the index of a leaf with a possibly out-of-order value. Restore the
|
| 204 |
+
# heap invariant.
|
| 205 |
+
def _siftdown(heap, startpos, pos):
|
| 206 |
+
newitem = heap[pos]
|
| 207 |
+
# Follow the path to the root, moving parents down until finding a place
|
| 208 |
+
# newitem fits.
|
| 209 |
+
while pos > startpos:
|
| 210 |
+
parentpos = (pos - 1) >> 1
|
| 211 |
+
parent = heap[parentpos]
|
| 212 |
+
if newitem < parent:
|
| 213 |
+
heap[pos] = parent
|
| 214 |
+
pos = parentpos
|
| 215 |
+
continue
|
| 216 |
+
break
|
| 217 |
+
heap[pos] = newitem
|
| 218 |
+
|
| 219 |
+
# The child indices of heap index pos are already heaps, and we want to make
|
| 220 |
+
# a heap at index pos too. We do this by bubbling the smaller child of
|
| 221 |
+
# pos up (and so on with that child's children, etc) until hitting a leaf,
|
| 222 |
+
# then using _siftdown to move the oddball originally at index pos into place.
|
| 223 |
+
#
|
| 224 |
+
# We *could* break out of the loop as soon as we find a pos where newitem <=
|
| 225 |
+
# both its children, but turns out that's not a good idea, and despite that
|
| 226 |
+
# many books write the algorithm that way. During a heap pop, the last array
|
| 227 |
+
# element is sifted in, and that tends to be large, so that comparing it
|
| 228 |
+
# against values starting from the root usually doesn't pay (= usually doesn't
|
| 229 |
+
# get us out of the loop early). See Knuth, Volume 3, where this is
|
| 230 |
+
# explained and quantified in an exercise.
|
| 231 |
+
#
|
| 232 |
+
# Cutting the # of comparisons is important, since these routines have no
|
| 233 |
+
# way to extract "the priority" from an array element, so that intelligence
|
| 234 |
+
# is likely to be hiding in custom comparison methods, or in array elements
|
| 235 |
+
# storing (priority, record) tuples. Comparisons are thus potentially
|
| 236 |
+
# expensive.
|
| 237 |
+
#
|
| 238 |
+
# On random arrays of length 1000, making this change cut the number of
|
| 239 |
+
# comparisons made by heapify() a little, and those made by exhaustive
|
| 240 |
+
# heappop() a lot, in accord with theory. Here are typical results from 3
|
| 241 |
+
# runs (3 just to demonstrate how small the variance is):
|
| 242 |
+
#
|
| 243 |
+
# Compares needed by heapify Compares needed by 1000 heappops
|
| 244 |
+
# -------------------------- --------------------------------
|
| 245 |
+
# 1837 cut to 1663 14996 cut to 8680
|
| 246 |
+
# 1855 cut to 1659 14966 cut to 8678
|
| 247 |
+
# 1847 cut to 1660 15024 cut to 8703
|
| 248 |
+
#
|
| 249 |
+
# Building the heap by using heappush() 1000 times instead required
|
| 250 |
+
# 2198, 2148, and 2219 compares: heapify() is more efficient, when
|
| 251 |
+
# you can use it.
|
| 252 |
+
#
|
| 253 |
+
# The total compares needed by list.sort() on the same lists were 8627,
|
| 254 |
+
# 8627, and 8632 (this should be compared to the sum of heapify() and
|
| 255 |
+
# heappop() compares): list.sort() is (unsurprisingly!) more efficient
|
| 256 |
+
# for sorting.
|
| 257 |
+
|
| 258 |
+
def _siftup(heap, pos):
|
| 259 |
+
endpos = len(heap)
|
| 260 |
+
startpos = pos
|
| 261 |
+
newitem = heap[pos]
|
| 262 |
+
# Bubble up the smaller child until hitting a leaf.
|
| 263 |
+
childpos = 2*pos + 1 # leftmost child position
|
| 264 |
+
while childpos < endpos:
|
| 265 |
+
# Set childpos to index of smaller child.
|
| 266 |
+
rightpos = childpos + 1
|
| 267 |
+
if rightpos < endpos and not heap[childpos] < heap[rightpos]:
|
| 268 |
+
childpos = rightpos
|
| 269 |
+
# Move the smaller child up.
|
| 270 |
+
heap[pos] = heap[childpos]
|
| 271 |
+
pos = childpos
|
| 272 |
+
childpos = 2*pos + 1
|
| 273 |
+
# The leaf at pos is empty now. Put newitem there, and bubble it up
|
| 274 |
+
# to its final resting place (by sifting its parents down).
|
| 275 |
+
heap[pos] = newitem
|
| 276 |
+
_siftdown(heap, startpos, pos)
|
| 277 |
+
|
| 278 |
+
def _siftdown_max(heap, startpos, pos):
|
| 279 |
+
'Maxheap variant of _siftdown'
|
| 280 |
+
newitem = heap[pos]
|
| 281 |
+
# Follow the path to the root, moving parents down until finding a place
|
| 282 |
+
# newitem fits.
|
| 283 |
+
while pos > startpos:
|
| 284 |
+
parentpos = (pos - 1) >> 1
|
| 285 |
+
parent = heap[parentpos]
|
| 286 |
+
if parent < newitem:
|
| 287 |
+
heap[pos] = parent
|
| 288 |
+
pos = parentpos
|
| 289 |
+
continue
|
| 290 |
+
break
|
| 291 |
+
heap[pos] = newitem
|
| 292 |
+
|
| 293 |
+
def _siftup_max(heap, pos):
|
| 294 |
+
'Maxheap variant of _siftup'
|
| 295 |
+
endpos = len(heap)
|
| 296 |
+
startpos = pos
|
| 297 |
+
newitem = heap[pos]
|
| 298 |
+
# Bubble up the larger child until hitting a leaf.
|
| 299 |
+
childpos = 2*pos + 1 # leftmost child position
|
| 300 |
+
while childpos < endpos:
|
| 301 |
+
# Set childpos to index of larger child.
|
| 302 |
+
rightpos = childpos + 1
|
| 303 |
+
if rightpos < endpos and not heap[rightpos] < heap[childpos]:
|
| 304 |
+
childpos = rightpos
|
| 305 |
+
# Move the larger child up.
|
| 306 |
+
heap[pos] = heap[childpos]
|
| 307 |
+
pos = childpos
|
| 308 |
+
childpos = 2*pos + 1
|
| 309 |
+
# The leaf at pos is empty now. Put newitem there, and bubble it up
|
| 310 |
+
# to its final resting place (by sifting its parents down).
|
| 311 |
+
heap[pos] = newitem
|
| 312 |
+
_siftdown_max(heap, startpos, pos)
|
| 313 |
+
|
| 314 |
+
def merge(*iterables, key=None, reverse=False):
|
| 315 |
+
'''Merge multiple sorted inputs into a single sorted output.
|
| 316 |
+
|
| 317 |
+
Similar to sorted(itertools.chain(*iterables)) but returns a generator,
|
| 318 |
+
does not pull the data into memory all at once, and assumes that each of
|
| 319 |
+
the input streams is already sorted (smallest to largest).
|
| 320 |
+
|
| 321 |
+
>>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
|
| 322 |
+
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
|
| 323 |
+
|
| 324 |
+
If *key* is not None, applies a key function to each element to determine
|
| 325 |
+
its sort order.
|
| 326 |
+
|
| 327 |
+
>>> list(merge(['dog', 'horse'], ['cat', 'fish', 'kangaroo'], key=len))
|
| 328 |
+
['dog', 'cat', 'fish', 'horse', 'kangaroo']
|
| 329 |
+
|
| 330 |
+
'''
|
| 331 |
+
|
| 332 |
+
h = []
|
| 333 |
+
h_append = h.append
|
| 334 |
+
|
| 335 |
+
if reverse:
|
| 336 |
+
_heapify = _heapify_max
|
| 337 |
+
_heappop = _heappop_max
|
| 338 |
+
_heapreplace = _heapreplace_max
|
| 339 |
+
direction = -1
|
| 340 |
+
else:
|
| 341 |
+
_heapify = heapify
|
| 342 |
+
_heappop = heappop
|
| 343 |
+
_heapreplace = heapreplace
|
| 344 |
+
direction = 1
|
| 345 |
+
|
| 346 |
+
if key is None:
|
| 347 |
+
for order, it in enumerate(map(iter, iterables)):
|
| 348 |
+
try:
|
| 349 |
+
next = it.__next__
|
| 350 |
+
h_append([next(), order * direction, next])
|
| 351 |
+
except StopIteration:
|
| 352 |
+
pass
|
| 353 |
+
_heapify(h)
|
| 354 |
+
while len(h) > 1:
|
| 355 |
+
try:
|
| 356 |
+
while True:
|
| 357 |
+
value, order, next = s = h[0]
|
| 358 |
+
yield value
|
| 359 |
+
s[0] = next() # raises StopIteration when exhausted
|
| 360 |
+
_heapreplace(h, s) # restore heap condition
|
| 361 |
+
except StopIteration:
|
| 362 |
+
_heappop(h) # remove empty iterator
|
| 363 |
+
if h:
|
| 364 |
+
# fast case when only a single iterator remains
|
| 365 |
+
value, order, next = h[0]
|
| 366 |
+
yield value
|
| 367 |
+
yield from next.__self__
|
| 368 |
+
return
|
| 369 |
+
|
| 370 |
+
for order, it in enumerate(map(iter, iterables)):
|
| 371 |
+
try:
|
| 372 |
+
next = it.__next__
|
| 373 |
+
value = next()
|
| 374 |
+
h_append([key(value), order * direction, value, next])
|
| 375 |
+
except StopIteration:
|
| 376 |
+
pass
|
| 377 |
+
_heapify(h)
|
| 378 |
+
while len(h) > 1:
|
| 379 |
+
try:
|
| 380 |
+
while True:
|
| 381 |
+
key_value, order, value, next = s = h[0]
|
| 382 |
+
yield value
|
| 383 |
+
value = next()
|
| 384 |
+
s[0] = key(value)
|
| 385 |
+
s[2] = value
|
| 386 |
+
_heapreplace(h, s)
|
| 387 |
+
except StopIteration:
|
| 388 |
+
_heappop(h)
|
| 389 |
+
if h:
|
| 390 |
+
key_value, order, value, next = h[0]
|
| 391 |
+
yield value
|
| 392 |
+
yield from next.__self__
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
# Algorithm notes for nlargest() and nsmallest()
|
| 396 |
+
# ==============================================
|
| 397 |
+
#
|
| 398 |
+
# Make a single pass over the data while keeping the k most extreme values
|
| 399 |
+
# in a heap. Memory consumption is limited to keeping k values in a list.
|
| 400 |
+
#
|
| 401 |
+
# Measured performance for random inputs:
|
| 402 |
+
#
|
| 403 |
+
# number of comparisons
|
| 404 |
+
# n inputs k-extreme values (average of 5 trials) % more than min()
|
| 405 |
+
# ------------- ---------------- --------------------- -----------------
|
| 406 |
+
# 1,000 100 3,317 231.7%
|
| 407 |
+
# 10,000 100 14,046 40.5%
|
| 408 |
+
# 100,000 100 105,749 5.7%
|
| 409 |
+
# 1,000,000 100 1,007,751 0.8%
|
| 410 |
+
# 10,000,000 100 10,009,401 0.1%
|
| 411 |
+
#
|
| 412 |
+
# Theoretical number of comparisons for k smallest of n random inputs:
|
| 413 |
+
#
|
| 414 |
+
# Step Comparisons Action
|
| 415 |
+
# ---- -------------------------- ---------------------------
|
| 416 |
+
# 1 1.66 * k heapify the first k-inputs
|
| 417 |
+
# 2 n - k compare remaining elements to top of heap
|
| 418 |
+
# 3 k * (1 + lg2(k)) * ln(n/k) replace the topmost value on the heap
|
| 419 |
+
# 4 k * lg2(k) - (k/2) final sort of the k most extreme values
|
| 420 |
+
#
|
| 421 |
+
# Combining and simplifying for a rough estimate gives:
|
| 422 |
+
#
|
| 423 |
+
# comparisons = n + k * (log(k, 2) * log(n/k) + log(k, 2) + log(n/k))
|
| 424 |
+
#
|
| 425 |
+
# Computing the number of comparisons for step 3:
|
| 426 |
+
# -----------------------------------------------
|
| 427 |
+
# * For the i-th new value from the iterable, the probability of being in the
|
| 428 |
+
# k most extreme values is k/i. For example, the probability of the 101st
|
| 429 |
+
# value seen being in the 100 most extreme values is 100/101.
|
| 430 |
+
# * If the value is a new extreme value, the cost of inserting it into the
|
| 431 |
+
# heap is 1 + log(k, 2).
|
| 432 |
+
# * The probability times the cost gives:
|
| 433 |
+
# (k/i) * (1 + log(k, 2))
|
| 434 |
+
# * Summing across the remaining n-k elements gives:
|
| 435 |
+
# sum((k/i) * (1 + log(k, 2)) for i in range(k+1, n+1))
|
| 436 |
+
# * This reduces to:
|
| 437 |
+
# (H(n) - H(k)) * k * (1 + log(k, 2))
|
| 438 |
+
# * Where H(n) is the n-th harmonic number estimated by:
|
| 439 |
+
# gamma = 0.5772156649
|
| 440 |
+
# H(n) = log(n, e) + gamma + 1 / (2 * n)
|
| 441 |
+
# http://en.wikipedia.org/wiki/Harmonic_series_(mathematics)#Rate_of_divergence
|
| 442 |
+
# * Substituting the H(n) formula:
|
| 443 |
+
# comparisons = k * (1 + log(k, 2)) * (log(n/k, e) + (1/n - 1/k) / 2)
|
| 444 |
+
#
|
| 445 |
+
# Worst-case for step 3:
|
| 446 |
+
# ----------------------
|
| 447 |
+
# In the worst case, the input data is reversed sorted so that every new element
|
| 448 |
+
# must be inserted in the heap:
|
| 449 |
+
#
|
| 450 |
+
# comparisons = 1.66 * k + log(k, 2) * (n - k)
|
| 451 |
+
#
|
| 452 |
+
# Alternative Algorithms
|
| 453 |
+
# ----------------------
|
| 454 |
+
# Other algorithms were not used because they:
|
| 455 |
+
# 1) Took much more auxiliary memory,
|
| 456 |
+
# 2) Made multiple passes over the data.
|
| 457 |
+
# 3) Made more comparisons in common cases (small k, large n, semi-random input).
|
| 458 |
+
# See the more detailed comparison of approach at:
|
| 459 |
+
# http://code.activestate.com/recipes/577573-compare-algorithms-for-heapqsmallest
|
| 460 |
+
|
| 461 |
+
def nsmallest(n, iterable, key=None):
|
| 462 |
+
"""Find the n smallest elements in a dataset.
|
| 463 |
+
|
| 464 |
+
Equivalent to: sorted(iterable, key=key)[:n]
|
| 465 |
+
"""
|
| 466 |
+
|
| 467 |
+
# Short-cut for n==1 is to use min()
|
| 468 |
+
if n == 1:
|
| 469 |
+
it = iter(iterable)
|
| 470 |
+
sentinel = object()
|
| 471 |
+
result = min(it, default=sentinel, key=key)
|
| 472 |
+
return [] if result is sentinel else [result]
|
| 473 |
+
|
| 474 |
+
# When n>=size, it's faster to use sorted()
|
| 475 |
+
try:
|
| 476 |
+
size = len(iterable)
|
| 477 |
+
except (TypeError, AttributeError):
|
| 478 |
+
pass
|
| 479 |
+
else:
|
| 480 |
+
if n >= size:
|
| 481 |
+
return sorted(iterable, key=key)[:n]
|
| 482 |
+
|
| 483 |
+
# When key is none, use simpler decoration
|
| 484 |
+
if key is None:
|
| 485 |
+
it = iter(iterable)
|
| 486 |
+
# put the range(n) first so that zip() doesn't
|
| 487 |
+
# consume one too many elements from the iterator
|
| 488 |
+
result = [(elem, i) for i, elem in zip(range(n), it)]
|
| 489 |
+
if not result:
|
| 490 |
+
return result
|
| 491 |
+
_heapify_max(result)
|
| 492 |
+
top = result[0][0]
|
| 493 |
+
order = n
|
| 494 |
+
_heapreplace = _heapreplace_max
|
| 495 |
+
for elem in it:
|
| 496 |
+
if elem < top:
|
| 497 |
+
_heapreplace(result, (elem, order))
|
| 498 |
+
top, _order = result[0]
|
| 499 |
+
order += 1
|
| 500 |
+
result.sort()
|
| 501 |
+
return [elem for (elem, order) in result]
|
| 502 |
+
|
| 503 |
+
# General case, slowest method
|
| 504 |
+
it = iter(iterable)
|
| 505 |
+
result = [(key(elem), i, elem) for i, elem in zip(range(n), it)]
|
| 506 |
+
if not result:
|
| 507 |
+
return result
|
| 508 |
+
_heapify_max(result)
|
| 509 |
+
top = result[0][0]
|
| 510 |
+
order = n
|
| 511 |
+
_heapreplace = _heapreplace_max
|
| 512 |
+
for elem in it:
|
| 513 |
+
k = key(elem)
|
| 514 |
+
if k < top:
|
| 515 |
+
_heapreplace(result, (k, order, elem))
|
| 516 |
+
top, _order, _elem = result[0]
|
| 517 |
+
order += 1
|
| 518 |
+
result.sort()
|
| 519 |
+
return [elem for (k, order, elem) in result]
|
| 520 |
+
|
| 521 |
+
def nlargest(n, iterable, key=None):
|
| 522 |
+
"""Find the n largest elements in a dataset.
|
| 523 |
+
|
| 524 |
+
Equivalent to: sorted(iterable, key=key, reverse=True)[:n]
|
| 525 |
+
"""
|
| 526 |
+
|
| 527 |
+
# Short-cut for n==1 is to use max()
|
| 528 |
+
if n == 1:
|
| 529 |
+
it = iter(iterable)
|
| 530 |
+
sentinel = object()
|
| 531 |
+
result = max(it, default=sentinel, key=key)
|
| 532 |
+
return [] if result is sentinel else [result]
|
| 533 |
+
|
| 534 |
+
# When n>=size, it's faster to use sorted()
|
| 535 |
+
try:
|
| 536 |
+
size = len(iterable)
|
| 537 |
+
except (TypeError, AttributeError):
|
| 538 |
+
pass
|
| 539 |
+
else:
|
| 540 |
+
if n >= size:
|
| 541 |
+
return sorted(iterable, key=key, reverse=True)[:n]
|
| 542 |
+
|
| 543 |
+
# When key is none, use simpler decoration
|
| 544 |
+
if key is None:
|
| 545 |
+
it = iter(iterable)
|
| 546 |
+
result = [(elem, i) for i, elem in zip(range(0, -n, -1), it)]
|
| 547 |
+
if not result:
|
| 548 |
+
return result
|
| 549 |
+
heapify(result)
|
| 550 |
+
top = result[0][0]
|
| 551 |
+
order = -n
|
| 552 |
+
_heapreplace = heapreplace
|
| 553 |
+
for elem in it:
|
| 554 |
+
if top < elem:
|
| 555 |
+
_heapreplace(result, (elem, order))
|
| 556 |
+
top, _order = result[0]
|
| 557 |
+
order -= 1
|
| 558 |
+
result.sort(reverse=True)
|
| 559 |
+
return [elem for (elem, order) in result]
|
| 560 |
+
|
| 561 |
+
# General case, slowest method
|
| 562 |
+
it = iter(iterable)
|
| 563 |
+
result = [(key(elem), i, elem) for i, elem in zip(range(0, -n, -1), it)]
|
| 564 |
+
if not result:
|
| 565 |
+
return result
|
| 566 |
+
heapify(result)
|
| 567 |
+
top = result[0][0]
|
| 568 |
+
order = -n
|
| 569 |
+
_heapreplace = heapreplace
|
| 570 |
+
for elem in it:
|
| 571 |
+
k = key(elem)
|
| 572 |
+
if top < k:
|
| 573 |
+
_heapreplace(result, (k, order, elem))
|
| 574 |
+
top, _order, _elem = result[0]
|
| 575 |
+
order -= 1
|
| 576 |
+
result.sort(reverse=True)
|
| 577 |
+
return [elem for (k, order, elem) in result]
|
| 578 |
+
|
| 579 |
+
# If available, use C implementation
|
| 580 |
+
try:
|
| 581 |
+
from _heapq import *
|
| 582 |
+
except ImportError:
|
| 583 |
+
pass
|
| 584 |
+
try:
|
| 585 |
+
from _heapq import _heapreplace_max
|
| 586 |
+
except ImportError:
|
| 587 |
+
pass
|
| 588 |
+
try:
|
| 589 |
+
from _heapq import _heapify_max
|
| 590 |
+
except ImportError:
|
| 591 |
+
pass
|
| 592 |
+
try:
|
| 593 |
+
from _heapq import _heappop_max
|
| 594 |
+
except ImportError:
|
| 595 |
+
pass
|
| 596 |
+
|
| 597 |
+
|
| 598 |
+
if __name__ == "__main__":
|
| 599 |
+
|
| 600 |
+
import doctest # pragma: no cover
|
| 601 |
+
print(doctest.testmod()) # pragma: no cover
|
llava/lib/python3.10/imp.py
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This module provides the components needed to build your own __import__
|
| 2 |
+
function. Undocumented functions are obsolete.
|
| 3 |
+
|
| 4 |
+
In most cases it is preferred you consider using the importlib module's
|
| 5 |
+
functionality over this module.
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
# (Probably) need to stay in _imp
|
| 9 |
+
from _imp import (lock_held, acquire_lock, release_lock,
|
| 10 |
+
get_frozen_object, is_frozen_package,
|
| 11 |
+
init_frozen, is_builtin, is_frozen,
|
| 12 |
+
_fix_co_filename)
|
| 13 |
+
try:
|
| 14 |
+
from _imp import create_dynamic
|
| 15 |
+
except ImportError:
|
| 16 |
+
# Platform doesn't support dynamic loading.
|
| 17 |
+
create_dynamic = None
|
| 18 |
+
|
| 19 |
+
from importlib._bootstrap import _ERR_MSG, _exec, _load, _builtin_from_name
|
| 20 |
+
from importlib._bootstrap_external import SourcelessFileLoader
|
| 21 |
+
|
| 22 |
+
from importlib import machinery
|
| 23 |
+
from importlib import util
|
| 24 |
+
import importlib
|
| 25 |
+
import os
|
| 26 |
+
import sys
|
| 27 |
+
import tokenize
|
| 28 |
+
import types
|
| 29 |
+
import warnings
|
| 30 |
+
|
| 31 |
+
warnings.warn("the imp module is deprecated in favour of importlib and slated "
|
| 32 |
+
"for removal in Python 3.12; "
|
| 33 |
+
"see the module's documentation for alternative uses",
|
| 34 |
+
DeprecationWarning, stacklevel=2)
|
| 35 |
+
|
| 36 |
+
# DEPRECATED
|
| 37 |
+
SEARCH_ERROR = 0
|
| 38 |
+
PY_SOURCE = 1
|
| 39 |
+
PY_COMPILED = 2
|
| 40 |
+
C_EXTENSION = 3
|
| 41 |
+
PY_RESOURCE = 4
|
| 42 |
+
PKG_DIRECTORY = 5
|
| 43 |
+
C_BUILTIN = 6
|
| 44 |
+
PY_FROZEN = 7
|
| 45 |
+
PY_CODERESOURCE = 8
|
| 46 |
+
IMP_HOOK = 9
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def new_module(name):
|
| 50 |
+
"""**DEPRECATED**
|
| 51 |
+
|
| 52 |
+
Create a new module.
|
| 53 |
+
|
| 54 |
+
The module is not entered into sys.modules.
|
| 55 |
+
|
| 56 |
+
"""
|
| 57 |
+
return types.ModuleType(name)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def get_magic():
|
| 61 |
+
"""**DEPRECATED**
|
| 62 |
+
|
| 63 |
+
Return the magic number for .pyc files.
|
| 64 |
+
"""
|
| 65 |
+
return util.MAGIC_NUMBER
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def get_tag():
|
| 69 |
+
"""Return the magic tag for .pyc files."""
|
| 70 |
+
return sys.implementation.cache_tag
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def cache_from_source(path, debug_override=None):
|
| 74 |
+
"""**DEPRECATED**
|
| 75 |
+
|
| 76 |
+
Given the path to a .py file, return the path to its .pyc file.
|
| 77 |
+
|
| 78 |
+
The .py file does not need to exist; this simply returns the path to the
|
| 79 |
+
.pyc file calculated as if the .py file were imported.
|
| 80 |
+
|
| 81 |
+
If debug_override is not None, then it must be a boolean and is used in
|
| 82 |
+
place of sys.flags.optimize.
|
| 83 |
+
|
| 84 |
+
If sys.implementation.cache_tag is None then NotImplementedError is raised.
|
| 85 |
+
|
| 86 |
+
"""
|
| 87 |
+
with warnings.catch_warnings():
|
| 88 |
+
warnings.simplefilter('ignore')
|
| 89 |
+
return util.cache_from_source(path, debug_override)
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def source_from_cache(path):
|
| 93 |
+
"""**DEPRECATED**
|
| 94 |
+
|
| 95 |
+
Given the path to a .pyc. file, return the path to its .py file.
|
| 96 |
+
|
| 97 |
+
The .pyc file does not need to exist; this simply returns the path to
|
| 98 |
+
the .py file calculated to correspond to the .pyc file. If path does
|
| 99 |
+
not conform to PEP 3147 format, ValueError will be raised. If
|
| 100 |
+
sys.implementation.cache_tag is None then NotImplementedError is raised.
|
| 101 |
+
|
| 102 |
+
"""
|
| 103 |
+
return util.source_from_cache(path)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def get_suffixes():
|
| 107 |
+
"""**DEPRECATED**"""
|
| 108 |
+
extensions = [(s, 'rb', C_EXTENSION) for s in machinery.EXTENSION_SUFFIXES]
|
| 109 |
+
source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES]
|
| 110 |
+
bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES]
|
| 111 |
+
|
| 112 |
+
return extensions + source + bytecode
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class NullImporter:
|
| 116 |
+
|
| 117 |
+
"""**DEPRECATED**
|
| 118 |
+
|
| 119 |
+
Null import object.
|
| 120 |
+
|
| 121 |
+
"""
|
| 122 |
+
|
| 123 |
+
def __init__(self, path):
|
| 124 |
+
if path == '':
|
| 125 |
+
raise ImportError('empty pathname', path='')
|
| 126 |
+
elif os.path.isdir(path):
|
| 127 |
+
raise ImportError('existing directory', path=path)
|
| 128 |
+
|
| 129 |
+
def find_module(self, fullname):
|
| 130 |
+
"""Always returns None."""
|
| 131 |
+
return None
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
class _HackedGetData:
|
| 135 |
+
|
| 136 |
+
"""Compatibility support for 'file' arguments of various load_*()
|
| 137 |
+
functions."""
|
| 138 |
+
|
| 139 |
+
def __init__(self, fullname, path, file=None):
|
| 140 |
+
super().__init__(fullname, path)
|
| 141 |
+
self.file = file
|
| 142 |
+
|
| 143 |
+
def get_data(self, path):
|
| 144 |
+
"""Gross hack to contort loader to deal w/ load_*()'s bad API."""
|
| 145 |
+
if self.file and path == self.path:
|
| 146 |
+
# The contract of get_data() requires us to return bytes. Reopen the
|
| 147 |
+
# file in binary mode if needed.
|
| 148 |
+
if not self.file.closed:
|
| 149 |
+
file = self.file
|
| 150 |
+
if 'b' not in file.mode:
|
| 151 |
+
file.close()
|
| 152 |
+
if self.file.closed:
|
| 153 |
+
self.file = file = open(self.path, 'rb')
|
| 154 |
+
|
| 155 |
+
with file:
|
| 156 |
+
return file.read()
|
| 157 |
+
else:
|
| 158 |
+
return super().get_data(path)
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
class _LoadSourceCompatibility(_HackedGetData, machinery.SourceFileLoader):
|
| 162 |
+
|
| 163 |
+
"""Compatibility support for implementing load_source()."""
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def load_source(name, pathname, file=None):
|
| 167 |
+
loader = _LoadSourceCompatibility(name, pathname, file)
|
| 168 |
+
spec = util.spec_from_file_location(name, pathname, loader=loader)
|
| 169 |
+
if name in sys.modules:
|
| 170 |
+
module = _exec(spec, sys.modules[name])
|
| 171 |
+
else:
|
| 172 |
+
module = _load(spec)
|
| 173 |
+
# To allow reloading to potentially work, use a non-hacked loader which
|
| 174 |
+
# won't rely on a now-closed file object.
|
| 175 |
+
module.__loader__ = machinery.SourceFileLoader(name, pathname)
|
| 176 |
+
module.__spec__.loader = module.__loader__
|
| 177 |
+
return module
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
class _LoadCompiledCompatibility(_HackedGetData, SourcelessFileLoader):
|
| 181 |
+
|
| 182 |
+
"""Compatibility support for implementing load_compiled()."""
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def load_compiled(name, pathname, file=None):
|
| 186 |
+
"""**DEPRECATED**"""
|
| 187 |
+
loader = _LoadCompiledCompatibility(name, pathname, file)
|
| 188 |
+
spec = util.spec_from_file_location(name, pathname, loader=loader)
|
| 189 |
+
if name in sys.modules:
|
| 190 |
+
module = _exec(spec, sys.modules[name])
|
| 191 |
+
else:
|
| 192 |
+
module = _load(spec)
|
| 193 |
+
# To allow reloading to potentially work, use a non-hacked loader which
|
| 194 |
+
# won't rely on a now-closed file object.
|
| 195 |
+
module.__loader__ = SourcelessFileLoader(name, pathname)
|
| 196 |
+
module.__spec__.loader = module.__loader__
|
| 197 |
+
return module
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def load_package(name, path):
|
| 201 |
+
"""**DEPRECATED**"""
|
| 202 |
+
if os.path.isdir(path):
|
| 203 |
+
extensions = (machinery.SOURCE_SUFFIXES[:] +
|
| 204 |
+
machinery.BYTECODE_SUFFIXES[:])
|
| 205 |
+
for extension in extensions:
|
| 206 |
+
init_path = os.path.join(path, '__init__' + extension)
|
| 207 |
+
if os.path.exists(init_path):
|
| 208 |
+
path = init_path
|
| 209 |
+
break
|
| 210 |
+
else:
|
| 211 |
+
raise ValueError('{!r} is not a package'.format(path))
|
| 212 |
+
spec = util.spec_from_file_location(name, path,
|
| 213 |
+
submodule_search_locations=[])
|
| 214 |
+
if name in sys.modules:
|
| 215 |
+
return _exec(spec, sys.modules[name])
|
| 216 |
+
else:
|
| 217 |
+
return _load(spec)
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def load_module(name, file, filename, details):
|
| 221 |
+
"""**DEPRECATED**
|
| 222 |
+
|
| 223 |
+
Load a module, given information returned by find_module().
|
| 224 |
+
|
| 225 |
+
The module name must include the full package name, if any.
|
| 226 |
+
|
| 227 |
+
"""
|
| 228 |
+
suffix, mode, type_ = details
|
| 229 |
+
if mode and (not mode.startswith(('r', 'U')) or '+' in mode):
|
| 230 |
+
raise ValueError('invalid file open mode {!r}'.format(mode))
|
| 231 |
+
elif file is None and type_ in {PY_SOURCE, PY_COMPILED}:
|
| 232 |
+
msg = 'file object required for import (type code {})'.format(type_)
|
| 233 |
+
raise ValueError(msg)
|
| 234 |
+
elif type_ == PY_SOURCE:
|
| 235 |
+
return load_source(name, filename, file)
|
| 236 |
+
elif type_ == PY_COMPILED:
|
| 237 |
+
return load_compiled(name, filename, file)
|
| 238 |
+
elif type_ == C_EXTENSION and load_dynamic is not None:
|
| 239 |
+
if file is None:
|
| 240 |
+
with open(filename, 'rb') as opened_file:
|
| 241 |
+
return load_dynamic(name, filename, opened_file)
|
| 242 |
+
else:
|
| 243 |
+
return load_dynamic(name, filename, file)
|
| 244 |
+
elif type_ == PKG_DIRECTORY:
|
| 245 |
+
return load_package(name, filename)
|
| 246 |
+
elif type_ == C_BUILTIN:
|
| 247 |
+
return init_builtin(name)
|
| 248 |
+
elif type_ == PY_FROZEN:
|
| 249 |
+
return init_frozen(name)
|
| 250 |
+
else:
|
| 251 |
+
msg = "Don't know how to import {} (type code {})".format(name, type_)
|
| 252 |
+
raise ImportError(msg, name=name)
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
def find_module(name, path=None):
|
| 256 |
+
"""**DEPRECATED**
|
| 257 |
+
|
| 258 |
+
Search for a module.
|
| 259 |
+
|
| 260 |
+
If path is omitted or None, search for a built-in, frozen or special
|
| 261 |
+
module and continue search in sys.path. The module name cannot
|
| 262 |
+
contain '.'; to search for a submodule of a package, pass the
|
| 263 |
+
submodule name and the package's __path__.
|
| 264 |
+
|
| 265 |
+
"""
|
| 266 |
+
if not isinstance(name, str):
|
| 267 |
+
raise TypeError("'name' must be a str, not {}".format(type(name)))
|
| 268 |
+
elif not isinstance(path, (type(None), list)):
|
| 269 |
+
# Backwards-compatibility
|
| 270 |
+
raise RuntimeError("'path' must be None or a list, "
|
| 271 |
+
"not {}".format(type(path)))
|
| 272 |
+
|
| 273 |
+
if path is None:
|
| 274 |
+
if is_builtin(name):
|
| 275 |
+
return None, None, ('', '', C_BUILTIN)
|
| 276 |
+
elif is_frozen(name):
|
| 277 |
+
return None, None, ('', '', PY_FROZEN)
|
| 278 |
+
else:
|
| 279 |
+
path = sys.path
|
| 280 |
+
|
| 281 |
+
for entry in path:
|
| 282 |
+
package_directory = os.path.join(entry, name)
|
| 283 |
+
for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]:
|
| 284 |
+
package_file_name = '__init__' + suffix
|
| 285 |
+
file_path = os.path.join(package_directory, package_file_name)
|
| 286 |
+
if os.path.isfile(file_path):
|
| 287 |
+
return None, package_directory, ('', '', PKG_DIRECTORY)
|
| 288 |
+
for suffix, mode, type_ in get_suffixes():
|
| 289 |
+
file_name = name + suffix
|
| 290 |
+
file_path = os.path.join(entry, file_name)
|
| 291 |
+
if os.path.isfile(file_path):
|
| 292 |
+
break
|
| 293 |
+
else:
|
| 294 |
+
continue
|
| 295 |
+
break # Break out of outer loop when breaking out of inner loop.
|
| 296 |
+
else:
|
| 297 |
+
raise ImportError(_ERR_MSG.format(name), name=name)
|
| 298 |
+
|
| 299 |
+
encoding = None
|
| 300 |
+
if 'b' not in mode:
|
| 301 |
+
with open(file_path, 'rb') as file:
|
| 302 |
+
encoding = tokenize.detect_encoding(file.readline)[0]
|
| 303 |
+
file = open(file_path, mode, encoding=encoding)
|
| 304 |
+
return file, file_path, (suffix, mode, type_)
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
def reload(module):
|
| 308 |
+
"""**DEPRECATED**
|
| 309 |
+
|
| 310 |
+
Reload the module and return it.
|
| 311 |
+
|
| 312 |
+
The module must have been successfully imported before.
|
| 313 |
+
|
| 314 |
+
"""
|
| 315 |
+
return importlib.reload(module)
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def init_builtin(name):
|
| 319 |
+
"""**DEPRECATED**
|
| 320 |
+
|
| 321 |
+
Load and return a built-in module by name, or None is such module doesn't
|
| 322 |
+
exist
|
| 323 |
+
"""
|
| 324 |
+
try:
|
| 325 |
+
return _builtin_from_name(name)
|
| 326 |
+
except ImportError:
|
| 327 |
+
return None
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
if create_dynamic:
|
| 331 |
+
def load_dynamic(name, path, file=None):
|
| 332 |
+
"""**DEPRECATED**
|
| 333 |
+
|
| 334 |
+
Load an extension module.
|
| 335 |
+
"""
|
| 336 |
+
import importlib.machinery
|
| 337 |
+
loader = importlib.machinery.ExtensionFileLoader(name, path)
|
| 338 |
+
|
| 339 |
+
# Issue #24748: Skip the sys.modules check in _load_module_shim;
|
| 340 |
+
# always load new extension
|
| 341 |
+
spec = importlib.machinery.ModuleSpec(
|
| 342 |
+
name=name, loader=loader, origin=path)
|
| 343 |
+
return _load(spec)
|
| 344 |
+
|
| 345 |
+
else:
|
| 346 |
+
load_dynamic = None
|
llava/lib/python3.10/inspect.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
llava/lib/python3.10/lzma.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Interface to the liblzma compression library.
|
| 2 |
+
|
| 3 |
+
This module provides a class for reading and writing compressed files,
|
| 4 |
+
classes for incremental (de)compression, and convenience functions for
|
| 5 |
+
one-shot (de)compression.
|
| 6 |
+
|
| 7 |
+
These classes and functions support both the XZ and legacy LZMA
|
| 8 |
+
container formats, as well as raw compressed data streams.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
__all__ = [
|
| 12 |
+
"CHECK_NONE", "CHECK_CRC32", "CHECK_CRC64", "CHECK_SHA256",
|
| 13 |
+
"CHECK_ID_MAX", "CHECK_UNKNOWN",
|
| 14 |
+
"FILTER_LZMA1", "FILTER_LZMA2", "FILTER_DELTA", "FILTER_X86", "FILTER_IA64",
|
| 15 |
+
"FILTER_ARM", "FILTER_ARMTHUMB", "FILTER_POWERPC", "FILTER_SPARC",
|
| 16 |
+
"FORMAT_AUTO", "FORMAT_XZ", "FORMAT_ALONE", "FORMAT_RAW",
|
| 17 |
+
"MF_HC3", "MF_HC4", "MF_BT2", "MF_BT3", "MF_BT4",
|
| 18 |
+
"MODE_FAST", "MODE_NORMAL", "PRESET_DEFAULT", "PRESET_EXTREME",
|
| 19 |
+
|
| 20 |
+
"LZMACompressor", "LZMADecompressor", "LZMAFile", "LZMAError",
|
| 21 |
+
"open", "compress", "decompress", "is_check_supported",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
import builtins
|
| 25 |
+
import io
|
| 26 |
+
import os
|
| 27 |
+
from _lzma import *
|
| 28 |
+
from _lzma import _encode_filter_properties, _decode_filter_properties
|
| 29 |
+
import _compression
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
_MODE_CLOSED = 0
|
| 33 |
+
_MODE_READ = 1
|
| 34 |
+
# Value 2 no longer used
|
| 35 |
+
_MODE_WRITE = 3
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class LZMAFile(_compression.BaseStream):
|
| 39 |
+
|
| 40 |
+
"""A file object providing transparent LZMA (de)compression.
|
| 41 |
+
|
| 42 |
+
An LZMAFile can act as a wrapper for an existing file object, or
|
| 43 |
+
refer directly to a named file on disk.
|
| 44 |
+
|
| 45 |
+
Note that LZMAFile provides a *binary* file interface - data read
|
| 46 |
+
is returned as bytes, and data to be written must be given as bytes.
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
def __init__(self, filename=None, mode="r", *,
|
| 50 |
+
format=None, check=-1, preset=None, filters=None):
|
| 51 |
+
"""Open an LZMA-compressed file in binary mode.
|
| 52 |
+
|
| 53 |
+
filename can be either an actual file name (given as a str,
|
| 54 |
+
bytes, or PathLike object), in which case the named file is
|
| 55 |
+
opened, or it can be an existing file object to read from or
|
| 56 |
+
write to.
|
| 57 |
+
|
| 58 |
+
mode can be "r" for reading (default), "w" for (over)writing,
|
| 59 |
+
"x" for creating exclusively, or "a" for appending. These can
|
| 60 |
+
equivalently be given as "rb", "wb", "xb" and "ab" respectively.
|
| 61 |
+
|
| 62 |
+
format specifies the container format to use for the file.
|
| 63 |
+
If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the
|
| 64 |
+
default is FORMAT_XZ.
|
| 65 |
+
|
| 66 |
+
check specifies the integrity check to use. This argument can
|
| 67 |
+
only be used when opening a file for writing. For FORMAT_XZ,
|
| 68 |
+
the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not
|
| 69 |
+
support integrity checks - for these formats, check must be
|
| 70 |
+
omitted, or be CHECK_NONE.
|
| 71 |
+
|
| 72 |
+
When opening a file for reading, the *preset* argument is not
|
| 73 |
+
meaningful, and should be omitted. The *filters* argument should
|
| 74 |
+
also be omitted, except when format is FORMAT_RAW (in which case
|
| 75 |
+
it is required).
|
| 76 |
+
|
| 77 |
+
When opening a file for writing, the settings used by the
|
| 78 |
+
compressor can be specified either as a preset compression
|
| 79 |
+
level (with the *preset* argument), or in detail as a custom
|
| 80 |
+
filter chain (with the *filters* argument). For FORMAT_XZ and
|
| 81 |
+
FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset
|
| 82 |
+
level. For FORMAT_RAW, the caller must always specify a filter
|
| 83 |
+
chain; the raw compressor does not support preset compression
|
| 84 |
+
levels.
|
| 85 |
+
|
| 86 |
+
preset (if provided) should be an integer in the range 0-9,
|
| 87 |
+
optionally OR-ed with the constant PRESET_EXTREME.
|
| 88 |
+
|
| 89 |
+
filters (if provided) should be a sequence of dicts. Each dict
|
| 90 |
+
should have an entry for "id" indicating ID of the filter, plus
|
| 91 |
+
additional entries for options to the filter.
|
| 92 |
+
"""
|
| 93 |
+
self._fp = None
|
| 94 |
+
self._closefp = False
|
| 95 |
+
self._mode = _MODE_CLOSED
|
| 96 |
+
|
| 97 |
+
if mode in ("r", "rb"):
|
| 98 |
+
if check != -1:
|
| 99 |
+
raise ValueError("Cannot specify an integrity check "
|
| 100 |
+
"when opening a file for reading")
|
| 101 |
+
if preset is not None:
|
| 102 |
+
raise ValueError("Cannot specify a preset compression "
|
| 103 |
+
"level when opening a file for reading")
|
| 104 |
+
if format is None:
|
| 105 |
+
format = FORMAT_AUTO
|
| 106 |
+
mode_code = _MODE_READ
|
| 107 |
+
elif mode in ("w", "wb", "a", "ab", "x", "xb"):
|
| 108 |
+
if format is None:
|
| 109 |
+
format = FORMAT_XZ
|
| 110 |
+
mode_code = _MODE_WRITE
|
| 111 |
+
self._compressor = LZMACompressor(format=format, check=check,
|
| 112 |
+
preset=preset, filters=filters)
|
| 113 |
+
self._pos = 0
|
| 114 |
+
else:
|
| 115 |
+
raise ValueError("Invalid mode: {!r}".format(mode))
|
| 116 |
+
|
| 117 |
+
if isinstance(filename, (str, bytes, os.PathLike)):
|
| 118 |
+
if "b" not in mode:
|
| 119 |
+
mode += "b"
|
| 120 |
+
self._fp = builtins.open(filename, mode)
|
| 121 |
+
self._closefp = True
|
| 122 |
+
self._mode = mode_code
|
| 123 |
+
elif hasattr(filename, "read") or hasattr(filename, "write"):
|
| 124 |
+
self._fp = filename
|
| 125 |
+
self._mode = mode_code
|
| 126 |
+
else:
|
| 127 |
+
raise TypeError("filename must be a str, bytes, file or PathLike object")
|
| 128 |
+
|
| 129 |
+
if self._mode == _MODE_READ:
|
| 130 |
+
raw = _compression.DecompressReader(self._fp, LZMADecompressor,
|
| 131 |
+
trailing_error=LZMAError, format=format, filters=filters)
|
| 132 |
+
self._buffer = io.BufferedReader(raw)
|
| 133 |
+
|
| 134 |
+
def close(self):
|
| 135 |
+
"""Flush and close the file.
|
| 136 |
+
|
| 137 |
+
May be called more than once without error. Once the file is
|
| 138 |
+
closed, any other operation on it will raise a ValueError.
|
| 139 |
+
"""
|
| 140 |
+
if self._mode == _MODE_CLOSED:
|
| 141 |
+
return
|
| 142 |
+
try:
|
| 143 |
+
if self._mode == _MODE_READ:
|
| 144 |
+
self._buffer.close()
|
| 145 |
+
self._buffer = None
|
| 146 |
+
elif self._mode == _MODE_WRITE:
|
| 147 |
+
self._fp.write(self._compressor.flush())
|
| 148 |
+
self._compressor = None
|
| 149 |
+
finally:
|
| 150 |
+
try:
|
| 151 |
+
if self._closefp:
|
| 152 |
+
self._fp.close()
|
| 153 |
+
finally:
|
| 154 |
+
self._fp = None
|
| 155 |
+
self._closefp = False
|
| 156 |
+
self._mode = _MODE_CLOSED
|
| 157 |
+
|
| 158 |
+
@property
|
| 159 |
+
def closed(self):
|
| 160 |
+
"""True if this file is closed."""
|
| 161 |
+
return self._mode == _MODE_CLOSED
|
| 162 |
+
|
| 163 |
+
def fileno(self):
|
| 164 |
+
"""Return the file descriptor for the underlying file."""
|
| 165 |
+
self._check_not_closed()
|
| 166 |
+
return self._fp.fileno()
|
| 167 |
+
|
| 168 |
+
def seekable(self):
|
| 169 |
+
"""Return whether the file supports seeking."""
|
| 170 |
+
return self.readable() and self._buffer.seekable()
|
| 171 |
+
|
| 172 |
+
def readable(self):
|
| 173 |
+
"""Return whether the file was opened for reading."""
|
| 174 |
+
self._check_not_closed()
|
| 175 |
+
return self._mode == _MODE_READ
|
| 176 |
+
|
| 177 |
+
def writable(self):
|
| 178 |
+
"""Return whether the file was opened for writing."""
|
| 179 |
+
self._check_not_closed()
|
| 180 |
+
return self._mode == _MODE_WRITE
|
| 181 |
+
|
| 182 |
+
def peek(self, size=-1):
|
| 183 |
+
"""Return buffered data without advancing the file position.
|
| 184 |
+
|
| 185 |
+
Always returns at least one byte of data, unless at EOF.
|
| 186 |
+
The exact number of bytes returned is unspecified.
|
| 187 |
+
"""
|
| 188 |
+
self._check_can_read()
|
| 189 |
+
# Relies on the undocumented fact that BufferedReader.peek() always
|
| 190 |
+
# returns at least one byte (except at EOF)
|
| 191 |
+
return self._buffer.peek(size)
|
| 192 |
+
|
| 193 |
+
def read(self, size=-1):
|
| 194 |
+
"""Read up to size uncompressed bytes from the file.
|
| 195 |
+
|
| 196 |
+
If size is negative or omitted, read until EOF is reached.
|
| 197 |
+
Returns b"" if the file is already at EOF.
|
| 198 |
+
"""
|
| 199 |
+
self._check_can_read()
|
| 200 |
+
return self._buffer.read(size)
|
| 201 |
+
|
| 202 |
+
def read1(self, size=-1):
|
| 203 |
+
"""Read up to size uncompressed bytes, while trying to avoid
|
| 204 |
+
making multiple reads from the underlying stream. Reads up to a
|
| 205 |
+
buffer's worth of data if size is negative.
|
| 206 |
+
|
| 207 |
+
Returns b"" if the file is at EOF.
|
| 208 |
+
"""
|
| 209 |
+
self._check_can_read()
|
| 210 |
+
if size < 0:
|
| 211 |
+
size = io.DEFAULT_BUFFER_SIZE
|
| 212 |
+
return self._buffer.read1(size)
|
| 213 |
+
|
| 214 |
+
def readline(self, size=-1):
|
| 215 |
+
"""Read a line of uncompressed bytes from the file.
|
| 216 |
+
|
| 217 |
+
The terminating newline (if present) is retained. If size is
|
| 218 |
+
non-negative, no more than size bytes will be read (in which
|
| 219 |
+
case the line may be incomplete). Returns b'' if already at EOF.
|
| 220 |
+
"""
|
| 221 |
+
self._check_can_read()
|
| 222 |
+
return self._buffer.readline(size)
|
| 223 |
+
|
| 224 |
+
def write(self, data):
|
| 225 |
+
"""Write a bytes object to the file.
|
| 226 |
+
|
| 227 |
+
Returns the number of uncompressed bytes written, which is
|
| 228 |
+
always the length of data in bytes. Note that due to buffering,
|
| 229 |
+
the file on disk may not reflect the data written until close()
|
| 230 |
+
is called.
|
| 231 |
+
"""
|
| 232 |
+
self._check_can_write()
|
| 233 |
+
if isinstance(data, (bytes, bytearray)):
|
| 234 |
+
length = len(data)
|
| 235 |
+
else:
|
| 236 |
+
# accept any data that supports the buffer protocol
|
| 237 |
+
data = memoryview(data)
|
| 238 |
+
length = data.nbytes
|
| 239 |
+
|
| 240 |
+
compressed = self._compressor.compress(data)
|
| 241 |
+
self._fp.write(compressed)
|
| 242 |
+
self._pos += length
|
| 243 |
+
return length
|
| 244 |
+
|
| 245 |
+
def seek(self, offset, whence=io.SEEK_SET):
|
| 246 |
+
"""Change the file position.
|
| 247 |
+
|
| 248 |
+
The new position is specified by offset, relative to the
|
| 249 |
+
position indicated by whence. Possible values for whence are:
|
| 250 |
+
|
| 251 |
+
0: start of stream (default): offset must not be negative
|
| 252 |
+
1: current stream position
|
| 253 |
+
2: end of stream; offset must not be positive
|
| 254 |
+
|
| 255 |
+
Returns the new file position.
|
| 256 |
+
|
| 257 |
+
Note that seeking is emulated, so depending on the parameters,
|
| 258 |
+
this operation may be extremely slow.
|
| 259 |
+
"""
|
| 260 |
+
self._check_can_seek()
|
| 261 |
+
return self._buffer.seek(offset, whence)
|
| 262 |
+
|
| 263 |
+
def tell(self):
|
| 264 |
+
"""Return the current file position."""
|
| 265 |
+
self._check_not_closed()
|
| 266 |
+
if self._mode == _MODE_READ:
|
| 267 |
+
return self._buffer.tell()
|
| 268 |
+
return self._pos
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def open(filename, mode="rb", *,
|
| 272 |
+
format=None, check=-1, preset=None, filters=None,
|
| 273 |
+
encoding=None, errors=None, newline=None):
|
| 274 |
+
"""Open an LZMA-compressed file in binary or text mode.
|
| 275 |
+
|
| 276 |
+
filename can be either an actual file name (given as a str, bytes,
|
| 277 |
+
or PathLike object), in which case the named file is opened, or it
|
| 278 |
+
can be an existing file object to read from or write to.
|
| 279 |
+
|
| 280 |
+
The mode argument can be "r", "rb" (default), "w", "wb", "x", "xb",
|
| 281 |
+
"a", or "ab" for binary mode, or "rt", "wt", "xt", or "at" for text
|
| 282 |
+
mode.
|
| 283 |
+
|
| 284 |
+
The format, check, preset and filters arguments specify the
|
| 285 |
+
compression settings, as for LZMACompressor, LZMADecompressor and
|
| 286 |
+
LZMAFile.
|
| 287 |
+
|
| 288 |
+
For binary mode, this function is equivalent to the LZMAFile
|
| 289 |
+
constructor: LZMAFile(filename, mode, ...). In this case, the
|
| 290 |
+
encoding, errors and newline arguments must not be provided.
|
| 291 |
+
|
| 292 |
+
For text mode, an LZMAFile object is created, and wrapped in an
|
| 293 |
+
io.TextIOWrapper instance with the specified encoding, error
|
| 294 |
+
handling behavior, and line ending(s).
|
| 295 |
+
|
| 296 |
+
"""
|
| 297 |
+
if "t" in mode:
|
| 298 |
+
if "b" in mode:
|
| 299 |
+
raise ValueError("Invalid mode: %r" % (mode,))
|
| 300 |
+
else:
|
| 301 |
+
if encoding is not None:
|
| 302 |
+
raise ValueError("Argument 'encoding' not supported in binary mode")
|
| 303 |
+
if errors is not None:
|
| 304 |
+
raise ValueError("Argument 'errors' not supported in binary mode")
|
| 305 |
+
if newline is not None:
|
| 306 |
+
raise ValueError("Argument 'newline' not supported in binary mode")
|
| 307 |
+
|
| 308 |
+
lz_mode = mode.replace("t", "")
|
| 309 |
+
binary_file = LZMAFile(filename, lz_mode, format=format, check=check,
|
| 310 |
+
preset=preset, filters=filters)
|
| 311 |
+
|
| 312 |
+
if "t" in mode:
|
| 313 |
+
encoding = io.text_encoding(encoding)
|
| 314 |
+
return io.TextIOWrapper(binary_file, encoding, errors, newline)
|
| 315 |
+
else:
|
| 316 |
+
return binary_file
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
|
| 320 |
+
"""Compress a block of data.
|
| 321 |
+
|
| 322 |
+
Refer to LZMACompressor's docstring for a description of the
|
| 323 |
+
optional arguments *format*, *check*, *preset* and *filters*.
|
| 324 |
+
|
| 325 |
+
For incremental compression, use an LZMACompressor instead.
|
| 326 |
+
"""
|
| 327 |
+
comp = LZMACompressor(format, check, preset, filters)
|
| 328 |
+
return comp.compress(data) + comp.flush()
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
|
| 332 |
+
"""Decompress a block of data.
|
| 333 |
+
|
| 334 |
+
Refer to LZMADecompressor's docstring for a description of the
|
| 335 |
+
optional arguments *format*, *check* and *filters*.
|
| 336 |
+
|
| 337 |
+
For incremental decompression, use an LZMADecompressor instead.
|
| 338 |
+
"""
|
| 339 |
+
results = []
|
| 340 |
+
while True:
|
| 341 |
+
decomp = LZMADecompressor(format, memlimit, filters)
|
| 342 |
+
try:
|
| 343 |
+
res = decomp.decompress(data)
|
| 344 |
+
except LZMAError:
|
| 345 |
+
if results:
|
| 346 |
+
break # Leftover data is not a valid LZMA/XZ stream; ignore it.
|
| 347 |
+
else:
|
| 348 |
+
raise # Error on the first iteration; bail out.
|
| 349 |
+
results.append(res)
|
| 350 |
+
if not decomp.eof:
|
| 351 |
+
raise LZMAError("Compressed data ended before the "
|
| 352 |
+
"end-of-stream marker was reached")
|
| 353 |
+
data = decomp.unused_data
|
| 354 |
+
if not data:
|
| 355 |
+
break
|
| 356 |
+
return b"".join(results)
|
llava/lib/python3.10/pprint.py
ADDED
|
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Author: Fred L. Drake, Jr.
|
| 2 |
+
# fdrake@acm.org
|
| 3 |
+
#
|
| 4 |
+
# This is a simple little module I wrote to make life easier. I didn't
|
| 5 |
+
# see anything quite like it in the library, though I may have overlooked
|
| 6 |
+
# something. I wrote this when I was trying to read some heavily nested
|
| 7 |
+
# tuples with fairly non-descriptive content. This is modeled very much
|
| 8 |
+
# after Lisp/Scheme - style pretty-printing of lists. If you find it
|
| 9 |
+
# useful, thank small children who sleep at night.
|
| 10 |
+
|
| 11 |
+
"""Support to pretty-print lists, tuples, & dictionaries recursively.
|
| 12 |
+
|
| 13 |
+
Very simple, but useful, especially in debugging data structures.
|
| 14 |
+
|
| 15 |
+
Classes
|
| 16 |
+
-------
|
| 17 |
+
|
| 18 |
+
PrettyPrinter()
|
| 19 |
+
Handle pretty-printing operations onto a stream using a configured
|
| 20 |
+
set of formatting parameters.
|
| 21 |
+
|
| 22 |
+
Functions
|
| 23 |
+
---------
|
| 24 |
+
|
| 25 |
+
pformat()
|
| 26 |
+
Format a Python object into a pretty-printed representation.
|
| 27 |
+
|
| 28 |
+
pprint()
|
| 29 |
+
Pretty-print a Python object to a stream [default is sys.stdout].
|
| 30 |
+
|
| 31 |
+
saferepr()
|
| 32 |
+
Generate a 'standard' repr()-like value, but protect against recursive
|
| 33 |
+
data structures.
|
| 34 |
+
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
import collections as _collections
|
| 38 |
+
import dataclasses as _dataclasses
|
| 39 |
+
import re
|
| 40 |
+
import sys as _sys
|
| 41 |
+
import types as _types
|
| 42 |
+
from io import StringIO as _StringIO
|
| 43 |
+
|
| 44 |
+
__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr",
|
| 45 |
+
"PrettyPrinter", "pp"]
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def pprint(object, stream=None, indent=1, width=80, depth=None, *,
|
| 49 |
+
compact=False, sort_dicts=True, underscore_numbers=False):
|
| 50 |
+
"""Pretty-print a Python object to a stream [default is sys.stdout]."""
|
| 51 |
+
printer = PrettyPrinter(
|
| 52 |
+
stream=stream, indent=indent, width=width, depth=depth,
|
| 53 |
+
compact=compact, sort_dicts=sort_dicts,
|
| 54 |
+
underscore_numbers=underscore_numbers)
|
| 55 |
+
printer.pprint(object)
|
| 56 |
+
|
| 57 |
+
def pformat(object, indent=1, width=80, depth=None, *,
|
| 58 |
+
compact=False, sort_dicts=True, underscore_numbers=False):
|
| 59 |
+
"""Format a Python object into a pretty-printed representation."""
|
| 60 |
+
return PrettyPrinter(indent=indent, width=width, depth=depth,
|
| 61 |
+
compact=compact, sort_dicts=sort_dicts,
|
| 62 |
+
underscore_numbers=underscore_numbers).pformat(object)
|
| 63 |
+
|
| 64 |
+
def pp(object, *args, sort_dicts=False, **kwargs):
|
| 65 |
+
"""Pretty-print a Python object"""
|
| 66 |
+
pprint(object, *args, sort_dicts=sort_dicts, **kwargs)
|
| 67 |
+
|
| 68 |
+
def saferepr(object):
|
| 69 |
+
"""Version of repr() which can handle recursive data structures."""
|
| 70 |
+
return PrettyPrinter()._safe_repr(object, {}, None, 0)[0]
|
| 71 |
+
|
| 72 |
+
def isreadable(object):
|
| 73 |
+
"""Determine if saferepr(object) is readable by eval()."""
|
| 74 |
+
return PrettyPrinter()._safe_repr(object, {}, None, 0)[1]
|
| 75 |
+
|
| 76 |
+
def isrecursive(object):
|
| 77 |
+
"""Determine if object requires a recursive representation."""
|
| 78 |
+
return PrettyPrinter()._safe_repr(object, {}, None, 0)[2]
|
| 79 |
+
|
| 80 |
+
class _safe_key:
|
| 81 |
+
"""Helper function for key functions when sorting unorderable objects.
|
| 82 |
+
|
| 83 |
+
The wrapped-object will fallback to a Py2.x style comparison for
|
| 84 |
+
unorderable types (sorting first comparing the type name and then by
|
| 85 |
+
the obj ids). Does not work recursively, so dict.items() must have
|
| 86 |
+
_safe_key applied to both the key and the value.
|
| 87 |
+
|
| 88 |
+
"""
|
| 89 |
+
|
| 90 |
+
__slots__ = ['obj']
|
| 91 |
+
|
| 92 |
+
def __init__(self, obj):
|
| 93 |
+
self.obj = obj
|
| 94 |
+
|
| 95 |
+
def __lt__(self, other):
|
| 96 |
+
try:
|
| 97 |
+
return self.obj < other.obj
|
| 98 |
+
except TypeError:
|
| 99 |
+
return ((str(type(self.obj)), id(self.obj)) < \
|
| 100 |
+
(str(type(other.obj)), id(other.obj)))
|
| 101 |
+
|
| 102 |
+
def _safe_tuple(t):
|
| 103 |
+
"Helper function for comparing 2-tuples"
|
| 104 |
+
return _safe_key(t[0]), _safe_key(t[1])
|
| 105 |
+
|
| 106 |
+
class PrettyPrinter:
|
| 107 |
+
def __init__(self, indent=1, width=80, depth=None, stream=None, *,
|
| 108 |
+
compact=False, sort_dicts=True, underscore_numbers=False):
|
| 109 |
+
"""Handle pretty printing operations onto a stream using a set of
|
| 110 |
+
configured parameters.
|
| 111 |
+
|
| 112 |
+
indent
|
| 113 |
+
Number of spaces to indent for each level of nesting.
|
| 114 |
+
|
| 115 |
+
width
|
| 116 |
+
Attempted maximum number of columns in the output.
|
| 117 |
+
|
| 118 |
+
depth
|
| 119 |
+
The maximum depth to print out nested structures.
|
| 120 |
+
|
| 121 |
+
stream
|
| 122 |
+
The desired output stream. If omitted (or false), the standard
|
| 123 |
+
output stream available at construction will be used.
|
| 124 |
+
|
| 125 |
+
compact
|
| 126 |
+
If true, several items will be combined in one line.
|
| 127 |
+
|
| 128 |
+
sort_dicts
|
| 129 |
+
If true, dict keys are sorted.
|
| 130 |
+
|
| 131 |
+
"""
|
| 132 |
+
indent = int(indent)
|
| 133 |
+
width = int(width)
|
| 134 |
+
if indent < 0:
|
| 135 |
+
raise ValueError('indent must be >= 0')
|
| 136 |
+
if depth is not None and depth <= 0:
|
| 137 |
+
raise ValueError('depth must be > 0')
|
| 138 |
+
if not width:
|
| 139 |
+
raise ValueError('width must be != 0')
|
| 140 |
+
self._depth = depth
|
| 141 |
+
self._indent_per_level = indent
|
| 142 |
+
self._width = width
|
| 143 |
+
if stream is not None:
|
| 144 |
+
self._stream = stream
|
| 145 |
+
else:
|
| 146 |
+
self._stream = _sys.stdout
|
| 147 |
+
self._compact = bool(compact)
|
| 148 |
+
self._sort_dicts = sort_dicts
|
| 149 |
+
self._underscore_numbers = underscore_numbers
|
| 150 |
+
|
| 151 |
+
def pprint(self, object):
|
| 152 |
+
self._format(object, self._stream, 0, 0, {}, 0)
|
| 153 |
+
self._stream.write("\n")
|
| 154 |
+
|
| 155 |
+
def pformat(self, object):
|
| 156 |
+
sio = _StringIO()
|
| 157 |
+
self._format(object, sio, 0, 0, {}, 0)
|
| 158 |
+
return sio.getvalue()
|
| 159 |
+
|
| 160 |
+
def isrecursive(self, object):
|
| 161 |
+
return self.format(object, {}, 0, 0)[2]
|
| 162 |
+
|
| 163 |
+
def isreadable(self, object):
|
| 164 |
+
s, readable, recursive = self.format(object, {}, 0, 0)
|
| 165 |
+
return readable and not recursive
|
| 166 |
+
|
| 167 |
+
def _format(self, object, stream, indent, allowance, context, level):
|
| 168 |
+
objid = id(object)
|
| 169 |
+
if objid in context:
|
| 170 |
+
stream.write(_recursion(object))
|
| 171 |
+
self._recursive = True
|
| 172 |
+
self._readable = False
|
| 173 |
+
return
|
| 174 |
+
rep = self._repr(object, context, level)
|
| 175 |
+
max_width = self._width - indent - allowance
|
| 176 |
+
if len(rep) > max_width:
|
| 177 |
+
p = self._dispatch.get(type(object).__repr__, None)
|
| 178 |
+
if p is not None:
|
| 179 |
+
context[objid] = 1
|
| 180 |
+
p(self, object, stream, indent, allowance, context, level + 1)
|
| 181 |
+
del context[objid]
|
| 182 |
+
return
|
| 183 |
+
elif (_dataclasses.is_dataclass(object) and
|
| 184 |
+
not isinstance(object, type) and
|
| 185 |
+
object.__dataclass_params__.repr and
|
| 186 |
+
# Check dataclass has generated repr method.
|
| 187 |
+
hasattr(object.__repr__, "__wrapped__") and
|
| 188 |
+
"__create_fn__" in object.__repr__.__wrapped__.__qualname__):
|
| 189 |
+
context[objid] = 1
|
| 190 |
+
self._pprint_dataclass(object, stream, indent, allowance, context, level + 1)
|
| 191 |
+
del context[objid]
|
| 192 |
+
return
|
| 193 |
+
stream.write(rep)
|
| 194 |
+
|
| 195 |
+
def _pprint_dataclass(self, object, stream, indent, allowance, context, level):
|
| 196 |
+
cls_name = object.__class__.__name__
|
| 197 |
+
indent += len(cls_name) + 1
|
| 198 |
+
items = [(f.name, getattr(object, f.name)) for f in _dataclasses.fields(object) if f.repr]
|
| 199 |
+
stream.write(cls_name + '(')
|
| 200 |
+
self._format_namespace_items(items, stream, indent, allowance, context, level)
|
| 201 |
+
stream.write(')')
|
| 202 |
+
|
| 203 |
+
_dispatch = {}
|
| 204 |
+
|
| 205 |
+
def _pprint_dict(self, object, stream, indent, allowance, context, level):
|
| 206 |
+
write = stream.write
|
| 207 |
+
write('{')
|
| 208 |
+
if self._indent_per_level > 1:
|
| 209 |
+
write((self._indent_per_level - 1) * ' ')
|
| 210 |
+
length = len(object)
|
| 211 |
+
if length:
|
| 212 |
+
if self._sort_dicts:
|
| 213 |
+
items = sorted(object.items(), key=_safe_tuple)
|
| 214 |
+
else:
|
| 215 |
+
items = object.items()
|
| 216 |
+
self._format_dict_items(items, stream, indent, allowance + 1,
|
| 217 |
+
context, level)
|
| 218 |
+
write('}')
|
| 219 |
+
|
| 220 |
+
_dispatch[dict.__repr__] = _pprint_dict
|
| 221 |
+
|
| 222 |
+
def _pprint_ordered_dict(self, object, stream, indent, allowance, context, level):
|
| 223 |
+
if not len(object):
|
| 224 |
+
stream.write(repr(object))
|
| 225 |
+
return
|
| 226 |
+
cls = object.__class__
|
| 227 |
+
stream.write(cls.__name__ + '(')
|
| 228 |
+
self._format(list(object.items()), stream,
|
| 229 |
+
indent + len(cls.__name__) + 1, allowance + 1,
|
| 230 |
+
context, level)
|
| 231 |
+
stream.write(')')
|
| 232 |
+
|
| 233 |
+
_dispatch[_collections.OrderedDict.__repr__] = _pprint_ordered_dict
|
| 234 |
+
|
| 235 |
+
def _pprint_list(self, object, stream, indent, allowance, context, level):
|
| 236 |
+
stream.write('[')
|
| 237 |
+
self._format_items(object, stream, indent, allowance + 1,
|
| 238 |
+
context, level)
|
| 239 |
+
stream.write(']')
|
| 240 |
+
|
| 241 |
+
_dispatch[list.__repr__] = _pprint_list
|
| 242 |
+
|
| 243 |
+
def _pprint_tuple(self, object, stream, indent, allowance, context, level):
|
| 244 |
+
stream.write('(')
|
| 245 |
+
endchar = ',)' if len(object) == 1 else ')'
|
| 246 |
+
self._format_items(object, stream, indent, allowance + len(endchar),
|
| 247 |
+
context, level)
|
| 248 |
+
stream.write(endchar)
|
| 249 |
+
|
| 250 |
+
_dispatch[tuple.__repr__] = _pprint_tuple
|
| 251 |
+
|
| 252 |
+
def _pprint_set(self, object, stream, indent, allowance, context, level):
|
| 253 |
+
if not len(object):
|
| 254 |
+
stream.write(repr(object))
|
| 255 |
+
return
|
| 256 |
+
typ = object.__class__
|
| 257 |
+
if typ is set:
|
| 258 |
+
stream.write('{')
|
| 259 |
+
endchar = '}'
|
| 260 |
+
else:
|
| 261 |
+
stream.write(typ.__name__ + '({')
|
| 262 |
+
endchar = '})'
|
| 263 |
+
indent += len(typ.__name__) + 1
|
| 264 |
+
object = sorted(object, key=_safe_key)
|
| 265 |
+
self._format_items(object, stream, indent, allowance + len(endchar),
|
| 266 |
+
context, level)
|
| 267 |
+
stream.write(endchar)
|
| 268 |
+
|
| 269 |
+
_dispatch[set.__repr__] = _pprint_set
|
| 270 |
+
_dispatch[frozenset.__repr__] = _pprint_set
|
| 271 |
+
|
| 272 |
+
def _pprint_str(self, object, stream, indent, allowance, context, level):
|
| 273 |
+
write = stream.write
|
| 274 |
+
if not len(object):
|
| 275 |
+
write(repr(object))
|
| 276 |
+
return
|
| 277 |
+
chunks = []
|
| 278 |
+
lines = object.splitlines(True)
|
| 279 |
+
if level == 1:
|
| 280 |
+
indent += 1
|
| 281 |
+
allowance += 1
|
| 282 |
+
max_width1 = max_width = self._width - indent
|
| 283 |
+
for i, line in enumerate(lines):
|
| 284 |
+
rep = repr(line)
|
| 285 |
+
if i == len(lines) - 1:
|
| 286 |
+
max_width1 -= allowance
|
| 287 |
+
if len(rep) <= max_width1:
|
| 288 |
+
chunks.append(rep)
|
| 289 |
+
else:
|
| 290 |
+
# A list of alternating (non-space, space) strings
|
| 291 |
+
parts = re.findall(r'\S*\s*', line)
|
| 292 |
+
assert parts
|
| 293 |
+
assert not parts[-1]
|
| 294 |
+
parts.pop() # drop empty last part
|
| 295 |
+
max_width2 = max_width
|
| 296 |
+
current = ''
|
| 297 |
+
for j, part in enumerate(parts):
|
| 298 |
+
candidate = current + part
|
| 299 |
+
if j == len(parts) - 1 and i == len(lines) - 1:
|
| 300 |
+
max_width2 -= allowance
|
| 301 |
+
if len(repr(candidate)) > max_width2:
|
| 302 |
+
if current:
|
| 303 |
+
chunks.append(repr(current))
|
| 304 |
+
current = part
|
| 305 |
+
else:
|
| 306 |
+
current = candidate
|
| 307 |
+
if current:
|
| 308 |
+
chunks.append(repr(current))
|
| 309 |
+
if len(chunks) == 1:
|
| 310 |
+
write(rep)
|
| 311 |
+
return
|
| 312 |
+
if level == 1:
|
| 313 |
+
write('(')
|
| 314 |
+
for i, rep in enumerate(chunks):
|
| 315 |
+
if i > 0:
|
| 316 |
+
write('\n' + ' '*indent)
|
| 317 |
+
write(rep)
|
| 318 |
+
if level == 1:
|
| 319 |
+
write(')')
|
| 320 |
+
|
| 321 |
+
_dispatch[str.__repr__] = _pprint_str
|
| 322 |
+
|
| 323 |
+
def _pprint_bytes(self, object, stream, indent, allowance, context, level):
|
| 324 |
+
write = stream.write
|
| 325 |
+
if len(object) <= 4:
|
| 326 |
+
write(repr(object))
|
| 327 |
+
return
|
| 328 |
+
parens = level == 1
|
| 329 |
+
if parens:
|
| 330 |
+
indent += 1
|
| 331 |
+
allowance += 1
|
| 332 |
+
write('(')
|
| 333 |
+
delim = ''
|
| 334 |
+
for rep in _wrap_bytes_repr(object, self._width - indent, allowance):
|
| 335 |
+
write(delim)
|
| 336 |
+
write(rep)
|
| 337 |
+
if not delim:
|
| 338 |
+
delim = '\n' + ' '*indent
|
| 339 |
+
if parens:
|
| 340 |
+
write(')')
|
| 341 |
+
|
| 342 |
+
_dispatch[bytes.__repr__] = _pprint_bytes
|
| 343 |
+
|
| 344 |
+
def _pprint_bytearray(self, object, stream, indent, allowance, context, level):
|
| 345 |
+
write = stream.write
|
| 346 |
+
write('bytearray(')
|
| 347 |
+
self._pprint_bytes(bytes(object), stream, indent + 10,
|
| 348 |
+
allowance + 1, context, level + 1)
|
| 349 |
+
write(')')
|
| 350 |
+
|
| 351 |
+
_dispatch[bytearray.__repr__] = _pprint_bytearray
|
| 352 |
+
|
| 353 |
+
def _pprint_mappingproxy(self, object, stream, indent, allowance, context, level):
|
| 354 |
+
stream.write('mappingproxy(')
|
| 355 |
+
self._format(object.copy(), stream, indent + 13, allowance + 1,
|
| 356 |
+
context, level)
|
| 357 |
+
stream.write(')')
|
| 358 |
+
|
| 359 |
+
_dispatch[_types.MappingProxyType.__repr__] = _pprint_mappingproxy
|
| 360 |
+
|
| 361 |
+
def _pprint_simplenamespace(self, object, stream, indent, allowance, context, level):
|
| 362 |
+
if type(object) is _types.SimpleNamespace:
|
| 363 |
+
# The SimpleNamespace repr is "namespace" instead of the class
|
| 364 |
+
# name, so we do the same here. For subclasses; use the class name.
|
| 365 |
+
cls_name = 'namespace'
|
| 366 |
+
else:
|
| 367 |
+
cls_name = object.__class__.__name__
|
| 368 |
+
indent += len(cls_name) + 1
|
| 369 |
+
items = object.__dict__.items()
|
| 370 |
+
stream.write(cls_name + '(')
|
| 371 |
+
self._format_namespace_items(items, stream, indent, allowance, context, level)
|
| 372 |
+
stream.write(')')
|
| 373 |
+
|
| 374 |
+
_dispatch[_types.SimpleNamespace.__repr__] = _pprint_simplenamespace
|
| 375 |
+
|
| 376 |
+
def _format_dict_items(self, items, stream, indent, allowance, context,
|
| 377 |
+
level):
|
| 378 |
+
write = stream.write
|
| 379 |
+
indent += self._indent_per_level
|
| 380 |
+
delimnl = ',\n' + ' ' * indent
|
| 381 |
+
last_index = len(items) - 1
|
| 382 |
+
for i, (key, ent) in enumerate(items):
|
| 383 |
+
last = i == last_index
|
| 384 |
+
rep = self._repr(key, context, level)
|
| 385 |
+
write(rep)
|
| 386 |
+
write(': ')
|
| 387 |
+
self._format(ent, stream, indent + len(rep) + 2,
|
| 388 |
+
allowance if last else 1,
|
| 389 |
+
context, level)
|
| 390 |
+
if not last:
|
| 391 |
+
write(delimnl)
|
| 392 |
+
|
| 393 |
+
def _format_namespace_items(self, items, stream, indent, allowance, context, level):
|
| 394 |
+
write = stream.write
|
| 395 |
+
delimnl = ',\n' + ' ' * indent
|
| 396 |
+
last_index = len(items) - 1
|
| 397 |
+
for i, (key, ent) in enumerate(items):
|
| 398 |
+
last = i == last_index
|
| 399 |
+
write(key)
|
| 400 |
+
write('=')
|
| 401 |
+
if id(ent) in context:
|
| 402 |
+
# Special-case representation of recursion to match standard
|
| 403 |
+
# recursive dataclass repr.
|
| 404 |
+
write("...")
|
| 405 |
+
else:
|
| 406 |
+
self._format(ent, stream, indent + len(key) + 1,
|
| 407 |
+
allowance if last else 1,
|
| 408 |
+
context, level)
|
| 409 |
+
if not last:
|
| 410 |
+
write(delimnl)
|
| 411 |
+
|
| 412 |
+
def _format_items(self, items, stream, indent, allowance, context, level):
|
| 413 |
+
write = stream.write
|
| 414 |
+
indent += self._indent_per_level
|
| 415 |
+
if self._indent_per_level > 1:
|
| 416 |
+
write((self._indent_per_level - 1) * ' ')
|
| 417 |
+
delimnl = ',\n' + ' ' * indent
|
| 418 |
+
delim = ''
|
| 419 |
+
width = max_width = self._width - indent + 1
|
| 420 |
+
it = iter(items)
|
| 421 |
+
try:
|
| 422 |
+
next_ent = next(it)
|
| 423 |
+
except StopIteration:
|
| 424 |
+
return
|
| 425 |
+
last = False
|
| 426 |
+
while not last:
|
| 427 |
+
ent = next_ent
|
| 428 |
+
try:
|
| 429 |
+
next_ent = next(it)
|
| 430 |
+
except StopIteration:
|
| 431 |
+
last = True
|
| 432 |
+
max_width -= allowance
|
| 433 |
+
width -= allowance
|
| 434 |
+
if self._compact:
|
| 435 |
+
rep = self._repr(ent, context, level)
|
| 436 |
+
w = len(rep) + 2
|
| 437 |
+
if width < w:
|
| 438 |
+
width = max_width
|
| 439 |
+
if delim:
|
| 440 |
+
delim = delimnl
|
| 441 |
+
if width >= w:
|
| 442 |
+
width -= w
|
| 443 |
+
write(delim)
|
| 444 |
+
delim = ', '
|
| 445 |
+
write(rep)
|
| 446 |
+
continue
|
| 447 |
+
write(delim)
|
| 448 |
+
delim = delimnl
|
| 449 |
+
self._format(ent, stream, indent,
|
| 450 |
+
allowance if last else 1,
|
| 451 |
+
context, level)
|
| 452 |
+
|
| 453 |
+
def _repr(self, object, context, level):
|
| 454 |
+
repr, readable, recursive = self.format(object, context.copy(),
|
| 455 |
+
self._depth, level)
|
| 456 |
+
if not readable:
|
| 457 |
+
self._readable = False
|
| 458 |
+
if recursive:
|
| 459 |
+
self._recursive = True
|
| 460 |
+
return repr
|
| 461 |
+
|
| 462 |
+
def format(self, object, context, maxlevels, level):
|
| 463 |
+
"""Format object for a specific context, returning a string
|
| 464 |
+
and flags indicating whether the representation is 'readable'
|
| 465 |
+
and whether the object represents a recursive construct.
|
| 466 |
+
"""
|
| 467 |
+
return self._safe_repr(object, context, maxlevels, level)
|
| 468 |
+
|
| 469 |
+
def _pprint_default_dict(self, object, stream, indent, allowance, context, level):
|
| 470 |
+
if not len(object):
|
| 471 |
+
stream.write(repr(object))
|
| 472 |
+
return
|
| 473 |
+
rdf = self._repr(object.default_factory, context, level)
|
| 474 |
+
cls = object.__class__
|
| 475 |
+
indent += len(cls.__name__) + 1
|
| 476 |
+
stream.write('%s(%s,\n%s' % (cls.__name__, rdf, ' ' * indent))
|
| 477 |
+
self._pprint_dict(object, stream, indent, allowance + 1, context, level)
|
| 478 |
+
stream.write(')')
|
| 479 |
+
|
| 480 |
+
_dispatch[_collections.defaultdict.__repr__] = _pprint_default_dict
|
| 481 |
+
|
| 482 |
+
def _pprint_counter(self, object, stream, indent, allowance, context, level):
|
| 483 |
+
if not len(object):
|
| 484 |
+
stream.write(repr(object))
|
| 485 |
+
return
|
| 486 |
+
cls = object.__class__
|
| 487 |
+
stream.write(cls.__name__ + '({')
|
| 488 |
+
if self._indent_per_level > 1:
|
| 489 |
+
stream.write((self._indent_per_level - 1) * ' ')
|
| 490 |
+
items = object.most_common()
|
| 491 |
+
self._format_dict_items(items, stream,
|
| 492 |
+
indent + len(cls.__name__) + 1, allowance + 2,
|
| 493 |
+
context, level)
|
| 494 |
+
stream.write('})')
|
| 495 |
+
|
| 496 |
+
_dispatch[_collections.Counter.__repr__] = _pprint_counter
|
| 497 |
+
|
| 498 |
+
def _pprint_chain_map(self, object, stream, indent, allowance, context, level):
|
| 499 |
+
if not len(object.maps):
|
| 500 |
+
stream.write(repr(object))
|
| 501 |
+
return
|
| 502 |
+
cls = object.__class__
|
| 503 |
+
stream.write(cls.__name__ + '(')
|
| 504 |
+
indent += len(cls.__name__) + 1
|
| 505 |
+
for i, m in enumerate(object.maps):
|
| 506 |
+
if i == len(object.maps) - 1:
|
| 507 |
+
self._format(m, stream, indent, allowance + 1, context, level)
|
| 508 |
+
stream.write(')')
|
| 509 |
+
else:
|
| 510 |
+
self._format(m, stream, indent, 1, context, level)
|
| 511 |
+
stream.write(',\n' + ' ' * indent)
|
| 512 |
+
|
| 513 |
+
_dispatch[_collections.ChainMap.__repr__] = _pprint_chain_map
|
| 514 |
+
|
| 515 |
+
def _pprint_deque(self, object, stream, indent, allowance, context, level):
|
| 516 |
+
if not len(object):
|
| 517 |
+
stream.write(repr(object))
|
| 518 |
+
return
|
| 519 |
+
cls = object.__class__
|
| 520 |
+
stream.write(cls.__name__ + '(')
|
| 521 |
+
indent += len(cls.__name__) + 1
|
| 522 |
+
stream.write('[')
|
| 523 |
+
if object.maxlen is None:
|
| 524 |
+
self._format_items(object, stream, indent, allowance + 2,
|
| 525 |
+
context, level)
|
| 526 |
+
stream.write('])')
|
| 527 |
+
else:
|
| 528 |
+
self._format_items(object, stream, indent, 2,
|
| 529 |
+
context, level)
|
| 530 |
+
rml = self._repr(object.maxlen, context, level)
|
| 531 |
+
stream.write('],\n%smaxlen=%s)' % (' ' * indent, rml))
|
| 532 |
+
|
| 533 |
+
_dispatch[_collections.deque.__repr__] = _pprint_deque
|
| 534 |
+
|
| 535 |
+
def _pprint_user_dict(self, object, stream, indent, allowance, context, level):
|
| 536 |
+
self._format(object.data, stream, indent, allowance, context, level - 1)
|
| 537 |
+
|
| 538 |
+
_dispatch[_collections.UserDict.__repr__] = _pprint_user_dict
|
| 539 |
+
|
| 540 |
+
def _pprint_user_list(self, object, stream, indent, allowance, context, level):
|
| 541 |
+
self._format(object.data, stream, indent, allowance, context, level - 1)
|
| 542 |
+
|
| 543 |
+
_dispatch[_collections.UserList.__repr__] = _pprint_user_list
|
| 544 |
+
|
| 545 |
+
def _pprint_user_string(self, object, stream, indent, allowance, context, level):
|
| 546 |
+
self._format(object.data, stream, indent, allowance, context, level - 1)
|
| 547 |
+
|
| 548 |
+
_dispatch[_collections.UserString.__repr__] = _pprint_user_string
|
| 549 |
+
|
| 550 |
+
def _safe_repr(self, object, context, maxlevels, level):
|
| 551 |
+
# Return triple (repr_string, isreadable, isrecursive).
|
| 552 |
+
typ = type(object)
|
| 553 |
+
if typ in _builtin_scalars:
|
| 554 |
+
return repr(object), True, False
|
| 555 |
+
|
| 556 |
+
r = getattr(typ, "__repr__", None)
|
| 557 |
+
|
| 558 |
+
if issubclass(typ, int) and r is int.__repr__:
|
| 559 |
+
if self._underscore_numbers:
|
| 560 |
+
return f"{object:_d}", True, False
|
| 561 |
+
else:
|
| 562 |
+
return repr(object), True, False
|
| 563 |
+
|
| 564 |
+
if issubclass(typ, dict) and r is dict.__repr__:
|
| 565 |
+
if not object:
|
| 566 |
+
return "{}", True, False
|
| 567 |
+
objid = id(object)
|
| 568 |
+
if maxlevels and level >= maxlevels:
|
| 569 |
+
return "{...}", False, objid in context
|
| 570 |
+
if objid in context:
|
| 571 |
+
return _recursion(object), False, True
|
| 572 |
+
context[objid] = 1
|
| 573 |
+
readable = True
|
| 574 |
+
recursive = False
|
| 575 |
+
components = []
|
| 576 |
+
append = components.append
|
| 577 |
+
level += 1
|
| 578 |
+
if self._sort_dicts:
|
| 579 |
+
items = sorted(object.items(), key=_safe_tuple)
|
| 580 |
+
else:
|
| 581 |
+
items = object.items()
|
| 582 |
+
for k, v in items:
|
| 583 |
+
krepr, kreadable, krecur = self.format(
|
| 584 |
+
k, context, maxlevels, level)
|
| 585 |
+
vrepr, vreadable, vrecur = self.format(
|
| 586 |
+
v, context, maxlevels, level)
|
| 587 |
+
append("%s: %s" % (krepr, vrepr))
|
| 588 |
+
readable = readable and kreadable and vreadable
|
| 589 |
+
if krecur or vrecur:
|
| 590 |
+
recursive = True
|
| 591 |
+
del context[objid]
|
| 592 |
+
return "{%s}" % ", ".join(components), readable, recursive
|
| 593 |
+
|
| 594 |
+
if (issubclass(typ, list) and r is list.__repr__) or \
|
| 595 |
+
(issubclass(typ, tuple) and r is tuple.__repr__):
|
| 596 |
+
if issubclass(typ, list):
|
| 597 |
+
if not object:
|
| 598 |
+
return "[]", True, False
|
| 599 |
+
format = "[%s]"
|
| 600 |
+
elif len(object) == 1:
|
| 601 |
+
format = "(%s,)"
|
| 602 |
+
else:
|
| 603 |
+
if not object:
|
| 604 |
+
return "()", True, False
|
| 605 |
+
format = "(%s)"
|
| 606 |
+
objid = id(object)
|
| 607 |
+
if maxlevels and level >= maxlevels:
|
| 608 |
+
return format % "...", False, objid in context
|
| 609 |
+
if objid in context:
|
| 610 |
+
return _recursion(object), False, True
|
| 611 |
+
context[objid] = 1
|
| 612 |
+
readable = True
|
| 613 |
+
recursive = False
|
| 614 |
+
components = []
|
| 615 |
+
append = components.append
|
| 616 |
+
level += 1
|
| 617 |
+
for o in object:
|
| 618 |
+
orepr, oreadable, orecur = self.format(
|
| 619 |
+
o, context, maxlevels, level)
|
| 620 |
+
append(orepr)
|
| 621 |
+
if not oreadable:
|
| 622 |
+
readable = False
|
| 623 |
+
if orecur:
|
| 624 |
+
recursive = True
|
| 625 |
+
del context[objid]
|
| 626 |
+
return format % ", ".join(components), readable, recursive
|
| 627 |
+
|
| 628 |
+
rep = repr(object)
|
| 629 |
+
return rep, (rep and not rep.startswith('<')), False
|
| 630 |
+
|
| 631 |
+
_builtin_scalars = frozenset({str, bytes, bytearray, float, complex,
|
| 632 |
+
bool, type(None)})
|
| 633 |
+
|
| 634 |
+
def _recursion(object):
|
| 635 |
+
return ("<Recursion on %s with id=%s>"
|
| 636 |
+
% (type(object).__name__, id(object)))
|
| 637 |
+
|
| 638 |
+
|
| 639 |
+
def _perfcheck(object=None):
|
| 640 |
+
import time
|
| 641 |
+
if object is None:
|
| 642 |
+
object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000
|
| 643 |
+
p = PrettyPrinter()
|
| 644 |
+
t1 = time.perf_counter()
|
| 645 |
+
p._safe_repr(object, {}, None, 0, True)
|
| 646 |
+
t2 = time.perf_counter()
|
| 647 |
+
p.pformat(object)
|
| 648 |
+
t3 = time.perf_counter()
|
| 649 |
+
print("_safe_repr:", t2 - t1)
|
| 650 |
+
print("pformat:", t3 - t2)
|
| 651 |
+
|
| 652 |
+
def _wrap_bytes_repr(object, width, allowance):
|
| 653 |
+
current = b''
|
| 654 |
+
last = len(object) // 4 * 4
|
| 655 |
+
for i in range(0, len(object), 4):
|
| 656 |
+
part = object[i: i+4]
|
| 657 |
+
candidate = current + part
|
| 658 |
+
if i == last:
|
| 659 |
+
width -= allowance
|
| 660 |
+
if len(repr(candidate)) > width:
|
| 661 |
+
if current:
|
| 662 |
+
yield repr(current)
|
| 663 |
+
current = part
|
| 664 |
+
else:
|
| 665 |
+
current = candidate
|
| 666 |
+
if current:
|
| 667 |
+
yield repr(current)
|
| 668 |
+
|
| 669 |
+
if __name__ == "__main__":
|
| 670 |
+
_perfcheck()
|
llava/lib/python3.10/profile.py
ADDED
|
@@ -0,0 +1,611 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/bin/env python3
|
| 2 |
+
#
|
| 3 |
+
# Class for profiling python code. rev 1.0 6/2/94
|
| 4 |
+
#
|
| 5 |
+
# Written by James Roskind
|
| 6 |
+
# Based on prior profile module by Sjoerd Mullender...
|
| 7 |
+
# which was hacked somewhat by: Guido van Rossum
|
| 8 |
+
|
| 9 |
+
"""Class for profiling Python code."""
|
| 10 |
+
|
| 11 |
+
# Copyright Disney Enterprises, Inc. All Rights Reserved.
|
| 12 |
+
# Licensed to PSF under a Contributor Agreement
|
| 13 |
+
#
|
| 14 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 15 |
+
# you may not use this file except in compliance with the License.
|
| 16 |
+
# You may obtain a copy of the License at
|
| 17 |
+
#
|
| 18 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 19 |
+
#
|
| 20 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 21 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 22 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
| 23 |
+
# either express or implied. See the License for the specific language
|
| 24 |
+
# governing permissions and limitations under the License.
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
import io
|
| 28 |
+
import sys
|
| 29 |
+
import time
|
| 30 |
+
import marshal
|
| 31 |
+
|
| 32 |
+
__all__ = ["run", "runctx", "Profile"]
|
| 33 |
+
|
| 34 |
+
# Sample timer for use with
|
| 35 |
+
#i_count = 0
|
| 36 |
+
#def integer_timer():
|
| 37 |
+
# global i_count
|
| 38 |
+
# i_count = i_count + 1
|
| 39 |
+
# return i_count
|
| 40 |
+
#itimes = integer_timer # replace with C coded timer returning integers
|
| 41 |
+
|
| 42 |
+
class _Utils:
|
| 43 |
+
"""Support class for utility functions which are shared by
|
| 44 |
+
profile.py and cProfile.py modules.
|
| 45 |
+
Not supposed to be used directly.
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
def __init__(self, profiler):
|
| 49 |
+
self.profiler = profiler
|
| 50 |
+
|
| 51 |
+
def run(self, statement, filename, sort):
|
| 52 |
+
prof = self.profiler()
|
| 53 |
+
try:
|
| 54 |
+
prof.run(statement)
|
| 55 |
+
except SystemExit:
|
| 56 |
+
pass
|
| 57 |
+
finally:
|
| 58 |
+
self._show(prof, filename, sort)
|
| 59 |
+
|
| 60 |
+
def runctx(self, statement, globals, locals, filename, sort):
|
| 61 |
+
prof = self.profiler()
|
| 62 |
+
try:
|
| 63 |
+
prof.runctx(statement, globals, locals)
|
| 64 |
+
except SystemExit:
|
| 65 |
+
pass
|
| 66 |
+
finally:
|
| 67 |
+
self._show(prof, filename, sort)
|
| 68 |
+
|
| 69 |
+
def _show(self, prof, filename, sort):
|
| 70 |
+
if filename is not None:
|
| 71 |
+
prof.dump_stats(filename)
|
| 72 |
+
else:
|
| 73 |
+
prof.print_stats(sort)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
#**************************************************************************
|
| 77 |
+
# The following are the static member functions for the profiler class
|
| 78 |
+
# Note that an instance of Profile() is *not* needed to call them.
|
| 79 |
+
#**************************************************************************
|
| 80 |
+
|
| 81 |
+
def run(statement, filename=None, sort=-1):
|
| 82 |
+
"""Run statement under profiler optionally saving results in filename
|
| 83 |
+
|
| 84 |
+
This function takes a single argument that can be passed to the
|
| 85 |
+
"exec" statement, and an optional file name. In all cases this
|
| 86 |
+
routine attempts to "exec" its first argument and gather profiling
|
| 87 |
+
statistics from the execution. If no file name is present, then this
|
| 88 |
+
function automatically prints a simple profiling report, sorted by the
|
| 89 |
+
standard name string (file/line/function-name) that is presented in
|
| 90 |
+
each line.
|
| 91 |
+
"""
|
| 92 |
+
return _Utils(Profile).run(statement, filename, sort)
|
| 93 |
+
|
| 94 |
+
def runctx(statement, globals, locals, filename=None, sort=-1):
|
| 95 |
+
"""Run statement under profiler, supplying your own globals and locals,
|
| 96 |
+
optionally saving results in filename.
|
| 97 |
+
|
| 98 |
+
statement and filename have the same semantics as profile.run
|
| 99 |
+
"""
|
| 100 |
+
return _Utils(Profile).runctx(statement, globals, locals, filename, sort)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class Profile:
|
| 104 |
+
"""Profiler class.
|
| 105 |
+
|
| 106 |
+
self.cur is always a tuple. Each such tuple corresponds to a stack
|
| 107 |
+
frame that is currently active (self.cur[-2]). The following are the
|
| 108 |
+
definitions of its members. We use this external "parallel stack" to
|
| 109 |
+
avoid contaminating the program that we are profiling. (old profiler
|
| 110 |
+
used to write into the frames local dictionary!!) Derived classes
|
| 111 |
+
can change the definition of some entries, as long as they leave
|
| 112 |
+
[-2:] intact (frame and previous tuple). In case an internal error is
|
| 113 |
+
detected, the -3 element is used as the function name.
|
| 114 |
+
|
| 115 |
+
[ 0] = Time that needs to be charged to the parent frame's function.
|
| 116 |
+
It is used so that a function call will not have to access the
|
| 117 |
+
timing data for the parent frame.
|
| 118 |
+
[ 1] = Total time spent in this frame's function, excluding time in
|
| 119 |
+
subfunctions (this latter is tallied in cur[2]).
|
| 120 |
+
[ 2] = Total time spent in subfunctions, excluding time executing the
|
| 121 |
+
frame's function (this latter is tallied in cur[1]).
|
| 122 |
+
[-3] = Name of the function that corresponds to this frame.
|
| 123 |
+
[-2] = Actual frame that we correspond to (used to sync exception handling).
|
| 124 |
+
[-1] = Our parent 6-tuple (corresponds to frame.f_back).
|
| 125 |
+
|
| 126 |
+
Timing data for each function is stored as a 5-tuple in the dictionary
|
| 127 |
+
self.timings[]. The index is always the name stored in self.cur[-3].
|
| 128 |
+
The following are the definitions of the members:
|
| 129 |
+
|
| 130 |
+
[0] = The number of times this function was called, not counting direct
|
| 131 |
+
or indirect recursion,
|
| 132 |
+
[1] = Number of times this function appears on the stack, minus one
|
| 133 |
+
[2] = Total time spent internal to this function
|
| 134 |
+
[3] = Cumulative time that this function was present on the stack. In
|
| 135 |
+
non-recursive functions, this is the total execution time from start
|
| 136 |
+
to finish of each invocation of a function, including time spent in
|
| 137 |
+
all subfunctions.
|
| 138 |
+
[4] = A dictionary indicating for each function name, the number of times
|
| 139 |
+
it was called by us.
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
bias = 0 # calibration constant
|
| 143 |
+
|
| 144 |
+
def __init__(self, timer=None, bias=None):
|
| 145 |
+
self.timings = {}
|
| 146 |
+
self.cur = None
|
| 147 |
+
self.cmd = ""
|
| 148 |
+
self.c_func_name = ""
|
| 149 |
+
|
| 150 |
+
if bias is None:
|
| 151 |
+
bias = self.bias
|
| 152 |
+
self.bias = bias # Materialize in local dict for lookup speed.
|
| 153 |
+
|
| 154 |
+
if not timer:
|
| 155 |
+
self.timer = self.get_time = time.process_time
|
| 156 |
+
self.dispatcher = self.trace_dispatch_i
|
| 157 |
+
else:
|
| 158 |
+
self.timer = timer
|
| 159 |
+
t = self.timer() # test out timer function
|
| 160 |
+
try:
|
| 161 |
+
length = len(t)
|
| 162 |
+
except TypeError:
|
| 163 |
+
self.get_time = timer
|
| 164 |
+
self.dispatcher = self.trace_dispatch_i
|
| 165 |
+
else:
|
| 166 |
+
if length == 2:
|
| 167 |
+
self.dispatcher = self.trace_dispatch
|
| 168 |
+
else:
|
| 169 |
+
self.dispatcher = self.trace_dispatch_l
|
| 170 |
+
# This get_time() implementation needs to be defined
|
| 171 |
+
# here to capture the passed-in timer in the parameter
|
| 172 |
+
# list (for performance). Note that we can't assume
|
| 173 |
+
# the timer() result contains two values in all
|
| 174 |
+
# cases.
|
| 175 |
+
def get_time_timer(timer=timer, sum=sum):
|
| 176 |
+
return sum(timer())
|
| 177 |
+
self.get_time = get_time_timer
|
| 178 |
+
self.t = self.get_time()
|
| 179 |
+
self.simulate_call('profiler')
|
| 180 |
+
|
| 181 |
+
# Heavily optimized dispatch routine for time.process_time() timer
|
| 182 |
+
|
| 183 |
+
def trace_dispatch(self, frame, event, arg):
|
| 184 |
+
timer = self.timer
|
| 185 |
+
t = timer()
|
| 186 |
+
t = t[0] + t[1] - self.t - self.bias
|
| 187 |
+
|
| 188 |
+
if event == "c_call":
|
| 189 |
+
self.c_func_name = arg.__name__
|
| 190 |
+
|
| 191 |
+
if self.dispatch[event](self, frame,t):
|
| 192 |
+
t = timer()
|
| 193 |
+
self.t = t[0] + t[1]
|
| 194 |
+
else:
|
| 195 |
+
r = timer()
|
| 196 |
+
self.t = r[0] + r[1] - t # put back unrecorded delta
|
| 197 |
+
|
| 198 |
+
# Dispatch routine for best timer program (return = scalar, fastest if
|
| 199 |
+
# an integer but float works too -- and time.process_time() relies on that).
|
| 200 |
+
|
| 201 |
+
def trace_dispatch_i(self, frame, event, arg):
|
| 202 |
+
timer = self.timer
|
| 203 |
+
t = timer() - self.t - self.bias
|
| 204 |
+
|
| 205 |
+
if event == "c_call":
|
| 206 |
+
self.c_func_name = arg.__name__
|
| 207 |
+
|
| 208 |
+
if self.dispatch[event](self, frame, t):
|
| 209 |
+
self.t = timer()
|
| 210 |
+
else:
|
| 211 |
+
self.t = timer() - t # put back unrecorded delta
|
| 212 |
+
|
| 213 |
+
# Dispatch routine for macintosh (timer returns time in ticks of
|
| 214 |
+
# 1/60th second)
|
| 215 |
+
|
| 216 |
+
def trace_dispatch_mac(self, frame, event, arg):
|
| 217 |
+
timer = self.timer
|
| 218 |
+
t = timer()/60.0 - self.t - self.bias
|
| 219 |
+
|
| 220 |
+
if event == "c_call":
|
| 221 |
+
self.c_func_name = arg.__name__
|
| 222 |
+
|
| 223 |
+
if self.dispatch[event](self, frame, t):
|
| 224 |
+
self.t = timer()/60.0
|
| 225 |
+
else:
|
| 226 |
+
self.t = timer()/60.0 - t # put back unrecorded delta
|
| 227 |
+
|
| 228 |
+
# SLOW generic dispatch routine for timer returning lists of numbers
|
| 229 |
+
|
| 230 |
+
def trace_dispatch_l(self, frame, event, arg):
|
| 231 |
+
get_time = self.get_time
|
| 232 |
+
t = get_time() - self.t - self.bias
|
| 233 |
+
|
| 234 |
+
if event == "c_call":
|
| 235 |
+
self.c_func_name = arg.__name__
|
| 236 |
+
|
| 237 |
+
if self.dispatch[event](self, frame, t):
|
| 238 |
+
self.t = get_time()
|
| 239 |
+
else:
|
| 240 |
+
self.t = get_time() - t # put back unrecorded delta
|
| 241 |
+
|
| 242 |
+
# In the event handlers, the first 3 elements of self.cur are unpacked
|
| 243 |
+
# into vrbls w/ 3-letter names. The last two characters are meant to be
|
| 244 |
+
# mnemonic:
|
| 245 |
+
# _pt self.cur[0] "parent time" time to be charged to parent frame
|
| 246 |
+
# _it self.cur[1] "internal time" time spent directly in the function
|
| 247 |
+
# _et self.cur[2] "external time" time spent in subfunctions
|
| 248 |
+
|
| 249 |
+
def trace_dispatch_exception(self, frame, t):
|
| 250 |
+
rpt, rit, ret, rfn, rframe, rcur = self.cur
|
| 251 |
+
if (rframe is not frame) and rcur:
|
| 252 |
+
return self.trace_dispatch_return(rframe, t)
|
| 253 |
+
self.cur = rpt, rit+t, ret, rfn, rframe, rcur
|
| 254 |
+
return 1
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
def trace_dispatch_call(self, frame, t):
|
| 258 |
+
if self.cur and frame.f_back is not self.cur[-2]:
|
| 259 |
+
rpt, rit, ret, rfn, rframe, rcur = self.cur
|
| 260 |
+
if not isinstance(rframe, Profile.fake_frame):
|
| 261 |
+
assert rframe.f_back is frame.f_back, ("Bad call", rfn,
|
| 262 |
+
rframe, rframe.f_back,
|
| 263 |
+
frame, frame.f_back)
|
| 264 |
+
self.trace_dispatch_return(rframe, 0)
|
| 265 |
+
assert (self.cur is None or \
|
| 266 |
+
frame.f_back is self.cur[-2]), ("Bad call",
|
| 267 |
+
self.cur[-3])
|
| 268 |
+
fcode = frame.f_code
|
| 269 |
+
fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name)
|
| 270 |
+
self.cur = (t, 0, 0, fn, frame, self.cur)
|
| 271 |
+
timings = self.timings
|
| 272 |
+
if fn in timings:
|
| 273 |
+
cc, ns, tt, ct, callers = timings[fn]
|
| 274 |
+
timings[fn] = cc, ns + 1, tt, ct, callers
|
| 275 |
+
else:
|
| 276 |
+
timings[fn] = 0, 0, 0, 0, {}
|
| 277 |
+
return 1
|
| 278 |
+
|
| 279 |
+
def trace_dispatch_c_call (self, frame, t):
|
| 280 |
+
fn = ("", 0, self.c_func_name)
|
| 281 |
+
self.cur = (t, 0, 0, fn, frame, self.cur)
|
| 282 |
+
timings = self.timings
|
| 283 |
+
if fn in timings:
|
| 284 |
+
cc, ns, tt, ct, callers = timings[fn]
|
| 285 |
+
timings[fn] = cc, ns+1, tt, ct, callers
|
| 286 |
+
else:
|
| 287 |
+
timings[fn] = 0, 0, 0, 0, {}
|
| 288 |
+
return 1
|
| 289 |
+
|
| 290 |
+
def trace_dispatch_return(self, frame, t):
|
| 291 |
+
if frame is not self.cur[-2]:
|
| 292 |
+
assert frame is self.cur[-2].f_back, ("Bad return", self.cur[-3])
|
| 293 |
+
self.trace_dispatch_return(self.cur[-2], 0)
|
| 294 |
+
|
| 295 |
+
# Prefix "r" means part of the Returning or exiting frame.
|
| 296 |
+
# Prefix "p" means part of the Previous or Parent or older frame.
|
| 297 |
+
|
| 298 |
+
rpt, rit, ret, rfn, frame, rcur = self.cur
|
| 299 |
+
rit = rit + t
|
| 300 |
+
frame_total = rit + ret
|
| 301 |
+
|
| 302 |
+
ppt, pit, pet, pfn, pframe, pcur = rcur
|
| 303 |
+
self.cur = ppt, pit + rpt, pet + frame_total, pfn, pframe, pcur
|
| 304 |
+
|
| 305 |
+
timings = self.timings
|
| 306 |
+
cc, ns, tt, ct, callers = timings[rfn]
|
| 307 |
+
if not ns:
|
| 308 |
+
# This is the only occurrence of the function on the stack.
|
| 309 |
+
# Else this is a (directly or indirectly) recursive call, and
|
| 310 |
+
# its cumulative time will get updated when the topmost call to
|
| 311 |
+
# it returns.
|
| 312 |
+
ct = ct + frame_total
|
| 313 |
+
cc = cc + 1
|
| 314 |
+
|
| 315 |
+
if pfn in callers:
|
| 316 |
+
callers[pfn] = callers[pfn] + 1 # hack: gather more
|
| 317 |
+
# stats such as the amount of time added to ct courtesy
|
| 318 |
+
# of this specific call, and the contribution to cc
|
| 319 |
+
# courtesy of this call.
|
| 320 |
+
else:
|
| 321 |
+
callers[pfn] = 1
|
| 322 |
+
|
| 323 |
+
timings[rfn] = cc, ns - 1, tt + rit, ct, callers
|
| 324 |
+
|
| 325 |
+
return 1
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
dispatch = {
|
| 329 |
+
"call": trace_dispatch_call,
|
| 330 |
+
"exception": trace_dispatch_exception,
|
| 331 |
+
"return": trace_dispatch_return,
|
| 332 |
+
"c_call": trace_dispatch_c_call,
|
| 333 |
+
"c_exception": trace_dispatch_return, # the C function returned
|
| 334 |
+
"c_return": trace_dispatch_return,
|
| 335 |
+
}
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
# The next few functions play with self.cmd. By carefully preloading
|
| 339 |
+
# our parallel stack, we can force the profiled result to include
|
| 340 |
+
# an arbitrary string as the name of the calling function.
|
| 341 |
+
# We use self.cmd as that string, and the resulting stats look
|
| 342 |
+
# very nice :-).
|
| 343 |
+
|
| 344 |
+
def set_cmd(self, cmd):
|
| 345 |
+
if self.cur[-1]: return # already set
|
| 346 |
+
self.cmd = cmd
|
| 347 |
+
self.simulate_call(cmd)
|
| 348 |
+
|
| 349 |
+
class fake_code:
|
| 350 |
+
def __init__(self, filename, line, name):
|
| 351 |
+
self.co_filename = filename
|
| 352 |
+
self.co_line = line
|
| 353 |
+
self.co_name = name
|
| 354 |
+
self.co_firstlineno = 0
|
| 355 |
+
|
| 356 |
+
def __repr__(self):
|
| 357 |
+
return repr((self.co_filename, self.co_line, self.co_name))
|
| 358 |
+
|
| 359 |
+
class fake_frame:
|
| 360 |
+
def __init__(self, code, prior):
|
| 361 |
+
self.f_code = code
|
| 362 |
+
self.f_back = prior
|
| 363 |
+
|
| 364 |
+
def simulate_call(self, name):
|
| 365 |
+
code = self.fake_code('profile', 0, name)
|
| 366 |
+
if self.cur:
|
| 367 |
+
pframe = self.cur[-2]
|
| 368 |
+
else:
|
| 369 |
+
pframe = None
|
| 370 |
+
frame = self.fake_frame(code, pframe)
|
| 371 |
+
self.dispatch['call'](self, frame, 0)
|
| 372 |
+
|
| 373 |
+
# collect stats from pending stack, including getting final
|
| 374 |
+
# timings for self.cmd frame.
|
| 375 |
+
|
| 376 |
+
def simulate_cmd_complete(self):
|
| 377 |
+
get_time = self.get_time
|
| 378 |
+
t = get_time() - self.t
|
| 379 |
+
while self.cur[-1]:
|
| 380 |
+
# We *can* cause assertion errors here if
|
| 381 |
+
# dispatch_trace_return checks for a frame match!
|
| 382 |
+
self.dispatch['return'](self, self.cur[-2], t)
|
| 383 |
+
t = 0
|
| 384 |
+
self.t = get_time() - t
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
def print_stats(self, sort=-1):
|
| 388 |
+
import pstats
|
| 389 |
+
pstats.Stats(self).strip_dirs().sort_stats(sort). \
|
| 390 |
+
print_stats()
|
| 391 |
+
|
| 392 |
+
def dump_stats(self, file):
|
| 393 |
+
with open(file, 'wb') as f:
|
| 394 |
+
self.create_stats()
|
| 395 |
+
marshal.dump(self.stats, f)
|
| 396 |
+
|
| 397 |
+
def create_stats(self):
|
| 398 |
+
self.simulate_cmd_complete()
|
| 399 |
+
self.snapshot_stats()
|
| 400 |
+
|
| 401 |
+
def snapshot_stats(self):
|
| 402 |
+
self.stats = {}
|
| 403 |
+
for func, (cc, ns, tt, ct, callers) in self.timings.items():
|
| 404 |
+
callers = callers.copy()
|
| 405 |
+
nc = 0
|
| 406 |
+
for callcnt in callers.values():
|
| 407 |
+
nc += callcnt
|
| 408 |
+
self.stats[func] = cc, nc, tt, ct, callers
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
# The following two methods can be called by clients to use
|
| 412 |
+
# a profiler to profile a statement, given as a string.
|
| 413 |
+
|
| 414 |
+
def run(self, cmd):
|
| 415 |
+
import __main__
|
| 416 |
+
dict = __main__.__dict__
|
| 417 |
+
return self.runctx(cmd, dict, dict)
|
| 418 |
+
|
| 419 |
+
def runctx(self, cmd, globals, locals):
|
| 420 |
+
self.set_cmd(cmd)
|
| 421 |
+
sys.setprofile(self.dispatcher)
|
| 422 |
+
try:
|
| 423 |
+
exec(cmd, globals, locals)
|
| 424 |
+
finally:
|
| 425 |
+
sys.setprofile(None)
|
| 426 |
+
return self
|
| 427 |
+
|
| 428 |
+
# This method is more useful to profile a single function call.
|
| 429 |
+
def runcall(self, func, /, *args, **kw):
|
| 430 |
+
self.set_cmd(repr(func))
|
| 431 |
+
sys.setprofile(self.dispatcher)
|
| 432 |
+
try:
|
| 433 |
+
return func(*args, **kw)
|
| 434 |
+
finally:
|
| 435 |
+
sys.setprofile(None)
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
#******************************************************************
|
| 439 |
+
# The following calculates the overhead for using a profiler. The
|
| 440 |
+
# problem is that it takes a fair amount of time for the profiler
|
| 441 |
+
# to stop the stopwatch (from the time it receives an event).
|
| 442 |
+
# Similarly, there is a delay from the time that the profiler
|
| 443 |
+
# re-starts the stopwatch before the user's code really gets to
|
| 444 |
+
# continue. The following code tries to measure the difference on
|
| 445 |
+
# a per-event basis.
|
| 446 |
+
#
|
| 447 |
+
# Note that this difference is only significant if there are a lot of
|
| 448 |
+
# events, and relatively little user code per event. For example,
|
| 449 |
+
# code with small functions will typically benefit from having the
|
| 450 |
+
# profiler calibrated for the current platform. This *could* be
|
| 451 |
+
# done on the fly during init() time, but it is not worth the
|
| 452 |
+
# effort. Also note that if too large a value specified, then
|
| 453 |
+
# execution time on some functions will actually appear as a
|
| 454 |
+
# negative number. It is *normal* for some functions (with very
|
| 455 |
+
# low call counts) to have such negative stats, even if the
|
| 456 |
+
# calibration figure is "correct."
|
| 457 |
+
#
|
| 458 |
+
# One alternative to profile-time calibration adjustments (i.e.,
|
| 459 |
+
# adding in the magic little delta during each event) is to track
|
| 460 |
+
# more carefully the number of events (and cumulatively, the number
|
| 461 |
+
# of events during sub functions) that are seen. If this were
|
| 462 |
+
# done, then the arithmetic could be done after the fact (i.e., at
|
| 463 |
+
# display time). Currently, we track only call/return events.
|
| 464 |
+
# These values can be deduced by examining the callees and callers
|
| 465 |
+
# vectors for each functions. Hence we *can* almost correct the
|
| 466 |
+
# internal time figure at print time (note that we currently don't
|
| 467 |
+
# track exception event processing counts). Unfortunately, there
|
| 468 |
+
# is currently no similar information for cumulative sub-function
|
| 469 |
+
# time. It would not be hard to "get all this info" at profiler
|
| 470 |
+
# time. Specifically, we would have to extend the tuples to keep
|
| 471 |
+
# counts of this in each frame, and then extend the defs of timing
|
| 472 |
+
# tuples to include the significant two figures. I'm a bit fearful
|
| 473 |
+
# that this additional feature will slow the heavily optimized
|
| 474 |
+
# event/time ratio (i.e., the profiler would run slower, fur a very
|
| 475 |
+
# low "value added" feature.)
|
| 476 |
+
#**************************************************************
|
| 477 |
+
|
| 478 |
+
def calibrate(self, m, verbose=0):
|
| 479 |
+
if self.__class__ is not Profile:
|
| 480 |
+
raise TypeError("Subclasses must override .calibrate().")
|
| 481 |
+
|
| 482 |
+
saved_bias = self.bias
|
| 483 |
+
self.bias = 0
|
| 484 |
+
try:
|
| 485 |
+
return self._calibrate_inner(m, verbose)
|
| 486 |
+
finally:
|
| 487 |
+
self.bias = saved_bias
|
| 488 |
+
|
| 489 |
+
def _calibrate_inner(self, m, verbose):
|
| 490 |
+
get_time = self.get_time
|
| 491 |
+
|
| 492 |
+
# Set up a test case to be run with and without profiling. Include
|
| 493 |
+
# lots of calls, because we're trying to quantify stopwatch overhead.
|
| 494 |
+
# Do not raise any exceptions, though, because we want to know
|
| 495 |
+
# exactly how many profile events are generated (one call event, +
|
| 496 |
+
# one return event, per Python-level call).
|
| 497 |
+
|
| 498 |
+
def f1(n):
|
| 499 |
+
for i in range(n):
|
| 500 |
+
x = 1
|
| 501 |
+
|
| 502 |
+
def f(m, f1=f1):
|
| 503 |
+
for i in range(m):
|
| 504 |
+
f1(100)
|
| 505 |
+
|
| 506 |
+
f(m) # warm up the cache
|
| 507 |
+
|
| 508 |
+
# elapsed_noprofile <- time f(m) takes without profiling.
|
| 509 |
+
t0 = get_time()
|
| 510 |
+
f(m)
|
| 511 |
+
t1 = get_time()
|
| 512 |
+
elapsed_noprofile = t1 - t0
|
| 513 |
+
if verbose:
|
| 514 |
+
print("elapsed time without profiling =", elapsed_noprofile)
|
| 515 |
+
|
| 516 |
+
# elapsed_profile <- time f(m) takes with profiling. The difference
|
| 517 |
+
# is profiling overhead, only some of which the profiler subtracts
|
| 518 |
+
# out on its own.
|
| 519 |
+
p = Profile()
|
| 520 |
+
t0 = get_time()
|
| 521 |
+
p.runctx('f(m)', globals(), locals())
|
| 522 |
+
t1 = get_time()
|
| 523 |
+
elapsed_profile = t1 - t0
|
| 524 |
+
if verbose:
|
| 525 |
+
print("elapsed time with profiling =", elapsed_profile)
|
| 526 |
+
|
| 527 |
+
# reported_time <- "CPU seconds" the profiler charged to f and f1.
|
| 528 |
+
total_calls = 0.0
|
| 529 |
+
reported_time = 0.0
|
| 530 |
+
for (filename, line, funcname), (cc, ns, tt, ct, callers) in \
|
| 531 |
+
p.timings.items():
|
| 532 |
+
if funcname in ("f", "f1"):
|
| 533 |
+
total_calls += cc
|
| 534 |
+
reported_time += tt
|
| 535 |
+
|
| 536 |
+
if verbose:
|
| 537 |
+
print("'CPU seconds' profiler reported =", reported_time)
|
| 538 |
+
print("total # calls =", total_calls)
|
| 539 |
+
if total_calls != m + 1:
|
| 540 |
+
raise ValueError("internal error: total calls = %d" % total_calls)
|
| 541 |
+
|
| 542 |
+
# reported_time - elapsed_noprofile = overhead the profiler wasn't
|
| 543 |
+
# able to measure. Divide by twice the number of calls (since there
|
| 544 |
+
# are two profiler events per call in this test) to get the hidden
|
| 545 |
+
# overhead per event.
|
| 546 |
+
mean = (reported_time - elapsed_noprofile) / 2.0 / total_calls
|
| 547 |
+
if verbose:
|
| 548 |
+
print("mean stopwatch overhead per profile event =", mean)
|
| 549 |
+
return mean
|
| 550 |
+
|
| 551 |
+
#****************************************************************************
|
| 552 |
+
|
| 553 |
+
def main():
|
| 554 |
+
import os
|
| 555 |
+
from optparse import OptionParser
|
| 556 |
+
|
| 557 |
+
usage = "profile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
|
| 558 |
+
parser = OptionParser(usage=usage)
|
| 559 |
+
parser.allow_interspersed_args = False
|
| 560 |
+
parser.add_option('-o', '--outfile', dest="outfile",
|
| 561 |
+
help="Save stats to <outfile>", default=None)
|
| 562 |
+
parser.add_option('-m', dest="module", action="store_true",
|
| 563 |
+
help="Profile a library module.", default=False)
|
| 564 |
+
parser.add_option('-s', '--sort', dest="sort",
|
| 565 |
+
help="Sort order when printing to stdout, based on pstats.Stats class",
|
| 566 |
+
default=-1)
|
| 567 |
+
|
| 568 |
+
if not sys.argv[1:]:
|
| 569 |
+
parser.print_usage()
|
| 570 |
+
sys.exit(2)
|
| 571 |
+
|
| 572 |
+
(options, args) = parser.parse_args()
|
| 573 |
+
sys.argv[:] = args
|
| 574 |
+
|
| 575 |
+
# The script that we're profiling may chdir, so capture the absolute path
|
| 576 |
+
# to the output file at startup.
|
| 577 |
+
if options.outfile is not None:
|
| 578 |
+
options.outfile = os.path.abspath(options.outfile)
|
| 579 |
+
|
| 580 |
+
if len(args) > 0:
|
| 581 |
+
if options.module:
|
| 582 |
+
import runpy
|
| 583 |
+
code = "run_module(modname, run_name='__main__')"
|
| 584 |
+
globs = {
|
| 585 |
+
'run_module': runpy.run_module,
|
| 586 |
+
'modname': args[0]
|
| 587 |
+
}
|
| 588 |
+
else:
|
| 589 |
+
progname = args[0]
|
| 590 |
+
sys.path.insert(0, os.path.dirname(progname))
|
| 591 |
+
with io.open_code(progname) as fp:
|
| 592 |
+
code = compile(fp.read(), progname, 'exec')
|
| 593 |
+
globs = {
|
| 594 |
+
'__file__': progname,
|
| 595 |
+
'__name__': '__main__',
|
| 596 |
+
'__package__': None,
|
| 597 |
+
'__cached__': None,
|
| 598 |
+
}
|
| 599 |
+
try:
|
| 600 |
+
runctx(code, globs, None, options.outfile, options.sort)
|
| 601 |
+
except BrokenPipeError as exc:
|
| 602 |
+
# Prevent "Exception ignored" during interpreter shutdown.
|
| 603 |
+
sys.stdout = None
|
| 604 |
+
sys.exit(exc.errno)
|
| 605 |
+
else:
|
| 606 |
+
parser.print_usage()
|
| 607 |
+
return parser
|
| 608 |
+
|
| 609 |
+
# When invoked as main program, invoke the profiler on a script
|
| 610 |
+
if __name__ == '__main__':
|
| 611 |
+
main()
|
llava/lib/python3.10/pty.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Pseudo terminal utilities."""
|
| 2 |
+
|
| 3 |
+
# Bugs: No signal handling. Doesn't set slave termios and window size.
|
| 4 |
+
# Only tested on Linux, FreeBSD, and macOS.
|
| 5 |
+
# See: W. Richard Stevens. 1992. Advanced Programming in the
|
| 6 |
+
# UNIX Environment. Chapter 19.
|
| 7 |
+
# Author: Steen Lumholt -- with additions by Guido.
|
| 8 |
+
|
| 9 |
+
from select import select
|
| 10 |
+
import os
|
| 11 |
+
import sys
|
| 12 |
+
import tty
|
| 13 |
+
|
| 14 |
+
# names imported directly for test mocking purposes
|
| 15 |
+
from os import close, waitpid
|
| 16 |
+
from tty import setraw, tcgetattr, tcsetattr
|
| 17 |
+
|
| 18 |
+
__all__ = ["openpty", "fork", "spawn"]
|
| 19 |
+
|
| 20 |
+
STDIN_FILENO = 0
|
| 21 |
+
STDOUT_FILENO = 1
|
| 22 |
+
STDERR_FILENO = 2
|
| 23 |
+
|
| 24 |
+
CHILD = 0
|
| 25 |
+
|
| 26 |
+
def openpty():
|
| 27 |
+
"""openpty() -> (master_fd, slave_fd)
|
| 28 |
+
Open a pty master/slave pair, using os.openpty() if possible."""
|
| 29 |
+
|
| 30 |
+
try:
|
| 31 |
+
return os.openpty()
|
| 32 |
+
except (AttributeError, OSError):
|
| 33 |
+
pass
|
| 34 |
+
master_fd, slave_name = _open_terminal()
|
| 35 |
+
slave_fd = slave_open(slave_name)
|
| 36 |
+
return master_fd, slave_fd
|
| 37 |
+
|
| 38 |
+
def master_open():
|
| 39 |
+
"""master_open() -> (master_fd, slave_name)
|
| 40 |
+
Open a pty master and return the fd, and the filename of the slave end.
|
| 41 |
+
Deprecated, use openpty() instead."""
|
| 42 |
+
|
| 43 |
+
try:
|
| 44 |
+
master_fd, slave_fd = os.openpty()
|
| 45 |
+
except (AttributeError, OSError):
|
| 46 |
+
pass
|
| 47 |
+
else:
|
| 48 |
+
slave_name = os.ttyname(slave_fd)
|
| 49 |
+
os.close(slave_fd)
|
| 50 |
+
return master_fd, slave_name
|
| 51 |
+
|
| 52 |
+
return _open_terminal()
|
| 53 |
+
|
| 54 |
+
def _open_terminal():
|
| 55 |
+
"""Open pty master and return (master_fd, tty_name)."""
|
| 56 |
+
for x in 'pqrstuvwxyzPQRST':
|
| 57 |
+
for y in '0123456789abcdef':
|
| 58 |
+
pty_name = '/dev/pty' + x + y
|
| 59 |
+
try:
|
| 60 |
+
fd = os.open(pty_name, os.O_RDWR)
|
| 61 |
+
except OSError:
|
| 62 |
+
continue
|
| 63 |
+
return (fd, '/dev/tty' + x + y)
|
| 64 |
+
raise OSError('out of pty devices')
|
| 65 |
+
|
| 66 |
+
def slave_open(tty_name):
|
| 67 |
+
"""slave_open(tty_name) -> slave_fd
|
| 68 |
+
Open the pty slave and acquire the controlling terminal, returning
|
| 69 |
+
opened filedescriptor.
|
| 70 |
+
Deprecated, use openpty() instead."""
|
| 71 |
+
|
| 72 |
+
result = os.open(tty_name, os.O_RDWR)
|
| 73 |
+
try:
|
| 74 |
+
from fcntl import ioctl, I_PUSH
|
| 75 |
+
except ImportError:
|
| 76 |
+
return result
|
| 77 |
+
try:
|
| 78 |
+
ioctl(result, I_PUSH, "ptem")
|
| 79 |
+
ioctl(result, I_PUSH, "ldterm")
|
| 80 |
+
except OSError:
|
| 81 |
+
pass
|
| 82 |
+
return result
|
| 83 |
+
|
| 84 |
+
def fork():
|
| 85 |
+
"""fork() -> (pid, master_fd)
|
| 86 |
+
Fork and make the child a session leader with a controlling terminal."""
|
| 87 |
+
|
| 88 |
+
try:
|
| 89 |
+
pid, fd = os.forkpty()
|
| 90 |
+
except (AttributeError, OSError):
|
| 91 |
+
pass
|
| 92 |
+
else:
|
| 93 |
+
if pid == CHILD:
|
| 94 |
+
try:
|
| 95 |
+
os.setsid()
|
| 96 |
+
except OSError:
|
| 97 |
+
# os.forkpty() already set us session leader
|
| 98 |
+
pass
|
| 99 |
+
return pid, fd
|
| 100 |
+
|
| 101 |
+
master_fd, slave_fd = openpty()
|
| 102 |
+
pid = os.fork()
|
| 103 |
+
if pid == CHILD:
|
| 104 |
+
# Establish a new session.
|
| 105 |
+
os.setsid()
|
| 106 |
+
os.close(master_fd)
|
| 107 |
+
|
| 108 |
+
# Slave becomes stdin/stdout/stderr of child.
|
| 109 |
+
os.dup2(slave_fd, STDIN_FILENO)
|
| 110 |
+
os.dup2(slave_fd, STDOUT_FILENO)
|
| 111 |
+
os.dup2(slave_fd, STDERR_FILENO)
|
| 112 |
+
if slave_fd > STDERR_FILENO:
|
| 113 |
+
os.close(slave_fd)
|
| 114 |
+
|
| 115 |
+
# Explicitly open the tty to make it become a controlling tty.
|
| 116 |
+
tmp_fd = os.open(os.ttyname(STDOUT_FILENO), os.O_RDWR)
|
| 117 |
+
os.close(tmp_fd)
|
| 118 |
+
else:
|
| 119 |
+
os.close(slave_fd)
|
| 120 |
+
|
| 121 |
+
# Parent and child process.
|
| 122 |
+
return pid, master_fd
|
| 123 |
+
|
| 124 |
+
def _writen(fd, data):
|
| 125 |
+
"""Write all the data to a descriptor."""
|
| 126 |
+
while data:
|
| 127 |
+
n = os.write(fd, data)
|
| 128 |
+
data = data[n:]
|
| 129 |
+
|
| 130 |
+
def _read(fd):
|
| 131 |
+
"""Default read function."""
|
| 132 |
+
return os.read(fd, 1024)
|
| 133 |
+
|
| 134 |
+
def _copy(master_fd, master_read=_read, stdin_read=_read):
|
| 135 |
+
"""Parent copy loop.
|
| 136 |
+
Copies
|
| 137 |
+
pty master -> standard output (master_read)
|
| 138 |
+
standard input -> pty master (stdin_read)"""
|
| 139 |
+
fds = [master_fd, STDIN_FILENO]
|
| 140 |
+
while fds:
|
| 141 |
+
rfds, _wfds, _xfds = select(fds, [], [])
|
| 142 |
+
|
| 143 |
+
if master_fd in rfds:
|
| 144 |
+
# Some OSes signal EOF by returning an empty byte string,
|
| 145 |
+
# some throw OSErrors.
|
| 146 |
+
try:
|
| 147 |
+
data = master_read(master_fd)
|
| 148 |
+
except OSError:
|
| 149 |
+
data = b""
|
| 150 |
+
if not data: # Reached EOF.
|
| 151 |
+
return # Assume the child process has exited and is
|
| 152 |
+
# unreachable, so we clean up.
|
| 153 |
+
else:
|
| 154 |
+
os.write(STDOUT_FILENO, data)
|
| 155 |
+
|
| 156 |
+
if STDIN_FILENO in rfds:
|
| 157 |
+
data = stdin_read(STDIN_FILENO)
|
| 158 |
+
if not data:
|
| 159 |
+
fds.remove(STDIN_FILENO)
|
| 160 |
+
else:
|
| 161 |
+
_writen(master_fd, data)
|
| 162 |
+
|
| 163 |
+
def spawn(argv, master_read=_read, stdin_read=_read):
|
| 164 |
+
"""Create a spawned process."""
|
| 165 |
+
if type(argv) == type(''):
|
| 166 |
+
argv = (argv,)
|
| 167 |
+
sys.audit('pty.spawn', argv)
|
| 168 |
+
|
| 169 |
+
pid, master_fd = fork()
|
| 170 |
+
if pid == CHILD:
|
| 171 |
+
os.execlp(argv[0], *argv)
|
| 172 |
+
|
| 173 |
+
try:
|
| 174 |
+
mode = tcgetattr(STDIN_FILENO)
|
| 175 |
+
setraw(STDIN_FILENO)
|
| 176 |
+
restore = True
|
| 177 |
+
except tty.error: # This is the same as termios.error
|
| 178 |
+
restore = False
|
| 179 |
+
|
| 180 |
+
try:
|
| 181 |
+
_copy(master_fd, master_read, stdin_read)
|
| 182 |
+
finally:
|
| 183 |
+
if restore:
|
| 184 |
+
tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
|
| 185 |
+
|
| 186 |
+
close(master_fd)
|
| 187 |
+
return waitpid(pid, 0)[1]
|
llava/lib/python3.10/runpy.py
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""runpy.py - locating and running Python code using the module namespace
|
| 2 |
+
|
| 3 |
+
Provides support for locating and running Python scripts using the Python
|
| 4 |
+
module namespace instead of the native filesystem.
|
| 5 |
+
|
| 6 |
+
This allows Python code to play nicely with non-filesystem based PEP 302
|
| 7 |
+
importers when locating support scripts as well as when importing modules.
|
| 8 |
+
"""
|
| 9 |
+
# Written by Nick Coghlan <ncoghlan at gmail.com>
|
| 10 |
+
# to implement PEP 338 (Executing Modules as Scripts)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
import sys
|
| 14 |
+
import importlib.machinery # importlib first so we can test #15386 via -m
|
| 15 |
+
import importlib.util
|
| 16 |
+
import io
|
| 17 |
+
import types
|
| 18 |
+
import os
|
| 19 |
+
|
| 20 |
+
__all__ = [
|
| 21 |
+
"run_module", "run_path",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
class _TempModule(object):
|
| 25 |
+
"""Temporarily replace a module in sys.modules with an empty namespace"""
|
| 26 |
+
def __init__(self, mod_name):
|
| 27 |
+
self.mod_name = mod_name
|
| 28 |
+
self.module = types.ModuleType(mod_name)
|
| 29 |
+
self._saved_module = []
|
| 30 |
+
|
| 31 |
+
def __enter__(self):
|
| 32 |
+
mod_name = self.mod_name
|
| 33 |
+
try:
|
| 34 |
+
self._saved_module.append(sys.modules[mod_name])
|
| 35 |
+
except KeyError:
|
| 36 |
+
pass
|
| 37 |
+
sys.modules[mod_name] = self.module
|
| 38 |
+
return self
|
| 39 |
+
|
| 40 |
+
def __exit__(self, *args):
|
| 41 |
+
if self._saved_module:
|
| 42 |
+
sys.modules[self.mod_name] = self._saved_module[0]
|
| 43 |
+
else:
|
| 44 |
+
del sys.modules[self.mod_name]
|
| 45 |
+
self._saved_module = []
|
| 46 |
+
|
| 47 |
+
class _ModifiedArgv0(object):
|
| 48 |
+
def __init__(self, value):
|
| 49 |
+
self.value = value
|
| 50 |
+
self._saved_value = self._sentinel = object()
|
| 51 |
+
|
| 52 |
+
def __enter__(self):
|
| 53 |
+
if self._saved_value is not self._sentinel:
|
| 54 |
+
raise RuntimeError("Already preserving saved value")
|
| 55 |
+
self._saved_value = sys.argv[0]
|
| 56 |
+
sys.argv[0] = self.value
|
| 57 |
+
|
| 58 |
+
def __exit__(self, *args):
|
| 59 |
+
self.value = self._sentinel
|
| 60 |
+
sys.argv[0] = self._saved_value
|
| 61 |
+
|
| 62 |
+
# TODO: Replace these helpers with importlib._bootstrap_external functions.
|
| 63 |
+
def _run_code(code, run_globals, init_globals=None,
|
| 64 |
+
mod_name=None, mod_spec=None,
|
| 65 |
+
pkg_name=None, script_name=None):
|
| 66 |
+
"""Helper to run code in nominated namespace"""
|
| 67 |
+
if init_globals is not None:
|
| 68 |
+
run_globals.update(init_globals)
|
| 69 |
+
if mod_spec is None:
|
| 70 |
+
loader = None
|
| 71 |
+
fname = script_name
|
| 72 |
+
cached = None
|
| 73 |
+
else:
|
| 74 |
+
loader = mod_spec.loader
|
| 75 |
+
fname = mod_spec.origin
|
| 76 |
+
cached = mod_spec.cached
|
| 77 |
+
if pkg_name is None:
|
| 78 |
+
pkg_name = mod_spec.parent
|
| 79 |
+
run_globals.update(__name__ = mod_name,
|
| 80 |
+
__file__ = fname,
|
| 81 |
+
__cached__ = cached,
|
| 82 |
+
__doc__ = None,
|
| 83 |
+
__loader__ = loader,
|
| 84 |
+
__package__ = pkg_name,
|
| 85 |
+
__spec__ = mod_spec)
|
| 86 |
+
exec(code, run_globals)
|
| 87 |
+
return run_globals
|
| 88 |
+
|
| 89 |
+
def _run_module_code(code, init_globals=None,
|
| 90 |
+
mod_name=None, mod_spec=None,
|
| 91 |
+
pkg_name=None, script_name=None):
|
| 92 |
+
"""Helper to run code in new namespace with sys modified"""
|
| 93 |
+
fname = script_name if mod_spec is None else mod_spec.origin
|
| 94 |
+
with _TempModule(mod_name) as temp_module, _ModifiedArgv0(fname):
|
| 95 |
+
mod_globals = temp_module.module.__dict__
|
| 96 |
+
_run_code(code, mod_globals, init_globals,
|
| 97 |
+
mod_name, mod_spec, pkg_name, script_name)
|
| 98 |
+
# Copy the globals of the temporary module, as they
|
| 99 |
+
# may be cleared when the temporary module goes away
|
| 100 |
+
return mod_globals.copy()
|
| 101 |
+
|
| 102 |
+
# Helper to get the full name, spec and code for a module
|
| 103 |
+
def _get_module_details(mod_name, error=ImportError):
|
| 104 |
+
if mod_name.startswith("."):
|
| 105 |
+
raise error("Relative module names not supported")
|
| 106 |
+
pkg_name, _, _ = mod_name.rpartition(".")
|
| 107 |
+
if pkg_name:
|
| 108 |
+
# Try importing the parent to avoid catching initialization errors
|
| 109 |
+
try:
|
| 110 |
+
__import__(pkg_name)
|
| 111 |
+
except ImportError as e:
|
| 112 |
+
# If the parent or higher ancestor package is missing, let the
|
| 113 |
+
# error be raised by find_spec() below and then be caught. But do
|
| 114 |
+
# not allow other errors to be caught.
|
| 115 |
+
if e.name is None or (e.name != pkg_name and
|
| 116 |
+
not pkg_name.startswith(e.name + ".")):
|
| 117 |
+
raise
|
| 118 |
+
# Warn if the module has already been imported under its normal name
|
| 119 |
+
existing = sys.modules.get(mod_name)
|
| 120 |
+
if existing is not None and not hasattr(existing, "__path__"):
|
| 121 |
+
from warnings import warn
|
| 122 |
+
msg = "{mod_name!r} found in sys.modules after import of " \
|
| 123 |
+
"package {pkg_name!r}, but prior to execution of " \
|
| 124 |
+
"{mod_name!r}; this may result in unpredictable " \
|
| 125 |
+
"behaviour".format(mod_name=mod_name, pkg_name=pkg_name)
|
| 126 |
+
warn(RuntimeWarning(msg))
|
| 127 |
+
|
| 128 |
+
try:
|
| 129 |
+
spec = importlib.util.find_spec(mod_name)
|
| 130 |
+
except (ImportError, AttributeError, TypeError, ValueError) as ex:
|
| 131 |
+
# This hack fixes an impedance mismatch between pkgutil and
|
| 132 |
+
# importlib, where the latter raises other errors for cases where
|
| 133 |
+
# pkgutil previously raised ImportError
|
| 134 |
+
msg = "Error while finding module specification for {!r} ({}: {})"
|
| 135 |
+
if mod_name.endswith(".py"):
|
| 136 |
+
msg += (f". Try using '{mod_name[:-3]}' instead of "
|
| 137 |
+
f"'{mod_name}' as the module name.")
|
| 138 |
+
raise error(msg.format(mod_name, type(ex).__name__, ex)) from ex
|
| 139 |
+
if spec is None:
|
| 140 |
+
raise error("No module named %s" % mod_name)
|
| 141 |
+
if spec.submodule_search_locations is not None:
|
| 142 |
+
if mod_name == "__main__" or mod_name.endswith(".__main__"):
|
| 143 |
+
raise error("Cannot use package as __main__ module")
|
| 144 |
+
try:
|
| 145 |
+
pkg_main_name = mod_name + ".__main__"
|
| 146 |
+
return _get_module_details(pkg_main_name, error)
|
| 147 |
+
except error as e:
|
| 148 |
+
if mod_name not in sys.modules:
|
| 149 |
+
raise # No module loaded; being a package is irrelevant
|
| 150 |
+
raise error(("%s; %r is a package and cannot " +
|
| 151 |
+
"be directly executed") %(e, mod_name))
|
| 152 |
+
loader = spec.loader
|
| 153 |
+
if loader is None:
|
| 154 |
+
raise error("%r is a namespace package and cannot be executed"
|
| 155 |
+
% mod_name)
|
| 156 |
+
try:
|
| 157 |
+
code = loader.get_code(mod_name)
|
| 158 |
+
except ImportError as e:
|
| 159 |
+
raise error(format(e)) from e
|
| 160 |
+
if code is None:
|
| 161 |
+
raise error("No code object available for %s" % mod_name)
|
| 162 |
+
return mod_name, spec, code
|
| 163 |
+
|
| 164 |
+
class _Error(Exception):
|
| 165 |
+
"""Error that _run_module_as_main() should report without a traceback"""
|
| 166 |
+
|
| 167 |
+
# XXX ncoghlan: Should this be documented and made public?
|
| 168 |
+
# (Current thoughts: don't repeat the mistake that lead to its
|
| 169 |
+
# creation when run_module() no longer met the needs of
|
| 170 |
+
# mainmodule.c, but couldn't be changed because it was public)
|
| 171 |
+
def _run_module_as_main(mod_name, alter_argv=True):
|
| 172 |
+
"""Runs the designated module in the __main__ namespace
|
| 173 |
+
|
| 174 |
+
Note that the executed module will have full access to the
|
| 175 |
+
__main__ namespace. If this is not desirable, the run_module()
|
| 176 |
+
function should be used to run the module code in a fresh namespace.
|
| 177 |
+
|
| 178 |
+
At the very least, these variables in __main__ will be overwritten:
|
| 179 |
+
__name__
|
| 180 |
+
__file__
|
| 181 |
+
__cached__
|
| 182 |
+
__loader__
|
| 183 |
+
__package__
|
| 184 |
+
"""
|
| 185 |
+
try:
|
| 186 |
+
if alter_argv or mod_name != "__main__": # i.e. -m switch
|
| 187 |
+
mod_name, mod_spec, code = _get_module_details(mod_name, _Error)
|
| 188 |
+
else: # i.e. directory or zipfile execution
|
| 189 |
+
mod_name, mod_spec, code = _get_main_module_details(_Error)
|
| 190 |
+
except _Error as exc:
|
| 191 |
+
msg = "%s: %s" % (sys.executable, exc)
|
| 192 |
+
sys.exit(msg)
|
| 193 |
+
main_globals = sys.modules["__main__"].__dict__
|
| 194 |
+
if alter_argv:
|
| 195 |
+
sys.argv[0] = mod_spec.origin
|
| 196 |
+
return _run_code(code, main_globals, None,
|
| 197 |
+
"__main__", mod_spec)
|
| 198 |
+
|
| 199 |
+
def run_module(mod_name, init_globals=None,
|
| 200 |
+
run_name=None, alter_sys=False):
|
| 201 |
+
"""Execute a module's code without importing it.
|
| 202 |
+
|
| 203 |
+
mod_name -- an absolute module name or package name.
|
| 204 |
+
|
| 205 |
+
Optional arguments:
|
| 206 |
+
init_globals -- dictionary used to pre-populate the module’s
|
| 207 |
+
globals dictionary before the code is executed.
|
| 208 |
+
|
| 209 |
+
run_name -- if not None, this will be used for setting __name__;
|
| 210 |
+
otherwise, __name__ will be set to mod_name + '__main__' if the
|
| 211 |
+
named module is a package and to just mod_name otherwise.
|
| 212 |
+
|
| 213 |
+
alter_sys -- if True, sys.argv[0] is updated with the value of
|
| 214 |
+
__file__ and sys.modules[__name__] is updated with a temporary
|
| 215 |
+
module object for the module being executed. Both are
|
| 216 |
+
restored to their original values before the function returns.
|
| 217 |
+
|
| 218 |
+
Returns the resulting module globals dictionary.
|
| 219 |
+
"""
|
| 220 |
+
mod_name, mod_spec, code = _get_module_details(mod_name)
|
| 221 |
+
if run_name is None:
|
| 222 |
+
run_name = mod_name
|
| 223 |
+
if alter_sys:
|
| 224 |
+
return _run_module_code(code, init_globals, run_name, mod_spec)
|
| 225 |
+
else:
|
| 226 |
+
# Leave the sys module alone
|
| 227 |
+
return _run_code(code, {}, init_globals, run_name, mod_spec)
|
| 228 |
+
|
| 229 |
+
def _get_main_module_details(error=ImportError):
|
| 230 |
+
# Helper that gives a nicer error message when attempting to
|
| 231 |
+
# execute a zipfile or directory by invoking __main__.py
|
| 232 |
+
# Also moves the standard __main__ out of the way so that the
|
| 233 |
+
# preexisting __loader__ entry doesn't cause issues
|
| 234 |
+
main_name = "__main__"
|
| 235 |
+
saved_main = sys.modules[main_name]
|
| 236 |
+
del sys.modules[main_name]
|
| 237 |
+
try:
|
| 238 |
+
return _get_module_details(main_name)
|
| 239 |
+
except ImportError as exc:
|
| 240 |
+
if main_name in str(exc):
|
| 241 |
+
raise error("can't find %r module in %r" %
|
| 242 |
+
(main_name, sys.path[0])) from exc
|
| 243 |
+
raise
|
| 244 |
+
finally:
|
| 245 |
+
sys.modules[main_name] = saved_main
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
def _get_code_from_file(run_name, fname):
|
| 249 |
+
# Check for a compiled file first
|
| 250 |
+
from pkgutil import read_code
|
| 251 |
+
decoded_path = os.path.abspath(os.fsdecode(fname))
|
| 252 |
+
with io.open_code(decoded_path) as f:
|
| 253 |
+
code = read_code(f)
|
| 254 |
+
if code is None:
|
| 255 |
+
# That didn't work, so try it as normal source code
|
| 256 |
+
with io.open_code(decoded_path) as f:
|
| 257 |
+
code = compile(f.read(), fname, 'exec')
|
| 258 |
+
return code, fname
|
| 259 |
+
|
| 260 |
+
def run_path(path_name, init_globals=None, run_name=None):
|
| 261 |
+
"""Execute code located at the specified filesystem location.
|
| 262 |
+
|
| 263 |
+
path_name -- filesystem location of a Python script, zipfile,
|
| 264 |
+
or directory containing a top level __main__.py script.
|
| 265 |
+
|
| 266 |
+
Optional arguments:
|
| 267 |
+
init_globals -- dictionary used to pre-populate the module’s
|
| 268 |
+
globals dictionary before the code is executed.
|
| 269 |
+
|
| 270 |
+
run_name -- if not None, this will be used to set __name__;
|
| 271 |
+
otherwise, '<run_path>' will be used for __name__.
|
| 272 |
+
|
| 273 |
+
Returns the resulting module globals dictionary.
|
| 274 |
+
"""
|
| 275 |
+
if run_name is None:
|
| 276 |
+
run_name = "<run_path>"
|
| 277 |
+
pkg_name = run_name.rpartition(".")[0]
|
| 278 |
+
from pkgutil import get_importer
|
| 279 |
+
importer = get_importer(path_name)
|
| 280 |
+
# Trying to avoid importing imp so as to not consume the deprecation warning.
|
| 281 |
+
is_NullImporter = False
|
| 282 |
+
if type(importer).__module__ == 'imp':
|
| 283 |
+
if type(importer).__name__ == 'NullImporter':
|
| 284 |
+
is_NullImporter = True
|
| 285 |
+
if isinstance(importer, type(None)) or is_NullImporter:
|
| 286 |
+
# Not a valid sys.path entry, so run the code directly
|
| 287 |
+
# execfile() doesn't help as we want to allow compiled files
|
| 288 |
+
code, fname = _get_code_from_file(run_name, path_name)
|
| 289 |
+
return _run_module_code(code, init_globals, run_name,
|
| 290 |
+
pkg_name=pkg_name, script_name=fname)
|
| 291 |
+
else:
|
| 292 |
+
# Finder is defined for path, so add it to
|
| 293 |
+
# the start of sys.path
|
| 294 |
+
sys.path.insert(0, path_name)
|
| 295 |
+
try:
|
| 296 |
+
# Here's where things are a little different from the run_module
|
| 297 |
+
# case. There, we only had to replace the module in sys while the
|
| 298 |
+
# code was running and doing so was somewhat optional. Here, we
|
| 299 |
+
# have no choice and we have to remove it even while we read the
|
| 300 |
+
# code. If we don't do this, a __loader__ attribute in the
|
| 301 |
+
# existing __main__ module may prevent location of the new module.
|
| 302 |
+
mod_name, mod_spec, code = _get_main_module_details()
|
| 303 |
+
with _TempModule(run_name) as temp_module, \
|
| 304 |
+
_ModifiedArgv0(path_name):
|
| 305 |
+
mod_globals = temp_module.module.__dict__
|
| 306 |
+
return _run_code(code, mod_globals, init_globals,
|
| 307 |
+
run_name, mod_spec, pkg_name).copy()
|
| 308 |
+
finally:
|
| 309 |
+
try:
|
| 310 |
+
sys.path.remove(path_name)
|
| 311 |
+
except ValueError:
|
| 312 |
+
pass
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
if __name__ == "__main__":
|
| 316 |
+
# Run the module specified as the next command line argument
|
| 317 |
+
if len(sys.argv) < 2:
|
| 318 |
+
print("No module specified for execution", file=sys.stderr)
|
| 319 |
+
else:
|
| 320 |
+
del sys.argv[0] # Make the requested module sys.argv[0]
|
| 321 |
+
_run_module_as_main(sys.argv[0])
|
llava/lib/python3.10/sndhdr.py
ADDED
|
@@ -0,0 +1,257 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Routines to help recognizing sound files.
|
| 2 |
+
|
| 3 |
+
Function whathdr() recognizes various types of sound file headers.
|
| 4 |
+
It understands almost all headers that SOX can decode.
|
| 5 |
+
|
| 6 |
+
The return tuple contains the following items, in this order:
|
| 7 |
+
- file type (as SOX understands it)
|
| 8 |
+
- sampling rate (0 if unknown or hard to decode)
|
| 9 |
+
- number of channels (0 if unknown or hard to decode)
|
| 10 |
+
- number of frames in the file (-1 if unknown or hard to decode)
|
| 11 |
+
- number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW
|
| 12 |
+
|
| 13 |
+
If the file doesn't have a recognizable type, it returns None.
|
| 14 |
+
If the file can't be opened, OSError is raised.
|
| 15 |
+
|
| 16 |
+
To compute the total time, divide the number of frames by the
|
| 17 |
+
sampling rate (a frame contains a sample for each channel).
|
| 18 |
+
|
| 19 |
+
Function what() calls whathdr(). (It used to also use some
|
| 20 |
+
heuristics for raw data, but this doesn't work very well.)
|
| 21 |
+
|
| 22 |
+
Finally, the function test() is a simple main program that calls
|
| 23 |
+
what() for all files mentioned on the argument list. For directory
|
| 24 |
+
arguments it calls what() for all files in that directory. Default
|
| 25 |
+
argument is "." (testing all files in the current directory). The
|
| 26 |
+
option -r tells it to recurse down directories found inside
|
| 27 |
+
explicitly given directories.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
# The file structure is top-down except that the test program and its
|
| 31 |
+
# subroutine come last.
|
| 32 |
+
|
| 33 |
+
__all__ = ['what', 'whathdr']
|
| 34 |
+
|
| 35 |
+
from collections import namedtuple
|
| 36 |
+
|
| 37 |
+
SndHeaders = namedtuple('SndHeaders',
|
| 38 |
+
'filetype framerate nchannels nframes sampwidth')
|
| 39 |
+
|
| 40 |
+
SndHeaders.filetype.__doc__ = ("""The value for type indicates the data type
|
| 41 |
+
and will be one of the strings 'aifc', 'aiff', 'au','hcom',
|
| 42 |
+
'sndr', 'sndt', 'voc', 'wav', '8svx', 'sb', 'ub', or 'ul'.""")
|
| 43 |
+
SndHeaders.framerate.__doc__ = ("""The sampling_rate will be either the actual
|
| 44 |
+
value or 0 if unknown or difficult to decode.""")
|
| 45 |
+
SndHeaders.nchannels.__doc__ = ("""The number of channels or 0 if it cannot be
|
| 46 |
+
determined or if the value is difficult to decode.""")
|
| 47 |
+
SndHeaders.nframes.__doc__ = ("""The value for frames will be either the number
|
| 48 |
+
of frames or -1.""")
|
| 49 |
+
SndHeaders.sampwidth.__doc__ = ("""Either the sample size in bits or
|
| 50 |
+
'A' for A-LAW or 'U' for u-LAW.""")
|
| 51 |
+
|
| 52 |
+
def what(filename):
|
| 53 |
+
"""Guess the type of a sound file."""
|
| 54 |
+
res = whathdr(filename)
|
| 55 |
+
return res
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def whathdr(filename):
|
| 59 |
+
"""Recognize sound headers."""
|
| 60 |
+
with open(filename, 'rb') as f:
|
| 61 |
+
h = f.read(512)
|
| 62 |
+
for tf in tests:
|
| 63 |
+
res = tf(h, f)
|
| 64 |
+
if res:
|
| 65 |
+
return SndHeaders(*res)
|
| 66 |
+
return None
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
#-----------------------------------#
|
| 70 |
+
# Subroutines per sound header type #
|
| 71 |
+
#-----------------------------------#
|
| 72 |
+
|
| 73 |
+
tests = []
|
| 74 |
+
|
| 75 |
+
def test_aifc(h, f):
|
| 76 |
+
import aifc
|
| 77 |
+
if not h.startswith(b'FORM'):
|
| 78 |
+
return None
|
| 79 |
+
if h[8:12] == b'AIFC':
|
| 80 |
+
fmt = 'aifc'
|
| 81 |
+
elif h[8:12] == b'AIFF':
|
| 82 |
+
fmt = 'aiff'
|
| 83 |
+
else:
|
| 84 |
+
return None
|
| 85 |
+
f.seek(0)
|
| 86 |
+
try:
|
| 87 |
+
a = aifc.open(f, 'r')
|
| 88 |
+
except (EOFError, aifc.Error):
|
| 89 |
+
return None
|
| 90 |
+
return (fmt, a.getframerate(), a.getnchannels(),
|
| 91 |
+
a.getnframes(), 8 * a.getsampwidth())
|
| 92 |
+
|
| 93 |
+
tests.append(test_aifc)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def test_au(h, f):
|
| 97 |
+
if h.startswith(b'.snd'):
|
| 98 |
+
func = get_long_be
|
| 99 |
+
elif h[:4] in (b'\0ds.', b'dns.'):
|
| 100 |
+
func = get_long_le
|
| 101 |
+
else:
|
| 102 |
+
return None
|
| 103 |
+
filetype = 'au'
|
| 104 |
+
hdr_size = func(h[4:8])
|
| 105 |
+
data_size = func(h[8:12])
|
| 106 |
+
encoding = func(h[12:16])
|
| 107 |
+
rate = func(h[16:20])
|
| 108 |
+
nchannels = func(h[20:24])
|
| 109 |
+
sample_size = 1 # default
|
| 110 |
+
if encoding == 1:
|
| 111 |
+
sample_bits = 'U'
|
| 112 |
+
elif encoding == 2:
|
| 113 |
+
sample_bits = 8
|
| 114 |
+
elif encoding == 3:
|
| 115 |
+
sample_bits = 16
|
| 116 |
+
sample_size = 2
|
| 117 |
+
else:
|
| 118 |
+
sample_bits = '?'
|
| 119 |
+
frame_size = sample_size * nchannels
|
| 120 |
+
if frame_size:
|
| 121 |
+
nframe = data_size / frame_size
|
| 122 |
+
else:
|
| 123 |
+
nframe = -1
|
| 124 |
+
return filetype, rate, nchannels, nframe, sample_bits
|
| 125 |
+
|
| 126 |
+
tests.append(test_au)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def test_hcom(h, f):
|
| 130 |
+
if h[65:69] != b'FSSD' or h[128:132] != b'HCOM':
|
| 131 |
+
return None
|
| 132 |
+
divisor = get_long_be(h[144:148])
|
| 133 |
+
if divisor:
|
| 134 |
+
rate = 22050 / divisor
|
| 135 |
+
else:
|
| 136 |
+
rate = 0
|
| 137 |
+
return 'hcom', rate, 1, -1, 8
|
| 138 |
+
|
| 139 |
+
tests.append(test_hcom)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def test_voc(h, f):
|
| 143 |
+
if not h.startswith(b'Creative Voice File\032'):
|
| 144 |
+
return None
|
| 145 |
+
sbseek = get_short_le(h[20:22])
|
| 146 |
+
rate = 0
|
| 147 |
+
if 0 <= sbseek < 500 and h[sbseek] == 1:
|
| 148 |
+
ratecode = 256 - h[sbseek+4]
|
| 149 |
+
if ratecode:
|
| 150 |
+
rate = int(1000000.0 / ratecode)
|
| 151 |
+
return 'voc', rate, 1, -1, 8
|
| 152 |
+
|
| 153 |
+
tests.append(test_voc)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def test_wav(h, f):
|
| 157 |
+
import wave
|
| 158 |
+
# 'RIFF' <len> 'WAVE' 'fmt ' <len>
|
| 159 |
+
if not h.startswith(b'RIFF') or h[8:12] != b'WAVE' or h[12:16] != b'fmt ':
|
| 160 |
+
return None
|
| 161 |
+
f.seek(0)
|
| 162 |
+
try:
|
| 163 |
+
w = wave.open(f, 'r')
|
| 164 |
+
except (EOFError, wave.Error):
|
| 165 |
+
return None
|
| 166 |
+
return ('wav', w.getframerate(), w.getnchannels(),
|
| 167 |
+
w.getnframes(), 8*w.getsampwidth())
|
| 168 |
+
|
| 169 |
+
tests.append(test_wav)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def test_8svx(h, f):
|
| 173 |
+
if not h.startswith(b'FORM') or h[8:12] != b'8SVX':
|
| 174 |
+
return None
|
| 175 |
+
# Should decode it to get #channels -- assume always 1
|
| 176 |
+
return '8svx', 0, 1, 0, 8
|
| 177 |
+
|
| 178 |
+
tests.append(test_8svx)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def test_sndt(h, f):
|
| 182 |
+
if h.startswith(b'SOUND'):
|
| 183 |
+
nsamples = get_long_le(h[8:12])
|
| 184 |
+
rate = get_short_le(h[20:22])
|
| 185 |
+
return 'sndt', rate, 1, nsamples, 8
|
| 186 |
+
|
| 187 |
+
tests.append(test_sndt)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def test_sndr(h, f):
|
| 191 |
+
if h.startswith(b'\0\0'):
|
| 192 |
+
rate = get_short_le(h[2:4])
|
| 193 |
+
if 4000 <= rate <= 25000:
|
| 194 |
+
return 'sndr', rate, 1, -1, 8
|
| 195 |
+
|
| 196 |
+
tests.append(test_sndr)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
#-------------------------------------------#
|
| 200 |
+
# Subroutines to extract numbers from bytes #
|
| 201 |
+
#-------------------------------------------#
|
| 202 |
+
|
| 203 |
+
def get_long_be(b):
|
| 204 |
+
return (b[0] << 24) | (b[1] << 16) | (b[2] << 8) | b[3]
|
| 205 |
+
|
| 206 |
+
def get_long_le(b):
|
| 207 |
+
return (b[3] << 24) | (b[2] << 16) | (b[1] << 8) | b[0]
|
| 208 |
+
|
| 209 |
+
def get_short_be(b):
|
| 210 |
+
return (b[0] << 8) | b[1]
|
| 211 |
+
|
| 212 |
+
def get_short_le(b):
|
| 213 |
+
return (b[1] << 8) | b[0]
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
#--------------------#
|
| 217 |
+
# Small test program #
|
| 218 |
+
#--------------------#
|
| 219 |
+
|
| 220 |
+
def test():
|
| 221 |
+
import sys
|
| 222 |
+
recursive = 0
|
| 223 |
+
if sys.argv[1:] and sys.argv[1] == '-r':
|
| 224 |
+
del sys.argv[1:2]
|
| 225 |
+
recursive = 1
|
| 226 |
+
try:
|
| 227 |
+
if sys.argv[1:]:
|
| 228 |
+
testall(sys.argv[1:], recursive, 1)
|
| 229 |
+
else:
|
| 230 |
+
testall(['.'], recursive, 1)
|
| 231 |
+
except KeyboardInterrupt:
|
| 232 |
+
sys.stderr.write('\n[Interrupted]\n')
|
| 233 |
+
sys.exit(1)
|
| 234 |
+
|
| 235 |
+
def testall(list, recursive, toplevel):
|
| 236 |
+
import sys
|
| 237 |
+
import os
|
| 238 |
+
for filename in list:
|
| 239 |
+
if os.path.isdir(filename):
|
| 240 |
+
print(filename + '/:', end=' ')
|
| 241 |
+
if recursive or toplevel:
|
| 242 |
+
print('recursing down:')
|
| 243 |
+
import glob
|
| 244 |
+
names = glob.glob(os.path.join(glob.escape(filename), '*'))
|
| 245 |
+
testall(names, recursive, 0)
|
| 246 |
+
else:
|
| 247 |
+
print('*** directory (use -r) ***')
|
| 248 |
+
else:
|
| 249 |
+
print(filename + ':', end=' ')
|
| 250 |
+
sys.stdout.flush()
|
| 251 |
+
try:
|
| 252 |
+
print(what(filename))
|
| 253 |
+
except OSError:
|
| 254 |
+
print('*** not found ***')
|
| 255 |
+
|
| 256 |
+
if __name__ == '__main__':
|
| 257 |
+
test()
|
llava/lib/python3.10/socket.py
ADDED
|
@@ -0,0 +1,972 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Wrapper module for _socket, providing some additional facilities
|
| 2 |
+
# implemented in Python.
|
| 3 |
+
|
| 4 |
+
"""\
|
| 5 |
+
This module provides socket operations and some related functions.
|
| 6 |
+
On Unix, it supports IP (Internet Protocol) and Unix domain sockets.
|
| 7 |
+
On other systems, it only supports IP. Functions specific for a
|
| 8 |
+
socket are available as methods of the socket object.
|
| 9 |
+
|
| 10 |
+
Functions:
|
| 11 |
+
|
| 12 |
+
socket() -- create a new socket object
|
| 13 |
+
socketpair() -- create a pair of new socket objects [*]
|
| 14 |
+
fromfd() -- create a socket object from an open file descriptor [*]
|
| 15 |
+
send_fds() -- Send file descriptor to the socket.
|
| 16 |
+
recv_fds() -- Recieve file descriptors from the socket.
|
| 17 |
+
fromshare() -- create a socket object from data received from socket.share() [*]
|
| 18 |
+
gethostname() -- return the current hostname
|
| 19 |
+
gethostbyname() -- map a hostname to its IP number
|
| 20 |
+
gethostbyaddr() -- map an IP number or hostname to DNS info
|
| 21 |
+
getservbyname() -- map a service name and a protocol name to a port number
|
| 22 |
+
getprotobyname() -- map a protocol name (e.g. 'tcp') to a number
|
| 23 |
+
ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order
|
| 24 |
+
htons(), htonl() -- convert 16, 32 bit int from host to network byte order
|
| 25 |
+
inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format
|
| 26 |
+
inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)
|
| 27 |
+
socket.getdefaulttimeout() -- get the default timeout value
|
| 28 |
+
socket.setdefaulttimeout() -- set the default timeout value
|
| 29 |
+
create_connection() -- connects to an address, with an optional timeout and
|
| 30 |
+
optional source address.
|
| 31 |
+
|
| 32 |
+
[*] not available on all platforms!
|
| 33 |
+
|
| 34 |
+
Special objects:
|
| 35 |
+
|
| 36 |
+
SocketType -- type object for socket objects
|
| 37 |
+
error -- exception raised for I/O errors
|
| 38 |
+
has_ipv6 -- boolean value indicating if IPv6 is supported
|
| 39 |
+
|
| 40 |
+
IntEnum constants:
|
| 41 |
+
|
| 42 |
+
AF_INET, AF_UNIX -- socket domains (first argument to socket() call)
|
| 43 |
+
SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)
|
| 44 |
+
|
| 45 |
+
Integer constants:
|
| 46 |
+
|
| 47 |
+
Many other constants may be defined; these may be used in calls to
|
| 48 |
+
the setsockopt() and getsockopt() methods.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
import _socket
|
| 52 |
+
from _socket import *
|
| 53 |
+
|
| 54 |
+
import os, sys, io, selectors
|
| 55 |
+
from enum import IntEnum, IntFlag
|
| 56 |
+
|
| 57 |
+
try:
|
| 58 |
+
import errno
|
| 59 |
+
except ImportError:
|
| 60 |
+
errno = None
|
| 61 |
+
EBADF = getattr(errno, 'EBADF', 9)
|
| 62 |
+
EAGAIN = getattr(errno, 'EAGAIN', 11)
|
| 63 |
+
EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11)
|
| 64 |
+
|
| 65 |
+
__all__ = ["fromfd", "getfqdn", "create_connection", "create_server",
|
| 66 |
+
"has_dualstack_ipv6", "AddressFamily", "SocketKind"]
|
| 67 |
+
__all__.extend(os._get_exports_list(_socket))
|
| 68 |
+
|
| 69 |
+
# Set up the socket.AF_* socket.SOCK_* constants as members of IntEnums for
|
| 70 |
+
# nicer string representations.
|
| 71 |
+
# Note that _socket only knows about the integer values. The public interface
|
| 72 |
+
# in this module understands the enums and translates them back from integers
|
| 73 |
+
# where needed (e.g. .family property of a socket object).
|
| 74 |
+
|
| 75 |
+
IntEnum._convert_(
|
| 76 |
+
'AddressFamily',
|
| 77 |
+
__name__,
|
| 78 |
+
lambda C: C.isupper() and C.startswith('AF_'))
|
| 79 |
+
|
| 80 |
+
IntEnum._convert_(
|
| 81 |
+
'SocketKind',
|
| 82 |
+
__name__,
|
| 83 |
+
lambda C: C.isupper() and C.startswith('SOCK_'))
|
| 84 |
+
|
| 85 |
+
IntFlag._convert_(
|
| 86 |
+
'MsgFlag',
|
| 87 |
+
__name__,
|
| 88 |
+
lambda C: C.isupper() and C.startswith('MSG_'))
|
| 89 |
+
|
| 90 |
+
IntFlag._convert_(
|
| 91 |
+
'AddressInfo',
|
| 92 |
+
__name__,
|
| 93 |
+
lambda C: C.isupper() and C.startswith('AI_'))
|
| 94 |
+
|
| 95 |
+
_LOCALHOST = '127.0.0.1'
|
| 96 |
+
_LOCALHOST_V6 = '::1'
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def _intenum_converter(value, enum_klass):
|
| 100 |
+
"""Convert a numeric family value to an IntEnum member.
|
| 101 |
+
|
| 102 |
+
If it's not a known member, return the numeric value itself.
|
| 103 |
+
"""
|
| 104 |
+
try:
|
| 105 |
+
return enum_klass(value)
|
| 106 |
+
except ValueError:
|
| 107 |
+
return value
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
# WSA error codes
|
| 111 |
+
if sys.platform.lower().startswith("win"):
|
| 112 |
+
errorTab = {}
|
| 113 |
+
errorTab[6] = "Specified event object handle is invalid."
|
| 114 |
+
errorTab[8] = "Insufficient memory available."
|
| 115 |
+
errorTab[87] = "One or more parameters are invalid."
|
| 116 |
+
errorTab[995] = "Overlapped operation aborted."
|
| 117 |
+
errorTab[996] = "Overlapped I/O event object not in signaled state."
|
| 118 |
+
errorTab[997] = "Overlapped operation will complete later."
|
| 119 |
+
errorTab[10004] = "The operation was interrupted."
|
| 120 |
+
errorTab[10009] = "A bad file handle was passed."
|
| 121 |
+
errorTab[10013] = "Permission denied."
|
| 122 |
+
errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT
|
| 123 |
+
errorTab[10022] = "An invalid operation was attempted."
|
| 124 |
+
errorTab[10024] = "Too many open files."
|
| 125 |
+
errorTab[10035] = "The socket operation would block"
|
| 126 |
+
errorTab[10036] = "A blocking operation is already in progress."
|
| 127 |
+
errorTab[10037] = "Operation already in progress."
|
| 128 |
+
errorTab[10038] = "Socket operation on nonsocket."
|
| 129 |
+
errorTab[10039] = "Destination address required."
|
| 130 |
+
errorTab[10040] = "Message too long."
|
| 131 |
+
errorTab[10041] = "Protocol wrong type for socket."
|
| 132 |
+
errorTab[10042] = "Bad protocol option."
|
| 133 |
+
errorTab[10043] = "Protocol not supported."
|
| 134 |
+
errorTab[10044] = "Socket type not supported."
|
| 135 |
+
errorTab[10045] = "Operation not supported."
|
| 136 |
+
errorTab[10046] = "Protocol family not supported."
|
| 137 |
+
errorTab[10047] = "Address family not supported by protocol family."
|
| 138 |
+
errorTab[10048] = "The network address is in use."
|
| 139 |
+
errorTab[10049] = "Cannot assign requested address."
|
| 140 |
+
errorTab[10050] = "Network is down."
|
| 141 |
+
errorTab[10051] = "Network is unreachable."
|
| 142 |
+
errorTab[10052] = "Network dropped connection on reset."
|
| 143 |
+
errorTab[10053] = "Software caused connection abort."
|
| 144 |
+
errorTab[10054] = "The connection has been reset."
|
| 145 |
+
errorTab[10055] = "No buffer space available."
|
| 146 |
+
errorTab[10056] = "Socket is already connected."
|
| 147 |
+
errorTab[10057] = "Socket is not connected."
|
| 148 |
+
errorTab[10058] = "The network has been shut down."
|
| 149 |
+
errorTab[10059] = "Too many references."
|
| 150 |
+
errorTab[10060] = "The operation timed out."
|
| 151 |
+
errorTab[10061] = "Connection refused."
|
| 152 |
+
errorTab[10062] = "Cannot translate name."
|
| 153 |
+
errorTab[10063] = "The name is too long."
|
| 154 |
+
errorTab[10064] = "The host is down."
|
| 155 |
+
errorTab[10065] = "The host is unreachable."
|
| 156 |
+
errorTab[10066] = "Directory not empty."
|
| 157 |
+
errorTab[10067] = "Too many processes."
|
| 158 |
+
errorTab[10068] = "User quota exceeded."
|
| 159 |
+
errorTab[10069] = "Disk quota exceeded."
|
| 160 |
+
errorTab[10070] = "Stale file handle reference."
|
| 161 |
+
errorTab[10071] = "Item is remote."
|
| 162 |
+
errorTab[10091] = "Network subsystem is unavailable."
|
| 163 |
+
errorTab[10092] = "Winsock.dll version out of range."
|
| 164 |
+
errorTab[10093] = "Successful WSAStartup not yet performed."
|
| 165 |
+
errorTab[10101] = "Graceful shutdown in progress."
|
| 166 |
+
errorTab[10102] = "No more results from WSALookupServiceNext."
|
| 167 |
+
errorTab[10103] = "Call has been canceled."
|
| 168 |
+
errorTab[10104] = "Procedure call table is invalid."
|
| 169 |
+
errorTab[10105] = "Service provider is invalid."
|
| 170 |
+
errorTab[10106] = "Service provider failed to initialize."
|
| 171 |
+
errorTab[10107] = "System call failure."
|
| 172 |
+
errorTab[10108] = "Service not found."
|
| 173 |
+
errorTab[10109] = "Class type not found."
|
| 174 |
+
errorTab[10110] = "No more results from WSALookupServiceNext."
|
| 175 |
+
errorTab[10111] = "Call was canceled."
|
| 176 |
+
errorTab[10112] = "Database query was refused."
|
| 177 |
+
errorTab[11001] = "Host not found."
|
| 178 |
+
errorTab[11002] = "Nonauthoritative host not found."
|
| 179 |
+
errorTab[11003] = "This is a nonrecoverable error."
|
| 180 |
+
errorTab[11004] = "Valid name, no data record requested type."
|
| 181 |
+
errorTab[11005] = "QoS receivers."
|
| 182 |
+
errorTab[11006] = "QoS senders."
|
| 183 |
+
errorTab[11007] = "No QoS senders."
|
| 184 |
+
errorTab[11008] = "QoS no receivers."
|
| 185 |
+
errorTab[11009] = "QoS request confirmed."
|
| 186 |
+
errorTab[11010] = "QoS admission error."
|
| 187 |
+
errorTab[11011] = "QoS policy failure."
|
| 188 |
+
errorTab[11012] = "QoS bad style."
|
| 189 |
+
errorTab[11013] = "QoS bad object."
|
| 190 |
+
errorTab[11014] = "QoS traffic control error."
|
| 191 |
+
errorTab[11015] = "QoS generic error."
|
| 192 |
+
errorTab[11016] = "QoS service type error."
|
| 193 |
+
errorTab[11017] = "QoS flowspec error."
|
| 194 |
+
errorTab[11018] = "Invalid QoS provider buffer."
|
| 195 |
+
errorTab[11019] = "Invalid QoS filter style."
|
| 196 |
+
errorTab[11020] = "Invalid QoS filter style."
|
| 197 |
+
errorTab[11021] = "Incorrect QoS filter count."
|
| 198 |
+
errorTab[11022] = "Invalid QoS object length."
|
| 199 |
+
errorTab[11023] = "Incorrect QoS flow count."
|
| 200 |
+
errorTab[11024] = "Unrecognized QoS object."
|
| 201 |
+
errorTab[11025] = "Invalid QoS policy object."
|
| 202 |
+
errorTab[11026] = "Invalid QoS flow descriptor."
|
| 203 |
+
errorTab[11027] = "Invalid QoS provider-specific flowspec."
|
| 204 |
+
errorTab[11028] = "Invalid QoS provider-specific filterspec."
|
| 205 |
+
errorTab[11029] = "Invalid QoS shape discard mode object."
|
| 206 |
+
errorTab[11030] = "Invalid QoS shaping rate object."
|
| 207 |
+
errorTab[11031] = "Reserved policy QoS element type."
|
| 208 |
+
__all__.append("errorTab")
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
class _GiveupOnSendfile(Exception): pass
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
class socket(_socket.socket):
|
| 215 |
+
|
| 216 |
+
"""A subclass of _socket.socket adding the makefile() method."""
|
| 217 |
+
|
| 218 |
+
__slots__ = ["__weakref__", "_io_refs", "_closed"]
|
| 219 |
+
|
| 220 |
+
def __init__(self, family=-1, type=-1, proto=-1, fileno=None):
|
| 221 |
+
# For user code address family and type values are IntEnum members, but
|
| 222 |
+
# for the underlying _socket.socket they're just integers. The
|
| 223 |
+
# constructor of _socket.socket converts the given argument to an
|
| 224 |
+
# integer automatically.
|
| 225 |
+
if fileno is None:
|
| 226 |
+
if family == -1:
|
| 227 |
+
family = AF_INET
|
| 228 |
+
if type == -1:
|
| 229 |
+
type = SOCK_STREAM
|
| 230 |
+
if proto == -1:
|
| 231 |
+
proto = 0
|
| 232 |
+
_socket.socket.__init__(self, family, type, proto, fileno)
|
| 233 |
+
self._io_refs = 0
|
| 234 |
+
self._closed = False
|
| 235 |
+
|
| 236 |
+
def __enter__(self):
|
| 237 |
+
return self
|
| 238 |
+
|
| 239 |
+
def __exit__(self, *args):
|
| 240 |
+
if not self._closed:
|
| 241 |
+
self.close()
|
| 242 |
+
|
| 243 |
+
def __repr__(self):
|
| 244 |
+
"""Wrap __repr__() to reveal the real class name and socket
|
| 245 |
+
address(es).
|
| 246 |
+
"""
|
| 247 |
+
closed = getattr(self, '_closed', False)
|
| 248 |
+
s = "<%s.%s%s fd=%i, family=%s, type=%s, proto=%i" \
|
| 249 |
+
% (self.__class__.__module__,
|
| 250 |
+
self.__class__.__qualname__,
|
| 251 |
+
" [closed]" if closed else "",
|
| 252 |
+
self.fileno(),
|
| 253 |
+
self.family,
|
| 254 |
+
self.type,
|
| 255 |
+
self.proto)
|
| 256 |
+
if not closed:
|
| 257 |
+
try:
|
| 258 |
+
laddr = self.getsockname()
|
| 259 |
+
if laddr:
|
| 260 |
+
s += ", laddr=%s" % str(laddr)
|
| 261 |
+
except error:
|
| 262 |
+
pass
|
| 263 |
+
try:
|
| 264 |
+
raddr = self.getpeername()
|
| 265 |
+
if raddr:
|
| 266 |
+
s += ", raddr=%s" % str(raddr)
|
| 267 |
+
except error:
|
| 268 |
+
pass
|
| 269 |
+
s += '>'
|
| 270 |
+
return s
|
| 271 |
+
|
| 272 |
+
def __getstate__(self):
|
| 273 |
+
raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
|
| 274 |
+
|
| 275 |
+
def dup(self):
|
| 276 |
+
"""dup() -> socket object
|
| 277 |
+
|
| 278 |
+
Duplicate the socket. Return a new socket object connected to the same
|
| 279 |
+
system resource. The new socket is non-inheritable.
|
| 280 |
+
"""
|
| 281 |
+
fd = dup(self.fileno())
|
| 282 |
+
sock = self.__class__(self.family, self.type, self.proto, fileno=fd)
|
| 283 |
+
sock.settimeout(self.gettimeout())
|
| 284 |
+
return sock
|
| 285 |
+
|
| 286 |
+
def accept(self):
|
| 287 |
+
"""accept() -> (socket object, address info)
|
| 288 |
+
|
| 289 |
+
Wait for an incoming connection. Return a new socket
|
| 290 |
+
representing the connection, and the address of the client.
|
| 291 |
+
For IP sockets, the address info is a pair (hostaddr, port).
|
| 292 |
+
"""
|
| 293 |
+
fd, addr = self._accept()
|
| 294 |
+
sock = socket(self.family, self.type, self.proto, fileno=fd)
|
| 295 |
+
# Issue #7995: if no default timeout is set and the listening
|
| 296 |
+
# socket had a (non-zero) timeout, force the new socket in blocking
|
| 297 |
+
# mode to override platform-specific socket flags inheritance.
|
| 298 |
+
if getdefaulttimeout() is None and self.gettimeout():
|
| 299 |
+
sock.setblocking(True)
|
| 300 |
+
return sock, addr
|
| 301 |
+
|
| 302 |
+
def makefile(self, mode="r", buffering=None, *,
|
| 303 |
+
encoding=None, errors=None, newline=None):
|
| 304 |
+
"""makefile(...) -> an I/O stream connected to the socket
|
| 305 |
+
|
| 306 |
+
The arguments are as for io.open() after the filename, except the only
|
| 307 |
+
supported mode values are 'r' (default), 'w' and 'b'.
|
| 308 |
+
"""
|
| 309 |
+
# XXX refactor to share code?
|
| 310 |
+
if not set(mode) <= {"r", "w", "b"}:
|
| 311 |
+
raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
|
| 312 |
+
writing = "w" in mode
|
| 313 |
+
reading = "r" in mode or not writing
|
| 314 |
+
assert reading or writing
|
| 315 |
+
binary = "b" in mode
|
| 316 |
+
rawmode = ""
|
| 317 |
+
if reading:
|
| 318 |
+
rawmode += "r"
|
| 319 |
+
if writing:
|
| 320 |
+
rawmode += "w"
|
| 321 |
+
raw = SocketIO(self, rawmode)
|
| 322 |
+
self._io_refs += 1
|
| 323 |
+
if buffering is None:
|
| 324 |
+
buffering = -1
|
| 325 |
+
if buffering < 0:
|
| 326 |
+
buffering = io.DEFAULT_BUFFER_SIZE
|
| 327 |
+
if buffering == 0:
|
| 328 |
+
if not binary:
|
| 329 |
+
raise ValueError("unbuffered streams must be binary")
|
| 330 |
+
return raw
|
| 331 |
+
if reading and writing:
|
| 332 |
+
buffer = io.BufferedRWPair(raw, raw, buffering)
|
| 333 |
+
elif reading:
|
| 334 |
+
buffer = io.BufferedReader(raw, buffering)
|
| 335 |
+
else:
|
| 336 |
+
assert writing
|
| 337 |
+
buffer = io.BufferedWriter(raw, buffering)
|
| 338 |
+
if binary:
|
| 339 |
+
return buffer
|
| 340 |
+
encoding = io.text_encoding(encoding)
|
| 341 |
+
text = io.TextIOWrapper(buffer, encoding, errors, newline)
|
| 342 |
+
text.mode = mode
|
| 343 |
+
return text
|
| 344 |
+
|
| 345 |
+
if hasattr(os, 'sendfile'):
|
| 346 |
+
|
| 347 |
+
def _sendfile_use_sendfile(self, file, offset=0, count=None):
|
| 348 |
+
self._check_sendfile_params(file, offset, count)
|
| 349 |
+
sockno = self.fileno()
|
| 350 |
+
try:
|
| 351 |
+
fileno = file.fileno()
|
| 352 |
+
except (AttributeError, io.UnsupportedOperation) as err:
|
| 353 |
+
raise _GiveupOnSendfile(err) # not a regular file
|
| 354 |
+
try:
|
| 355 |
+
fsize = os.fstat(fileno).st_size
|
| 356 |
+
except OSError as err:
|
| 357 |
+
raise _GiveupOnSendfile(err) # not a regular file
|
| 358 |
+
if not fsize:
|
| 359 |
+
return 0 # empty file
|
| 360 |
+
# Truncate to 1GiB to avoid OverflowError, see bpo-38319.
|
| 361 |
+
blocksize = min(count or fsize, 2 ** 30)
|
| 362 |
+
timeout = self.gettimeout()
|
| 363 |
+
if timeout == 0:
|
| 364 |
+
raise ValueError("non-blocking sockets are not supported")
|
| 365 |
+
# poll/select have the advantage of not requiring any
|
| 366 |
+
# extra file descriptor, contrarily to epoll/kqueue
|
| 367 |
+
# (also, they require a single syscall).
|
| 368 |
+
if hasattr(selectors, 'PollSelector'):
|
| 369 |
+
selector = selectors.PollSelector()
|
| 370 |
+
else:
|
| 371 |
+
selector = selectors.SelectSelector()
|
| 372 |
+
selector.register(sockno, selectors.EVENT_WRITE)
|
| 373 |
+
|
| 374 |
+
total_sent = 0
|
| 375 |
+
# localize variable access to minimize overhead
|
| 376 |
+
selector_select = selector.select
|
| 377 |
+
os_sendfile = os.sendfile
|
| 378 |
+
try:
|
| 379 |
+
while True:
|
| 380 |
+
if timeout and not selector_select(timeout):
|
| 381 |
+
raise TimeoutError('timed out')
|
| 382 |
+
if count:
|
| 383 |
+
blocksize = count - total_sent
|
| 384 |
+
if blocksize <= 0:
|
| 385 |
+
break
|
| 386 |
+
try:
|
| 387 |
+
sent = os_sendfile(sockno, fileno, offset, blocksize)
|
| 388 |
+
except BlockingIOError:
|
| 389 |
+
if not timeout:
|
| 390 |
+
# Block until the socket is ready to send some
|
| 391 |
+
# data; avoids hogging CPU resources.
|
| 392 |
+
selector_select()
|
| 393 |
+
continue
|
| 394 |
+
except OSError as err:
|
| 395 |
+
if total_sent == 0:
|
| 396 |
+
# We can get here for different reasons, the main
|
| 397 |
+
# one being 'file' is not a regular mmap(2)-like
|
| 398 |
+
# file, in which case we'll fall back on using
|
| 399 |
+
# plain send().
|
| 400 |
+
raise _GiveupOnSendfile(err)
|
| 401 |
+
raise err from None
|
| 402 |
+
else:
|
| 403 |
+
if sent == 0:
|
| 404 |
+
break # EOF
|
| 405 |
+
offset += sent
|
| 406 |
+
total_sent += sent
|
| 407 |
+
return total_sent
|
| 408 |
+
finally:
|
| 409 |
+
if total_sent > 0 and hasattr(file, 'seek'):
|
| 410 |
+
file.seek(offset)
|
| 411 |
+
else:
|
| 412 |
+
def _sendfile_use_sendfile(self, file, offset=0, count=None):
|
| 413 |
+
raise _GiveupOnSendfile(
|
| 414 |
+
"os.sendfile() not available on this platform")
|
| 415 |
+
|
| 416 |
+
def _sendfile_use_send(self, file, offset=0, count=None):
|
| 417 |
+
self._check_sendfile_params(file, offset, count)
|
| 418 |
+
if self.gettimeout() == 0:
|
| 419 |
+
raise ValueError("non-blocking sockets are not supported")
|
| 420 |
+
if offset:
|
| 421 |
+
file.seek(offset)
|
| 422 |
+
blocksize = min(count, 8192) if count else 8192
|
| 423 |
+
total_sent = 0
|
| 424 |
+
# localize variable access to minimize overhead
|
| 425 |
+
file_read = file.read
|
| 426 |
+
sock_send = self.send
|
| 427 |
+
try:
|
| 428 |
+
while True:
|
| 429 |
+
if count:
|
| 430 |
+
blocksize = min(count - total_sent, blocksize)
|
| 431 |
+
if blocksize <= 0:
|
| 432 |
+
break
|
| 433 |
+
data = memoryview(file_read(blocksize))
|
| 434 |
+
if not data:
|
| 435 |
+
break # EOF
|
| 436 |
+
while True:
|
| 437 |
+
try:
|
| 438 |
+
sent = sock_send(data)
|
| 439 |
+
except BlockingIOError:
|
| 440 |
+
continue
|
| 441 |
+
else:
|
| 442 |
+
total_sent += sent
|
| 443 |
+
if sent < len(data):
|
| 444 |
+
data = data[sent:]
|
| 445 |
+
else:
|
| 446 |
+
break
|
| 447 |
+
return total_sent
|
| 448 |
+
finally:
|
| 449 |
+
if total_sent > 0 and hasattr(file, 'seek'):
|
| 450 |
+
file.seek(offset + total_sent)
|
| 451 |
+
|
| 452 |
+
def _check_sendfile_params(self, file, offset, count):
|
| 453 |
+
if 'b' not in getattr(file, 'mode', 'b'):
|
| 454 |
+
raise ValueError("file should be opened in binary mode")
|
| 455 |
+
if not self.type & SOCK_STREAM:
|
| 456 |
+
raise ValueError("only SOCK_STREAM type sockets are supported")
|
| 457 |
+
if count is not None:
|
| 458 |
+
if not isinstance(count, int):
|
| 459 |
+
raise TypeError(
|
| 460 |
+
"count must be a positive integer (got {!r})".format(count))
|
| 461 |
+
if count <= 0:
|
| 462 |
+
raise ValueError(
|
| 463 |
+
"count must be a positive integer (got {!r})".format(count))
|
| 464 |
+
|
| 465 |
+
def sendfile(self, file, offset=0, count=None):
|
| 466 |
+
"""sendfile(file[, offset[, count]]) -> sent
|
| 467 |
+
|
| 468 |
+
Send a file until EOF is reached by using high-performance
|
| 469 |
+
os.sendfile() and return the total number of bytes which
|
| 470 |
+
were sent.
|
| 471 |
+
*file* must be a regular file object opened in binary mode.
|
| 472 |
+
If os.sendfile() is not available (e.g. Windows) or file is
|
| 473 |
+
not a regular file socket.send() will be used instead.
|
| 474 |
+
*offset* tells from where to start reading the file.
|
| 475 |
+
If specified, *count* is the total number of bytes to transmit
|
| 476 |
+
as opposed to sending the file until EOF is reached.
|
| 477 |
+
File position is updated on return or also in case of error in
|
| 478 |
+
which case file.tell() can be used to figure out the number of
|
| 479 |
+
bytes which were sent.
|
| 480 |
+
The socket must be of SOCK_STREAM type.
|
| 481 |
+
Non-blocking sockets are not supported.
|
| 482 |
+
"""
|
| 483 |
+
try:
|
| 484 |
+
return self._sendfile_use_sendfile(file, offset, count)
|
| 485 |
+
except _GiveupOnSendfile:
|
| 486 |
+
return self._sendfile_use_send(file, offset, count)
|
| 487 |
+
|
| 488 |
+
def _decref_socketios(self):
|
| 489 |
+
if self._io_refs > 0:
|
| 490 |
+
self._io_refs -= 1
|
| 491 |
+
if self._closed:
|
| 492 |
+
self.close()
|
| 493 |
+
|
| 494 |
+
def _real_close(self, _ss=_socket.socket):
|
| 495 |
+
# This function should not reference any globals. See issue #808164.
|
| 496 |
+
_ss.close(self)
|
| 497 |
+
|
| 498 |
+
def close(self):
|
| 499 |
+
# This function should not reference any globals. See issue #808164.
|
| 500 |
+
self._closed = True
|
| 501 |
+
if self._io_refs <= 0:
|
| 502 |
+
self._real_close()
|
| 503 |
+
|
| 504 |
+
def detach(self):
|
| 505 |
+
"""detach() -> file descriptor
|
| 506 |
+
|
| 507 |
+
Close the socket object without closing the underlying file descriptor.
|
| 508 |
+
The object cannot be used after this call, but the file descriptor
|
| 509 |
+
can be reused for other purposes. The file descriptor is returned.
|
| 510 |
+
"""
|
| 511 |
+
self._closed = True
|
| 512 |
+
return super().detach()
|
| 513 |
+
|
| 514 |
+
@property
|
| 515 |
+
def family(self):
|
| 516 |
+
"""Read-only access to the address family for this socket.
|
| 517 |
+
"""
|
| 518 |
+
return _intenum_converter(super().family, AddressFamily)
|
| 519 |
+
|
| 520 |
+
@property
|
| 521 |
+
def type(self):
|
| 522 |
+
"""Read-only access to the socket type.
|
| 523 |
+
"""
|
| 524 |
+
return _intenum_converter(super().type, SocketKind)
|
| 525 |
+
|
| 526 |
+
if os.name == 'nt':
|
| 527 |
+
def get_inheritable(self):
|
| 528 |
+
return os.get_handle_inheritable(self.fileno())
|
| 529 |
+
def set_inheritable(self, inheritable):
|
| 530 |
+
os.set_handle_inheritable(self.fileno(), inheritable)
|
| 531 |
+
else:
|
| 532 |
+
def get_inheritable(self):
|
| 533 |
+
return os.get_inheritable(self.fileno())
|
| 534 |
+
def set_inheritable(self, inheritable):
|
| 535 |
+
os.set_inheritable(self.fileno(), inheritable)
|
| 536 |
+
get_inheritable.__doc__ = "Get the inheritable flag of the socket"
|
| 537 |
+
set_inheritable.__doc__ = "Set the inheritable flag of the socket"
|
| 538 |
+
|
| 539 |
+
def fromfd(fd, family, type, proto=0):
|
| 540 |
+
""" fromfd(fd, family, type[, proto]) -> socket object
|
| 541 |
+
|
| 542 |
+
Create a socket object from a duplicate of the given file
|
| 543 |
+
descriptor. The remaining arguments are the same as for socket().
|
| 544 |
+
"""
|
| 545 |
+
nfd = dup(fd)
|
| 546 |
+
return socket(family, type, proto, nfd)
|
| 547 |
+
|
| 548 |
+
if hasattr(_socket.socket, "sendmsg"):
|
| 549 |
+
import array
|
| 550 |
+
|
| 551 |
+
def send_fds(sock, buffers, fds, flags=0, address=None):
|
| 552 |
+
""" send_fds(sock, buffers, fds[, flags[, address]]) -> integer
|
| 553 |
+
|
| 554 |
+
Send the list of file descriptors fds over an AF_UNIX socket.
|
| 555 |
+
"""
|
| 556 |
+
return sock.sendmsg(buffers, [(_socket.SOL_SOCKET,
|
| 557 |
+
_socket.SCM_RIGHTS, array.array("i", fds))])
|
| 558 |
+
__all__.append("send_fds")
|
| 559 |
+
|
| 560 |
+
if hasattr(_socket.socket, "recvmsg"):
|
| 561 |
+
import array
|
| 562 |
+
|
| 563 |
+
def recv_fds(sock, bufsize, maxfds, flags=0):
|
| 564 |
+
""" recv_fds(sock, bufsize, maxfds[, flags]) -> (data, list of file
|
| 565 |
+
descriptors, msg_flags, address)
|
| 566 |
+
|
| 567 |
+
Receive up to maxfds file descriptors returning the message
|
| 568 |
+
data and a list containing the descriptors.
|
| 569 |
+
"""
|
| 570 |
+
# Array of ints
|
| 571 |
+
fds = array.array("i")
|
| 572 |
+
msg, ancdata, flags, addr = sock.recvmsg(bufsize,
|
| 573 |
+
_socket.CMSG_LEN(maxfds * fds.itemsize))
|
| 574 |
+
for cmsg_level, cmsg_type, cmsg_data in ancdata:
|
| 575 |
+
if (cmsg_level == _socket.SOL_SOCKET and cmsg_type == _socket.SCM_RIGHTS):
|
| 576 |
+
fds.frombytes(cmsg_data[:
|
| 577 |
+
len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
|
| 578 |
+
|
| 579 |
+
return msg, list(fds), flags, addr
|
| 580 |
+
__all__.append("recv_fds")
|
| 581 |
+
|
| 582 |
+
if hasattr(_socket.socket, "share"):
|
| 583 |
+
def fromshare(info):
|
| 584 |
+
""" fromshare(info) -> socket object
|
| 585 |
+
|
| 586 |
+
Create a socket object from the bytes object returned by
|
| 587 |
+
socket.share(pid).
|
| 588 |
+
"""
|
| 589 |
+
return socket(0, 0, 0, info)
|
| 590 |
+
__all__.append("fromshare")
|
| 591 |
+
|
| 592 |
+
# Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain.
|
| 593 |
+
# This is used if _socket doesn't natively provide socketpair. It's
|
| 594 |
+
# always defined so that it can be patched in for testing purposes.
|
| 595 |
+
def _fallback_socketpair(family=AF_INET, type=SOCK_STREAM, proto=0):
|
| 596 |
+
if family == AF_INET:
|
| 597 |
+
host = _LOCALHOST
|
| 598 |
+
elif family == AF_INET6:
|
| 599 |
+
host = _LOCALHOST_V6
|
| 600 |
+
else:
|
| 601 |
+
raise ValueError("Only AF_INET and AF_INET6 socket address families "
|
| 602 |
+
"are supported")
|
| 603 |
+
if type != SOCK_STREAM:
|
| 604 |
+
raise ValueError("Only SOCK_STREAM socket type is supported")
|
| 605 |
+
if proto != 0:
|
| 606 |
+
raise ValueError("Only protocol zero is supported")
|
| 607 |
+
|
| 608 |
+
# We create a connected TCP socket. Note the trick with
|
| 609 |
+
# setblocking(False) that prevents us from having to create a thread.
|
| 610 |
+
lsock = socket(family, type, proto)
|
| 611 |
+
try:
|
| 612 |
+
lsock.bind((host, 0))
|
| 613 |
+
lsock.listen()
|
| 614 |
+
# On IPv6, ignore flow_info and scope_id
|
| 615 |
+
addr, port = lsock.getsockname()[:2]
|
| 616 |
+
csock = socket(family, type, proto)
|
| 617 |
+
try:
|
| 618 |
+
csock.setblocking(False)
|
| 619 |
+
try:
|
| 620 |
+
csock.connect((addr, port))
|
| 621 |
+
except (BlockingIOError, InterruptedError):
|
| 622 |
+
pass
|
| 623 |
+
csock.setblocking(True)
|
| 624 |
+
ssock, _ = lsock.accept()
|
| 625 |
+
except:
|
| 626 |
+
csock.close()
|
| 627 |
+
raise
|
| 628 |
+
finally:
|
| 629 |
+
lsock.close()
|
| 630 |
+
|
| 631 |
+
# Authenticating avoids using a connection from something else
|
| 632 |
+
# able to connect to {host}:{port} instead of us.
|
| 633 |
+
# We expect only AF_INET and AF_INET6 families.
|
| 634 |
+
try:
|
| 635 |
+
if (
|
| 636 |
+
ssock.getsockname() != csock.getpeername()
|
| 637 |
+
or csock.getsockname() != ssock.getpeername()
|
| 638 |
+
):
|
| 639 |
+
raise ConnectionError("Unexpected peer connection")
|
| 640 |
+
except:
|
| 641 |
+
# getsockname() and getpeername() can fail
|
| 642 |
+
# if either socket isn't connected.
|
| 643 |
+
ssock.close()
|
| 644 |
+
csock.close()
|
| 645 |
+
raise
|
| 646 |
+
|
| 647 |
+
return (ssock, csock)
|
| 648 |
+
|
| 649 |
+
if hasattr(_socket, "socketpair"):
|
| 650 |
+
def socketpair(family=None, type=SOCK_STREAM, proto=0):
|
| 651 |
+
if family is None:
|
| 652 |
+
try:
|
| 653 |
+
family = AF_UNIX
|
| 654 |
+
except NameError:
|
| 655 |
+
family = AF_INET
|
| 656 |
+
a, b = _socket.socketpair(family, type, proto)
|
| 657 |
+
a = socket(family, type, proto, a.detach())
|
| 658 |
+
b = socket(family, type, proto, b.detach())
|
| 659 |
+
return a, b
|
| 660 |
+
|
| 661 |
+
else:
|
| 662 |
+
socketpair = _fallback_socketpair
|
| 663 |
+
__all__.append("socketpair")
|
| 664 |
+
|
| 665 |
+
socketpair.__doc__ = """socketpair([family[, type[, proto]]]) -> (socket object, socket object)
|
| 666 |
+
Create a pair of socket objects from the sockets returned by the platform
|
| 667 |
+
socketpair() function.
|
| 668 |
+
The arguments are the same as for socket() except the default family is AF_UNIX
|
| 669 |
+
if defined on the platform; otherwise, the default is AF_INET.
|
| 670 |
+
"""
|
| 671 |
+
|
| 672 |
+
_blocking_errnos = { EAGAIN, EWOULDBLOCK }
|
| 673 |
+
|
| 674 |
+
class SocketIO(io.RawIOBase):
|
| 675 |
+
|
| 676 |
+
"""Raw I/O implementation for stream sockets.
|
| 677 |
+
|
| 678 |
+
This class supports the makefile() method on sockets. It provides
|
| 679 |
+
the raw I/O interface on top of a socket object.
|
| 680 |
+
"""
|
| 681 |
+
|
| 682 |
+
# One might wonder why not let FileIO do the job instead. There are two
|
| 683 |
+
# main reasons why FileIO is not adapted:
|
| 684 |
+
# - it wouldn't work under Windows (where you can't used read() and
|
| 685 |
+
# write() on a socket handle)
|
| 686 |
+
# - it wouldn't work with socket timeouts (FileIO would ignore the
|
| 687 |
+
# timeout and consider the socket non-blocking)
|
| 688 |
+
|
| 689 |
+
# XXX More docs
|
| 690 |
+
|
| 691 |
+
def __init__(self, sock, mode):
|
| 692 |
+
if mode not in ("r", "w", "rw", "rb", "wb", "rwb"):
|
| 693 |
+
raise ValueError("invalid mode: %r" % mode)
|
| 694 |
+
io.RawIOBase.__init__(self)
|
| 695 |
+
self._sock = sock
|
| 696 |
+
if "b" not in mode:
|
| 697 |
+
mode += "b"
|
| 698 |
+
self._mode = mode
|
| 699 |
+
self._reading = "r" in mode
|
| 700 |
+
self._writing = "w" in mode
|
| 701 |
+
self._timeout_occurred = False
|
| 702 |
+
|
| 703 |
+
def readinto(self, b):
|
| 704 |
+
"""Read up to len(b) bytes into the writable buffer *b* and return
|
| 705 |
+
the number of bytes read. If the socket is non-blocking and no bytes
|
| 706 |
+
are available, None is returned.
|
| 707 |
+
|
| 708 |
+
If *b* is non-empty, a 0 return value indicates that the connection
|
| 709 |
+
was shutdown at the other end.
|
| 710 |
+
"""
|
| 711 |
+
self._checkClosed()
|
| 712 |
+
self._checkReadable()
|
| 713 |
+
if self._timeout_occurred:
|
| 714 |
+
raise OSError("cannot read from timed out object")
|
| 715 |
+
while True:
|
| 716 |
+
try:
|
| 717 |
+
return self._sock.recv_into(b)
|
| 718 |
+
except timeout:
|
| 719 |
+
self._timeout_occurred = True
|
| 720 |
+
raise
|
| 721 |
+
except error as e:
|
| 722 |
+
if e.errno in _blocking_errnos:
|
| 723 |
+
return None
|
| 724 |
+
raise
|
| 725 |
+
|
| 726 |
+
def write(self, b):
|
| 727 |
+
"""Write the given bytes or bytearray object *b* to the socket
|
| 728 |
+
and return the number of bytes written. This can be less than
|
| 729 |
+
len(b) if not all data could be written. If the socket is
|
| 730 |
+
non-blocking and no bytes could be written None is returned.
|
| 731 |
+
"""
|
| 732 |
+
self._checkClosed()
|
| 733 |
+
self._checkWritable()
|
| 734 |
+
try:
|
| 735 |
+
return self._sock.send(b)
|
| 736 |
+
except error as e:
|
| 737 |
+
# XXX what about EINTR?
|
| 738 |
+
if e.errno in _blocking_errnos:
|
| 739 |
+
return None
|
| 740 |
+
raise
|
| 741 |
+
|
| 742 |
+
def readable(self):
|
| 743 |
+
"""True if the SocketIO is open for reading.
|
| 744 |
+
"""
|
| 745 |
+
if self.closed:
|
| 746 |
+
raise ValueError("I/O operation on closed socket.")
|
| 747 |
+
return self._reading
|
| 748 |
+
|
| 749 |
+
def writable(self):
|
| 750 |
+
"""True if the SocketIO is open for writing.
|
| 751 |
+
"""
|
| 752 |
+
if self.closed:
|
| 753 |
+
raise ValueError("I/O operation on closed socket.")
|
| 754 |
+
return self._writing
|
| 755 |
+
|
| 756 |
+
def seekable(self):
|
| 757 |
+
"""True if the SocketIO is open for seeking.
|
| 758 |
+
"""
|
| 759 |
+
if self.closed:
|
| 760 |
+
raise ValueError("I/O operation on closed socket.")
|
| 761 |
+
return super().seekable()
|
| 762 |
+
|
| 763 |
+
def fileno(self):
|
| 764 |
+
"""Return the file descriptor of the underlying socket.
|
| 765 |
+
"""
|
| 766 |
+
self._checkClosed()
|
| 767 |
+
return self._sock.fileno()
|
| 768 |
+
|
| 769 |
+
@property
|
| 770 |
+
def name(self):
|
| 771 |
+
if not self.closed:
|
| 772 |
+
return self.fileno()
|
| 773 |
+
else:
|
| 774 |
+
return -1
|
| 775 |
+
|
| 776 |
+
@property
|
| 777 |
+
def mode(self):
|
| 778 |
+
return self._mode
|
| 779 |
+
|
| 780 |
+
def close(self):
|
| 781 |
+
"""Close the SocketIO object. This doesn't close the underlying
|
| 782 |
+
socket, except if all references to it have disappeared.
|
| 783 |
+
"""
|
| 784 |
+
if self.closed:
|
| 785 |
+
return
|
| 786 |
+
io.RawIOBase.close(self)
|
| 787 |
+
self._sock._decref_socketios()
|
| 788 |
+
self._sock = None
|
| 789 |
+
|
| 790 |
+
|
| 791 |
+
def getfqdn(name=''):
|
| 792 |
+
"""Get fully qualified domain name from name.
|
| 793 |
+
|
| 794 |
+
An empty argument is interpreted as meaning the local host.
|
| 795 |
+
|
| 796 |
+
First the hostname returned by gethostbyaddr() is checked, then
|
| 797 |
+
possibly existing aliases. In case no FQDN is available and `name`
|
| 798 |
+
was given, it is returned unchanged. If `name` was empty, '0.0.0.0' or '::',
|
| 799 |
+
hostname from gethostname() is returned.
|
| 800 |
+
"""
|
| 801 |
+
name = name.strip()
|
| 802 |
+
if not name or name in ('0.0.0.0', '::'):
|
| 803 |
+
name = gethostname()
|
| 804 |
+
try:
|
| 805 |
+
hostname, aliases, ipaddrs = gethostbyaddr(name)
|
| 806 |
+
except error:
|
| 807 |
+
pass
|
| 808 |
+
else:
|
| 809 |
+
aliases.insert(0, hostname)
|
| 810 |
+
for name in aliases:
|
| 811 |
+
if '.' in name:
|
| 812 |
+
break
|
| 813 |
+
else:
|
| 814 |
+
name = hostname
|
| 815 |
+
return name
|
| 816 |
+
|
| 817 |
+
|
| 818 |
+
_GLOBAL_DEFAULT_TIMEOUT = object()
|
| 819 |
+
|
| 820 |
+
def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
|
| 821 |
+
source_address=None):
|
| 822 |
+
"""Connect to *address* and return the socket object.
|
| 823 |
+
|
| 824 |
+
Convenience function. Connect to *address* (a 2-tuple ``(host,
|
| 825 |
+
port)``) and return the socket object. Passing the optional
|
| 826 |
+
*timeout* parameter will set the timeout on the socket instance
|
| 827 |
+
before attempting to connect. If no *timeout* is supplied, the
|
| 828 |
+
global default timeout setting returned by :func:`getdefaulttimeout`
|
| 829 |
+
is used. If *source_address* is set it must be a tuple of (host, port)
|
| 830 |
+
for the socket to bind as a source address before making the connection.
|
| 831 |
+
A host of '' or port 0 tells the OS to use the default.
|
| 832 |
+
"""
|
| 833 |
+
|
| 834 |
+
host, port = address
|
| 835 |
+
err = None
|
| 836 |
+
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
|
| 837 |
+
af, socktype, proto, canonname, sa = res
|
| 838 |
+
sock = None
|
| 839 |
+
try:
|
| 840 |
+
sock = socket(af, socktype, proto)
|
| 841 |
+
if timeout is not _GLOBAL_DEFAULT_TIMEOUT:
|
| 842 |
+
sock.settimeout(timeout)
|
| 843 |
+
if source_address:
|
| 844 |
+
sock.bind(source_address)
|
| 845 |
+
sock.connect(sa)
|
| 846 |
+
# Break explicitly a reference cycle
|
| 847 |
+
err = None
|
| 848 |
+
return sock
|
| 849 |
+
|
| 850 |
+
except error as _:
|
| 851 |
+
err = _
|
| 852 |
+
if sock is not None:
|
| 853 |
+
sock.close()
|
| 854 |
+
|
| 855 |
+
if err is not None:
|
| 856 |
+
try:
|
| 857 |
+
raise err
|
| 858 |
+
finally:
|
| 859 |
+
# Break explicitly a reference cycle
|
| 860 |
+
err = None
|
| 861 |
+
else:
|
| 862 |
+
raise error("getaddrinfo returns an empty list")
|
| 863 |
+
|
| 864 |
+
|
| 865 |
+
def has_dualstack_ipv6():
|
| 866 |
+
"""Return True if the platform supports creating a SOCK_STREAM socket
|
| 867 |
+
which can handle both AF_INET and AF_INET6 (IPv4 / IPv6) connections.
|
| 868 |
+
"""
|
| 869 |
+
if not has_ipv6 \
|
| 870 |
+
or not hasattr(_socket, 'IPPROTO_IPV6') \
|
| 871 |
+
or not hasattr(_socket, 'IPV6_V6ONLY'):
|
| 872 |
+
return False
|
| 873 |
+
try:
|
| 874 |
+
with socket(AF_INET6, SOCK_STREAM) as sock:
|
| 875 |
+
sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0)
|
| 876 |
+
return True
|
| 877 |
+
except error:
|
| 878 |
+
return False
|
| 879 |
+
|
| 880 |
+
|
| 881 |
+
def create_server(address, *, family=AF_INET, backlog=None, reuse_port=False,
|
| 882 |
+
dualstack_ipv6=False):
|
| 883 |
+
"""Convenience function which creates a SOCK_STREAM type socket
|
| 884 |
+
bound to *address* (a 2-tuple (host, port)) and return the socket
|
| 885 |
+
object.
|
| 886 |
+
|
| 887 |
+
*family* should be either AF_INET or AF_INET6.
|
| 888 |
+
*backlog* is the queue size passed to socket.listen().
|
| 889 |
+
*reuse_port* dictates whether to use the SO_REUSEPORT socket option.
|
| 890 |
+
*dualstack_ipv6*: if true and the platform supports it, it will
|
| 891 |
+
create an AF_INET6 socket able to accept both IPv4 or IPv6
|
| 892 |
+
connections. When false it will explicitly disable this option on
|
| 893 |
+
platforms that enable it by default (e.g. Linux).
|
| 894 |
+
|
| 895 |
+
>>> with create_server(('', 8000)) as server:
|
| 896 |
+
... while True:
|
| 897 |
+
... conn, addr = server.accept()
|
| 898 |
+
... # handle new connection
|
| 899 |
+
"""
|
| 900 |
+
if reuse_port and not hasattr(_socket, "SO_REUSEPORT"):
|
| 901 |
+
raise ValueError("SO_REUSEPORT not supported on this platform")
|
| 902 |
+
if dualstack_ipv6:
|
| 903 |
+
if not has_dualstack_ipv6():
|
| 904 |
+
raise ValueError("dualstack_ipv6 not supported on this platform")
|
| 905 |
+
if family != AF_INET6:
|
| 906 |
+
raise ValueError("dualstack_ipv6 requires AF_INET6 family")
|
| 907 |
+
sock = socket(family, SOCK_STREAM)
|
| 908 |
+
try:
|
| 909 |
+
# Note about Windows. We don't set SO_REUSEADDR because:
|
| 910 |
+
# 1) It's unnecessary: bind() will succeed even in case of a
|
| 911 |
+
# previous closed socket on the same address and still in
|
| 912 |
+
# TIME_WAIT state.
|
| 913 |
+
# 2) If set, another socket is free to bind() on the same
|
| 914 |
+
# address, effectively preventing this one from accepting
|
| 915 |
+
# connections. Also, it may set the process in a state where
|
| 916 |
+
# it'll no longer respond to any signals or graceful kills.
|
| 917 |
+
# See: https://learn.microsoft.com/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
|
| 918 |
+
if os.name not in ('nt', 'cygwin') and \
|
| 919 |
+
hasattr(_socket, 'SO_REUSEADDR'):
|
| 920 |
+
try:
|
| 921 |
+
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
|
| 922 |
+
except error:
|
| 923 |
+
# Fail later on bind(), for platforms which may not
|
| 924 |
+
# support this option.
|
| 925 |
+
pass
|
| 926 |
+
if reuse_port:
|
| 927 |
+
sock.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1)
|
| 928 |
+
if has_ipv6 and family == AF_INET6:
|
| 929 |
+
if dualstack_ipv6:
|
| 930 |
+
sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0)
|
| 931 |
+
elif hasattr(_socket, "IPV6_V6ONLY") and \
|
| 932 |
+
hasattr(_socket, "IPPROTO_IPV6"):
|
| 933 |
+
sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 1)
|
| 934 |
+
try:
|
| 935 |
+
sock.bind(address)
|
| 936 |
+
except error as err:
|
| 937 |
+
msg = '%s (while attempting to bind on address %r)' % \
|
| 938 |
+
(err.strerror, address)
|
| 939 |
+
raise error(err.errno, msg) from None
|
| 940 |
+
if backlog is None:
|
| 941 |
+
sock.listen()
|
| 942 |
+
else:
|
| 943 |
+
sock.listen(backlog)
|
| 944 |
+
return sock
|
| 945 |
+
except error:
|
| 946 |
+
sock.close()
|
| 947 |
+
raise
|
| 948 |
+
|
| 949 |
+
|
| 950 |
+
def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
|
| 951 |
+
"""Resolve host and port into list of address info entries.
|
| 952 |
+
|
| 953 |
+
Translate the host/port argument into a sequence of 5-tuples that contain
|
| 954 |
+
all the necessary arguments for creating a socket connected to that service.
|
| 955 |
+
host is a domain name, a string representation of an IPv4/v6 address or
|
| 956 |
+
None. port is a string service name such as 'http', a numeric port number or
|
| 957 |
+
None. By passing None as the value of host and port, you can pass NULL to
|
| 958 |
+
the underlying C API.
|
| 959 |
+
|
| 960 |
+
The family, type and proto arguments can be optionally specified in order to
|
| 961 |
+
narrow the list of addresses returned. Passing zero as a value for each of
|
| 962 |
+
these arguments selects the full range of results.
|
| 963 |
+
"""
|
| 964 |
+
# We override this function since we want to translate the numeric family
|
| 965 |
+
# and socket type values to enum constants.
|
| 966 |
+
addrlist = []
|
| 967 |
+
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
|
| 968 |
+
af, socktype, proto, canonname, sa = res
|
| 969 |
+
addrlist.append((_intenum_converter(af, AddressFamily),
|
| 970 |
+
_intenum_converter(socktype, SocketKind),
|
| 971 |
+
proto, canonname, sa))
|
| 972 |
+
return addrlist
|
llava/lib/python3.10/sre_compile.py
ADDED
|
@@ -0,0 +1,808 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Secret Labs' Regular Expression Engine
|
| 3 |
+
#
|
| 4 |
+
# convert template to internal format
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
|
| 7 |
+
#
|
| 8 |
+
# See the sre.py file for information on usage and redistribution.
|
| 9 |
+
#
|
| 10 |
+
|
| 11 |
+
"""Internal support module for sre"""
|
| 12 |
+
|
| 13 |
+
import _sre
|
| 14 |
+
import sre_parse
|
| 15 |
+
from sre_constants import *
|
| 16 |
+
|
| 17 |
+
assert _sre.MAGIC == MAGIC, "SRE module mismatch"
|
| 18 |
+
|
| 19 |
+
_LITERAL_CODES = {LITERAL, NOT_LITERAL}
|
| 20 |
+
_REPEATING_CODES = {REPEAT, MIN_REPEAT, MAX_REPEAT}
|
| 21 |
+
_SUCCESS_CODES = {SUCCESS, FAILURE}
|
| 22 |
+
_ASSERT_CODES = {ASSERT, ASSERT_NOT}
|
| 23 |
+
_UNIT_CODES = _LITERAL_CODES | {ANY, IN}
|
| 24 |
+
|
| 25 |
+
# Sets of lowercase characters which have the same uppercase.
|
| 26 |
+
_equivalences = (
|
| 27 |
+
# LATIN SMALL LETTER I, LATIN SMALL LETTER DOTLESS I
|
| 28 |
+
(0x69, 0x131), # iı
|
| 29 |
+
# LATIN SMALL LETTER S, LATIN SMALL LETTER LONG S
|
| 30 |
+
(0x73, 0x17f), # sſ
|
| 31 |
+
# MICRO SIGN, GREEK SMALL LETTER MU
|
| 32 |
+
(0xb5, 0x3bc), # µμ
|
| 33 |
+
# COMBINING GREEK YPOGEGRAMMENI, GREEK SMALL LETTER IOTA, GREEK PROSGEGRAMMENI
|
| 34 |
+
(0x345, 0x3b9, 0x1fbe), # \u0345ιι
|
| 35 |
+
# GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS, GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
|
| 36 |
+
(0x390, 0x1fd3), # ΐΐ
|
| 37 |
+
# GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS, GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA
|
| 38 |
+
(0x3b0, 0x1fe3), # ΰΰ
|
| 39 |
+
# GREEK SMALL LETTER BETA, GREEK BETA SYMBOL
|
| 40 |
+
(0x3b2, 0x3d0), # βϐ
|
| 41 |
+
# GREEK SMALL LETTER EPSILON, GREEK LUNATE EPSILON SYMBOL
|
| 42 |
+
(0x3b5, 0x3f5), # εϵ
|
| 43 |
+
# GREEK SMALL LETTER THETA, GREEK THETA SYMBOL
|
| 44 |
+
(0x3b8, 0x3d1), # θϑ
|
| 45 |
+
# GREEK SMALL LETTER KAPPA, GREEK KAPPA SYMBOL
|
| 46 |
+
(0x3ba, 0x3f0), # κϰ
|
| 47 |
+
# GREEK SMALL LETTER PI, GREEK PI SYMBOL
|
| 48 |
+
(0x3c0, 0x3d6), # πϖ
|
| 49 |
+
# GREEK SMALL LETTER RHO, GREEK RHO SYMBOL
|
| 50 |
+
(0x3c1, 0x3f1), # ρϱ
|
| 51 |
+
# GREEK SMALL LETTER FINAL SIGMA, GREEK SMALL LETTER SIGMA
|
| 52 |
+
(0x3c2, 0x3c3), # ςσ
|
| 53 |
+
# GREEK SMALL LETTER PHI, GREEK PHI SYMBOL
|
| 54 |
+
(0x3c6, 0x3d5), # φϕ
|
| 55 |
+
# CYRILLIC SMALL LETTER VE, CYRILLIC SMALL LETTER ROUNDED VE
|
| 56 |
+
(0x432, 0x1c80), # вᲀ
|
| 57 |
+
# CYRILLIC SMALL LETTER DE, CYRILLIC SMALL LETTER LONG-LEGGED DE
|
| 58 |
+
(0x434, 0x1c81), # дᲁ
|
| 59 |
+
# CYRILLIC SMALL LETTER O, CYRILLIC SMALL LETTER NARROW O
|
| 60 |
+
(0x43e, 0x1c82), # оᲂ
|
| 61 |
+
# CYRILLIC SMALL LETTER ES, CYRILLIC SMALL LETTER WIDE ES
|
| 62 |
+
(0x441, 0x1c83), # сᲃ
|
| 63 |
+
# CYRILLIC SMALL LETTER TE, CYRILLIC SMALL LETTER TALL TE, CYRILLIC SMALL LETTER THREE-LEGGED TE
|
| 64 |
+
(0x442, 0x1c84, 0x1c85), # тᲄᲅ
|
| 65 |
+
# CYRILLIC SMALL LETTER HARD SIGN, CYRILLIC SMALL LETTER TALL HARD SIGN
|
| 66 |
+
(0x44a, 0x1c86), # ъᲆ
|
| 67 |
+
# CYRILLIC SMALL LETTER YAT, CYRILLIC SMALL LETTER TALL YAT
|
| 68 |
+
(0x463, 0x1c87), # ѣᲇ
|
| 69 |
+
# CYRILLIC SMALL LETTER UNBLENDED UK, CYRILLIC SMALL LETTER MONOGRAPH UK
|
| 70 |
+
(0x1c88, 0xa64b), # ᲈꙋ
|
| 71 |
+
# LATIN SMALL LETTER S WITH DOT ABOVE, LATIN SMALL LETTER LONG S WITH DOT ABOVE
|
| 72 |
+
(0x1e61, 0x1e9b), # ṡẛ
|
| 73 |
+
# LATIN SMALL LIGATURE LONG S T, LATIN SMALL LIGATURE ST
|
| 74 |
+
(0xfb05, 0xfb06), # ſtst
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
# Maps the lowercase code to lowercase codes which have the same uppercase.
|
| 78 |
+
_ignorecase_fixes = {i: tuple(j for j in t if i != j)
|
| 79 |
+
for t in _equivalences for i in t}
|
| 80 |
+
|
| 81 |
+
def _combine_flags(flags, add_flags, del_flags,
|
| 82 |
+
TYPE_FLAGS=sre_parse.TYPE_FLAGS):
|
| 83 |
+
if add_flags & TYPE_FLAGS:
|
| 84 |
+
flags &= ~TYPE_FLAGS
|
| 85 |
+
return (flags | add_flags) & ~del_flags
|
| 86 |
+
|
| 87 |
+
def _compile(code, pattern, flags):
|
| 88 |
+
# internal: compile a (sub)pattern
|
| 89 |
+
emit = code.append
|
| 90 |
+
_len = len
|
| 91 |
+
LITERAL_CODES = _LITERAL_CODES
|
| 92 |
+
REPEATING_CODES = _REPEATING_CODES
|
| 93 |
+
SUCCESS_CODES = _SUCCESS_CODES
|
| 94 |
+
ASSERT_CODES = _ASSERT_CODES
|
| 95 |
+
iscased = None
|
| 96 |
+
tolower = None
|
| 97 |
+
fixes = None
|
| 98 |
+
if flags & SRE_FLAG_IGNORECASE and not flags & SRE_FLAG_LOCALE:
|
| 99 |
+
if flags & SRE_FLAG_UNICODE:
|
| 100 |
+
iscased = _sre.unicode_iscased
|
| 101 |
+
tolower = _sre.unicode_tolower
|
| 102 |
+
fixes = _ignorecase_fixes
|
| 103 |
+
else:
|
| 104 |
+
iscased = _sre.ascii_iscased
|
| 105 |
+
tolower = _sre.ascii_tolower
|
| 106 |
+
for op, av in pattern:
|
| 107 |
+
if op in LITERAL_CODES:
|
| 108 |
+
if not flags & SRE_FLAG_IGNORECASE:
|
| 109 |
+
emit(op)
|
| 110 |
+
emit(av)
|
| 111 |
+
elif flags & SRE_FLAG_LOCALE:
|
| 112 |
+
emit(OP_LOCALE_IGNORE[op])
|
| 113 |
+
emit(av)
|
| 114 |
+
elif not iscased(av):
|
| 115 |
+
emit(op)
|
| 116 |
+
emit(av)
|
| 117 |
+
else:
|
| 118 |
+
lo = tolower(av)
|
| 119 |
+
if not fixes: # ascii
|
| 120 |
+
emit(OP_IGNORE[op])
|
| 121 |
+
emit(lo)
|
| 122 |
+
elif lo not in fixes:
|
| 123 |
+
emit(OP_UNICODE_IGNORE[op])
|
| 124 |
+
emit(lo)
|
| 125 |
+
else:
|
| 126 |
+
emit(IN_UNI_IGNORE)
|
| 127 |
+
skip = _len(code); emit(0)
|
| 128 |
+
if op is NOT_LITERAL:
|
| 129 |
+
emit(NEGATE)
|
| 130 |
+
for k in (lo,) + fixes[lo]:
|
| 131 |
+
emit(LITERAL)
|
| 132 |
+
emit(k)
|
| 133 |
+
emit(FAILURE)
|
| 134 |
+
code[skip] = _len(code) - skip
|
| 135 |
+
elif op is IN:
|
| 136 |
+
charset, hascased = _optimize_charset(av, iscased, tolower, fixes)
|
| 137 |
+
if flags & SRE_FLAG_IGNORECASE and flags & SRE_FLAG_LOCALE:
|
| 138 |
+
emit(IN_LOC_IGNORE)
|
| 139 |
+
elif not hascased:
|
| 140 |
+
emit(IN)
|
| 141 |
+
elif not fixes: # ascii
|
| 142 |
+
emit(IN_IGNORE)
|
| 143 |
+
else:
|
| 144 |
+
emit(IN_UNI_IGNORE)
|
| 145 |
+
skip = _len(code); emit(0)
|
| 146 |
+
_compile_charset(charset, flags, code)
|
| 147 |
+
code[skip] = _len(code) - skip
|
| 148 |
+
elif op is ANY:
|
| 149 |
+
if flags & SRE_FLAG_DOTALL:
|
| 150 |
+
emit(ANY_ALL)
|
| 151 |
+
else:
|
| 152 |
+
emit(ANY)
|
| 153 |
+
elif op in REPEATING_CODES:
|
| 154 |
+
if flags & SRE_FLAG_TEMPLATE:
|
| 155 |
+
raise error("internal: unsupported template operator %r" % (op,))
|
| 156 |
+
if _simple(av[2]):
|
| 157 |
+
if op is MAX_REPEAT:
|
| 158 |
+
emit(REPEAT_ONE)
|
| 159 |
+
else:
|
| 160 |
+
emit(MIN_REPEAT_ONE)
|
| 161 |
+
skip = _len(code); emit(0)
|
| 162 |
+
emit(av[0])
|
| 163 |
+
emit(av[1])
|
| 164 |
+
_compile(code, av[2], flags)
|
| 165 |
+
emit(SUCCESS)
|
| 166 |
+
code[skip] = _len(code) - skip
|
| 167 |
+
else:
|
| 168 |
+
emit(REPEAT)
|
| 169 |
+
skip = _len(code); emit(0)
|
| 170 |
+
emit(av[0])
|
| 171 |
+
emit(av[1])
|
| 172 |
+
_compile(code, av[2], flags)
|
| 173 |
+
code[skip] = _len(code) - skip
|
| 174 |
+
if op is MAX_REPEAT:
|
| 175 |
+
emit(MAX_UNTIL)
|
| 176 |
+
else:
|
| 177 |
+
emit(MIN_UNTIL)
|
| 178 |
+
elif op is SUBPATTERN:
|
| 179 |
+
group, add_flags, del_flags, p = av
|
| 180 |
+
if group:
|
| 181 |
+
emit(MARK)
|
| 182 |
+
emit((group-1)*2)
|
| 183 |
+
# _compile_info(code, p, _combine_flags(flags, add_flags, del_flags))
|
| 184 |
+
_compile(code, p, _combine_flags(flags, add_flags, del_flags))
|
| 185 |
+
if group:
|
| 186 |
+
emit(MARK)
|
| 187 |
+
emit((group-1)*2+1)
|
| 188 |
+
elif op in SUCCESS_CODES:
|
| 189 |
+
emit(op)
|
| 190 |
+
elif op in ASSERT_CODES:
|
| 191 |
+
emit(op)
|
| 192 |
+
skip = _len(code); emit(0)
|
| 193 |
+
if av[0] >= 0:
|
| 194 |
+
emit(0) # look ahead
|
| 195 |
+
else:
|
| 196 |
+
lo, hi = av[1].getwidth()
|
| 197 |
+
if lo != hi:
|
| 198 |
+
raise error("look-behind requires fixed-width pattern")
|
| 199 |
+
emit(lo) # look behind
|
| 200 |
+
_compile(code, av[1], flags)
|
| 201 |
+
emit(SUCCESS)
|
| 202 |
+
code[skip] = _len(code) - skip
|
| 203 |
+
elif op is CALL:
|
| 204 |
+
emit(op)
|
| 205 |
+
skip = _len(code); emit(0)
|
| 206 |
+
_compile(code, av, flags)
|
| 207 |
+
emit(SUCCESS)
|
| 208 |
+
code[skip] = _len(code) - skip
|
| 209 |
+
elif op is AT:
|
| 210 |
+
emit(op)
|
| 211 |
+
if flags & SRE_FLAG_MULTILINE:
|
| 212 |
+
av = AT_MULTILINE.get(av, av)
|
| 213 |
+
if flags & SRE_FLAG_LOCALE:
|
| 214 |
+
av = AT_LOCALE.get(av, av)
|
| 215 |
+
elif flags & SRE_FLAG_UNICODE:
|
| 216 |
+
av = AT_UNICODE.get(av, av)
|
| 217 |
+
emit(av)
|
| 218 |
+
elif op is BRANCH:
|
| 219 |
+
emit(op)
|
| 220 |
+
tail = []
|
| 221 |
+
tailappend = tail.append
|
| 222 |
+
for av in av[1]:
|
| 223 |
+
skip = _len(code); emit(0)
|
| 224 |
+
# _compile_info(code, av, flags)
|
| 225 |
+
_compile(code, av, flags)
|
| 226 |
+
emit(JUMP)
|
| 227 |
+
tailappend(_len(code)); emit(0)
|
| 228 |
+
code[skip] = _len(code) - skip
|
| 229 |
+
emit(FAILURE) # end of branch
|
| 230 |
+
for tail in tail:
|
| 231 |
+
code[tail] = _len(code) - tail
|
| 232 |
+
elif op is CATEGORY:
|
| 233 |
+
emit(op)
|
| 234 |
+
if flags & SRE_FLAG_LOCALE:
|
| 235 |
+
av = CH_LOCALE[av]
|
| 236 |
+
elif flags & SRE_FLAG_UNICODE:
|
| 237 |
+
av = CH_UNICODE[av]
|
| 238 |
+
emit(av)
|
| 239 |
+
elif op is GROUPREF:
|
| 240 |
+
if not flags & SRE_FLAG_IGNORECASE:
|
| 241 |
+
emit(op)
|
| 242 |
+
elif flags & SRE_FLAG_LOCALE:
|
| 243 |
+
emit(GROUPREF_LOC_IGNORE)
|
| 244 |
+
elif not fixes: # ascii
|
| 245 |
+
emit(GROUPREF_IGNORE)
|
| 246 |
+
else:
|
| 247 |
+
emit(GROUPREF_UNI_IGNORE)
|
| 248 |
+
emit(av-1)
|
| 249 |
+
elif op is GROUPREF_EXISTS:
|
| 250 |
+
emit(op)
|
| 251 |
+
emit(av[0]-1)
|
| 252 |
+
skipyes = _len(code); emit(0)
|
| 253 |
+
_compile(code, av[1], flags)
|
| 254 |
+
if av[2]:
|
| 255 |
+
emit(JUMP)
|
| 256 |
+
skipno = _len(code); emit(0)
|
| 257 |
+
code[skipyes] = _len(code) - skipyes + 1
|
| 258 |
+
_compile(code, av[2], flags)
|
| 259 |
+
code[skipno] = _len(code) - skipno
|
| 260 |
+
else:
|
| 261 |
+
code[skipyes] = _len(code) - skipyes + 1
|
| 262 |
+
else:
|
| 263 |
+
raise error("internal: unsupported operand type %r" % (op,))
|
| 264 |
+
|
| 265 |
+
def _compile_charset(charset, flags, code):
|
| 266 |
+
# compile charset subprogram
|
| 267 |
+
emit = code.append
|
| 268 |
+
for op, av in charset:
|
| 269 |
+
emit(op)
|
| 270 |
+
if op is NEGATE:
|
| 271 |
+
pass
|
| 272 |
+
elif op is LITERAL:
|
| 273 |
+
emit(av)
|
| 274 |
+
elif op is RANGE or op is RANGE_UNI_IGNORE:
|
| 275 |
+
emit(av[0])
|
| 276 |
+
emit(av[1])
|
| 277 |
+
elif op is CHARSET:
|
| 278 |
+
code.extend(av)
|
| 279 |
+
elif op is BIGCHARSET:
|
| 280 |
+
code.extend(av)
|
| 281 |
+
elif op is CATEGORY:
|
| 282 |
+
if flags & SRE_FLAG_LOCALE:
|
| 283 |
+
emit(CH_LOCALE[av])
|
| 284 |
+
elif flags & SRE_FLAG_UNICODE:
|
| 285 |
+
emit(CH_UNICODE[av])
|
| 286 |
+
else:
|
| 287 |
+
emit(av)
|
| 288 |
+
else:
|
| 289 |
+
raise error("internal: unsupported set operator %r" % (op,))
|
| 290 |
+
emit(FAILURE)
|
| 291 |
+
|
| 292 |
+
def _optimize_charset(charset, iscased=None, fixup=None, fixes=None):
|
| 293 |
+
# internal: optimize character set
|
| 294 |
+
out = []
|
| 295 |
+
tail = []
|
| 296 |
+
charmap = bytearray(256)
|
| 297 |
+
hascased = False
|
| 298 |
+
for op, av in charset:
|
| 299 |
+
while True:
|
| 300 |
+
try:
|
| 301 |
+
if op is LITERAL:
|
| 302 |
+
if fixup:
|
| 303 |
+
lo = fixup(av)
|
| 304 |
+
charmap[lo] = 1
|
| 305 |
+
if fixes and lo in fixes:
|
| 306 |
+
for k in fixes[lo]:
|
| 307 |
+
charmap[k] = 1
|
| 308 |
+
if not hascased and iscased(av):
|
| 309 |
+
hascased = True
|
| 310 |
+
else:
|
| 311 |
+
charmap[av] = 1
|
| 312 |
+
elif op is RANGE:
|
| 313 |
+
r = range(av[0], av[1]+1)
|
| 314 |
+
if fixup:
|
| 315 |
+
if fixes:
|
| 316 |
+
for i in map(fixup, r):
|
| 317 |
+
charmap[i] = 1
|
| 318 |
+
if i in fixes:
|
| 319 |
+
for k in fixes[i]:
|
| 320 |
+
charmap[k] = 1
|
| 321 |
+
else:
|
| 322 |
+
for i in map(fixup, r):
|
| 323 |
+
charmap[i] = 1
|
| 324 |
+
if not hascased:
|
| 325 |
+
hascased = any(map(iscased, r))
|
| 326 |
+
else:
|
| 327 |
+
for i in r:
|
| 328 |
+
charmap[i] = 1
|
| 329 |
+
elif op is NEGATE:
|
| 330 |
+
out.append((op, av))
|
| 331 |
+
else:
|
| 332 |
+
tail.append((op, av))
|
| 333 |
+
except IndexError:
|
| 334 |
+
if len(charmap) == 256:
|
| 335 |
+
# character set contains non-UCS1 character codes
|
| 336 |
+
charmap += b'\0' * 0xff00
|
| 337 |
+
continue
|
| 338 |
+
# Character set contains non-BMP character codes.
|
| 339 |
+
# For range, all BMP characters in the range are already
|
| 340 |
+
# proceeded.
|
| 341 |
+
if fixup:
|
| 342 |
+
hascased = True
|
| 343 |
+
# For now, IN_UNI_IGNORE+LITERAL and
|
| 344 |
+
# IN_UNI_IGNORE+RANGE_UNI_IGNORE work for all non-BMP
|
| 345 |
+
# characters, because two characters (at least one of
|
| 346 |
+
# which is not in the BMP) match case-insensitively
|
| 347 |
+
# if and only if:
|
| 348 |
+
# 1) c1.lower() == c2.lower()
|
| 349 |
+
# 2) c1.lower() == c2 or c1.lower().upper() == c2
|
| 350 |
+
# Also, both c.lower() and c.lower().upper() are single
|
| 351 |
+
# characters for every non-BMP character.
|
| 352 |
+
if op is RANGE:
|
| 353 |
+
op = RANGE_UNI_IGNORE
|
| 354 |
+
tail.append((op, av))
|
| 355 |
+
break
|
| 356 |
+
|
| 357 |
+
# compress character map
|
| 358 |
+
runs = []
|
| 359 |
+
q = 0
|
| 360 |
+
while True:
|
| 361 |
+
p = charmap.find(1, q)
|
| 362 |
+
if p < 0:
|
| 363 |
+
break
|
| 364 |
+
if len(runs) >= 2:
|
| 365 |
+
runs = None
|
| 366 |
+
break
|
| 367 |
+
q = charmap.find(0, p)
|
| 368 |
+
if q < 0:
|
| 369 |
+
runs.append((p, len(charmap)))
|
| 370 |
+
break
|
| 371 |
+
runs.append((p, q))
|
| 372 |
+
if runs is not None:
|
| 373 |
+
# use literal/range
|
| 374 |
+
for p, q in runs:
|
| 375 |
+
if q - p == 1:
|
| 376 |
+
out.append((LITERAL, p))
|
| 377 |
+
else:
|
| 378 |
+
out.append((RANGE, (p, q - 1)))
|
| 379 |
+
out += tail
|
| 380 |
+
# if the case was changed or new representation is more compact
|
| 381 |
+
if hascased or len(out) < len(charset):
|
| 382 |
+
return out, hascased
|
| 383 |
+
# else original character set is good enough
|
| 384 |
+
return charset, hascased
|
| 385 |
+
|
| 386 |
+
# use bitmap
|
| 387 |
+
if len(charmap) == 256:
|
| 388 |
+
data = _mk_bitmap(charmap)
|
| 389 |
+
out.append((CHARSET, data))
|
| 390 |
+
out += tail
|
| 391 |
+
return out, hascased
|
| 392 |
+
|
| 393 |
+
# To represent a big charset, first a bitmap of all characters in the
|
| 394 |
+
# set is constructed. Then, this bitmap is sliced into chunks of 256
|
| 395 |
+
# characters, duplicate chunks are eliminated, and each chunk is
|
| 396 |
+
# given a number. In the compiled expression, the charset is
|
| 397 |
+
# represented by a 32-bit word sequence, consisting of one word for
|
| 398 |
+
# the number of different chunks, a sequence of 256 bytes (64 words)
|
| 399 |
+
# of chunk numbers indexed by their original chunk position, and a
|
| 400 |
+
# sequence of 256-bit chunks (8 words each).
|
| 401 |
+
|
| 402 |
+
# Compression is normally good: in a typical charset, large ranges of
|
| 403 |
+
# Unicode will be either completely excluded (e.g. if only cyrillic
|
| 404 |
+
# letters are to be matched), or completely included (e.g. if large
|
| 405 |
+
# subranges of Kanji match). These ranges will be represented by
|
| 406 |
+
# chunks of all one-bits or all zero-bits.
|
| 407 |
+
|
| 408 |
+
# Matching can be also done efficiently: the more significant byte of
|
| 409 |
+
# the Unicode character is an index into the chunk number, and the
|
| 410 |
+
# less significant byte is a bit index in the chunk (just like the
|
| 411 |
+
# CHARSET matching).
|
| 412 |
+
|
| 413 |
+
charmap = bytes(charmap) # should be hashable
|
| 414 |
+
comps = {}
|
| 415 |
+
mapping = bytearray(256)
|
| 416 |
+
block = 0
|
| 417 |
+
data = bytearray()
|
| 418 |
+
for i in range(0, 65536, 256):
|
| 419 |
+
chunk = charmap[i: i + 256]
|
| 420 |
+
if chunk in comps:
|
| 421 |
+
mapping[i // 256] = comps[chunk]
|
| 422 |
+
else:
|
| 423 |
+
mapping[i // 256] = comps[chunk] = block
|
| 424 |
+
block += 1
|
| 425 |
+
data += chunk
|
| 426 |
+
data = _mk_bitmap(data)
|
| 427 |
+
data[0:0] = [block] + _bytes_to_codes(mapping)
|
| 428 |
+
out.append((BIGCHARSET, data))
|
| 429 |
+
out += tail
|
| 430 |
+
return out, hascased
|
| 431 |
+
|
| 432 |
+
_CODEBITS = _sre.CODESIZE * 8
|
| 433 |
+
MAXCODE = (1 << _CODEBITS) - 1
|
| 434 |
+
_BITS_TRANS = b'0' + b'1' * 255
|
| 435 |
+
def _mk_bitmap(bits, _CODEBITS=_CODEBITS, _int=int):
|
| 436 |
+
s = bits.translate(_BITS_TRANS)[::-1]
|
| 437 |
+
return [_int(s[i - _CODEBITS: i], 2)
|
| 438 |
+
for i in range(len(s), 0, -_CODEBITS)]
|
| 439 |
+
|
| 440 |
+
def _bytes_to_codes(b):
|
| 441 |
+
# Convert block indices to word array
|
| 442 |
+
a = memoryview(b).cast('I')
|
| 443 |
+
assert a.itemsize == _sre.CODESIZE
|
| 444 |
+
assert len(a) * a.itemsize == len(b)
|
| 445 |
+
return a.tolist()
|
| 446 |
+
|
| 447 |
+
def _simple(p):
|
| 448 |
+
# check if this subpattern is a "simple" operator
|
| 449 |
+
if len(p) != 1:
|
| 450 |
+
return False
|
| 451 |
+
op, av = p[0]
|
| 452 |
+
if op is SUBPATTERN:
|
| 453 |
+
return av[0] is None and _simple(av[-1])
|
| 454 |
+
return op in _UNIT_CODES
|
| 455 |
+
|
| 456 |
+
def _generate_overlap_table(prefix):
|
| 457 |
+
"""
|
| 458 |
+
Generate an overlap table for the following prefix.
|
| 459 |
+
An overlap table is a table of the same size as the prefix which
|
| 460 |
+
informs about the potential self-overlap for each index in the prefix:
|
| 461 |
+
- if overlap[i] == 0, prefix[i:] can't overlap prefix[0:...]
|
| 462 |
+
- if overlap[i] == k with 0 < k <= i, prefix[i-k+1:i+1] overlaps with
|
| 463 |
+
prefix[0:k]
|
| 464 |
+
"""
|
| 465 |
+
table = [0] * len(prefix)
|
| 466 |
+
for i in range(1, len(prefix)):
|
| 467 |
+
idx = table[i - 1]
|
| 468 |
+
while prefix[i] != prefix[idx]:
|
| 469 |
+
if idx == 0:
|
| 470 |
+
table[i] = 0
|
| 471 |
+
break
|
| 472 |
+
idx = table[idx - 1]
|
| 473 |
+
else:
|
| 474 |
+
table[i] = idx + 1
|
| 475 |
+
return table
|
| 476 |
+
|
| 477 |
+
def _get_iscased(flags):
|
| 478 |
+
if not flags & SRE_FLAG_IGNORECASE:
|
| 479 |
+
return None
|
| 480 |
+
elif flags & SRE_FLAG_UNICODE:
|
| 481 |
+
return _sre.unicode_iscased
|
| 482 |
+
else:
|
| 483 |
+
return _sre.ascii_iscased
|
| 484 |
+
|
| 485 |
+
def _get_literal_prefix(pattern, flags):
|
| 486 |
+
# look for literal prefix
|
| 487 |
+
prefix = []
|
| 488 |
+
prefixappend = prefix.append
|
| 489 |
+
prefix_skip = None
|
| 490 |
+
iscased = _get_iscased(flags)
|
| 491 |
+
for op, av in pattern.data:
|
| 492 |
+
if op is LITERAL:
|
| 493 |
+
if iscased and iscased(av):
|
| 494 |
+
break
|
| 495 |
+
prefixappend(av)
|
| 496 |
+
elif op is SUBPATTERN:
|
| 497 |
+
group, add_flags, del_flags, p = av
|
| 498 |
+
flags1 = _combine_flags(flags, add_flags, del_flags)
|
| 499 |
+
if flags1 & SRE_FLAG_IGNORECASE and flags1 & SRE_FLAG_LOCALE:
|
| 500 |
+
break
|
| 501 |
+
prefix1, prefix_skip1, got_all = _get_literal_prefix(p, flags1)
|
| 502 |
+
if prefix_skip is None:
|
| 503 |
+
if group is not None:
|
| 504 |
+
prefix_skip = len(prefix)
|
| 505 |
+
elif prefix_skip1 is not None:
|
| 506 |
+
prefix_skip = len(prefix) + prefix_skip1
|
| 507 |
+
prefix.extend(prefix1)
|
| 508 |
+
if not got_all:
|
| 509 |
+
break
|
| 510 |
+
else:
|
| 511 |
+
break
|
| 512 |
+
else:
|
| 513 |
+
return prefix, prefix_skip, True
|
| 514 |
+
return prefix, prefix_skip, False
|
| 515 |
+
|
| 516 |
+
def _get_charset_prefix(pattern, flags):
|
| 517 |
+
while True:
|
| 518 |
+
if not pattern.data:
|
| 519 |
+
return None
|
| 520 |
+
op, av = pattern.data[0]
|
| 521 |
+
if op is not SUBPATTERN:
|
| 522 |
+
break
|
| 523 |
+
group, add_flags, del_flags, pattern = av
|
| 524 |
+
flags = _combine_flags(flags, add_flags, del_flags)
|
| 525 |
+
if flags & SRE_FLAG_IGNORECASE and flags & SRE_FLAG_LOCALE:
|
| 526 |
+
return None
|
| 527 |
+
|
| 528 |
+
iscased = _get_iscased(flags)
|
| 529 |
+
if op is LITERAL:
|
| 530 |
+
if iscased and iscased(av):
|
| 531 |
+
return None
|
| 532 |
+
return [(op, av)]
|
| 533 |
+
elif op is BRANCH:
|
| 534 |
+
charset = []
|
| 535 |
+
charsetappend = charset.append
|
| 536 |
+
for p in av[1]:
|
| 537 |
+
if not p:
|
| 538 |
+
return None
|
| 539 |
+
op, av = p[0]
|
| 540 |
+
if op is LITERAL and not (iscased and iscased(av)):
|
| 541 |
+
charsetappend((op, av))
|
| 542 |
+
else:
|
| 543 |
+
return None
|
| 544 |
+
return charset
|
| 545 |
+
elif op is IN:
|
| 546 |
+
charset = av
|
| 547 |
+
if iscased:
|
| 548 |
+
for op, av in charset:
|
| 549 |
+
if op is LITERAL:
|
| 550 |
+
if iscased(av):
|
| 551 |
+
return None
|
| 552 |
+
elif op is RANGE:
|
| 553 |
+
if av[1] > 0xffff:
|
| 554 |
+
return None
|
| 555 |
+
if any(map(iscased, range(av[0], av[1]+1))):
|
| 556 |
+
return None
|
| 557 |
+
return charset
|
| 558 |
+
return None
|
| 559 |
+
|
| 560 |
+
def _compile_info(code, pattern, flags):
|
| 561 |
+
# internal: compile an info block. in the current version,
|
| 562 |
+
# this contains min/max pattern width, and an optional literal
|
| 563 |
+
# prefix or a character map
|
| 564 |
+
lo, hi = pattern.getwidth()
|
| 565 |
+
if hi > MAXCODE:
|
| 566 |
+
hi = MAXCODE
|
| 567 |
+
if lo == 0:
|
| 568 |
+
code.extend([INFO, 4, 0, lo, hi])
|
| 569 |
+
return
|
| 570 |
+
# look for a literal prefix
|
| 571 |
+
prefix = []
|
| 572 |
+
prefix_skip = 0
|
| 573 |
+
charset = [] # not used
|
| 574 |
+
if not (flags & SRE_FLAG_IGNORECASE and flags & SRE_FLAG_LOCALE):
|
| 575 |
+
# look for literal prefix
|
| 576 |
+
prefix, prefix_skip, got_all = _get_literal_prefix(pattern, flags)
|
| 577 |
+
# if no prefix, look for charset prefix
|
| 578 |
+
if not prefix:
|
| 579 |
+
charset = _get_charset_prefix(pattern, flags)
|
| 580 |
+
## if prefix:
|
| 581 |
+
## print("*** PREFIX", prefix, prefix_skip)
|
| 582 |
+
## if charset:
|
| 583 |
+
## print("*** CHARSET", charset)
|
| 584 |
+
# add an info block
|
| 585 |
+
emit = code.append
|
| 586 |
+
emit(INFO)
|
| 587 |
+
skip = len(code); emit(0)
|
| 588 |
+
# literal flag
|
| 589 |
+
mask = 0
|
| 590 |
+
if prefix:
|
| 591 |
+
mask = SRE_INFO_PREFIX
|
| 592 |
+
if prefix_skip is None and got_all:
|
| 593 |
+
mask = mask | SRE_INFO_LITERAL
|
| 594 |
+
elif charset:
|
| 595 |
+
mask = mask | SRE_INFO_CHARSET
|
| 596 |
+
emit(mask)
|
| 597 |
+
# pattern length
|
| 598 |
+
if lo < MAXCODE:
|
| 599 |
+
emit(lo)
|
| 600 |
+
else:
|
| 601 |
+
emit(MAXCODE)
|
| 602 |
+
prefix = prefix[:MAXCODE]
|
| 603 |
+
emit(min(hi, MAXCODE))
|
| 604 |
+
# add literal prefix
|
| 605 |
+
if prefix:
|
| 606 |
+
emit(len(prefix)) # length
|
| 607 |
+
if prefix_skip is None:
|
| 608 |
+
prefix_skip = len(prefix)
|
| 609 |
+
emit(prefix_skip) # skip
|
| 610 |
+
code.extend(prefix)
|
| 611 |
+
# generate overlap table
|
| 612 |
+
code.extend(_generate_overlap_table(prefix))
|
| 613 |
+
elif charset:
|
| 614 |
+
charset, hascased = _optimize_charset(charset)
|
| 615 |
+
assert not hascased
|
| 616 |
+
_compile_charset(charset, flags, code)
|
| 617 |
+
code[skip] = len(code) - skip
|
| 618 |
+
|
| 619 |
+
def isstring(obj):
|
| 620 |
+
return isinstance(obj, (str, bytes))
|
| 621 |
+
|
| 622 |
+
def _code(p, flags):
|
| 623 |
+
|
| 624 |
+
flags = p.state.flags | flags
|
| 625 |
+
code = []
|
| 626 |
+
|
| 627 |
+
# compile info block
|
| 628 |
+
_compile_info(code, p, flags)
|
| 629 |
+
|
| 630 |
+
# compile the pattern
|
| 631 |
+
_compile(code, p.data, flags)
|
| 632 |
+
|
| 633 |
+
code.append(SUCCESS)
|
| 634 |
+
|
| 635 |
+
return code
|
| 636 |
+
|
| 637 |
+
def _hex_code(code):
|
| 638 |
+
return '[%s]' % ', '.join('%#0*x' % (_sre.CODESIZE*2+2, x) for x in code)
|
| 639 |
+
|
| 640 |
+
def dis(code):
|
| 641 |
+
import sys
|
| 642 |
+
|
| 643 |
+
labels = set()
|
| 644 |
+
level = 0
|
| 645 |
+
offset_width = len(str(len(code) - 1))
|
| 646 |
+
|
| 647 |
+
def dis_(start, end):
|
| 648 |
+
def print_(*args, to=None):
|
| 649 |
+
if to is not None:
|
| 650 |
+
labels.add(to)
|
| 651 |
+
args += ('(to %d)' % (to,),)
|
| 652 |
+
print('%*d%s ' % (offset_width, start, ':' if start in labels else '.'),
|
| 653 |
+
end=' '*(level-1))
|
| 654 |
+
print(*args)
|
| 655 |
+
|
| 656 |
+
def print_2(*args):
|
| 657 |
+
print(end=' '*(offset_width + 2*level))
|
| 658 |
+
print(*args)
|
| 659 |
+
|
| 660 |
+
nonlocal level
|
| 661 |
+
level += 1
|
| 662 |
+
i = start
|
| 663 |
+
while i < end:
|
| 664 |
+
start = i
|
| 665 |
+
op = code[i]
|
| 666 |
+
i += 1
|
| 667 |
+
op = OPCODES[op]
|
| 668 |
+
if op in (SUCCESS, FAILURE, ANY, ANY_ALL,
|
| 669 |
+
MAX_UNTIL, MIN_UNTIL, NEGATE):
|
| 670 |
+
print_(op)
|
| 671 |
+
elif op in (LITERAL, NOT_LITERAL,
|
| 672 |
+
LITERAL_IGNORE, NOT_LITERAL_IGNORE,
|
| 673 |
+
LITERAL_UNI_IGNORE, NOT_LITERAL_UNI_IGNORE,
|
| 674 |
+
LITERAL_LOC_IGNORE, NOT_LITERAL_LOC_IGNORE):
|
| 675 |
+
arg = code[i]
|
| 676 |
+
i += 1
|
| 677 |
+
print_(op, '%#02x (%r)' % (arg, chr(arg)))
|
| 678 |
+
elif op is AT:
|
| 679 |
+
arg = code[i]
|
| 680 |
+
i += 1
|
| 681 |
+
arg = str(ATCODES[arg])
|
| 682 |
+
assert arg[:3] == 'AT_'
|
| 683 |
+
print_(op, arg[3:])
|
| 684 |
+
elif op is CATEGORY:
|
| 685 |
+
arg = code[i]
|
| 686 |
+
i += 1
|
| 687 |
+
arg = str(CHCODES[arg])
|
| 688 |
+
assert arg[:9] == 'CATEGORY_'
|
| 689 |
+
print_(op, arg[9:])
|
| 690 |
+
elif op in (IN, IN_IGNORE, IN_UNI_IGNORE, IN_LOC_IGNORE):
|
| 691 |
+
skip = code[i]
|
| 692 |
+
print_(op, skip, to=i+skip)
|
| 693 |
+
dis_(i+1, i+skip)
|
| 694 |
+
i += skip
|
| 695 |
+
elif op in (RANGE, RANGE_UNI_IGNORE):
|
| 696 |
+
lo, hi = code[i: i+2]
|
| 697 |
+
i += 2
|
| 698 |
+
print_(op, '%#02x %#02x (%r-%r)' % (lo, hi, chr(lo), chr(hi)))
|
| 699 |
+
elif op is CHARSET:
|
| 700 |
+
print_(op, _hex_code(code[i: i + 256//_CODEBITS]))
|
| 701 |
+
i += 256//_CODEBITS
|
| 702 |
+
elif op is BIGCHARSET:
|
| 703 |
+
arg = code[i]
|
| 704 |
+
i += 1
|
| 705 |
+
mapping = list(b''.join(x.to_bytes(_sre.CODESIZE, sys.byteorder)
|
| 706 |
+
for x in code[i: i + 256//_sre.CODESIZE]))
|
| 707 |
+
print_(op, arg, mapping)
|
| 708 |
+
i += 256//_sre.CODESIZE
|
| 709 |
+
level += 1
|
| 710 |
+
for j in range(arg):
|
| 711 |
+
print_2(_hex_code(code[i: i + 256//_CODEBITS]))
|
| 712 |
+
i += 256//_CODEBITS
|
| 713 |
+
level -= 1
|
| 714 |
+
elif op in (MARK, GROUPREF, GROUPREF_IGNORE, GROUPREF_UNI_IGNORE,
|
| 715 |
+
GROUPREF_LOC_IGNORE):
|
| 716 |
+
arg = code[i]
|
| 717 |
+
i += 1
|
| 718 |
+
print_(op, arg)
|
| 719 |
+
elif op is JUMP:
|
| 720 |
+
skip = code[i]
|
| 721 |
+
print_(op, skip, to=i+skip)
|
| 722 |
+
i += 1
|
| 723 |
+
elif op is BRANCH:
|
| 724 |
+
skip = code[i]
|
| 725 |
+
print_(op, skip, to=i+skip)
|
| 726 |
+
while skip:
|
| 727 |
+
dis_(i+1, i+skip)
|
| 728 |
+
i += skip
|
| 729 |
+
start = i
|
| 730 |
+
skip = code[i]
|
| 731 |
+
if skip:
|
| 732 |
+
print_('branch', skip, to=i+skip)
|
| 733 |
+
else:
|
| 734 |
+
print_(FAILURE)
|
| 735 |
+
i += 1
|
| 736 |
+
elif op in (REPEAT, REPEAT_ONE, MIN_REPEAT_ONE):
|
| 737 |
+
skip, min, max = code[i: i+3]
|
| 738 |
+
if max == MAXREPEAT:
|
| 739 |
+
max = 'MAXREPEAT'
|
| 740 |
+
print_(op, skip, min, max, to=i+skip)
|
| 741 |
+
dis_(i+3, i+skip)
|
| 742 |
+
i += skip
|
| 743 |
+
elif op is GROUPREF_EXISTS:
|
| 744 |
+
arg, skip = code[i: i+2]
|
| 745 |
+
print_(op, arg, skip, to=i+skip)
|
| 746 |
+
i += 2
|
| 747 |
+
elif op in (ASSERT, ASSERT_NOT):
|
| 748 |
+
skip, arg = code[i: i+2]
|
| 749 |
+
print_(op, skip, arg, to=i+skip)
|
| 750 |
+
dis_(i+2, i+skip)
|
| 751 |
+
i += skip
|
| 752 |
+
elif op is INFO:
|
| 753 |
+
skip, flags, min, max = code[i: i+4]
|
| 754 |
+
if max == MAXREPEAT:
|
| 755 |
+
max = 'MAXREPEAT'
|
| 756 |
+
print_(op, skip, bin(flags), min, max, to=i+skip)
|
| 757 |
+
start = i+4
|
| 758 |
+
if flags & SRE_INFO_PREFIX:
|
| 759 |
+
prefix_len, prefix_skip = code[i+4: i+6]
|
| 760 |
+
print_2(' prefix_skip', prefix_skip)
|
| 761 |
+
start = i + 6
|
| 762 |
+
prefix = code[start: start+prefix_len]
|
| 763 |
+
print_2(' prefix',
|
| 764 |
+
'[%s]' % ', '.join('%#02x' % x for x in prefix),
|
| 765 |
+
'(%r)' % ''.join(map(chr, prefix)))
|
| 766 |
+
start += prefix_len
|
| 767 |
+
print_2(' overlap', code[start: start+prefix_len])
|
| 768 |
+
start += prefix_len
|
| 769 |
+
if flags & SRE_INFO_CHARSET:
|
| 770 |
+
level += 1
|
| 771 |
+
print_2('in')
|
| 772 |
+
dis_(start, i+skip)
|
| 773 |
+
level -= 1
|
| 774 |
+
i += skip
|
| 775 |
+
else:
|
| 776 |
+
raise ValueError(op)
|
| 777 |
+
|
| 778 |
+
level -= 1
|
| 779 |
+
|
| 780 |
+
dis_(0, len(code))
|
| 781 |
+
|
| 782 |
+
|
| 783 |
+
def compile(p, flags=0):
|
| 784 |
+
# internal: convert pattern list to internal format
|
| 785 |
+
|
| 786 |
+
if isstring(p):
|
| 787 |
+
pattern = p
|
| 788 |
+
p = sre_parse.parse(p, flags)
|
| 789 |
+
else:
|
| 790 |
+
pattern = None
|
| 791 |
+
|
| 792 |
+
code = _code(p, flags)
|
| 793 |
+
|
| 794 |
+
if flags & SRE_FLAG_DEBUG:
|
| 795 |
+
print()
|
| 796 |
+
dis(code)
|
| 797 |
+
|
| 798 |
+
# map in either direction
|
| 799 |
+
groupindex = p.state.groupdict
|
| 800 |
+
indexgroup = [None] * p.state.groups
|
| 801 |
+
for k, i in groupindex.items():
|
| 802 |
+
indexgroup[i] = k
|
| 803 |
+
|
| 804 |
+
return _sre.compile(
|
| 805 |
+
pattern, flags | p.state.flags, code,
|
| 806 |
+
p.state.groups-1,
|
| 807 |
+
groupindex, tuple(indexgroup)
|
| 808 |
+
)
|
llava/lib/python3.10/struct.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__all__ = [
|
| 2 |
+
# Functions
|
| 3 |
+
'calcsize', 'pack', 'pack_into', 'unpack', 'unpack_from',
|
| 4 |
+
'iter_unpack',
|
| 5 |
+
|
| 6 |
+
# Classes
|
| 7 |
+
'Struct',
|
| 8 |
+
|
| 9 |
+
# Exceptions
|
| 10 |
+
'error'
|
| 11 |
+
]
|
| 12 |
+
|
| 13 |
+
from _struct import *
|
| 14 |
+
from _struct import _clearcache
|
| 15 |
+
from _struct import __doc__
|
llava/lib/python3.10/symtable.py
ADDED
|
@@ -0,0 +1,322 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Interface to the compiler's internal symbol tables"""
|
| 2 |
+
|
| 3 |
+
import _symtable
|
| 4 |
+
from _symtable import (USE, DEF_GLOBAL, DEF_NONLOCAL, DEF_LOCAL, DEF_PARAM,
|
| 5 |
+
DEF_IMPORT, DEF_BOUND, DEF_ANNOT, SCOPE_OFF, SCOPE_MASK, FREE,
|
| 6 |
+
LOCAL, GLOBAL_IMPLICIT, GLOBAL_EXPLICIT, CELL)
|
| 7 |
+
|
| 8 |
+
import weakref
|
| 9 |
+
|
| 10 |
+
__all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"]
|
| 11 |
+
|
| 12 |
+
def symtable(code, filename, compile_type):
|
| 13 |
+
""" Return the toplevel *SymbolTable* for the source code.
|
| 14 |
+
|
| 15 |
+
*filename* is the name of the file with the code
|
| 16 |
+
and *compile_type* is the *compile()* mode argument.
|
| 17 |
+
"""
|
| 18 |
+
top = _symtable.symtable(code, filename, compile_type)
|
| 19 |
+
return _newSymbolTable(top, filename)
|
| 20 |
+
|
| 21 |
+
class SymbolTableFactory:
|
| 22 |
+
def __init__(self):
|
| 23 |
+
self.__memo = weakref.WeakValueDictionary()
|
| 24 |
+
|
| 25 |
+
def new(self, table, filename):
|
| 26 |
+
if table.type == _symtable.TYPE_FUNCTION:
|
| 27 |
+
return Function(table, filename)
|
| 28 |
+
if table.type == _symtable.TYPE_CLASS:
|
| 29 |
+
return Class(table, filename)
|
| 30 |
+
return SymbolTable(table, filename)
|
| 31 |
+
|
| 32 |
+
def __call__(self, table, filename):
|
| 33 |
+
key = table, filename
|
| 34 |
+
obj = self.__memo.get(key, None)
|
| 35 |
+
if obj is None:
|
| 36 |
+
obj = self.__memo[key] = self.new(table, filename)
|
| 37 |
+
return obj
|
| 38 |
+
|
| 39 |
+
_newSymbolTable = SymbolTableFactory()
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class SymbolTable:
|
| 43 |
+
|
| 44 |
+
def __init__(self, raw_table, filename):
|
| 45 |
+
self._table = raw_table
|
| 46 |
+
self._filename = filename
|
| 47 |
+
self._symbols = {}
|
| 48 |
+
|
| 49 |
+
def __repr__(self):
|
| 50 |
+
if self.__class__ == SymbolTable:
|
| 51 |
+
kind = ""
|
| 52 |
+
else:
|
| 53 |
+
kind = "%s " % self.__class__.__name__
|
| 54 |
+
|
| 55 |
+
if self._table.name == "top":
|
| 56 |
+
return "<{0}SymbolTable for module {1}>".format(kind, self._filename)
|
| 57 |
+
else:
|
| 58 |
+
return "<{0}SymbolTable for {1} in {2}>".format(kind,
|
| 59 |
+
self._table.name,
|
| 60 |
+
self._filename)
|
| 61 |
+
|
| 62 |
+
def get_type(self):
|
| 63 |
+
"""Return the type of the symbol table.
|
| 64 |
+
|
| 65 |
+
The values retuned are 'class', 'module' and
|
| 66 |
+
'function'.
|
| 67 |
+
"""
|
| 68 |
+
if self._table.type == _symtable.TYPE_MODULE:
|
| 69 |
+
return "module"
|
| 70 |
+
if self._table.type == _symtable.TYPE_FUNCTION:
|
| 71 |
+
return "function"
|
| 72 |
+
if self._table.type == _symtable.TYPE_CLASS:
|
| 73 |
+
return "class"
|
| 74 |
+
assert self._table.type in (1, 2, 3), \
|
| 75 |
+
"unexpected type: {0}".format(self._table.type)
|
| 76 |
+
|
| 77 |
+
def get_id(self):
|
| 78 |
+
"""Return an identifier for the table.
|
| 79 |
+
"""
|
| 80 |
+
return self._table.id
|
| 81 |
+
|
| 82 |
+
def get_name(self):
|
| 83 |
+
"""Return the table's name.
|
| 84 |
+
|
| 85 |
+
This corresponds to the name of the class, function
|
| 86 |
+
or 'top' if the table is for a class, function or
|
| 87 |
+
global respectively.
|
| 88 |
+
"""
|
| 89 |
+
return self._table.name
|
| 90 |
+
|
| 91 |
+
def get_lineno(self):
|
| 92 |
+
"""Return the number of the first line in the
|
| 93 |
+
block for the table.
|
| 94 |
+
"""
|
| 95 |
+
return self._table.lineno
|
| 96 |
+
|
| 97 |
+
def is_optimized(self):
|
| 98 |
+
"""Return *True* if the locals in the table
|
| 99 |
+
are optimizable.
|
| 100 |
+
"""
|
| 101 |
+
return bool(self._table.type == _symtable.TYPE_FUNCTION)
|
| 102 |
+
|
| 103 |
+
def is_nested(self):
|
| 104 |
+
"""Return *True* if the block is a nested class
|
| 105 |
+
or function."""
|
| 106 |
+
return bool(self._table.nested)
|
| 107 |
+
|
| 108 |
+
def has_children(self):
|
| 109 |
+
"""Return *True* if the block has nested namespaces.
|
| 110 |
+
"""
|
| 111 |
+
return bool(self._table.children)
|
| 112 |
+
|
| 113 |
+
def get_identifiers(self):
|
| 114 |
+
"""Return a view object containing the names of symbols in the table.
|
| 115 |
+
"""
|
| 116 |
+
return self._table.symbols.keys()
|
| 117 |
+
|
| 118 |
+
def lookup(self, name):
|
| 119 |
+
"""Lookup a *name* in the table.
|
| 120 |
+
|
| 121 |
+
Returns a *Symbol* instance.
|
| 122 |
+
"""
|
| 123 |
+
sym = self._symbols.get(name)
|
| 124 |
+
if sym is None:
|
| 125 |
+
flags = self._table.symbols[name]
|
| 126 |
+
namespaces = self.__check_children(name)
|
| 127 |
+
module_scope = (self._table.name == "top")
|
| 128 |
+
sym = self._symbols[name] = Symbol(name, flags, namespaces,
|
| 129 |
+
module_scope=module_scope)
|
| 130 |
+
return sym
|
| 131 |
+
|
| 132 |
+
def get_symbols(self):
|
| 133 |
+
"""Return a list of *Symbol* instances for
|
| 134 |
+
names in the table.
|
| 135 |
+
"""
|
| 136 |
+
return [self.lookup(ident) for ident in self.get_identifiers()]
|
| 137 |
+
|
| 138 |
+
def __check_children(self, name):
|
| 139 |
+
return [_newSymbolTable(st, self._filename)
|
| 140 |
+
for st in self._table.children
|
| 141 |
+
if st.name == name]
|
| 142 |
+
|
| 143 |
+
def get_children(self):
|
| 144 |
+
"""Return a list of the nested symbol tables.
|
| 145 |
+
"""
|
| 146 |
+
return [_newSymbolTable(st, self._filename)
|
| 147 |
+
for st in self._table.children]
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class Function(SymbolTable):
|
| 151 |
+
|
| 152 |
+
# Default values for instance variables
|
| 153 |
+
__params = None
|
| 154 |
+
__locals = None
|
| 155 |
+
__frees = None
|
| 156 |
+
__globals = None
|
| 157 |
+
__nonlocals = None
|
| 158 |
+
|
| 159 |
+
def __idents_matching(self, test_func):
|
| 160 |
+
return tuple(ident for ident in self.get_identifiers()
|
| 161 |
+
if test_func(self._table.symbols[ident]))
|
| 162 |
+
|
| 163 |
+
def get_parameters(self):
|
| 164 |
+
"""Return a tuple of parameters to the function.
|
| 165 |
+
"""
|
| 166 |
+
if self.__params is None:
|
| 167 |
+
self.__params = self.__idents_matching(lambda x:x & DEF_PARAM)
|
| 168 |
+
return self.__params
|
| 169 |
+
|
| 170 |
+
def get_locals(self):
|
| 171 |
+
"""Return a tuple of locals in the function.
|
| 172 |
+
"""
|
| 173 |
+
if self.__locals is None:
|
| 174 |
+
locs = (LOCAL, CELL)
|
| 175 |
+
test = lambda x: ((x >> SCOPE_OFF) & SCOPE_MASK) in locs
|
| 176 |
+
self.__locals = self.__idents_matching(test)
|
| 177 |
+
return self.__locals
|
| 178 |
+
|
| 179 |
+
def get_globals(self):
|
| 180 |
+
"""Return a tuple of globals in the function.
|
| 181 |
+
"""
|
| 182 |
+
if self.__globals is None:
|
| 183 |
+
glob = (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT)
|
| 184 |
+
test = lambda x:((x >> SCOPE_OFF) & SCOPE_MASK) in glob
|
| 185 |
+
self.__globals = self.__idents_matching(test)
|
| 186 |
+
return self.__globals
|
| 187 |
+
|
| 188 |
+
def get_nonlocals(self):
|
| 189 |
+
"""Return a tuple of nonlocals in the function.
|
| 190 |
+
"""
|
| 191 |
+
if self.__nonlocals is None:
|
| 192 |
+
self.__nonlocals = self.__idents_matching(lambda x:x & DEF_NONLOCAL)
|
| 193 |
+
return self.__nonlocals
|
| 194 |
+
|
| 195 |
+
def get_frees(self):
|
| 196 |
+
"""Return a tuple of free variables in the function.
|
| 197 |
+
"""
|
| 198 |
+
if self.__frees is None:
|
| 199 |
+
is_free = lambda x:((x >> SCOPE_OFF) & SCOPE_MASK) == FREE
|
| 200 |
+
self.__frees = self.__idents_matching(is_free)
|
| 201 |
+
return self.__frees
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
class Class(SymbolTable):
|
| 205 |
+
|
| 206 |
+
__methods = None
|
| 207 |
+
|
| 208 |
+
def get_methods(self):
|
| 209 |
+
"""Return a tuple of methods declared in the class.
|
| 210 |
+
"""
|
| 211 |
+
if self.__methods is None:
|
| 212 |
+
d = {}
|
| 213 |
+
for st in self._table.children:
|
| 214 |
+
d[st.name] = 1
|
| 215 |
+
self.__methods = tuple(d)
|
| 216 |
+
return self.__methods
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
class Symbol:
|
| 220 |
+
|
| 221 |
+
def __init__(self, name, flags, namespaces=None, *, module_scope=False):
|
| 222 |
+
self.__name = name
|
| 223 |
+
self.__flags = flags
|
| 224 |
+
self.__scope = (flags >> SCOPE_OFF) & SCOPE_MASK # like PyST_GetScope()
|
| 225 |
+
self.__namespaces = namespaces or ()
|
| 226 |
+
self.__module_scope = module_scope
|
| 227 |
+
|
| 228 |
+
def __repr__(self):
|
| 229 |
+
return "<symbol {0!r}>".format(self.__name)
|
| 230 |
+
|
| 231 |
+
def get_name(self):
|
| 232 |
+
"""Return a name of a symbol.
|
| 233 |
+
"""
|
| 234 |
+
return self.__name
|
| 235 |
+
|
| 236 |
+
def is_referenced(self):
|
| 237 |
+
"""Return *True* if the symbol is used in
|
| 238 |
+
its block.
|
| 239 |
+
"""
|
| 240 |
+
return bool(self.__flags & _symtable.USE)
|
| 241 |
+
|
| 242 |
+
def is_parameter(self):
|
| 243 |
+
"""Return *True* if the symbol is a parameter.
|
| 244 |
+
"""
|
| 245 |
+
return bool(self.__flags & DEF_PARAM)
|
| 246 |
+
|
| 247 |
+
def is_global(self):
|
| 248 |
+
"""Return *True* if the sysmbol is global.
|
| 249 |
+
"""
|
| 250 |
+
return bool(self.__scope in (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT)
|
| 251 |
+
or (self.__module_scope and self.__flags & DEF_BOUND))
|
| 252 |
+
|
| 253 |
+
def is_nonlocal(self):
|
| 254 |
+
"""Return *True* if the symbol is nonlocal."""
|
| 255 |
+
return bool(self.__flags & DEF_NONLOCAL)
|
| 256 |
+
|
| 257 |
+
def is_declared_global(self):
|
| 258 |
+
"""Return *True* if the symbol is declared global
|
| 259 |
+
with a global statement."""
|
| 260 |
+
return bool(self.__scope == GLOBAL_EXPLICIT)
|
| 261 |
+
|
| 262 |
+
def is_local(self):
|
| 263 |
+
"""Return *True* if the symbol is local.
|
| 264 |
+
"""
|
| 265 |
+
return bool(self.__scope in (LOCAL, CELL)
|
| 266 |
+
or (self.__module_scope and self.__flags & DEF_BOUND))
|
| 267 |
+
|
| 268 |
+
def is_annotated(self):
|
| 269 |
+
"""Return *True* if the symbol is annotated.
|
| 270 |
+
"""
|
| 271 |
+
return bool(self.__flags & DEF_ANNOT)
|
| 272 |
+
|
| 273 |
+
def is_free(self):
|
| 274 |
+
"""Return *True* if a referenced symbol is
|
| 275 |
+
not assigned to.
|
| 276 |
+
"""
|
| 277 |
+
return bool(self.__scope == FREE)
|
| 278 |
+
|
| 279 |
+
def is_imported(self):
|
| 280 |
+
"""Return *True* if the symbol is created from
|
| 281 |
+
an import statement.
|
| 282 |
+
"""
|
| 283 |
+
return bool(self.__flags & DEF_IMPORT)
|
| 284 |
+
|
| 285 |
+
def is_assigned(self):
|
| 286 |
+
"""Return *True* if a symbol is assigned to."""
|
| 287 |
+
return bool(self.__flags & DEF_LOCAL)
|
| 288 |
+
|
| 289 |
+
def is_namespace(self):
|
| 290 |
+
"""Returns *True* if name binding introduces new namespace.
|
| 291 |
+
|
| 292 |
+
If the name is used as the target of a function or class
|
| 293 |
+
statement, this will be true.
|
| 294 |
+
|
| 295 |
+
Note that a single name can be bound to multiple objects. If
|
| 296 |
+
is_namespace() is true, the name may also be bound to other
|
| 297 |
+
objects, like an int or list, that does not introduce a new
|
| 298 |
+
namespace.
|
| 299 |
+
"""
|
| 300 |
+
return bool(self.__namespaces)
|
| 301 |
+
|
| 302 |
+
def get_namespaces(self):
|
| 303 |
+
"""Return a list of namespaces bound to this name"""
|
| 304 |
+
return self.__namespaces
|
| 305 |
+
|
| 306 |
+
def get_namespace(self):
|
| 307 |
+
"""Return the single namespace bound to this name.
|
| 308 |
+
|
| 309 |
+
Raises ValueError if the name is bound to multiple namespaces.
|
| 310 |
+
"""
|
| 311 |
+
if len(self.__namespaces) != 1:
|
| 312 |
+
raise ValueError("name is bound to multiple namespaces")
|
| 313 |
+
return self.__namespaces[0]
|
| 314 |
+
|
| 315 |
+
if __name__ == "__main__":
|
| 316 |
+
import os, sys
|
| 317 |
+
with open(sys.argv[0]) as f:
|
| 318 |
+
src = f.read()
|
| 319 |
+
mod = symtable(src, os.path.split(sys.argv[0])[1], "exec")
|
| 320 |
+
for ident in mod.get_identifiers():
|
| 321 |
+
info = mod.lookup(ident)
|
| 322 |
+
print(info, info.is_local(), info.is_namespace())
|
llava/lib/python3.10/turtle.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
llava/lib/python3.10/uuid.py
ADDED
|
@@ -0,0 +1,733 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""UUID objects (universally unique identifiers) according to RFC 4122.
|
| 2 |
+
|
| 3 |
+
This module provides immutable UUID objects (class UUID) and the functions
|
| 4 |
+
uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5
|
| 5 |
+
UUIDs as specified in RFC 4122.
|
| 6 |
+
|
| 7 |
+
If all you want is a unique ID, you should probably call uuid1() or uuid4().
|
| 8 |
+
Note that uuid1() may compromise privacy since it creates a UUID containing
|
| 9 |
+
the computer's network address. uuid4() creates a random UUID.
|
| 10 |
+
|
| 11 |
+
Typical usage:
|
| 12 |
+
|
| 13 |
+
>>> import uuid
|
| 14 |
+
|
| 15 |
+
# make a UUID based on the host ID and current time
|
| 16 |
+
>>> uuid.uuid1() # doctest: +SKIP
|
| 17 |
+
UUID('a8098c1a-f86e-11da-bd1a-00112444be1e')
|
| 18 |
+
|
| 19 |
+
# make a UUID using an MD5 hash of a namespace UUID and a name
|
| 20 |
+
>>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org')
|
| 21 |
+
UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e')
|
| 22 |
+
|
| 23 |
+
# make a random UUID
|
| 24 |
+
>>> uuid.uuid4() # doctest: +SKIP
|
| 25 |
+
UUID('16fd2706-8baf-433b-82eb-8c7fada847da')
|
| 26 |
+
|
| 27 |
+
# make a UUID using a SHA-1 hash of a namespace UUID and a name
|
| 28 |
+
>>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org')
|
| 29 |
+
UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d')
|
| 30 |
+
|
| 31 |
+
# make a UUID from a string of hex digits (braces and hyphens ignored)
|
| 32 |
+
>>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}')
|
| 33 |
+
|
| 34 |
+
# convert a UUID to a string of hex digits in standard form
|
| 35 |
+
>>> str(x)
|
| 36 |
+
'00010203-0405-0607-0809-0a0b0c0d0e0f'
|
| 37 |
+
|
| 38 |
+
# get the raw 16 bytes of the UUID
|
| 39 |
+
>>> x.bytes
|
| 40 |
+
b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
|
| 41 |
+
|
| 42 |
+
# make a UUID from a 16-byte string
|
| 43 |
+
>>> uuid.UUID(bytes=x.bytes)
|
| 44 |
+
UUID('00010203-0405-0607-0809-0a0b0c0d0e0f')
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
import os
|
| 48 |
+
import sys
|
| 49 |
+
|
| 50 |
+
from enum import Enum
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
__author__ = 'Ka-Ping Yee <ping@zesty.ca>'
|
| 54 |
+
|
| 55 |
+
# The recognized platforms - known behaviors
|
| 56 |
+
if sys.platform in ('win32', 'darwin'):
|
| 57 |
+
_AIX = _LINUX = False
|
| 58 |
+
else:
|
| 59 |
+
import platform
|
| 60 |
+
_platform_system = platform.system()
|
| 61 |
+
_AIX = _platform_system == 'AIX'
|
| 62 |
+
_LINUX = _platform_system == 'Linux'
|
| 63 |
+
|
| 64 |
+
_MAC_DELIM = b':'
|
| 65 |
+
_MAC_OMITS_LEADING_ZEROES = False
|
| 66 |
+
if _AIX:
|
| 67 |
+
_MAC_DELIM = b'.'
|
| 68 |
+
_MAC_OMITS_LEADING_ZEROES = True
|
| 69 |
+
|
| 70 |
+
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
|
| 71 |
+
'reserved for NCS compatibility', 'specified in RFC 4122',
|
| 72 |
+
'reserved for Microsoft compatibility', 'reserved for future definition']
|
| 73 |
+
|
| 74 |
+
int_ = int # The built-in int type
|
| 75 |
+
bytes_ = bytes # The built-in bytes type
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class SafeUUID(Enum):
|
| 79 |
+
safe = 0
|
| 80 |
+
unsafe = -1
|
| 81 |
+
unknown = None
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class UUID:
|
| 85 |
+
"""Instances of the UUID class represent UUIDs as specified in RFC 4122.
|
| 86 |
+
UUID objects are immutable, hashable, and usable as dictionary keys.
|
| 87 |
+
Converting a UUID to a string with str() yields something in the form
|
| 88 |
+
'12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts
|
| 89 |
+
five possible forms: a similar string of hexadecimal digits, or a tuple
|
| 90 |
+
of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and
|
| 91 |
+
48-bit values respectively) as an argument named 'fields', or a string
|
| 92 |
+
of 16 bytes (with all the integer fields in big-endian order) as an
|
| 93 |
+
argument named 'bytes', or a string of 16 bytes (with the first three
|
| 94 |
+
fields in little-endian order) as an argument named 'bytes_le', or a
|
| 95 |
+
single 128-bit integer as an argument named 'int'.
|
| 96 |
+
|
| 97 |
+
UUIDs have these read-only attributes:
|
| 98 |
+
|
| 99 |
+
bytes the UUID as a 16-byte string (containing the six
|
| 100 |
+
integer fields in big-endian byte order)
|
| 101 |
+
|
| 102 |
+
bytes_le the UUID as a 16-byte string (with time_low, time_mid,
|
| 103 |
+
and time_hi_version in little-endian byte order)
|
| 104 |
+
|
| 105 |
+
fields a tuple of the six integer fields of the UUID,
|
| 106 |
+
which are also available as six individual attributes
|
| 107 |
+
and two derived attributes:
|
| 108 |
+
|
| 109 |
+
time_low the first 32 bits of the UUID
|
| 110 |
+
time_mid the next 16 bits of the UUID
|
| 111 |
+
time_hi_version the next 16 bits of the UUID
|
| 112 |
+
clock_seq_hi_variant the next 8 bits of the UUID
|
| 113 |
+
clock_seq_low the next 8 bits of the UUID
|
| 114 |
+
node the last 48 bits of the UUID
|
| 115 |
+
|
| 116 |
+
time the 60-bit timestamp
|
| 117 |
+
clock_seq the 14-bit sequence number
|
| 118 |
+
|
| 119 |
+
hex the UUID as a 32-character hexadecimal string
|
| 120 |
+
|
| 121 |
+
int the UUID as a 128-bit integer
|
| 122 |
+
|
| 123 |
+
urn the UUID as a URN as specified in RFC 4122
|
| 124 |
+
|
| 125 |
+
variant the UUID variant (one of the constants RESERVED_NCS,
|
| 126 |
+
RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE)
|
| 127 |
+
|
| 128 |
+
version the UUID version number (1 through 5, meaningful only
|
| 129 |
+
when the variant is RFC_4122)
|
| 130 |
+
|
| 131 |
+
is_safe An enum indicating whether the UUID has been generated in
|
| 132 |
+
a way that is safe for multiprocessing applications, via
|
| 133 |
+
uuid_generate_time_safe(3).
|
| 134 |
+
"""
|
| 135 |
+
|
| 136 |
+
__slots__ = ('int', 'is_safe', '__weakref__')
|
| 137 |
+
|
| 138 |
+
def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None,
|
| 139 |
+
int=None, version=None,
|
| 140 |
+
*, is_safe=SafeUUID.unknown):
|
| 141 |
+
r"""Create a UUID from either a string of 32 hexadecimal digits,
|
| 142 |
+
a string of 16 bytes as the 'bytes' argument, a string of 16 bytes
|
| 143 |
+
in little-endian order as the 'bytes_le' argument, a tuple of six
|
| 144 |
+
integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version,
|
| 145 |
+
8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as
|
| 146 |
+
the 'fields' argument, or a single 128-bit integer as the 'int'
|
| 147 |
+
argument. When a string of hex digits is given, curly braces,
|
| 148 |
+
hyphens, and a URN prefix are all optional. For example, these
|
| 149 |
+
expressions all yield the same UUID:
|
| 150 |
+
|
| 151 |
+
UUID('{12345678-1234-5678-1234-567812345678}')
|
| 152 |
+
UUID('12345678123456781234567812345678')
|
| 153 |
+
UUID('urn:uuid:12345678-1234-5678-1234-567812345678')
|
| 154 |
+
UUID(bytes='\x12\x34\x56\x78'*4)
|
| 155 |
+
UUID(bytes_le='\x78\x56\x34\x12\x34\x12\x78\x56' +
|
| 156 |
+
'\x12\x34\x56\x78\x12\x34\x56\x78')
|
| 157 |
+
UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678))
|
| 158 |
+
UUID(int=0x12345678123456781234567812345678)
|
| 159 |
+
|
| 160 |
+
Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must
|
| 161 |
+
be given. The 'version' argument is optional; if given, the resulting
|
| 162 |
+
UUID will have its variant and version set according to RFC 4122,
|
| 163 |
+
overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'.
|
| 164 |
+
|
| 165 |
+
is_safe is an enum exposed as an attribute on the instance. It
|
| 166 |
+
indicates whether the UUID has been generated in a way that is safe
|
| 167 |
+
for multiprocessing applications, via uuid_generate_time_safe(3).
|
| 168 |
+
"""
|
| 169 |
+
|
| 170 |
+
if [hex, bytes, bytes_le, fields, int].count(None) != 4:
|
| 171 |
+
raise TypeError('one of the hex, bytes, bytes_le, fields, '
|
| 172 |
+
'or int arguments must be given')
|
| 173 |
+
if hex is not None:
|
| 174 |
+
hex = hex.replace('urn:', '').replace('uuid:', '')
|
| 175 |
+
hex = hex.strip('{}').replace('-', '')
|
| 176 |
+
if len(hex) != 32:
|
| 177 |
+
raise ValueError('badly formed hexadecimal UUID string')
|
| 178 |
+
int = int_(hex, 16)
|
| 179 |
+
if bytes_le is not None:
|
| 180 |
+
if len(bytes_le) != 16:
|
| 181 |
+
raise ValueError('bytes_le is not a 16-char string')
|
| 182 |
+
bytes = (bytes_le[4-1::-1] + bytes_le[6-1:4-1:-1] +
|
| 183 |
+
bytes_le[8-1:6-1:-1] + bytes_le[8:])
|
| 184 |
+
if bytes is not None:
|
| 185 |
+
if len(bytes) != 16:
|
| 186 |
+
raise ValueError('bytes is not a 16-char string')
|
| 187 |
+
assert isinstance(bytes, bytes_), repr(bytes)
|
| 188 |
+
int = int_.from_bytes(bytes, byteorder='big')
|
| 189 |
+
if fields is not None:
|
| 190 |
+
if len(fields) != 6:
|
| 191 |
+
raise ValueError('fields is not a 6-tuple')
|
| 192 |
+
(time_low, time_mid, time_hi_version,
|
| 193 |
+
clock_seq_hi_variant, clock_seq_low, node) = fields
|
| 194 |
+
if not 0 <= time_low < 1<<32:
|
| 195 |
+
raise ValueError('field 1 out of range (need a 32-bit value)')
|
| 196 |
+
if not 0 <= time_mid < 1<<16:
|
| 197 |
+
raise ValueError('field 2 out of range (need a 16-bit value)')
|
| 198 |
+
if not 0 <= time_hi_version < 1<<16:
|
| 199 |
+
raise ValueError('field 3 out of range (need a 16-bit value)')
|
| 200 |
+
if not 0 <= clock_seq_hi_variant < 1<<8:
|
| 201 |
+
raise ValueError('field 4 out of range (need an 8-bit value)')
|
| 202 |
+
if not 0 <= clock_seq_low < 1<<8:
|
| 203 |
+
raise ValueError('field 5 out of range (need an 8-bit value)')
|
| 204 |
+
if not 0 <= node < 1<<48:
|
| 205 |
+
raise ValueError('field 6 out of range (need a 48-bit value)')
|
| 206 |
+
clock_seq = (clock_seq_hi_variant << 8) | clock_seq_low
|
| 207 |
+
int = ((time_low << 96) | (time_mid << 80) |
|
| 208 |
+
(time_hi_version << 64) | (clock_seq << 48) | node)
|
| 209 |
+
if int is not None:
|
| 210 |
+
if not 0 <= int < 1<<128:
|
| 211 |
+
raise ValueError('int is out of range (need a 128-bit value)')
|
| 212 |
+
if version is not None:
|
| 213 |
+
if not 1 <= version <= 5:
|
| 214 |
+
raise ValueError('illegal version number')
|
| 215 |
+
# Set the variant to RFC 4122.
|
| 216 |
+
int &= ~(0xc000 << 48)
|
| 217 |
+
int |= 0x8000 << 48
|
| 218 |
+
# Set the version number.
|
| 219 |
+
int &= ~(0xf000 << 64)
|
| 220 |
+
int |= version << 76
|
| 221 |
+
object.__setattr__(self, 'int', int)
|
| 222 |
+
object.__setattr__(self, 'is_safe', is_safe)
|
| 223 |
+
|
| 224 |
+
def __getstate__(self):
|
| 225 |
+
d = {'int': self.int}
|
| 226 |
+
if self.is_safe != SafeUUID.unknown:
|
| 227 |
+
# is_safe is a SafeUUID instance. Return just its value, so that
|
| 228 |
+
# it can be un-pickled in older Python versions without SafeUUID.
|
| 229 |
+
d['is_safe'] = self.is_safe.value
|
| 230 |
+
return d
|
| 231 |
+
|
| 232 |
+
def __setstate__(self, state):
|
| 233 |
+
object.__setattr__(self, 'int', state['int'])
|
| 234 |
+
# is_safe was added in 3.7; it is also omitted when it is "unknown"
|
| 235 |
+
object.__setattr__(self, 'is_safe',
|
| 236 |
+
SafeUUID(state['is_safe'])
|
| 237 |
+
if 'is_safe' in state else SafeUUID.unknown)
|
| 238 |
+
|
| 239 |
+
def __eq__(self, other):
|
| 240 |
+
if isinstance(other, UUID):
|
| 241 |
+
return self.int == other.int
|
| 242 |
+
return NotImplemented
|
| 243 |
+
|
| 244 |
+
# Q. What's the value of being able to sort UUIDs?
|
| 245 |
+
# A. Use them as keys in a B-Tree or similar mapping.
|
| 246 |
+
|
| 247 |
+
def __lt__(self, other):
|
| 248 |
+
if isinstance(other, UUID):
|
| 249 |
+
return self.int < other.int
|
| 250 |
+
return NotImplemented
|
| 251 |
+
|
| 252 |
+
def __gt__(self, other):
|
| 253 |
+
if isinstance(other, UUID):
|
| 254 |
+
return self.int > other.int
|
| 255 |
+
return NotImplemented
|
| 256 |
+
|
| 257 |
+
def __le__(self, other):
|
| 258 |
+
if isinstance(other, UUID):
|
| 259 |
+
return self.int <= other.int
|
| 260 |
+
return NotImplemented
|
| 261 |
+
|
| 262 |
+
def __ge__(self, other):
|
| 263 |
+
if isinstance(other, UUID):
|
| 264 |
+
return self.int >= other.int
|
| 265 |
+
return NotImplemented
|
| 266 |
+
|
| 267 |
+
def __hash__(self):
|
| 268 |
+
return hash(self.int)
|
| 269 |
+
|
| 270 |
+
def __int__(self):
|
| 271 |
+
return self.int
|
| 272 |
+
|
| 273 |
+
def __repr__(self):
|
| 274 |
+
return '%s(%r)' % (self.__class__.__name__, str(self))
|
| 275 |
+
|
| 276 |
+
def __setattr__(self, name, value):
|
| 277 |
+
raise TypeError('UUID objects are immutable')
|
| 278 |
+
|
| 279 |
+
def __str__(self):
|
| 280 |
+
hex = '%032x' % self.int
|
| 281 |
+
return '%s-%s-%s-%s-%s' % (
|
| 282 |
+
hex[:8], hex[8:12], hex[12:16], hex[16:20], hex[20:])
|
| 283 |
+
|
| 284 |
+
@property
|
| 285 |
+
def bytes(self):
|
| 286 |
+
return self.int.to_bytes(16, 'big')
|
| 287 |
+
|
| 288 |
+
@property
|
| 289 |
+
def bytes_le(self):
|
| 290 |
+
bytes = self.bytes
|
| 291 |
+
return (bytes[4-1::-1] + bytes[6-1:4-1:-1] + bytes[8-1:6-1:-1] +
|
| 292 |
+
bytes[8:])
|
| 293 |
+
|
| 294 |
+
@property
|
| 295 |
+
def fields(self):
|
| 296 |
+
return (self.time_low, self.time_mid, self.time_hi_version,
|
| 297 |
+
self.clock_seq_hi_variant, self.clock_seq_low, self.node)
|
| 298 |
+
|
| 299 |
+
@property
|
| 300 |
+
def time_low(self):
|
| 301 |
+
return self.int >> 96
|
| 302 |
+
|
| 303 |
+
@property
|
| 304 |
+
def time_mid(self):
|
| 305 |
+
return (self.int >> 80) & 0xffff
|
| 306 |
+
|
| 307 |
+
@property
|
| 308 |
+
def time_hi_version(self):
|
| 309 |
+
return (self.int >> 64) & 0xffff
|
| 310 |
+
|
| 311 |
+
@property
|
| 312 |
+
def clock_seq_hi_variant(self):
|
| 313 |
+
return (self.int >> 56) & 0xff
|
| 314 |
+
|
| 315 |
+
@property
|
| 316 |
+
def clock_seq_low(self):
|
| 317 |
+
return (self.int >> 48) & 0xff
|
| 318 |
+
|
| 319 |
+
@property
|
| 320 |
+
def time(self):
|
| 321 |
+
return (((self.time_hi_version & 0x0fff) << 48) |
|
| 322 |
+
(self.time_mid << 32) | self.time_low)
|
| 323 |
+
|
| 324 |
+
@property
|
| 325 |
+
def clock_seq(self):
|
| 326 |
+
return (((self.clock_seq_hi_variant & 0x3f) << 8) |
|
| 327 |
+
self.clock_seq_low)
|
| 328 |
+
|
| 329 |
+
@property
|
| 330 |
+
def node(self):
|
| 331 |
+
return self.int & 0xffffffffffff
|
| 332 |
+
|
| 333 |
+
@property
|
| 334 |
+
def hex(self):
|
| 335 |
+
return '%032x' % self.int
|
| 336 |
+
|
| 337 |
+
@property
|
| 338 |
+
def urn(self):
|
| 339 |
+
return 'urn:uuid:' + str(self)
|
| 340 |
+
|
| 341 |
+
@property
|
| 342 |
+
def variant(self):
|
| 343 |
+
if not self.int & (0x8000 << 48):
|
| 344 |
+
return RESERVED_NCS
|
| 345 |
+
elif not self.int & (0x4000 << 48):
|
| 346 |
+
return RFC_4122
|
| 347 |
+
elif not self.int & (0x2000 << 48):
|
| 348 |
+
return RESERVED_MICROSOFT
|
| 349 |
+
else:
|
| 350 |
+
return RESERVED_FUTURE
|
| 351 |
+
|
| 352 |
+
@property
|
| 353 |
+
def version(self):
|
| 354 |
+
# The version bits are only meaningful for RFC 4122 UUIDs.
|
| 355 |
+
if self.variant == RFC_4122:
|
| 356 |
+
return int((self.int >> 76) & 0xf)
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
def _get_command_stdout(command, *args):
|
| 360 |
+
import io, os, shutil, subprocess
|
| 361 |
+
|
| 362 |
+
try:
|
| 363 |
+
path_dirs = os.environ.get('PATH', os.defpath).split(os.pathsep)
|
| 364 |
+
path_dirs.extend(['/sbin', '/usr/sbin'])
|
| 365 |
+
executable = shutil.which(command, path=os.pathsep.join(path_dirs))
|
| 366 |
+
if executable is None:
|
| 367 |
+
return None
|
| 368 |
+
# LC_ALL=C to ensure English output, stderr=DEVNULL to prevent output
|
| 369 |
+
# on stderr (Note: we don't have an example where the words we search
|
| 370 |
+
# for are actually localized, but in theory some system could do so.)
|
| 371 |
+
env = dict(os.environ)
|
| 372 |
+
env['LC_ALL'] = 'C'
|
| 373 |
+
# Empty strings will be quoted by popen so we should just ommit it
|
| 374 |
+
if args != ('',):
|
| 375 |
+
command = (executable, *args)
|
| 376 |
+
else:
|
| 377 |
+
command = (executable,)
|
| 378 |
+
proc = subprocess.Popen(command,
|
| 379 |
+
stdout=subprocess.PIPE,
|
| 380 |
+
stderr=subprocess.DEVNULL,
|
| 381 |
+
env=env)
|
| 382 |
+
if not proc:
|
| 383 |
+
return None
|
| 384 |
+
stdout, stderr = proc.communicate()
|
| 385 |
+
return io.BytesIO(stdout)
|
| 386 |
+
except (OSError, subprocess.SubprocessError):
|
| 387 |
+
return None
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
# For MAC (a.k.a. IEEE 802, or EUI-48) addresses, the second least significant
|
| 391 |
+
# bit of the first octet signifies whether the MAC address is universally (0)
|
| 392 |
+
# or locally (1) administered. Network cards from hardware manufacturers will
|
| 393 |
+
# always be universally administered to guarantee global uniqueness of the MAC
|
| 394 |
+
# address, but any particular machine may have other interfaces which are
|
| 395 |
+
# locally administered. An example of the latter is the bridge interface to
|
| 396 |
+
# the Touch Bar on MacBook Pros.
|
| 397 |
+
#
|
| 398 |
+
# This bit works out to be the 42nd bit counting from 1 being the least
|
| 399 |
+
# significant, or 1<<41. We'll prefer universally administered MAC addresses
|
| 400 |
+
# over locally administered ones since the former are globally unique, but
|
| 401 |
+
# we'll return the first of the latter found if that's all the machine has.
|
| 402 |
+
#
|
| 403 |
+
# See https://en.wikipedia.org/wiki/MAC_address#Universal_vs._local
|
| 404 |
+
|
| 405 |
+
def _is_universal(mac):
|
| 406 |
+
return not (mac & (1 << 41))
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
def _find_mac_near_keyword(command, args, keywords, get_word_index):
|
| 410 |
+
"""Searches a command's output for a MAC address near a keyword.
|
| 411 |
+
|
| 412 |
+
Each line of words in the output is case-insensitively searched for
|
| 413 |
+
any of the given keywords. Upon a match, get_word_index is invoked
|
| 414 |
+
to pick a word from the line, given the index of the match. For
|
| 415 |
+
example, lambda i: 0 would get the first word on the line, while
|
| 416 |
+
lambda i: i - 1 would get the word preceding the keyword.
|
| 417 |
+
"""
|
| 418 |
+
stdout = _get_command_stdout(command, args)
|
| 419 |
+
if stdout is None:
|
| 420 |
+
return None
|
| 421 |
+
|
| 422 |
+
first_local_mac = None
|
| 423 |
+
for line in stdout:
|
| 424 |
+
words = line.lower().rstrip().split()
|
| 425 |
+
for i in range(len(words)):
|
| 426 |
+
if words[i] in keywords:
|
| 427 |
+
try:
|
| 428 |
+
word = words[get_word_index(i)]
|
| 429 |
+
mac = int(word.replace(_MAC_DELIM, b''), 16)
|
| 430 |
+
except (ValueError, IndexError):
|
| 431 |
+
# Virtual interfaces, such as those provided by
|
| 432 |
+
# VPNs, do not have a colon-delimited MAC address
|
| 433 |
+
# as expected, but a 16-byte HWAddr separated by
|
| 434 |
+
# dashes. These should be ignored in favor of a
|
| 435 |
+
# real MAC address
|
| 436 |
+
pass
|
| 437 |
+
else:
|
| 438 |
+
if _is_universal(mac):
|
| 439 |
+
return mac
|
| 440 |
+
first_local_mac = first_local_mac or mac
|
| 441 |
+
return first_local_mac or None
|
| 442 |
+
|
| 443 |
+
|
| 444 |
+
def _parse_mac(word):
|
| 445 |
+
# Accept 'HH:HH:HH:HH:HH:HH' MAC address (ex: '52:54:00:9d:0e:67'),
|
| 446 |
+
# but reject IPv6 address (ex: 'fe80::5054:ff:fe9' or '123:2:3:4:5:6:7:8').
|
| 447 |
+
#
|
| 448 |
+
# Virtual interfaces, such as those provided by VPNs, do not have a
|
| 449 |
+
# colon-delimited MAC address as expected, but a 16-byte HWAddr separated
|
| 450 |
+
# by dashes. These should be ignored in favor of a real MAC address
|
| 451 |
+
parts = word.split(_MAC_DELIM)
|
| 452 |
+
if len(parts) != 6:
|
| 453 |
+
return
|
| 454 |
+
if _MAC_OMITS_LEADING_ZEROES:
|
| 455 |
+
# (Only) on AIX the macaddr value given is not prefixed by 0, e.g.
|
| 456 |
+
# en0 1500 link#2 fa.bc.de.f7.62.4 110854824 0 160133733 0 0
|
| 457 |
+
# not
|
| 458 |
+
# en0 1500 link#2 fa.bc.de.f7.62.04 110854824 0 160133733 0 0
|
| 459 |
+
if not all(1 <= len(part) <= 2 for part in parts):
|
| 460 |
+
return
|
| 461 |
+
hexstr = b''.join(part.rjust(2, b'0') for part in parts)
|
| 462 |
+
else:
|
| 463 |
+
if not all(len(part) == 2 for part in parts):
|
| 464 |
+
return
|
| 465 |
+
hexstr = b''.join(parts)
|
| 466 |
+
try:
|
| 467 |
+
return int(hexstr, 16)
|
| 468 |
+
except ValueError:
|
| 469 |
+
return
|
| 470 |
+
|
| 471 |
+
|
| 472 |
+
def _find_mac_under_heading(command, args, heading):
|
| 473 |
+
"""Looks for a MAC address under a heading in a command's output.
|
| 474 |
+
|
| 475 |
+
The first line of words in the output is searched for the given
|
| 476 |
+
heading. Words at the same word index as the heading in subsequent
|
| 477 |
+
lines are then examined to see if they look like MAC addresses.
|
| 478 |
+
"""
|
| 479 |
+
stdout = _get_command_stdout(command, args)
|
| 480 |
+
if stdout is None:
|
| 481 |
+
return None
|
| 482 |
+
|
| 483 |
+
keywords = stdout.readline().rstrip().split()
|
| 484 |
+
try:
|
| 485 |
+
column_index = keywords.index(heading)
|
| 486 |
+
except ValueError:
|
| 487 |
+
return None
|
| 488 |
+
|
| 489 |
+
first_local_mac = None
|
| 490 |
+
for line in stdout:
|
| 491 |
+
words = line.rstrip().split()
|
| 492 |
+
try:
|
| 493 |
+
word = words[column_index]
|
| 494 |
+
except IndexError:
|
| 495 |
+
continue
|
| 496 |
+
|
| 497 |
+
mac = _parse_mac(word)
|
| 498 |
+
if mac is None:
|
| 499 |
+
continue
|
| 500 |
+
if _is_universal(mac):
|
| 501 |
+
return mac
|
| 502 |
+
if first_local_mac is None:
|
| 503 |
+
first_local_mac = mac
|
| 504 |
+
|
| 505 |
+
return first_local_mac
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
# The following functions call external programs to 'get' a macaddr value to
|
| 509 |
+
# be used as basis for an uuid
|
| 510 |
+
def _ifconfig_getnode():
|
| 511 |
+
"""Get the hardware address on Unix by running ifconfig."""
|
| 512 |
+
# This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes.
|
| 513 |
+
keywords = (b'hwaddr', b'ether', b'address:', b'lladdr')
|
| 514 |
+
for args in ('', '-a', '-av'):
|
| 515 |
+
mac = _find_mac_near_keyword('ifconfig', args, keywords, lambda i: i+1)
|
| 516 |
+
if mac:
|
| 517 |
+
return mac
|
| 518 |
+
return None
|
| 519 |
+
|
| 520 |
+
def _ip_getnode():
|
| 521 |
+
"""Get the hardware address on Unix by running ip."""
|
| 522 |
+
# This works on Linux with iproute2.
|
| 523 |
+
mac = _find_mac_near_keyword('ip', 'link', [b'link/ether'], lambda i: i+1)
|
| 524 |
+
if mac:
|
| 525 |
+
return mac
|
| 526 |
+
return None
|
| 527 |
+
|
| 528 |
+
def _arp_getnode():
|
| 529 |
+
"""Get the hardware address on Unix by running arp."""
|
| 530 |
+
import os, socket
|
| 531 |
+
try:
|
| 532 |
+
ip_addr = socket.gethostbyname(socket.gethostname())
|
| 533 |
+
except OSError:
|
| 534 |
+
return None
|
| 535 |
+
|
| 536 |
+
# Try getting the MAC addr from arp based on our IP address (Solaris).
|
| 537 |
+
mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: -1)
|
| 538 |
+
if mac:
|
| 539 |
+
return mac
|
| 540 |
+
|
| 541 |
+
# This works on OpenBSD
|
| 542 |
+
mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: i+1)
|
| 543 |
+
if mac:
|
| 544 |
+
return mac
|
| 545 |
+
|
| 546 |
+
# This works on Linux, FreeBSD and NetBSD
|
| 547 |
+
mac = _find_mac_near_keyword('arp', '-an', [os.fsencode('(%s)' % ip_addr)],
|
| 548 |
+
lambda i: i+2)
|
| 549 |
+
# Return None instead of 0.
|
| 550 |
+
if mac:
|
| 551 |
+
return mac
|
| 552 |
+
return None
|
| 553 |
+
|
| 554 |
+
def _lanscan_getnode():
|
| 555 |
+
"""Get the hardware address on Unix by running lanscan."""
|
| 556 |
+
# This might work on HP-UX.
|
| 557 |
+
return _find_mac_near_keyword('lanscan', '-ai', [b'lan0'], lambda i: 0)
|
| 558 |
+
|
| 559 |
+
def _netstat_getnode():
|
| 560 |
+
"""Get the hardware address on Unix by running netstat."""
|
| 561 |
+
# This works on AIX and might work on Tru64 UNIX.
|
| 562 |
+
return _find_mac_under_heading('netstat', '-ian', b'Address')
|
| 563 |
+
|
| 564 |
+
def _ipconfig_getnode():
|
| 565 |
+
"""[DEPRECATED] Get the hardware address on Windows."""
|
| 566 |
+
# bpo-40501: UuidCreateSequential() is now the only supported approach
|
| 567 |
+
return _windll_getnode()
|
| 568 |
+
|
| 569 |
+
def _netbios_getnode():
|
| 570 |
+
"""[DEPRECATED] Get the hardware address on Windows."""
|
| 571 |
+
# bpo-40501: UuidCreateSequential() is now the only supported approach
|
| 572 |
+
return _windll_getnode()
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
# Import optional C extension at toplevel, to help disabling it when testing
|
| 576 |
+
try:
|
| 577 |
+
import _uuid
|
| 578 |
+
_generate_time_safe = getattr(_uuid, "generate_time_safe", None)
|
| 579 |
+
_UuidCreate = getattr(_uuid, "UuidCreate", None)
|
| 580 |
+
_has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe
|
| 581 |
+
except ImportError:
|
| 582 |
+
_uuid = None
|
| 583 |
+
_generate_time_safe = None
|
| 584 |
+
_UuidCreate = None
|
| 585 |
+
_has_uuid_generate_time_safe = None
|
| 586 |
+
|
| 587 |
+
|
| 588 |
+
def _load_system_functions():
|
| 589 |
+
"""[DEPRECATED] Platform-specific functions loaded at import time"""
|
| 590 |
+
|
| 591 |
+
|
| 592 |
+
def _unix_getnode():
|
| 593 |
+
"""Get the hardware address on Unix using the _uuid extension module."""
|
| 594 |
+
if _generate_time_safe:
|
| 595 |
+
uuid_time, _ = _generate_time_safe()
|
| 596 |
+
return UUID(bytes=uuid_time).node
|
| 597 |
+
|
| 598 |
+
def _windll_getnode():
|
| 599 |
+
"""Get the hardware address on Windows using the _uuid extension module."""
|
| 600 |
+
if _UuidCreate:
|
| 601 |
+
uuid_bytes = _UuidCreate()
|
| 602 |
+
return UUID(bytes_le=uuid_bytes).node
|
| 603 |
+
|
| 604 |
+
def _random_getnode():
|
| 605 |
+
"""Get a random node ID."""
|
| 606 |
+
# RFC 4122, $4.1.6 says "For systems with no IEEE address, a randomly or
|
| 607 |
+
# pseudo-randomly generated value may be used; see Section 4.5. The
|
| 608 |
+
# multicast bit must be set in such addresses, in order that they will
|
| 609 |
+
# never conflict with addresses obtained from network cards."
|
| 610 |
+
#
|
| 611 |
+
# The "multicast bit" of a MAC address is defined to be "the least
|
| 612 |
+
# significant bit of the first octet". This works out to be the 41st bit
|
| 613 |
+
# counting from 1 being the least significant bit, or 1<<40.
|
| 614 |
+
#
|
| 615 |
+
# See https://en.wikipedia.org/wiki/MAC_address#Unicast_vs._multicast
|
| 616 |
+
import random
|
| 617 |
+
return random.getrandbits(48) | (1 << 40)
|
| 618 |
+
|
| 619 |
+
|
| 620 |
+
# _OS_GETTERS, when known, are targeted for a specific OS or platform.
|
| 621 |
+
# The order is by 'common practice' on the specified platform.
|
| 622 |
+
# Note: 'posix' and 'windows' _OS_GETTERS are prefixed by a dll/dlload() method
|
| 623 |
+
# which, when successful, means none of these "external" methods are called.
|
| 624 |
+
# _GETTERS is (also) used by test_uuid.py to SkipUnless(), e.g.,
|
| 625 |
+
# @unittest.skipUnless(_uuid._ifconfig_getnode in _uuid._GETTERS, ...)
|
| 626 |
+
if _LINUX:
|
| 627 |
+
_OS_GETTERS = [_ip_getnode, _ifconfig_getnode]
|
| 628 |
+
elif sys.platform == 'darwin':
|
| 629 |
+
_OS_GETTERS = [_ifconfig_getnode, _arp_getnode, _netstat_getnode]
|
| 630 |
+
elif sys.platform == 'win32':
|
| 631 |
+
# bpo-40201: _windll_getnode will always succeed, so these are not needed
|
| 632 |
+
_OS_GETTERS = []
|
| 633 |
+
elif _AIX:
|
| 634 |
+
_OS_GETTERS = [_netstat_getnode]
|
| 635 |
+
else:
|
| 636 |
+
_OS_GETTERS = [_ifconfig_getnode, _ip_getnode, _arp_getnode,
|
| 637 |
+
_netstat_getnode, _lanscan_getnode]
|
| 638 |
+
if os.name == 'posix':
|
| 639 |
+
_GETTERS = [_unix_getnode] + _OS_GETTERS
|
| 640 |
+
elif os.name == 'nt':
|
| 641 |
+
_GETTERS = [_windll_getnode] + _OS_GETTERS
|
| 642 |
+
else:
|
| 643 |
+
_GETTERS = _OS_GETTERS
|
| 644 |
+
|
| 645 |
+
_node = None
|
| 646 |
+
|
| 647 |
+
def getnode():
|
| 648 |
+
"""Get the hardware address as a 48-bit positive integer.
|
| 649 |
+
|
| 650 |
+
The first time this runs, it may launch a separate program, which could
|
| 651 |
+
be quite slow. If all attempts to obtain the hardware address fail, we
|
| 652 |
+
choose a random 48-bit number with its eighth bit set to 1 as recommended
|
| 653 |
+
in RFC 4122.
|
| 654 |
+
"""
|
| 655 |
+
global _node
|
| 656 |
+
if _node is not None:
|
| 657 |
+
return _node
|
| 658 |
+
|
| 659 |
+
for getter in _GETTERS + [_random_getnode]:
|
| 660 |
+
try:
|
| 661 |
+
_node = getter()
|
| 662 |
+
except:
|
| 663 |
+
continue
|
| 664 |
+
if (_node is not None) and (0 <= _node < (1 << 48)):
|
| 665 |
+
return _node
|
| 666 |
+
assert False, '_random_getnode() returned invalid value: {}'.format(_node)
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
_last_timestamp = None
|
| 670 |
+
|
| 671 |
+
def uuid1(node=None, clock_seq=None):
|
| 672 |
+
"""Generate a UUID from a host ID, sequence number, and the current time.
|
| 673 |
+
If 'node' is not given, getnode() is used to obtain the hardware
|
| 674 |
+
address. If 'clock_seq' is given, it is used as the sequence number;
|
| 675 |
+
otherwise a random 14-bit sequence number is chosen."""
|
| 676 |
+
|
| 677 |
+
# When the system provides a version-1 UUID generator, use it (but don't
|
| 678 |
+
# use UuidCreate here because its UUIDs don't conform to RFC 4122).
|
| 679 |
+
if _generate_time_safe is not None and node is clock_seq is None:
|
| 680 |
+
uuid_time, safely_generated = _generate_time_safe()
|
| 681 |
+
try:
|
| 682 |
+
is_safe = SafeUUID(safely_generated)
|
| 683 |
+
except ValueError:
|
| 684 |
+
is_safe = SafeUUID.unknown
|
| 685 |
+
return UUID(bytes=uuid_time, is_safe=is_safe)
|
| 686 |
+
|
| 687 |
+
global _last_timestamp
|
| 688 |
+
import time
|
| 689 |
+
nanoseconds = time.time_ns()
|
| 690 |
+
# 0x01b21dd213814000 is the number of 100-ns intervals between the
|
| 691 |
+
# UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
|
| 692 |
+
timestamp = nanoseconds // 100 + 0x01b21dd213814000
|
| 693 |
+
if _last_timestamp is not None and timestamp <= _last_timestamp:
|
| 694 |
+
timestamp = _last_timestamp + 1
|
| 695 |
+
_last_timestamp = timestamp
|
| 696 |
+
if clock_seq is None:
|
| 697 |
+
import random
|
| 698 |
+
clock_seq = random.getrandbits(14) # instead of stable storage
|
| 699 |
+
time_low = timestamp & 0xffffffff
|
| 700 |
+
time_mid = (timestamp >> 32) & 0xffff
|
| 701 |
+
time_hi_version = (timestamp >> 48) & 0x0fff
|
| 702 |
+
clock_seq_low = clock_seq & 0xff
|
| 703 |
+
clock_seq_hi_variant = (clock_seq >> 8) & 0x3f
|
| 704 |
+
if node is None:
|
| 705 |
+
node = getnode()
|
| 706 |
+
return UUID(fields=(time_low, time_mid, time_hi_version,
|
| 707 |
+
clock_seq_hi_variant, clock_seq_low, node), version=1)
|
| 708 |
+
|
| 709 |
+
def uuid3(namespace, name):
|
| 710 |
+
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
|
| 711 |
+
from hashlib import md5
|
| 712 |
+
digest = md5(
|
| 713 |
+
namespace.bytes + bytes(name, "utf-8"),
|
| 714 |
+
usedforsecurity=False
|
| 715 |
+
).digest()
|
| 716 |
+
return UUID(bytes=digest[:16], version=3)
|
| 717 |
+
|
| 718 |
+
def uuid4():
|
| 719 |
+
"""Generate a random UUID."""
|
| 720 |
+
return UUID(bytes=os.urandom(16), version=4)
|
| 721 |
+
|
| 722 |
+
def uuid5(namespace, name):
|
| 723 |
+
"""Generate a UUID from the SHA-1 hash of a namespace UUID and a name."""
|
| 724 |
+
from hashlib import sha1
|
| 725 |
+
hash = sha1(namespace.bytes + bytes(name, "utf-8")).digest()
|
| 726 |
+
return UUID(bytes=hash[:16], version=5)
|
| 727 |
+
|
| 728 |
+
# The following standard UUIDs are for use with uuid3() or uuid5().
|
| 729 |
+
|
| 730 |
+
NAMESPACE_DNS = UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')
|
| 731 |
+
NAMESPACE_URL = UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')
|
| 732 |
+
NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')
|
| 733 |
+
NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')
|
llava/lib/python3.10/xdrlib.py
ADDED
|
@@ -0,0 +1,241 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Implements (a subset of) Sun XDR -- eXternal Data Representation.
|
| 2 |
+
|
| 3 |
+
See: RFC 1014
|
| 4 |
+
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import struct
|
| 8 |
+
from io import BytesIO
|
| 9 |
+
from functools import wraps
|
| 10 |
+
|
| 11 |
+
__all__ = ["Error", "Packer", "Unpacker", "ConversionError"]
|
| 12 |
+
|
| 13 |
+
# exceptions
|
| 14 |
+
class Error(Exception):
|
| 15 |
+
"""Exception class for this module. Use:
|
| 16 |
+
|
| 17 |
+
except xdrlib.Error as var:
|
| 18 |
+
# var has the Error instance for the exception
|
| 19 |
+
|
| 20 |
+
Public ivars:
|
| 21 |
+
msg -- contains the message
|
| 22 |
+
|
| 23 |
+
"""
|
| 24 |
+
def __init__(self, msg):
|
| 25 |
+
self.msg = msg
|
| 26 |
+
def __repr__(self):
|
| 27 |
+
return repr(self.msg)
|
| 28 |
+
def __str__(self):
|
| 29 |
+
return str(self.msg)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class ConversionError(Error):
|
| 33 |
+
pass
|
| 34 |
+
|
| 35 |
+
def raise_conversion_error(function):
|
| 36 |
+
""" Wrap any raised struct.errors in a ConversionError. """
|
| 37 |
+
|
| 38 |
+
@wraps(function)
|
| 39 |
+
def result(self, value):
|
| 40 |
+
try:
|
| 41 |
+
return function(self, value)
|
| 42 |
+
except struct.error as e:
|
| 43 |
+
raise ConversionError(e.args[0]) from None
|
| 44 |
+
return result
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class Packer:
|
| 48 |
+
"""Pack various data representations into a buffer."""
|
| 49 |
+
|
| 50 |
+
def __init__(self):
|
| 51 |
+
self.reset()
|
| 52 |
+
|
| 53 |
+
def reset(self):
|
| 54 |
+
self.__buf = BytesIO()
|
| 55 |
+
|
| 56 |
+
def get_buffer(self):
|
| 57 |
+
return self.__buf.getvalue()
|
| 58 |
+
# backwards compatibility
|
| 59 |
+
get_buf = get_buffer
|
| 60 |
+
|
| 61 |
+
@raise_conversion_error
|
| 62 |
+
def pack_uint(self, x):
|
| 63 |
+
self.__buf.write(struct.pack('>L', x))
|
| 64 |
+
|
| 65 |
+
@raise_conversion_error
|
| 66 |
+
def pack_int(self, x):
|
| 67 |
+
self.__buf.write(struct.pack('>l', x))
|
| 68 |
+
|
| 69 |
+
pack_enum = pack_int
|
| 70 |
+
|
| 71 |
+
def pack_bool(self, x):
|
| 72 |
+
if x: self.__buf.write(b'\0\0\0\1')
|
| 73 |
+
else: self.__buf.write(b'\0\0\0\0')
|
| 74 |
+
|
| 75 |
+
def pack_uhyper(self, x):
|
| 76 |
+
try:
|
| 77 |
+
self.pack_uint(x>>32 & 0xffffffff)
|
| 78 |
+
except (TypeError, struct.error) as e:
|
| 79 |
+
raise ConversionError(e.args[0]) from None
|
| 80 |
+
try:
|
| 81 |
+
self.pack_uint(x & 0xffffffff)
|
| 82 |
+
except (TypeError, struct.error) as e:
|
| 83 |
+
raise ConversionError(e.args[0]) from None
|
| 84 |
+
|
| 85 |
+
pack_hyper = pack_uhyper
|
| 86 |
+
|
| 87 |
+
@raise_conversion_error
|
| 88 |
+
def pack_float(self, x):
|
| 89 |
+
self.__buf.write(struct.pack('>f', x))
|
| 90 |
+
|
| 91 |
+
@raise_conversion_error
|
| 92 |
+
def pack_double(self, x):
|
| 93 |
+
self.__buf.write(struct.pack('>d', x))
|
| 94 |
+
|
| 95 |
+
def pack_fstring(self, n, s):
|
| 96 |
+
if n < 0:
|
| 97 |
+
raise ValueError('fstring size must be nonnegative')
|
| 98 |
+
data = s[:n]
|
| 99 |
+
n = ((n+3)//4)*4
|
| 100 |
+
data = data + (n - len(data)) * b'\0'
|
| 101 |
+
self.__buf.write(data)
|
| 102 |
+
|
| 103 |
+
pack_fopaque = pack_fstring
|
| 104 |
+
|
| 105 |
+
def pack_string(self, s):
|
| 106 |
+
n = len(s)
|
| 107 |
+
self.pack_uint(n)
|
| 108 |
+
self.pack_fstring(n, s)
|
| 109 |
+
|
| 110 |
+
pack_opaque = pack_string
|
| 111 |
+
pack_bytes = pack_string
|
| 112 |
+
|
| 113 |
+
def pack_list(self, list, pack_item):
|
| 114 |
+
for item in list:
|
| 115 |
+
self.pack_uint(1)
|
| 116 |
+
pack_item(item)
|
| 117 |
+
self.pack_uint(0)
|
| 118 |
+
|
| 119 |
+
def pack_farray(self, n, list, pack_item):
|
| 120 |
+
if len(list) != n:
|
| 121 |
+
raise ValueError('wrong array size')
|
| 122 |
+
for item in list:
|
| 123 |
+
pack_item(item)
|
| 124 |
+
|
| 125 |
+
def pack_array(self, list, pack_item):
|
| 126 |
+
n = len(list)
|
| 127 |
+
self.pack_uint(n)
|
| 128 |
+
self.pack_farray(n, list, pack_item)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class Unpacker:
|
| 133 |
+
"""Unpacks various data representations from the given buffer."""
|
| 134 |
+
|
| 135 |
+
def __init__(self, data):
|
| 136 |
+
self.reset(data)
|
| 137 |
+
|
| 138 |
+
def reset(self, data):
|
| 139 |
+
self.__buf = data
|
| 140 |
+
self.__pos = 0
|
| 141 |
+
|
| 142 |
+
def get_position(self):
|
| 143 |
+
return self.__pos
|
| 144 |
+
|
| 145 |
+
def set_position(self, position):
|
| 146 |
+
self.__pos = position
|
| 147 |
+
|
| 148 |
+
def get_buffer(self):
|
| 149 |
+
return self.__buf
|
| 150 |
+
|
| 151 |
+
def done(self):
|
| 152 |
+
if self.__pos < len(self.__buf):
|
| 153 |
+
raise Error('unextracted data remains')
|
| 154 |
+
|
| 155 |
+
def unpack_uint(self):
|
| 156 |
+
i = self.__pos
|
| 157 |
+
self.__pos = j = i+4
|
| 158 |
+
data = self.__buf[i:j]
|
| 159 |
+
if len(data) < 4:
|
| 160 |
+
raise EOFError
|
| 161 |
+
return struct.unpack('>L', data)[0]
|
| 162 |
+
|
| 163 |
+
def unpack_int(self):
|
| 164 |
+
i = self.__pos
|
| 165 |
+
self.__pos = j = i+4
|
| 166 |
+
data = self.__buf[i:j]
|
| 167 |
+
if len(data) < 4:
|
| 168 |
+
raise EOFError
|
| 169 |
+
return struct.unpack('>l', data)[0]
|
| 170 |
+
|
| 171 |
+
unpack_enum = unpack_int
|
| 172 |
+
|
| 173 |
+
def unpack_bool(self):
|
| 174 |
+
return bool(self.unpack_int())
|
| 175 |
+
|
| 176 |
+
def unpack_uhyper(self):
|
| 177 |
+
hi = self.unpack_uint()
|
| 178 |
+
lo = self.unpack_uint()
|
| 179 |
+
return int(hi)<<32 | lo
|
| 180 |
+
|
| 181 |
+
def unpack_hyper(self):
|
| 182 |
+
x = self.unpack_uhyper()
|
| 183 |
+
if x >= 0x8000000000000000:
|
| 184 |
+
x = x - 0x10000000000000000
|
| 185 |
+
return x
|
| 186 |
+
|
| 187 |
+
def unpack_float(self):
|
| 188 |
+
i = self.__pos
|
| 189 |
+
self.__pos = j = i+4
|
| 190 |
+
data = self.__buf[i:j]
|
| 191 |
+
if len(data) < 4:
|
| 192 |
+
raise EOFError
|
| 193 |
+
return struct.unpack('>f', data)[0]
|
| 194 |
+
|
| 195 |
+
def unpack_double(self):
|
| 196 |
+
i = self.__pos
|
| 197 |
+
self.__pos = j = i+8
|
| 198 |
+
data = self.__buf[i:j]
|
| 199 |
+
if len(data) < 8:
|
| 200 |
+
raise EOFError
|
| 201 |
+
return struct.unpack('>d', data)[0]
|
| 202 |
+
|
| 203 |
+
def unpack_fstring(self, n):
|
| 204 |
+
if n < 0:
|
| 205 |
+
raise ValueError('fstring size must be nonnegative')
|
| 206 |
+
i = self.__pos
|
| 207 |
+
j = i + (n+3)//4*4
|
| 208 |
+
if j > len(self.__buf):
|
| 209 |
+
raise EOFError
|
| 210 |
+
self.__pos = j
|
| 211 |
+
return self.__buf[i:i+n]
|
| 212 |
+
|
| 213 |
+
unpack_fopaque = unpack_fstring
|
| 214 |
+
|
| 215 |
+
def unpack_string(self):
|
| 216 |
+
n = self.unpack_uint()
|
| 217 |
+
return self.unpack_fstring(n)
|
| 218 |
+
|
| 219 |
+
unpack_opaque = unpack_string
|
| 220 |
+
unpack_bytes = unpack_string
|
| 221 |
+
|
| 222 |
+
def unpack_list(self, unpack_item):
|
| 223 |
+
list = []
|
| 224 |
+
while 1:
|
| 225 |
+
x = self.unpack_uint()
|
| 226 |
+
if x == 0: break
|
| 227 |
+
if x != 1:
|
| 228 |
+
raise ConversionError('0 or 1 expected, got %r' % (x,))
|
| 229 |
+
item = unpack_item()
|
| 230 |
+
list.append(item)
|
| 231 |
+
return list
|
| 232 |
+
|
| 233 |
+
def unpack_farray(self, n, unpack_item):
|
| 234 |
+
list = []
|
| 235 |
+
for i in range(n):
|
| 236 |
+
list.append(unpack_item())
|
| 237 |
+
return list
|
| 238 |
+
|
| 239 |
+
def unpack_array(self, unpack_item):
|
| 240 |
+
n = self.unpack_uint()
|
| 241 |
+
return self.unpack_farray(n, unpack_item)
|
llava/lib/python3.10/zipfile.py
ADDED
|
@@ -0,0 +1,2510 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Read and write ZIP files.
|
| 3 |
+
|
| 4 |
+
XXX references to utf-8 need further investigation.
|
| 5 |
+
"""
|
| 6 |
+
import binascii
|
| 7 |
+
import importlib.util
|
| 8 |
+
import io
|
| 9 |
+
import itertools
|
| 10 |
+
import os
|
| 11 |
+
import posixpath
|
| 12 |
+
import re
|
| 13 |
+
import shutil
|
| 14 |
+
import stat
|
| 15 |
+
import struct
|
| 16 |
+
import sys
|
| 17 |
+
import threading
|
| 18 |
+
import time
|
| 19 |
+
import contextlib
|
| 20 |
+
import pathlib
|
| 21 |
+
|
| 22 |
+
try:
|
| 23 |
+
import zlib # We may need its compression method
|
| 24 |
+
crc32 = zlib.crc32
|
| 25 |
+
except ImportError:
|
| 26 |
+
zlib = None
|
| 27 |
+
crc32 = binascii.crc32
|
| 28 |
+
|
| 29 |
+
try:
|
| 30 |
+
import bz2 # We may need its compression method
|
| 31 |
+
except ImportError:
|
| 32 |
+
bz2 = None
|
| 33 |
+
|
| 34 |
+
try:
|
| 35 |
+
import lzma # We may need its compression method
|
| 36 |
+
except ImportError:
|
| 37 |
+
lzma = None
|
| 38 |
+
|
| 39 |
+
__all__ = ["BadZipFile", "BadZipfile", "error",
|
| 40 |
+
"ZIP_STORED", "ZIP_DEFLATED", "ZIP_BZIP2", "ZIP_LZMA",
|
| 41 |
+
"is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile",
|
| 42 |
+
"Path"]
|
| 43 |
+
|
| 44 |
+
class BadZipFile(Exception):
|
| 45 |
+
pass
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class LargeZipFile(Exception):
|
| 49 |
+
"""
|
| 50 |
+
Raised when writing a zipfile, the zipfile requires ZIP64 extensions
|
| 51 |
+
and those extensions are disabled.
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
error = BadZipfile = BadZipFile # Pre-3.2 compatibility names
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
ZIP64_LIMIT = (1 << 31) - 1
|
| 58 |
+
ZIP_FILECOUNT_LIMIT = (1 << 16) - 1
|
| 59 |
+
ZIP_MAX_COMMENT = (1 << 16) - 1
|
| 60 |
+
|
| 61 |
+
# constants for Zip file compression methods
|
| 62 |
+
ZIP_STORED = 0
|
| 63 |
+
ZIP_DEFLATED = 8
|
| 64 |
+
ZIP_BZIP2 = 12
|
| 65 |
+
ZIP_LZMA = 14
|
| 66 |
+
# Other ZIP compression methods not supported
|
| 67 |
+
|
| 68 |
+
DEFAULT_VERSION = 20
|
| 69 |
+
ZIP64_VERSION = 45
|
| 70 |
+
BZIP2_VERSION = 46
|
| 71 |
+
LZMA_VERSION = 63
|
| 72 |
+
# we recognize (but not necessarily support) all features up to that version
|
| 73 |
+
MAX_EXTRACT_VERSION = 63
|
| 74 |
+
|
| 75 |
+
# Below are some formats and associated data for reading/writing headers using
|
| 76 |
+
# the struct module. The names and structures of headers/records are those used
|
| 77 |
+
# in the PKWARE description of the ZIP file format:
|
| 78 |
+
# http://www.pkware.com/documents/casestudies/APPNOTE.TXT
|
| 79 |
+
# (URL valid as of January 2008)
|
| 80 |
+
|
| 81 |
+
# The "end of central directory" structure, magic number, size, and indices
|
| 82 |
+
# (section V.I in the format document)
|
| 83 |
+
structEndArchive = b"<4s4H2LH"
|
| 84 |
+
stringEndArchive = b"PK\005\006"
|
| 85 |
+
sizeEndCentDir = struct.calcsize(structEndArchive)
|
| 86 |
+
|
| 87 |
+
_ECD_SIGNATURE = 0
|
| 88 |
+
_ECD_DISK_NUMBER = 1
|
| 89 |
+
_ECD_DISK_START = 2
|
| 90 |
+
_ECD_ENTRIES_THIS_DISK = 3
|
| 91 |
+
_ECD_ENTRIES_TOTAL = 4
|
| 92 |
+
_ECD_SIZE = 5
|
| 93 |
+
_ECD_OFFSET = 6
|
| 94 |
+
_ECD_COMMENT_SIZE = 7
|
| 95 |
+
# These last two indices are not part of the structure as defined in the
|
| 96 |
+
# spec, but they are used internally by this module as a convenience
|
| 97 |
+
_ECD_COMMENT = 8
|
| 98 |
+
_ECD_LOCATION = 9
|
| 99 |
+
|
| 100 |
+
# The "central directory" structure, magic number, size, and indices
|
| 101 |
+
# of entries in the structure (section V.F in the format document)
|
| 102 |
+
structCentralDir = "<4s4B4HL2L5H2L"
|
| 103 |
+
stringCentralDir = b"PK\001\002"
|
| 104 |
+
sizeCentralDir = struct.calcsize(structCentralDir)
|
| 105 |
+
|
| 106 |
+
# indexes of entries in the central directory structure
|
| 107 |
+
_CD_SIGNATURE = 0
|
| 108 |
+
_CD_CREATE_VERSION = 1
|
| 109 |
+
_CD_CREATE_SYSTEM = 2
|
| 110 |
+
_CD_EXTRACT_VERSION = 3
|
| 111 |
+
_CD_EXTRACT_SYSTEM = 4
|
| 112 |
+
_CD_FLAG_BITS = 5
|
| 113 |
+
_CD_COMPRESS_TYPE = 6
|
| 114 |
+
_CD_TIME = 7
|
| 115 |
+
_CD_DATE = 8
|
| 116 |
+
_CD_CRC = 9
|
| 117 |
+
_CD_COMPRESSED_SIZE = 10
|
| 118 |
+
_CD_UNCOMPRESSED_SIZE = 11
|
| 119 |
+
_CD_FILENAME_LENGTH = 12
|
| 120 |
+
_CD_EXTRA_FIELD_LENGTH = 13
|
| 121 |
+
_CD_COMMENT_LENGTH = 14
|
| 122 |
+
_CD_DISK_NUMBER_START = 15
|
| 123 |
+
_CD_INTERNAL_FILE_ATTRIBUTES = 16
|
| 124 |
+
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
|
| 125 |
+
_CD_LOCAL_HEADER_OFFSET = 18
|
| 126 |
+
|
| 127 |
+
# The "local file header" structure, magic number, size, and indices
|
| 128 |
+
# (section V.A in the format document)
|
| 129 |
+
structFileHeader = "<4s2B4HL2L2H"
|
| 130 |
+
stringFileHeader = b"PK\003\004"
|
| 131 |
+
sizeFileHeader = struct.calcsize(structFileHeader)
|
| 132 |
+
|
| 133 |
+
_FH_SIGNATURE = 0
|
| 134 |
+
_FH_EXTRACT_VERSION = 1
|
| 135 |
+
_FH_EXTRACT_SYSTEM = 2
|
| 136 |
+
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
|
| 137 |
+
_FH_COMPRESSION_METHOD = 4
|
| 138 |
+
_FH_LAST_MOD_TIME = 5
|
| 139 |
+
_FH_LAST_MOD_DATE = 6
|
| 140 |
+
_FH_CRC = 7
|
| 141 |
+
_FH_COMPRESSED_SIZE = 8
|
| 142 |
+
_FH_UNCOMPRESSED_SIZE = 9
|
| 143 |
+
_FH_FILENAME_LENGTH = 10
|
| 144 |
+
_FH_EXTRA_FIELD_LENGTH = 11
|
| 145 |
+
|
| 146 |
+
# The "Zip64 end of central directory locator" structure, magic number, and size
|
| 147 |
+
structEndArchive64Locator = "<4sLQL"
|
| 148 |
+
stringEndArchive64Locator = b"PK\x06\x07"
|
| 149 |
+
sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
|
| 150 |
+
|
| 151 |
+
# The "Zip64 end of central directory" record, magic number, size, and indices
|
| 152 |
+
# (section V.G in the format document)
|
| 153 |
+
structEndArchive64 = "<4sQ2H2L4Q"
|
| 154 |
+
stringEndArchive64 = b"PK\x06\x06"
|
| 155 |
+
sizeEndCentDir64 = struct.calcsize(structEndArchive64)
|
| 156 |
+
|
| 157 |
+
_CD64_SIGNATURE = 0
|
| 158 |
+
_CD64_DIRECTORY_RECSIZE = 1
|
| 159 |
+
_CD64_CREATE_VERSION = 2
|
| 160 |
+
_CD64_EXTRACT_VERSION = 3
|
| 161 |
+
_CD64_DISK_NUMBER = 4
|
| 162 |
+
_CD64_DISK_NUMBER_START = 5
|
| 163 |
+
_CD64_NUMBER_ENTRIES_THIS_DISK = 6
|
| 164 |
+
_CD64_NUMBER_ENTRIES_TOTAL = 7
|
| 165 |
+
_CD64_DIRECTORY_SIZE = 8
|
| 166 |
+
_CD64_OFFSET_START_CENTDIR = 9
|
| 167 |
+
|
| 168 |
+
_DD_SIGNATURE = 0x08074b50
|
| 169 |
+
|
| 170 |
+
_EXTRA_FIELD_STRUCT = struct.Struct('<HH')
|
| 171 |
+
|
| 172 |
+
def _strip_extra(extra, xids):
|
| 173 |
+
# Remove Extra Fields with specified IDs.
|
| 174 |
+
unpack = _EXTRA_FIELD_STRUCT.unpack
|
| 175 |
+
modified = False
|
| 176 |
+
buffer = []
|
| 177 |
+
start = i = 0
|
| 178 |
+
while i + 4 <= len(extra):
|
| 179 |
+
xid, xlen = unpack(extra[i : i + 4])
|
| 180 |
+
j = i + 4 + xlen
|
| 181 |
+
if xid in xids:
|
| 182 |
+
if i != start:
|
| 183 |
+
buffer.append(extra[start : i])
|
| 184 |
+
start = j
|
| 185 |
+
modified = True
|
| 186 |
+
i = j
|
| 187 |
+
if not modified:
|
| 188 |
+
return extra
|
| 189 |
+
if start != len(extra):
|
| 190 |
+
buffer.append(extra[start:])
|
| 191 |
+
return b''.join(buffer)
|
| 192 |
+
|
| 193 |
+
def _check_zipfile(fp):
|
| 194 |
+
try:
|
| 195 |
+
if _EndRecData(fp):
|
| 196 |
+
return True # file has correct magic number
|
| 197 |
+
except OSError:
|
| 198 |
+
pass
|
| 199 |
+
return False
|
| 200 |
+
|
| 201 |
+
def is_zipfile(filename):
|
| 202 |
+
"""Quickly see if a file is a ZIP file by checking the magic number.
|
| 203 |
+
|
| 204 |
+
The filename argument may be a file or file-like object too.
|
| 205 |
+
"""
|
| 206 |
+
result = False
|
| 207 |
+
try:
|
| 208 |
+
if hasattr(filename, "read"):
|
| 209 |
+
result = _check_zipfile(fp=filename)
|
| 210 |
+
else:
|
| 211 |
+
with open(filename, "rb") as fp:
|
| 212 |
+
result = _check_zipfile(fp)
|
| 213 |
+
except OSError:
|
| 214 |
+
pass
|
| 215 |
+
return result
|
| 216 |
+
|
| 217 |
+
def _EndRecData64(fpin, offset, endrec):
|
| 218 |
+
"""
|
| 219 |
+
Read the ZIP64 end-of-archive records and use that to update endrec
|
| 220 |
+
"""
|
| 221 |
+
try:
|
| 222 |
+
fpin.seek(offset - sizeEndCentDir64Locator, 2)
|
| 223 |
+
except OSError:
|
| 224 |
+
# If the seek fails, the file is not large enough to contain a ZIP64
|
| 225 |
+
# end-of-archive record, so just return the end record we were given.
|
| 226 |
+
return endrec
|
| 227 |
+
|
| 228 |
+
data = fpin.read(sizeEndCentDir64Locator)
|
| 229 |
+
if len(data) != sizeEndCentDir64Locator:
|
| 230 |
+
return endrec
|
| 231 |
+
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
|
| 232 |
+
if sig != stringEndArchive64Locator:
|
| 233 |
+
return endrec
|
| 234 |
+
|
| 235 |
+
if diskno != 0 or disks > 1:
|
| 236 |
+
raise BadZipFile("zipfiles that span multiple disks are not supported")
|
| 237 |
+
|
| 238 |
+
# Assume no 'zip64 extensible data'
|
| 239 |
+
fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
|
| 240 |
+
data = fpin.read(sizeEndCentDir64)
|
| 241 |
+
if len(data) != sizeEndCentDir64:
|
| 242 |
+
return endrec
|
| 243 |
+
sig, sz, create_version, read_version, disk_num, disk_dir, \
|
| 244 |
+
dircount, dircount2, dirsize, diroffset = \
|
| 245 |
+
struct.unpack(structEndArchive64, data)
|
| 246 |
+
if sig != stringEndArchive64:
|
| 247 |
+
return endrec
|
| 248 |
+
|
| 249 |
+
# Update the original endrec using data from the ZIP64 record
|
| 250 |
+
endrec[_ECD_SIGNATURE] = sig
|
| 251 |
+
endrec[_ECD_DISK_NUMBER] = disk_num
|
| 252 |
+
endrec[_ECD_DISK_START] = disk_dir
|
| 253 |
+
endrec[_ECD_ENTRIES_THIS_DISK] = dircount
|
| 254 |
+
endrec[_ECD_ENTRIES_TOTAL] = dircount2
|
| 255 |
+
endrec[_ECD_SIZE] = dirsize
|
| 256 |
+
endrec[_ECD_OFFSET] = diroffset
|
| 257 |
+
return endrec
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def _EndRecData(fpin):
|
| 261 |
+
"""Return data from the "End of Central Directory" record, or None.
|
| 262 |
+
|
| 263 |
+
The data is a list of the nine items in the ZIP "End of central dir"
|
| 264 |
+
record followed by a tenth item, the file seek offset of this record."""
|
| 265 |
+
|
| 266 |
+
# Determine file size
|
| 267 |
+
fpin.seek(0, 2)
|
| 268 |
+
filesize = fpin.tell()
|
| 269 |
+
|
| 270 |
+
# Check to see if this is ZIP file with no archive comment (the
|
| 271 |
+
# "end of central directory" structure should be the last item in the
|
| 272 |
+
# file if this is the case).
|
| 273 |
+
try:
|
| 274 |
+
fpin.seek(-sizeEndCentDir, 2)
|
| 275 |
+
except OSError:
|
| 276 |
+
return None
|
| 277 |
+
data = fpin.read()
|
| 278 |
+
if (len(data) == sizeEndCentDir and
|
| 279 |
+
data[0:4] == stringEndArchive and
|
| 280 |
+
data[-2:] == b"\000\000"):
|
| 281 |
+
# the signature is correct and there's no comment, unpack structure
|
| 282 |
+
endrec = struct.unpack(structEndArchive, data)
|
| 283 |
+
endrec=list(endrec)
|
| 284 |
+
|
| 285 |
+
# Append a blank comment and record start offset
|
| 286 |
+
endrec.append(b"")
|
| 287 |
+
endrec.append(filesize - sizeEndCentDir)
|
| 288 |
+
|
| 289 |
+
# Try to read the "Zip64 end of central directory" structure
|
| 290 |
+
return _EndRecData64(fpin, -sizeEndCentDir, endrec)
|
| 291 |
+
|
| 292 |
+
# Either this is not a ZIP file, or it is a ZIP file with an archive
|
| 293 |
+
# comment. Search the end of the file for the "end of central directory"
|
| 294 |
+
# record signature. The comment is the last item in the ZIP file and may be
|
| 295 |
+
# up to 64K long. It is assumed that the "end of central directory" magic
|
| 296 |
+
# number does not appear in the comment.
|
| 297 |
+
maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
|
| 298 |
+
fpin.seek(maxCommentStart, 0)
|
| 299 |
+
data = fpin.read()
|
| 300 |
+
start = data.rfind(stringEndArchive)
|
| 301 |
+
if start >= 0:
|
| 302 |
+
# found the magic number; attempt to unpack and interpret
|
| 303 |
+
recData = data[start:start+sizeEndCentDir]
|
| 304 |
+
if len(recData) != sizeEndCentDir:
|
| 305 |
+
# Zip file is corrupted.
|
| 306 |
+
return None
|
| 307 |
+
endrec = list(struct.unpack(structEndArchive, recData))
|
| 308 |
+
commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file
|
| 309 |
+
comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]
|
| 310 |
+
endrec.append(comment)
|
| 311 |
+
endrec.append(maxCommentStart + start)
|
| 312 |
+
|
| 313 |
+
# Try to read the "Zip64 end of central directory" structure
|
| 314 |
+
return _EndRecData64(fpin, maxCommentStart + start - filesize,
|
| 315 |
+
endrec)
|
| 316 |
+
|
| 317 |
+
# Unable to find a valid end of central directory structure
|
| 318 |
+
return None
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
class ZipInfo (object):
|
| 322 |
+
"""Class with attributes describing each file in the ZIP archive."""
|
| 323 |
+
|
| 324 |
+
__slots__ = (
|
| 325 |
+
'orig_filename',
|
| 326 |
+
'filename',
|
| 327 |
+
'date_time',
|
| 328 |
+
'compress_type',
|
| 329 |
+
'_compresslevel',
|
| 330 |
+
'comment',
|
| 331 |
+
'extra',
|
| 332 |
+
'create_system',
|
| 333 |
+
'create_version',
|
| 334 |
+
'extract_version',
|
| 335 |
+
'reserved',
|
| 336 |
+
'flag_bits',
|
| 337 |
+
'volume',
|
| 338 |
+
'internal_attr',
|
| 339 |
+
'external_attr',
|
| 340 |
+
'header_offset',
|
| 341 |
+
'CRC',
|
| 342 |
+
'compress_size',
|
| 343 |
+
'file_size',
|
| 344 |
+
'_raw_time',
|
| 345 |
+
'_end_offset',
|
| 346 |
+
)
|
| 347 |
+
|
| 348 |
+
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
|
| 349 |
+
self.orig_filename = filename # Original file name in archive
|
| 350 |
+
|
| 351 |
+
# Terminate the file name at the first null byte. Null bytes in file
|
| 352 |
+
# names are used as tricks by viruses in archives.
|
| 353 |
+
null_byte = filename.find(chr(0))
|
| 354 |
+
if null_byte >= 0:
|
| 355 |
+
filename = filename[0:null_byte]
|
| 356 |
+
# This is used to ensure paths in generated ZIP files always use
|
| 357 |
+
# forward slashes as the directory separator, as required by the
|
| 358 |
+
# ZIP format specification.
|
| 359 |
+
if os.sep != "/" and os.sep in filename:
|
| 360 |
+
filename = filename.replace(os.sep, "/")
|
| 361 |
+
|
| 362 |
+
self.filename = filename # Normalized file name
|
| 363 |
+
self.date_time = date_time # year, month, day, hour, min, sec
|
| 364 |
+
|
| 365 |
+
if date_time[0] < 1980:
|
| 366 |
+
raise ValueError('ZIP does not support timestamps before 1980')
|
| 367 |
+
|
| 368 |
+
# Standard values:
|
| 369 |
+
self.compress_type = ZIP_STORED # Type of compression for the file
|
| 370 |
+
self._compresslevel = None # Level for the compressor
|
| 371 |
+
self.comment = b"" # Comment for each file
|
| 372 |
+
self.extra = b"" # ZIP extra data
|
| 373 |
+
if sys.platform == 'win32':
|
| 374 |
+
self.create_system = 0 # System which created ZIP archive
|
| 375 |
+
else:
|
| 376 |
+
# Assume everything else is unix-y
|
| 377 |
+
self.create_system = 3 # System which created ZIP archive
|
| 378 |
+
self.create_version = DEFAULT_VERSION # Version which created ZIP archive
|
| 379 |
+
self.extract_version = DEFAULT_VERSION # Version needed to extract archive
|
| 380 |
+
self.reserved = 0 # Must be zero
|
| 381 |
+
self.flag_bits = 0 # ZIP flag bits
|
| 382 |
+
self.volume = 0 # Volume number of file header
|
| 383 |
+
self.internal_attr = 0 # Internal attributes
|
| 384 |
+
self.external_attr = 0 # External file attributes
|
| 385 |
+
self.compress_size = 0 # Size of the compressed file
|
| 386 |
+
self.file_size = 0 # Size of the uncompressed file
|
| 387 |
+
self._end_offset = None # Start of the next local header or central directory
|
| 388 |
+
# Other attributes are set by class ZipFile:
|
| 389 |
+
# header_offset Byte offset to the file header
|
| 390 |
+
# CRC CRC-32 of the uncompressed file
|
| 391 |
+
|
| 392 |
+
def __repr__(self):
|
| 393 |
+
result = ['<%s filename=%r' % (self.__class__.__name__, self.filename)]
|
| 394 |
+
if self.compress_type != ZIP_STORED:
|
| 395 |
+
result.append(' compress_type=%s' %
|
| 396 |
+
compressor_names.get(self.compress_type,
|
| 397 |
+
self.compress_type))
|
| 398 |
+
hi = self.external_attr >> 16
|
| 399 |
+
lo = self.external_attr & 0xFFFF
|
| 400 |
+
if hi:
|
| 401 |
+
result.append(' filemode=%r' % stat.filemode(hi))
|
| 402 |
+
if lo:
|
| 403 |
+
result.append(' external_attr=%#x' % lo)
|
| 404 |
+
isdir = self.is_dir()
|
| 405 |
+
if not isdir or self.file_size:
|
| 406 |
+
result.append(' file_size=%r' % self.file_size)
|
| 407 |
+
if ((not isdir or self.compress_size) and
|
| 408 |
+
(self.compress_type != ZIP_STORED or
|
| 409 |
+
self.file_size != self.compress_size)):
|
| 410 |
+
result.append(' compress_size=%r' % self.compress_size)
|
| 411 |
+
result.append('>')
|
| 412 |
+
return ''.join(result)
|
| 413 |
+
|
| 414 |
+
def FileHeader(self, zip64=None):
|
| 415 |
+
"""Return the per-file header as a bytes object."""
|
| 416 |
+
dt = self.date_time
|
| 417 |
+
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
|
| 418 |
+
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
|
| 419 |
+
if self.flag_bits & 0x08:
|
| 420 |
+
# Set these to zero because we write them after the file data
|
| 421 |
+
CRC = compress_size = file_size = 0
|
| 422 |
+
else:
|
| 423 |
+
CRC = self.CRC
|
| 424 |
+
compress_size = self.compress_size
|
| 425 |
+
file_size = self.file_size
|
| 426 |
+
|
| 427 |
+
extra = self.extra
|
| 428 |
+
|
| 429 |
+
min_version = 0
|
| 430 |
+
if zip64 is None:
|
| 431 |
+
zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
|
| 432 |
+
if zip64:
|
| 433 |
+
fmt = '<HHQQ'
|
| 434 |
+
extra = extra + struct.pack(fmt,
|
| 435 |
+
1, struct.calcsize(fmt)-4, file_size, compress_size)
|
| 436 |
+
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
|
| 437 |
+
if not zip64:
|
| 438 |
+
raise LargeZipFile("Filesize would require ZIP64 extensions")
|
| 439 |
+
# File is larger than what fits into a 4 byte integer,
|
| 440 |
+
# fall back to the ZIP64 extension
|
| 441 |
+
file_size = 0xffffffff
|
| 442 |
+
compress_size = 0xffffffff
|
| 443 |
+
min_version = ZIP64_VERSION
|
| 444 |
+
|
| 445 |
+
if self.compress_type == ZIP_BZIP2:
|
| 446 |
+
min_version = max(BZIP2_VERSION, min_version)
|
| 447 |
+
elif self.compress_type == ZIP_LZMA:
|
| 448 |
+
min_version = max(LZMA_VERSION, min_version)
|
| 449 |
+
|
| 450 |
+
self.extract_version = max(min_version, self.extract_version)
|
| 451 |
+
self.create_version = max(min_version, self.create_version)
|
| 452 |
+
filename, flag_bits = self._encodeFilenameFlags()
|
| 453 |
+
header = struct.pack(structFileHeader, stringFileHeader,
|
| 454 |
+
self.extract_version, self.reserved, flag_bits,
|
| 455 |
+
self.compress_type, dostime, dosdate, CRC,
|
| 456 |
+
compress_size, file_size,
|
| 457 |
+
len(filename), len(extra))
|
| 458 |
+
return header + filename + extra
|
| 459 |
+
|
| 460 |
+
def _encodeFilenameFlags(self):
|
| 461 |
+
try:
|
| 462 |
+
return self.filename.encode('ascii'), self.flag_bits
|
| 463 |
+
except UnicodeEncodeError:
|
| 464 |
+
return self.filename.encode('utf-8'), self.flag_bits | 0x800
|
| 465 |
+
|
| 466 |
+
def _decodeExtra(self):
|
| 467 |
+
# Try to decode the extra field.
|
| 468 |
+
extra = self.extra
|
| 469 |
+
unpack = struct.unpack
|
| 470 |
+
while len(extra) >= 4:
|
| 471 |
+
tp, ln = unpack('<HH', extra[:4])
|
| 472 |
+
if ln+4 > len(extra):
|
| 473 |
+
raise BadZipFile("Corrupt extra field %04x (size=%d)" % (tp, ln))
|
| 474 |
+
if tp == 0x0001:
|
| 475 |
+
data = extra[4:ln+4]
|
| 476 |
+
# ZIP64 extension (large files and/or large archives)
|
| 477 |
+
try:
|
| 478 |
+
if self.file_size in (0xFFFF_FFFF_FFFF_FFFF, 0xFFFF_FFFF):
|
| 479 |
+
field = "File size"
|
| 480 |
+
self.file_size, = unpack('<Q', data[:8])
|
| 481 |
+
data = data[8:]
|
| 482 |
+
if self.compress_size == 0xFFFF_FFFF:
|
| 483 |
+
field = "Compress size"
|
| 484 |
+
self.compress_size, = unpack('<Q', data[:8])
|
| 485 |
+
data = data[8:]
|
| 486 |
+
if self.header_offset == 0xFFFF_FFFF:
|
| 487 |
+
field = "Header offset"
|
| 488 |
+
self.header_offset, = unpack('<Q', data[:8])
|
| 489 |
+
except struct.error:
|
| 490 |
+
raise BadZipFile(f"Corrupt zip64 extra field. "
|
| 491 |
+
f"{field} not found.") from None
|
| 492 |
+
|
| 493 |
+
extra = extra[ln+4:]
|
| 494 |
+
|
| 495 |
+
@classmethod
|
| 496 |
+
def from_file(cls, filename, arcname=None, *, strict_timestamps=True):
|
| 497 |
+
"""Construct an appropriate ZipInfo for a file on the filesystem.
|
| 498 |
+
|
| 499 |
+
filename should be the path to a file or directory on the filesystem.
|
| 500 |
+
|
| 501 |
+
arcname is the name which it will have within the archive (by default,
|
| 502 |
+
this will be the same as filename, but without a drive letter and with
|
| 503 |
+
leading path separators removed).
|
| 504 |
+
"""
|
| 505 |
+
if isinstance(filename, os.PathLike):
|
| 506 |
+
filename = os.fspath(filename)
|
| 507 |
+
st = os.stat(filename)
|
| 508 |
+
isdir = stat.S_ISDIR(st.st_mode)
|
| 509 |
+
mtime = time.localtime(st.st_mtime)
|
| 510 |
+
date_time = mtime[0:6]
|
| 511 |
+
if not strict_timestamps and date_time[0] < 1980:
|
| 512 |
+
date_time = (1980, 1, 1, 0, 0, 0)
|
| 513 |
+
elif not strict_timestamps and date_time[0] > 2107:
|
| 514 |
+
date_time = (2107, 12, 31, 23, 59, 59)
|
| 515 |
+
# Create ZipInfo instance to store file information
|
| 516 |
+
if arcname is None:
|
| 517 |
+
arcname = filename
|
| 518 |
+
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
|
| 519 |
+
while arcname[0] in (os.sep, os.altsep):
|
| 520 |
+
arcname = arcname[1:]
|
| 521 |
+
if isdir:
|
| 522 |
+
arcname += '/'
|
| 523 |
+
zinfo = cls(arcname, date_time)
|
| 524 |
+
zinfo.external_attr = (st.st_mode & 0xFFFF) << 16 # Unix attributes
|
| 525 |
+
if isdir:
|
| 526 |
+
zinfo.file_size = 0
|
| 527 |
+
zinfo.external_attr |= 0x10 # MS-DOS directory flag
|
| 528 |
+
else:
|
| 529 |
+
zinfo.file_size = st.st_size
|
| 530 |
+
|
| 531 |
+
return zinfo
|
| 532 |
+
|
| 533 |
+
def is_dir(self):
|
| 534 |
+
"""Return True if this archive member is a directory."""
|
| 535 |
+
return self.filename[-1] == '/'
|
| 536 |
+
|
| 537 |
+
|
| 538 |
+
# ZIP encryption uses the CRC32 one-byte primitive for scrambling some
|
| 539 |
+
# internal keys. We noticed that a direct implementation is faster than
|
| 540 |
+
# relying on binascii.crc32().
|
| 541 |
+
|
| 542 |
+
_crctable = None
|
| 543 |
+
def _gen_crc(crc):
|
| 544 |
+
for j in range(8):
|
| 545 |
+
if crc & 1:
|
| 546 |
+
crc = (crc >> 1) ^ 0xEDB88320
|
| 547 |
+
else:
|
| 548 |
+
crc >>= 1
|
| 549 |
+
return crc
|
| 550 |
+
|
| 551 |
+
# ZIP supports a password-based form of encryption. Even though known
|
| 552 |
+
# plaintext attacks have been found against it, it is still useful
|
| 553 |
+
# to be able to get data out of such a file.
|
| 554 |
+
#
|
| 555 |
+
# Usage:
|
| 556 |
+
# zd = _ZipDecrypter(mypwd)
|
| 557 |
+
# plain_bytes = zd(cypher_bytes)
|
| 558 |
+
|
| 559 |
+
def _ZipDecrypter(pwd):
|
| 560 |
+
key0 = 305419896
|
| 561 |
+
key1 = 591751049
|
| 562 |
+
key2 = 878082192
|
| 563 |
+
|
| 564 |
+
global _crctable
|
| 565 |
+
if _crctable is None:
|
| 566 |
+
_crctable = list(map(_gen_crc, range(256)))
|
| 567 |
+
crctable = _crctable
|
| 568 |
+
|
| 569 |
+
def crc32(ch, crc):
|
| 570 |
+
"""Compute the CRC32 primitive on one byte."""
|
| 571 |
+
return (crc >> 8) ^ crctable[(crc ^ ch) & 0xFF]
|
| 572 |
+
|
| 573 |
+
def update_keys(c):
|
| 574 |
+
nonlocal key0, key1, key2
|
| 575 |
+
key0 = crc32(c, key0)
|
| 576 |
+
key1 = (key1 + (key0 & 0xFF)) & 0xFFFFFFFF
|
| 577 |
+
key1 = (key1 * 134775813 + 1) & 0xFFFFFFFF
|
| 578 |
+
key2 = crc32(key1 >> 24, key2)
|
| 579 |
+
|
| 580 |
+
for p in pwd:
|
| 581 |
+
update_keys(p)
|
| 582 |
+
|
| 583 |
+
def decrypter(data):
|
| 584 |
+
"""Decrypt a bytes object."""
|
| 585 |
+
result = bytearray()
|
| 586 |
+
append = result.append
|
| 587 |
+
for c in data:
|
| 588 |
+
k = key2 | 2
|
| 589 |
+
c ^= ((k * (k^1)) >> 8) & 0xFF
|
| 590 |
+
update_keys(c)
|
| 591 |
+
append(c)
|
| 592 |
+
return bytes(result)
|
| 593 |
+
|
| 594 |
+
return decrypter
|
| 595 |
+
|
| 596 |
+
|
| 597 |
+
class LZMACompressor:
|
| 598 |
+
|
| 599 |
+
def __init__(self):
|
| 600 |
+
self._comp = None
|
| 601 |
+
|
| 602 |
+
def _init(self):
|
| 603 |
+
props = lzma._encode_filter_properties({'id': lzma.FILTER_LZMA1})
|
| 604 |
+
self._comp = lzma.LZMACompressor(lzma.FORMAT_RAW, filters=[
|
| 605 |
+
lzma._decode_filter_properties(lzma.FILTER_LZMA1, props)
|
| 606 |
+
])
|
| 607 |
+
return struct.pack('<BBH', 9, 4, len(props)) + props
|
| 608 |
+
|
| 609 |
+
def compress(self, data):
|
| 610 |
+
if self._comp is None:
|
| 611 |
+
return self._init() + self._comp.compress(data)
|
| 612 |
+
return self._comp.compress(data)
|
| 613 |
+
|
| 614 |
+
def flush(self):
|
| 615 |
+
if self._comp is None:
|
| 616 |
+
return self._init() + self._comp.flush()
|
| 617 |
+
return self._comp.flush()
|
| 618 |
+
|
| 619 |
+
|
| 620 |
+
class LZMADecompressor:
|
| 621 |
+
|
| 622 |
+
def __init__(self):
|
| 623 |
+
self._decomp = None
|
| 624 |
+
self._unconsumed = b''
|
| 625 |
+
self.eof = False
|
| 626 |
+
|
| 627 |
+
def decompress(self, data):
|
| 628 |
+
if self._decomp is None:
|
| 629 |
+
self._unconsumed += data
|
| 630 |
+
if len(self._unconsumed) <= 4:
|
| 631 |
+
return b''
|
| 632 |
+
psize, = struct.unpack('<H', self._unconsumed[2:4])
|
| 633 |
+
if len(self._unconsumed) <= 4 + psize:
|
| 634 |
+
return b''
|
| 635 |
+
|
| 636 |
+
self._decomp = lzma.LZMADecompressor(lzma.FORMAT_RAW, filters=[
|
| 637 |
+
lzma._decode_filter_properties(lzma.FILTER_LZMA1,
|
| 638 |
+
self._unconsumed[4:4 + psize])
|
| 639 |
+
])
|
| 640 |
+
data = self._unconsumed[4 + psize:]
|
| 641 |
+
del self._unconsumed
|
| 642 |
+
|
| 643 |
+
result = self._decomp.decompress(data)
|
| 644 |
+
self.eof = self._decomp.eof
|
| 645 |
+
return result
|
| 646 |
+
|
| 647 |
+
|
| 648 |
+
compressor_names = {
|
| 649 |
+
0: 'store',
|
| 650 |
+
1: 'shrink',
|
| 651 |
+
2: 'reduce',
|
| 652 |
+
3: 'reduce',
|
| 653 |
+
4: 'reduce',
|
| 654 |
+
5: 'reduce',
|
| 655 |
+
6: 'implode',
|
| 656 |
+
7: 'tokenize',
|
| 657 |
+
8: 'deflate',
|
| 658 |
+
9: 'deflate64',
|
| 659 |
+
10: 'implode',
|
| 660 |
+
12: 'bzip2',
|
| 661 |
+
14: 'lzma',
|
| 662 |
+
18: 'terse',
|
| 663 |
+
19: 'lz77',
|
| 664 |
+
97: 'wavpack',
|
| 665 |
+
98: 'ppmd',
|
| 666 |
+
}
|
| 667 |
+
|
| 668 |
+
def _check_compression(compression):
|
| 669 |
+
if compression == ZIP_STORED:
|
| 670 |
+
pass
|
| 671 |
+
elif compression == ZIP_DEFLATED:
|
| 672 |
+
if not zlib:
|
| 673 |
+
raise RuntimeError(
|
| 674 |
+
"Compression requires the (missing) zlib module")
|
| 675 |
+
elif compression == ZIP_BZIP2:
|
| 676 |
+
if not bz2:
|
| 677 |
+
raise RuntimeError(
|
| 678 |
+
"Compression requires the (missing) bz2 module")
|
| 679 |
+
elif compression == ZIP_LZMA:
|
| 680 |
+
if not lzma:
|
| 681 |
+
raise RuntimeError(
|
| 682 |
+
"Compression requires the (missing) lzma module")
|
| 683 |
+
else:
|
| 684 |
+
raise NotImplementedError("That compression method is not supported")
|
| 685 |
+
|
| 686 |
+
|
| 687 |
+
def _get_compressor(compress_type, compresslevel=None):
|
| 688 |
+
if compress_type == ZIP_DEFLATED:
|
| 689 |
+
if compresslevel is not None:
|
| 690 |
+
return zlib.compressobj(compresslevel, zlib.DEFLATED, -15)
|
| 691 |
+
return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
|
| 692 |
+
elif compress_type == ZIP_BZIP2:
|
| 693 |
+
if compresslevel is not None:
|
| 694 |
+
return bz2.BZ2Compressor(compresslevel)
|
| 695 |
+
return bz2.BZ2Compressor()
|
| 696 |
+
# compresslevel is ignored for ZIP_LZMA
|
| 697 |
+
elif compress_type == ZIP_LZMA:
|
| 698 |
+
return LZMACompressor()
|
| 699 |
+
else:
|
| 700 |
+
return None
|
| 701 |
+
|
| 702 |
+
|
| 703 |
+
def _get_decompressor(compress_type):
|
| 704 |
+
_check_compression(compress_type)
|
| 705 |
+
if compress_type == ZIP_STORED:
|
| 706 |
+
return None
|
| 707 |
+
elif compress_type == ZIP_DEFLATED:
|
| 708 |
+
return zlib.decompressobj(-15)
|
| 709 |
+
elif compress_type == ZIP_BZIP2:
|
| 710 |
+
return bz2.BZ2Decompressor()
|
| 711 |
+
elif compress_type == ZIP_LZMA:
|
| 712 |
+
return LZMADecompressor()
|
| 713 |
+
else:
|
| 714 |
+
descr = compressor_names.get(compress_type)
|
| 715 |
+
if descr:
|
| 716 |
+
raise NotImplementedError("compression type %d (%s)" % (compress_type, descr))
|
| 717 |
+
else:
|
| 718 |
+
raise NotImplementedError("compression type %d" % (compress_type,))
|
| 719 |
+
|
| 720 |
+
|
| 721 |
+
class _SharedFile:
|
| 722 |
+
def __init__(self, file, pos, close, lock, writing):
|
| 723 |
+
self._file = file
|
| 724 |
+
self._pos = pos
|
| 725 |
+
self._close = close
|
| 726 |
+
self._lock = lock
|
| 727 |
+
self._writing = writing
|
| 728 |
+
self.seekable = file.seekable
|
| 729 |
+
|
| 730 |
+
def tell(self):
|
| 731 |
+
return self._pos
|
| 732 |
+
|
| 733 |
+
def seek(self, offset, whence=0):
|
| 734 |
+
with self._lock:
|
| 735 |
+
if self._writing():
|
| 736 |
+
raise ValueError("Can't reposition in the ZIP file while "
|
| 737 |
+
"there is an open writing handle on it. "
|
| 738 |
+
"Close the writing handle before trying to read.")
|
| 739 |
+
self._file.seek(offset, whence)
|
| 740 |
+
self._pos = self._file.tell()
|
| 741 |
+
return self._pos
|
| 742 |
+
|
| 743 |
+
def read(self, n=-1):
|
| 744 |
+
with self._lock:
|
| 745 |
+
if self._writing():
|
| 746 |
+
raise ValueError("Can't read from the ZIP file while there "
|
| 747 |
+
"is an open writing handle on it. "
|
| 748 |
+
"Close the writing handle before trying to read.")
|
| 749 |
+
self._file.seek(self._pos)
|
| 750 |
+
data = self._file.read(n)
|
| 751 |
+
self._pos = self._file.tell()
|
| 752 |
+
return data
|
| 753 |
+
|
| 754 |
+
def close(self):
|
| 755 |
+
if self._file is not None:
|
| 756 |
+
fileobj = self._file
|
| 757 |
+
self._file = None
|
| 758 |
+
self._close(fileobj)
|
| 759 |
+
|
| 760 |
+
# Provide the tell method for unseekable stream
|
| 761 |
+
class _Tellable:
|
| 762 |
+
def __init__(self, fp):
|
| 763 |
+
self.fp = fp
|
| 764 |
+
self.offset = 0
|
| 765 |
+
|
| 766 |
+
def write(self, data):
|
| 767 |
+
n = self.fp.write(data)
|
| 768 |
+
self.offset += n
|
| 769 |
+
return n
|
| 770 |
+
|
| 771 |
+
def tell(self):
|
| 772 |
+
return self.offset
|
| 773 |
+
|
| 774 |
+
def flush(self):
|
| 775 |
+
self.fp.flush()
|
| 776 |
+
|
| 777 |
+
def close(self):
|
| 778 |
+
self.fp.close()
|
| 779 |
+
|
| 780 |
+
|
| 781 |
+
class ZipExtFile(io.BufferedIOBase):
|
| 782 |
+
"""File-like object for reading an archive member.
|
| 783 |
+
Is returned by ZipFile.open().
|
| 784 |
+
"""
|
| 785 |
+
|
| 786 |
+
# Max size supported by decompressor.
|
| 787 |
+
MAX_N = 1 << 31 - 1
|
| 788 |
+
|
| 789 |
+
# Read from compressed files in 4k blocks.
|
| 790 |
+
MIN_READ_SIZE = 4096
|
| 791 |
+
|
| 792 |
+
# Chunk size to read during seek
|
| 793 |
+
MAX_SEEK_READ = 1 << 24
|
| 794 |
+
|
| 795 |
+
def __init__(self, fileobj, mode, zipinfo, pwd=None,
|
| 796 |
+
close_fileobj=False):
|
| 797 |
+
self._fileobj = fileobj
|
| 798 |
+
self._pwd = pwd
|
| 799 |
+
self._close_fileobj = close_fileobj
|
| 800 |
+
|
| 801 |
+
self._compress_type = zipinfo.compress_type
|
| 802 |
+
self._compress_left = zipinfo.compress_size
|
| 803 |
+
self._left = zipinfo.file_size
|
| 804 |
+
|
| 805 |
+
self._decompressor = _get_decompressor(self._compress_type)
|
| 806 |
+
|
| 807 |
+
self._eof = False
|
| 808 |
+
self._readbuffer = b''
|
| 809 |
+
self._offset = 0
|
| 810 |
+
|
| 811 |
+
self.newlines = None
|
| 812 |
+
|
| 813 |
+
self.mode = mode
|
| 814 |
+
self.name = zipinfo.filename
|
| 815 |
+
|
| 816 |
+
if hasattr(zipinfo, 'CRC'):
|
| 817 |
+
self._expected_crc = zipinfo.CRC
|
| 818 |
+
self._running_crc = crc32(b'')
|
| 819 |
+
else:
|
| 820 |
+
self._expected_crc = None
|
| 821 |
+
|
| 822 |
+
self._seekable = False
|
| 823 |
+
try:
|
| 824 |
+
if fileobj.seekable():
|
| 825 |
+
self._orig_compress_start = fileobj.tell()
|
| 826 |
+
self._orig_compress_size = zipinfo.compress_size
|
| 827 |
+
self._orig_file_size = zipinfo.file_size
|
| 828 |
+
self._orig_start_crc = self._running_crc
|
| 829 |
+
self._seekable = True
|
| 830 |
+
except AttributeError:
|
| 831 |
+
pass
|
| 832 |
+
|
| 833 |
+
self._decrypter = None
|
| 834 |
+
if pwd:
|
| 835 |
+
if zipinfo.flag_bits & 0x8:
|
| 836 |
+
# compare against the file type from extended local headers
|
| 837 |
+
check_byte = (zipinfo._raw_time >> 8) & 0xff
|
| 838 |
+
else:
|
| 839 |
+
# compare against the CRC otherwise
|
| 840 |
+
check_byte = (zipinfo.CRC >> 24) & 0xff
|
| 841 |
+
h = self._init_decrypter()
|
| 842 |
+
if h != check_byte:
|
| 843 |
+
raise RuntimeError("Bad password for file %r" % zipinfo.orig_filename)
|
| 844 |
+
|
| 845 |
+
|
| 846 |
+
def _init_decrypter(self):
|
| 847 |
+
self._decrypter = _ZipDecrypter(self._pwd)
|
| 848 |
+
# The first 12 bytes in the cypher stream is an encryption header
|
| 849 |
+
# used to strengthen the algorithm. The first 11 bytes are
|
| 850 |
+
# completely random, while the 12th contains the MSB of the CRC,
|
| 851 |
+
# or the MSB of the file time depending on the header type
|
| 852 |
+
# and is used to check the correctness of the password.
|
| 853 |
+
header = self._fileobj.read(12)
|
| 854 |
+
self._compress_left -= 12
|
| 855 |
+
return self._decrypter(header)[11]
|
| 856 |
+
|
| 857 |
+
def __repr__(self):
|
| 858 |
+
result = ['<%s.%s' % (self.__class__.__module__,
|
| 859 |
+
self.__class__.__qualname__)]
|
| 860 |
+
if not self.closed:
|
| 861 |
+
result.append(' name=%r mode=%r' % (self.name, self.mode))
|
| 862 |
+
if self._compress_type != ZIP_STORED:
|
| 863 |
+
result.append(' compress_type=%s' %
|
| 864 |
+
compressor_names.get(self._compress_type,
|
| 865 |
+
self._compress_type))
|
| 866 |
+
else:
|
| 867 |
+
result.append(' [closed]')
|
| 868 |
+
result.append('>')
|
| 869 |
+
return ''.join(result)
|
| 870 |
+
|
| 871 |
+
def readline(self, limit=-1):
|
| 872 |
+
"""Read and return a line from the stream.
|
| 873 |
+
|
| 874 |
+
If limit is specified, at most limit bytes will be read.
|
| 875 |
+
"""
|
| 876 |
+
|
| 877 |
+
if limit < 0:
|
| 878 |
+
# Shortcut common case - newline found in buffer.
|
| 879 |
+
i = self._readbuffer.find(b'\n', self._offset) + 1
|
| 880 |
+
if i > 0:
|
| 881 |
+
line = self._readbuffer[self._offset: i]
|
| 882 |
+
self._offset = i
|
| 883 |
+
return line
|
| 884 |
+
|
| 885 |
+
return io.BufferedIOBase.readline(self, limit)
|
| 886 |
+
|
| 887 |
+
def peek(self, n=1):
|
| 888 |
+
"""Returns buffered bytes without advancing the position."""
|
| 889 |
+
if n > len(self._readbuffer) - self._offset:
|
| 890 |
+
chunk = self.read(n)
|
| 891 |
+
if len(chunk) > self._offset:
|
| 892 |
+
self._readbuffer = chunk + self._readbuffer[self._offset:]
|
| 893 |
+
self._offset = 0
|
| 894 |
+
else:
|
| 895 |
+
self._offset -= len(chunk)
|
| 896 |
+
|
| 897 |
+
# Return up to 512 bytes to reduce allocation overhead for tight loops.
|
| 898 |
+
return self._readbuffer[self._offset: self._offset + 512]
|
| 899 |
+
|
| 900 |
+
def readable(self):
|
| 901 |
+
if self.closed:
|
| 902 |
+
raise ValueError("I/O operation on closed file.")
|
| 903 |
+
return True
|
| 904 |
+
|
| 905 |
+
def read(self, n=-1):
|
| 906 |
+
"""Read and return up to n bytes.
|
| 907 |
+
If the argument is omitted, None, or negative, data is read and returned until EOF is reached.
|
| 908 |
+
"""
|
| 909 |
+
if self.closed:
|
| 910 |
+
raise ValueError("read from closed file.")
|
| 911 |
+
if n is None or n < 0:
|
| 912 |
+
buf = self._readbuffer[self._offset:]
|
| 913 |
+
self._readbuffer = b''
|
| 914 |
+
self._offset = 0
|
| 915 |
+
while not self._eof:
|
| 916 |
+
buf += self._read1(self.MAX_N)
|
| 917 |
+
return buf
|
| 918 |
+
|
| 919 |
+
end = n + self._offset
|
| 920 |
+
if end < len(self._readbuffer):
|
| 921 |
+
buf = self._readbuffer[self._offset:end]
|
| 922 |
+
self._offset = end
|
| 923 |
+
return buf
|
| 924 |
+
|
| 925 |
+
n = end - len(self._readbuffer)
|
| 926 |
+
buf = self._readbuffer[self._offset:]
|
| 927 |
+
self._readbuffer = b''
|
| 928 |
+
self._offset = 0
|
| 929 |
+
while n > 0 and not self._eof:
|
| 930 |
+
data = self._read1(n)
|
| 931 |
+
if n < len(data):
|
| 932 |
+
self._readbuffer = data
|
| 933 |
+
self._offset = n
|
| 934 |
+
buf += data[:n]
|
| 935 |
+
break
|
| 936 |
+
buf += data
|
| 937 |
+
n -= len(data)
|
| 938 |
+
return buf
|
| 939 |
+
|
| 940 |
+
def _update_crc(self, newdata):
|
| 941 |
+
# Update the CRC using the given data.
|
| 942 |
+
if self._expected_crc is None:
|
| 943 |
+
# No need to compute the CRC if we don't have a reference value
|
| 944 |
+
return
|
| 945 |
+
self._running_crc = crc32(newdata, self._running_crc)
|
| 946 |
+
# Check the CRC if we're at the end of the file
|
| 947 |
+
if self._eof and self._running_crc != self._expected_crc:
|
| 948 |
+
raise BadZipFile("Bad CRC-32 for file %r" % self.name)
|
| 949 |
+
|
| 950 |
+
def read1(self, n):
|
| 951 |
+
"""Read up to n bytes with at most one read() system call."""
|
| 952 |
+
|
| 953 |
+
if n is None or n < 0:
|
| 954 |
+
buf = self._readbuffer[self._offset:]
|
| 955 |
+
self._readbuffer = b''
|
| 956 |
+
self._offset = 0
|
| 957 |
+
while not self._eof:
|
| 958 |
+
data = self._read1(self.MAX_N)
|
| 959 |
+
if data:
|
| 960 |
+
buf += data
|
| 961 |
+
break
|
| 962 |
+
return buf
|
| 963 |
+
|
| 964 |
+
end = n + self._offset
|
| 965 |
+
if end < len(self._readbuffer):
|
| 966 |
+
buf = self._readbuffer[self._offset:end]
|
| 967 |
+
self._offset = end
|
| 968 |
+
return buf
|
| 969 |
+
|
| 970 |
+
n = end - len(self._readbuffer)
|
| 971 |
+
buf = self._readbuffer[self._offset:]
|
| 972 |
+
self._readbuffer = b''
|
| 973 |
+
self._offset = 0
|
| 974 |
+
if n > 0:
|
| 975 |
+
while not self._eof:
|
| 976 |
+
data = self._read1(n)
|
| 977 |
+
if n < len(data):
|
| 978 |
+
self._readbuffer = data
|
| 979 |
+
self._offset = n
|
| 980 |
+
buf += data[:n]
|
| 981 |
+
break
|
| 982 |
+
if data:
|
| 983 |
+
buf += data
|
| 984 |
+
break
|
| 985 |
+
return buf
|
| 986 |
+
|
| 987 |
+
def _read1(self, n):
|
| 988 |
+
# Read up to n compressed bytes with at most one read() system call,
|
| 989 |
+
# decrypt and decompress them.
|
| 990 |
+
if self._eof or n <= 0:
|
| 991 |
+
return b''
|
| 992 |
+
|
| 993 |
+
# Read from file.
|
| 994 |
+
if self._compress_type == ZIP_DEFLATED:
|
| 995 |
+
## Handle unconsumed data.
|
| 996 |
+
data = self._decompressor.unconsumed_tail
|
| 997 |
+
if n > len(data):
|
| 998 |
+
data += self._read2(n - len(data))
|
| 999 |
+
else:
|
| 1000 |
+
data = self._read2(n)
|
| 1001 |
+
|
| 1002 |
+
if self._compress_type == ZIP_STORED:
|
| 1003 |
+
self._eof = self._compress_left <= 0
|
| 1004 |
+
elif self._compress_type == ZIP_DEFLATED:
|
| 1005 |
+
n = max(n, self.MIN_READ_SIZE)
|
| 1006 |
+
data = self._decompressor.decompress(data, n)
|
| 1007 |
+
self._eof = (self._decompressor.eof or
|
| 1008 |
+
self._compress_left <= 0 and
|
| 1009 |
+
not self._decompressor.unconsumed_tail)
|
| 1010 |
+
if self._eof:
|
| 1011 |
+
data += self._decompressor.flush()
|
| 1012 |
+
else:
|
| 1013 |
+
data = self._decompressor.decompress(data)
|
| 1014 |
+
self._eof = self._decompressor.eof or self._compress_left <= 0
|
| 1015 |
+
|
| 1016 |
+
data = data[:self._left]
|
| 1017 |
+
self._left -= len(data)
|
| 1018 |
+
if self._left <= 0:
|
| 1019 |
+
self._eof = True
|
| 1020 |
+
self._update_crc(data)
|
| 1021 |
+
return data
|
| 1022 |
+
|
| 1023 |
+
def _read2(self, n):
|
| 1024 |
+
if self._compress_left <= 0:
|
| 1025 |
+
return b''
|
| 1026 |
+
|
| 1027 |
+
n = max(n, self.MIN_READ_SIZE)
|
| 1028 |
+
n = min(n, self._compress_left)
|
| 1029 |
+
|
| 1030 |
+
data = self._fileobj.read(n)
|
| 1031 |
+
self._compress_left -= len(data)
|
| 1032 |
+
if not data:
|
| 1033 |
+
raise EOFError
|
| 1034 |
+
|
| 1035 |
+
if self._decrypter is not None:
|
| 1036 |
+
data = self._decrypter(data)
|
| 1037 |
+
return data
|
| 1038 |
+
|
| 1039 |
+
def close(self):
|
| 1040 |
+
try:
|
| 1041 |
+
if self._close_fileobj:
|
| 1042 |
+
self._fileobj.close()
|
| 1043 |
+
finally:
|
| 1044 |
+
super().close()
|
| 1045 |
+
|
| 1046 |
+
def seekable(self):
|
| 1047 |
+
if self.closed:
|
| 1048 |
+
raise ValueError("I/O operation on closed file.")
|
| 1049 |
+
return self._seekable
|
| 1050 |
+
|
| 1051 |
+
def seek(self, offset, whence=0):
|
| 1052 |
+
if self.closed:
|
| 1053 |
+
raise ValueError("seek on closed file.")
|
| 1054 |
+
if not self._seekable:
|
| 1055 |
+
raise io.UnsupportedOperation("underlying stream is not seekable")
|
| 1056 |
+
curr_pos = self.tell()
|
| 1057 |
+
if whence == 0: # Seek from start of file
|
| 1058 |
+
new_pos = offset
|
| 1059 |
+
elif whence == 1: # Seek from current position
|
| 1060 |
+
new_pos = curr_pos + offset
|
| 1061 |
+
elif whence == 2: # Seek from EOF
|
| 1062 |
+
new_pos = self._orig_file_size + offset
|
| 1063 |
+
else:
|
| 1064 |
+
raise ValueError("whence must be os.SEEK_SET (0), "
|
| 1065 |
+
"os.SEEK_CUR (1), or os.SEEK_END (2)")
|
| 1066 |
+
|
| 1067 |
+
if new_pos > self._orig_file_size:
|
| 1068 |
+
new_pos = self._orig_file_size
|
| 1069 |
+
|
| 1070 |
+
if new_pos < 0:
|
| 1071 |
+
new_pos = 0
|
| 1072 |
+
|
| 1073 |
+
read_offset = new_pos - curr_pos
|
| 1074 |
+
buff_offset = read_offset + self._offset
|
| 1075 |
+
|
| 1076 |
+
if buff_offset >= 0 and buff_offset < len(self._readbuffer):
|
| 1077 |
+
# Just move the _offset index if the new position is in the _readbuffer
|
| 1078 |
+
self._offset = buff_offset
|
| 1079 |
+
read_offset = 0
|
| 1080 |
+
elif read_offset < 0:
|
| 1081 |
+
# Position is before the current position. Reset the ZipExtFile
|
| 1082 |
+
self._fileobj.seek(self._orig_compress_start)
|
| 1083 |
+
self._running_crc = self._orig_start_crc
|
| 1084 |
+
self._compress_left = self._orig_compress_size
|
| 1085 |
+
self._left = self._orig_file_size
|
| 1086 |
+
self._readbuffer = b''
|
| 1087 |
+
self._offset = 0
|
| 1088 |
+
self._decompressor = _get_decompressor(self._compress_type)
|
| 1089 |
+
self._eof = False
|
| 1090 |
+
read_offset = new_pos
|
| 1091 |
+
if self._decrypter is not None:
|
| 1092 |
+
self._init_decrypter()
|
| 1093 |
+
|
| 1094 |
+
while read_offset > 0:
|
| 1095 |
+
read_len = min(self.MAX_SEEK_READ, read_offset)
|
| 1096 |
+
self.read(read_len)
|
| 1097 |
+
read_offset -= read_len
|
| 1098 |
+
|
| 1099 |
+
return self.tell()
|
| 1100 |
+
|
| 1101 |
+
def tell(self):
|
| 1102 |
+
if self.closed:
|
| 1103 |
+
raise ValueError("tell on closed file.")
|
| 1104 |
+
if not self._seekable:
|
| 1105 |
+
raise io.UnsupportedOperation("underlying stream is not seekable")
|
| 1106 |
+
filepos = self._orig_file_size - self._left - len(self._readbuffer) + self._offset
|
| 1107 |
+
return filepos
|
| 1108 |
+
|
| 1109 |
+
|
| 1110 |
+
class _ZipWriteFile(io.BufferedIOBase):
|
| 1111 |
+
def __init__(self, zf, zinfo, zip64):
|
| 1112 |
+
self._zinfo = zinfo
|
| 1113 |
+
self._zip64 = zip64
|
| 1114 |
+
self._zipfile = zf
|
| 1115 |
+
self._compressor = _get_compressor(zinfo.compress_type,
|
| 1116 |
+
zinfo._compresslevel)
|
| 1117 |
+
self._file_size = 0
|
| 1118 |
+
self._compress_size = 0
|
| 1119 |
+
self._crc = 0
|
| 1120 |
+
|
| 1121 |
+
@property
|
| 1122 |
+
def _fileobj(self):
|
| 1123 |
+
return self._zipfile.fp
|
| 1124 |
+
|
| 1125 |
+
def writable(self):
|
| 1126 |
+
return True
|
| 1127 |
+
|
| 1128 |
+
def write(self, data):
|
| 1129 |
+
if self.closed:
|
| 1130 |
+
raise ValueError('I/O operation on closed file.')
|
| 1131 |
+
|
| 1132 |
+
# Accept any data that supports the buffer protocol
|
| 1133 |
+
if isinstance(data, (bytes, bytearray)):
|
| 1134 |
+
nbytes = len(data)
|
| 1135 |
+
else:
|
| 1136 |
+
data = memoryview(data)
|
| 1137 |
+
nbytes = data.nbytes
|
| 1138 |
+
self._file_size += nbytes
|
| 1139 |
+
|
| 1140 |
+
self._crc = crc32(data, self._crc)
|
| 1141 |
+
if self._compressor:
|
| 1142 |
+
data = self._compressor.compress(data)
|
| 1143 |
+
self._compress_size += len(data)
|
| 1144 |
+
self._fileobj.write(data)
|
| 1145 |
+
return nbytes
|
| 1146 |
+
|
| 1147 |
+
def close(self):
|
| 1148 |
+
if self.closed:
|
| 1149 |
+
return
|
| 1150 |
+
try:
|
| 1151 |
+
super().close()
|
| 1152 |
+
# Flush any data from the compressor, and update header info
|
| 1153 |
+
if self._compressor:
|
| 1154 |
+
buf = self._compressor.flush()
|
| 1155 |
+
self._compress_size += len(buf)
|
| 1156 |
+
self._fileobj.write(buf)
|
| 1157 |
+
self._zinfo.compress_size = self._compress_size
|
| 1158 |
+
else:
|
| 1159 |
+
self._zinfo.compress_size = self._file_size
|
| 1160 |
+
self._zinfo.CRC = self._crc
|
| 1161 |
+
self._zinfo.file_size = self._file_size
|
| 1162 |
+
|
| 1163 |
+
# Write updated header info
|
| 1164 |
+
if self._zinfo.flag_bits & 0x08:
|
| 1165 |
+
# Write CRC and file sizes after the file data
|
| 1166 |
+
fmt = '<LLQQ' if self._zip64 else '<LLLL'
|
| 1167 |
+
self._fileobj.write(struct.pack(fmt, _DD_SIGNATURE, self._zinfo.CRC,
|
| 1168 |
+
self._zinfo.compress_size, self._zinfo.file_size))
|
| 1169 |
+
self._zipfile.start_dir = self._fileobj.tell()
|
| 1170 |
+
else:
|
| 1171 |
+
if not self._zip64:
|
| 1172 |
+
if self._file_size > ZIP64_LIMIT:
|
| 1173 |
+
raise RuntimeError(
|
| 1174 |
+
'File size unexpectedly exceeded ZIP64 limit')
|
| 1175 |
+
if self._compress_size > ZIP64_LIMIT:
|
| 1176 |
+
raise RuntimeError(
|
| 1177 |
+
'Compressed size unexpectedly exceeded ZIP64 limit')
|
| 1178 |
+
# Seek backwards and write file header (which will now include
|
| 1179 |
+
# correct CRC and file sizes)
|
| 1180 |
+
|
| 1181 |
+
# Preserve current position in file
|
| 1182 |
+
self._zipfile.start_dir = self._fileobj.tell()
|
| 1183 |
+
self._fileobj.seek(self._zinfo.header_offset)
|
| 1184 |
+
self._fileobj.write(self._zinfo.FileHeader(self._zip64))
|
| 1185 |
+
self._fileobj.seek(self._zipfile.start_dir)
|
| 1186 |
+
|
| 1187 |
+
# Successfully written: Add file to our caches
|
| 1188 |
+
self._zipfile.filelist.append(self._zinfo)
|
| 1189 |
+
self._zipfile.NameToInfo[self._zinfo.filename] = self._zinfo
|
| 1190 |
+
finally:
|
| 1191 |
+
self._zipfile._writing = False
|
| 1192 |
+
|
| 1193 |
+
|
| 1194 |
+
|
| 1195 |
+
class ZipFile:
|
| 1196 |
+
""" Class with methods to open, read, write, close, list zip files.
|
| 1197 |
+
|
| 1198 |
+
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=True,
|
| 1199 |
+
compresslevel=None)
|
| 1200 |
+
|
| 1201 |
+
file: Either the path to the file, or a file-like object.
|
| 1202 |
+
If it is a path, the file will be opened and closed by ZipFile.
|
| 1203 |
+
mode: The mode can be either read 'r', write 'w', exclusive create 'x',
|
| 1204 |
+
or append 'a'.
|
| 1205 |
+
compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib),
|
| 1206 |
+
ZIP_BZIP2 (requires bz2) or ZIP_LZMA (requires lzma).
|
| 1207 |
+
allowZip64: if True ZipFile will create files with ZIP64 extensions when
|
| 1208 |
+
needed, otherwise it will raise an exception when this would
|
| 1209 |
+
be necessary.
|
| 1210 |
+
compresslevel: None (default for the given compression type) or an integer
|
| 1211 |
+
specifying the level to pass to the compressor.
|
| 1212 |
+
When using ZIP_STORED or ZIP_LZMA this keyword has no effect.
|
| 1213 |
+
When using ZIP_DEFLATED integers 0 through 9 are accepted.
|
| 1214 |
+
When using ZIP_BZIP2 integers 1 through 9 are accepted.
|
| 1215 |
+
|
| 1216 |
+
"""
|
| 1217 |
+
|
| 1218 |
+
fp = None # Set here since __del__ checks it
|
| 1219 |
+
_windows_illegal_name_trans_table = None
|
| 1220 |
+
|
| 1221 |
+
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=True,
|
| 1222 |
+
compresslevel=None, *, strict_timestamps=True):
|
| 1223 |
+
"""Open the ZIP file with mode read 'r', write 'w', exclusive create 'x',
|
| 1224 |
+
or append 'a'."""
|
| 1225 |
+
if mode not in ('r', 'w', 'x', 'a'):
|
| 1226 |
+
raise ValueError("ZipFile requires mode 'r', 'w', 'x', or 'a'")
|
| 1227 |
+
|
| 1228 |
+
_check_compression(compression)
|
| 1229 |
+
|
| 1230 |
+
self._allowZip64 = allowZip64
|
| 1231 |
+
self._didModify = False
|
| 1232 |
+
self.debug = 0 # Level of printing: 0 through 3
|
| 1233 |
+
self.NameToInfo = {} # Find file info given name
|
| 1234 |
+
self.filelist = [] # List of ZipInfo instances for archive
|
| 1235 |
+
self.compression = compression # Method of compression
|
| 1236 |
+
self.compresslevel = compresslevel
|
| 1237 |
+
self.mode = mode
|
| 1238 |
+
self.pwd = None
|
| 1239 |
+
self._comment = b''
|
| 1240 |
+
self._strict_timestamps = strict_timestamps
|
| 1241 |
+
|
| 1242 |
+
# Check if we were passed a file-like object
|
| 1243 |
+
if isinstance(file, os.PathLike):
|
| 1244 |
+
file = os.fspath(file)
|
| 1245 |
+
if isinstance(file, str):
|
| 1246 |
+
# No, it's a filename
|
| 1247 |
+
self._filePassed = 0
|
| 1248 |
+
self.filename = file
|
| 1249 |
+
modeDict = {'r' : 'rb', 'w': 'w+b', 'x': 'x+b', 'a' : 'r+b',
|
| 1250 |
+
'r+b': 'w+b', 'w+b': 'wb', 'x+b': 'xb'}
|
| 1251 |
+
filemode = modeDict[mode]
|
| 1252 |
+
while True:
|
| 1253 |
+
try:
|
| 1254 |
+
self.fp = io.open(file, filemode)
|
| 1255 |
+
except OSError:
|
| 1256 |
+
if filemode in modeDict:
|
| 1257 |
+
filemode = modeDict[filemode]
|
| 1258 |
+
continue
|
| 1259 |
+
raise
|
| 1260 |
+
break
|
| 1261 |
+
else:
|
| 1262 |
+
self._filePassed = 1
|
| 1263 |
+
self.fp = file
|
| 1264 |
+
self.filename = getattr(file, 'name', None)
|
| 1265 |
+
self._fileRefCnt = 1
|
| 1266 |
+
self._lock = threading.RLock()
|
| 1267 |
+
self._seekable = True
|
| 1268 |
+
self._writing = False
|
| 1269 |
+
|
| 1270 |
+
try:
|
| 1271 |
+
if mode == 'r':
|
| 1272 |
+
self._RealGetContents()
|
| 1273 |
+
elif mode in ('w', 'x'):
|
| 1274 |
+
# set the modified flag so central directory gets written
|
| 1275 |
+
# even if no files are added to the archive
|
| 1276 |
+
self._didModify = True
|
| 1277 |
+
try:
|
| 1278 |
+
self.start_dir = self.fp.tell()
|
| 1279 |
+
except (AttributeError, OSError):
|
| 1280 |
+
self.fp = _Tellable(self.fp)
|
| 1281 |
+
self.start_dir = 0
|
| 1282 |
+
self._seekable = False
|
| 1283 |
+
else:
|
| 1284 |
+
# Some file-like objects can provide tell() but not seek()
|
| 1285 |
+
try:
|
| 1286 |
+
self.fp.seek(self.start_dir)
|
| 1287 |
+
except (AttributeError, OSError):
|
| 1288 |
+
self._seekable = False
|
| 1289 |
+
elif mode == 'a':
|
| 1290 |
+
try:
|
| 1291 |
+
# See if file is a zip file
|
| 1292 |
+
self._RealGetContents()
|
| 1293 |
+
# seek to start of directory and overwrite
|
| 1294 |
+
self.fp.seek(self.start_dir)
|
| 1295 |
+
except BadZipFile:
|
| 1296 |
+
# file is not a zip file, just append
|
| 1297 |
+
self.fp.seek(0, 2)
|
| 1298 |
+
|
| 1299 |
+
# set the modified flag so central directory gets written
|
| 1300 |
+
# even if no files are added to the archive
|
| 1301 |
+
self._didModify = True
|
| 1302 |
+
self.start_dir = self.fp.tell()
|
| 1303 |
+
else:
|
| 1304 |
+
raise ValueError("Mode must be 'r', 'w', 'x', or 'a'")
|
| 1305 |
+
except:
|
| 1306 |
+
fp = self.fp
|
| 1307 |
+
self.fp = None
|
| 1308 |
+
self._fpclose(fp)
|
| 1309 |
+
raise
|
| 1310 |
+
|
| 1311 |
+
def __enter__(self):
|
| 1312 |
+
return self
|
| 1313 |
+
|
| 1314 |
+
def __exit__(self, type, value, traceback):
|
| 1315 |
+
self.close()
|
| 1316 |
+
|
| 1317 |
+
def __repr__(self):
|
| 1318 |
+
result = ['<%s.%s' % (self.__class__.__module__,
|
| 1319 |
+
self.__class__.__qualname__)]
|
| 1320 |
+
if self.fp is not None:
|
| 1321 |
+
if self._filePassed:
|
| 1322 |
+
result.append(' file=%r' % self.fp)
|
| 1323 |
+
elif self.filename is not None:
|
| 1324 |
+
result.append(' filename=%r' % self.filename)
|
| 1325 |
+
result.append(' mode=%r' % self.mode)
|
| 1326 |
+
else:
|
| 1327 |
+
result.append(' [closed]')
|
| 1328 |
+
result.append('>')
|
| 1329 |
+
return ''.join(result)
|
| 1330 |
+
|
| 1331 |
+
def _RealGetContents(self):
|
| 1332 |
+
"""Read in the table of contents for the ZIP file."""
|
| 1333 |
+
fp = self.fp
|
| 1334 |
+
try:
|
| 1335 |
+
endrec = _EndRecData(fp)
|
| 1336 |
+
except OSError:
|
| 1337 |
+
raise BadZipFile("File is not a zip file")
|
| 1338 |
+
if not endrec:
|
| 1339 |
+
raise BadZipFile("File is not a zip file")
|
| 1340 |
+
if self.debug > 1:
|
| 1341 |
+
print(endrec)
|
| 1342 |
+
size_cd = endrec[_ECD_SIZE] # bytes in central directory
|
| 1343 |
+
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
|
| 1344 |
+
self._comment = endrec[_ECD_COMMENT] # archive comment
|
| 1345 |
+
|
| 1346 |
+
# "concat" is zero, unless zip was concatenated to another file
|
| 1347 |
+
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
|
| 1348 |
+
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
|
| 1349 |
+
# If Zip64 extension structures are present, account for them
|
| 1350 |
+
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
|
| 1351 |
+
|
| 1352 |
+
if self.debug > 2:
|
| 1353 |
+
inferred = concat + offset_cd
|
| 1354 |
+
print("given, inferred, offset", offset_cd, inferred, concat)
|
| 1355 |
+
# self.start_dir: Position of start of central directory
|
| 1356 |
+
self.start_dir = offset_cd + concat
|
| 1357 |
+
if self.start_dir < 0:
|
| 1358 |
+
raise BadZipFile("Bad offset for central directory")
|
| 1359 |
+
fp.seek(self.start_dir, 0)
|
| 1360 |
+
data = fp.read(size_cd)
|
| 1361 |
+
fp = io.BytesIO(data)
|
| 1362 |
+
total = 0
|
| 1363 |
+
while total < size_cd:
|
| 1364 |
+
centdir = fp.read(sizeCentralDir)
|
| 1365 |
+
if len(centdir) != sizeCentralDir:
|
| 1366 |
+
raise BadZipFile("Truncated central directory")
|
| 1367 |
+
centdir = struct.unpack(structCentralDir, centdir)
|
| 1368 |
+
if centdir[_CD_SIGNATURE] != stringCentralDir:
|
| 1369 |
+
raise BadZipFile("Bad magic number for central directory")
|
| 1370 |
+
if self.debug > 2:
|
| 1371 |
+
print(centdir)
|
| 1372 |
+
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
|
| 1373 |
+
flags = centdir[5]
|
| 1374 |
+
if flags & 0x800:
|
| 1375 |
+
# UTF-8 file names extension
|
| 1376 |
+
filename = filename.decode('utf-8')
|
| 1377 |
+
else:
|
| 1378 |
+
# Historical ZIP filename encoding
|
| 1379 |
+
filename = filename.decode('cp437')
|
| 1380 |
+
# Create ZipInfo instance to store file information
|
| 1381 |
+
x = ZipInfo(filename)
|
| 1382 |
+
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
|
| 1383 |
+
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
|
| 1384 |
+
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
|
| 1385 |
+
(x.create_version, x.create_system, x.extract_version, x.reserved,
|
| 1386 |
+
x.flag_bits, x.compress_type, t, d,
|
| 1387 |
+
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
|
| 1388 |
+
if x.extract_version > MAX_EXTRACT_VERSION:
|
| 1389 |
+
raise NotImplementedError("zip file version %.1f" %
|
| 1390 |
+
(x.extract_version / 10))
|
| 1391 |
+
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
|
| 1392 |
+
# Convert date/time code to (year, month, day, hour, min, sec)
|
| 1393 |
+
x._raw_time = t
|
| 1394 |
+
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
|
| 1395 |
+
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
|
| 1396 |
+
|
| 1397 |
+
x._decodeExtra()
|
| 1398 |
+
x.header_offset = x.header_offset + concat
|
| 1399 |
+
self.filelist.append(x)
|
| 1400 |
+
self.NameToInfo[x.filename] = x
|
| 1401 |
+
|
| 1402 |
+
# update total bytes read from central directory
|
| 1403 |
+
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
|
| 1404 |
+
+ centdir[_CD_EXTRA_FIELD_LENGTH]
|
| 1405 |
+
+ centdir[_CD_COMMENT_LENGTH])
|
| 1406 |
+
|
| 1407 |
+
if self.debug > 2:
|
| 1408 |
+
print("total", total)
|
| 1409 |
+
|
| 1410 |
+
end_offset = self.start_dir
|
| 1411 |
+
for zinfo in sorted(self.filelist,
|
| 1412 |
+
key=lambda zinfo: zinfo.header_offset,
|
| 1413 |
+
reverse=True):
|
| 1414 |
+
zinfo._end_offset = end_offset
|
| 1415 |
+
end_offset = zinfo.header_offset
|
| 1416 |
+
|
| 1417 |
+
def namelist(self):
|
| 1418 |
+
"""Return a list of file names in the archive."""
|
| 1419 |
+
return [data.filename for data in self.filelist]
|
| 1420 |
+
|
| 1421 |
+
def infolist(self):
|
| 1422 |
+
"""Return a list of class ZipInfo instances for files in the
|
| 1423 |
+
archive."""
|
| 1424 |
+
return self.filelist
|
| 1425 |
+
|
| 1426 |
+
def printdir(self, file=None):
|
| 1427 |
+
"""Print a table of contents for the zip file."""
|
| 1428 |
+
print("%-46s %19s %12s" % ("File Name", "Modified ", "Size"),
|
| 1429 |
+
file=file)
|
| 1430 |
+
for zinfo in self.filelist:
|
| 1431 |
+
date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
|
| 1432 |
+
print("%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size),
|
| 1433 |
+
file=file)
|
| 1434 |
+
|
| 1435 |
+
def testzip(self):
|
| 1436 |
+
"""Read all the files and check the CRC."""
|
| 1437 |
+
chunk_size = 2 ** 20
|
| 1438 |
+
for zinfo in self.filelist:
|
| 1439 |
+
try:
|
| 1440 |
+
# Read by chunks, to avoid an OverflowError or a
|
| 1441 |
+
# MemoryError with very large embedded files.
|
| 1442 |
+
with self.open(zinfo.filename, "r") as f:
|
| 1443 |
+
while f.read(chunk_size): # Check CRC-32
|
| 1444 |
+
pass
|
| 1445 |
+
except BadZipFile:
|
| 1446 |
+
return zinfo.filename
|
| 1447 |
+
|
| 1448 |
+
def getinfo(self, name):
|
| 1449 |
+
"""Return the instance of ZipInfo given 'name'."""
|
| 1450 |
+
info = self.NameToInfo.get(name)
|
| 1451 |
+
if info is None:
|
| 1452 |
+
raise KeyError(
|
| 1453 |
+
'There is no item named %r in the archive' % name)
|
| 1454 |
+
|
| 1455 |
+
return info
|
| 1456 |
+
|
| 1457 |
+
def setpassword(self, pwd):
|
| 1458 |
+
"""Set default password for encrypted files."""
|
| 1459 |
+
if pwd and not isinstance(pwd, bytes):
|
| 1460 |
+
raise TypeError("pwd: expected bytes, got %s" % type(pwd).__name__)
|
| 1461 |
+
if pwd:
|
| 1462 |
+
self.pwd = pwd
|
| 1463 |
+
else:
|
| 1464 |
+
self.pwd = None
|
| 1465 |
+
|
| 1466 |
+
@property
|
| 1467 |
+
def comment(self):
|
| 1468 |
+
"""The comment text associated with the ZIP file."""
|
| 1469 |
+
return self._comment
|
| 1470 |
+
|
| 1471 |
+
@comment.setter
|
| 1472 |
+
def comment(self, comment):
|
| 1473 |
+
if not isinstance(comment, bytes):
|
| 1474 |
+
raise TypeError("comment: expected bytes, got %s" % type(comment).__name__)
|
| 1475 |
+
# check for valid comment length
|
| 1476 |
+
if len(comment) > ZIP_MAX_COMMENT:
|
| 1477 |
+
import warnings
|
| 1478 |
+
warnings.warn('Archive comment is too long; truncating to %d bytes'
|
| 1479 |
+
% ZIP_MAX_COMMENT, stacklevel=2)
|
| 1480 |
+
comment = comment[:ZIP_MAX_COMMENT]
|
| 1481 |
+
self._comment = comment
|
| 1482 |
+
self._didModify = True
|
| 1483 |
+
|
| 1484 |
+
def read(self, name, pwd=None):
|
| 1485 |
+
"""Return file bytes for name."""
|
| 1486 |
+
with self.open(name, "r", pwd) as fp:
|
| 1487 |
+
return fp.read()
|
| 1488 |
+
|
| 1489 |
+
def open(self, name, mode="r", pwd=None, *, force_zip64=False):
|
| 1490 |
+
"""Return file-like object for 'name'.
|
| 1491 |
+
|
| 1492 |
+
name is a string for the file name within the ZIP file, or a ZipInfo
|
| 1493 |
+
object.
|
| 1494 |
+
|
| 1495 |
+
mode should be 'r' to read a file already in the ZIP file, or 'w' to
|
| 1496 |
+
write to a file newly added to the archive.
|
| 1497 |
+
|
| 1498 |
+
pwd is the password to decrypt files (only used for reading).
|
| 1499 |
+
|
| 1500 |
+
When writing, if the file size is not known in advance but may exceed
|
| 1501 |
+
2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large
|
| 1502 |
+
files. If the size is known in advance, it is best to pass a ZipInfo
|
| 1503 |
+
instance for name, with zinfo.file_size set.
|
| 1504 |
+
"""
|
| 1505 |
+
if mode not in {"r", "w"}:
|
| 1506 |
+
raise ValueError('open() requires mode "r" or "w"')
|
| 1507 |
+
if pwd and not isinstance(pwd, bytes):
|
| 1508 |
+
raise TypeError("pwd: expected bytes, got %s" % type(pwd).__name__)
|
| 1509 |
+
if pwd and (mode == "w"):
|
| 1510 |
+
raise ValueError("pwd is only supported for reading files")
|
| 1511 |
+
if not self.fp:
|
| 1512 |
+
raise ValueError(
|
| 1513 |
+
"Attempt to use ZIP archive that was already closed")
|
| 1514 |
+
|
| 1515 |
+
# Make sure we have an info object
|
| 1516 |
+
if isinstance(name, ZipInfo):
|
| 1517 |
+
# 'name' is already an info object
|
| 1518 |
+
zinfo = name
|
| 1519 |
+
elif mode == 'w':
|
| 1520 |
+
zinfo = ZipInfo(name)
|
| 1521 |
+
zinfo.compress_type = self.compression
|
| 1522 |
+
zinfo._compresslevel = self.compresslevel
|
| 1523 |
+
else:
|
| 1524 |
+
# Get info object for name
|
| 1525 |
+
zinfo = self.getinfo(name)
|
| 1526 |
+
|
| 1527 |
+
if mode == 'w':
|
| 1528 |
+
return self._open_to_write(zinfo, force_zip64=force_zip64)
|
| 1529 |
+
|
| 1530 |
+
if self._writing:
|
| 1531 |
+
raise ValueError("Can't read from the ZIP file while there "
|
| 1532 |
+
"is an open writing handle on it. "
|
| 1533 |
+
"Close the writing handle before trying to read.")
|
| 1534 |
+
|
| 1535 |
+
# Open for reading:
|
| 1536 |
+
self._fileRefCnt += 1
|
| 1537 |
+
zef_file = _SharedFile(self.fp, zinfo.header_offset,
|
| 1538 |
+
self._fpclose, self._lock, lambda: self._writing)
|
| 1539 |
+
try:
|
| 1540 |
+
# Skip the file header:
|
| 1541 |
+
fheader = zef_file.read(sizeFileHeader)
|
| 1542 |
+
if len(fheader) != sizeFileHeader:
|
| 1543 |
+
raise BadZipFile("Truncated file header")
|
| 1544 |
+
fheader = struct.unpack(structFileHeader, fheader)
|
| 1545 |
+
if fheader[_FH_SIGNATURE] != stringFileHeader:
|
| 1546 |
+
raise BadZipFile("Bad magic number for file header")
|
| 1547 |
+
|
| 1548 |
+
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
|
| 1549 |
+
if fheader[_FH_EXTRA_FIELD_LENGTH]:
|
| 1550 |
+
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
|
| 1551 |
+
|
| 1552 |
+
if zinfo.flag_bits & 0x20:
|
| 1553 |
+
# Zip 2.7: compressed patched data
|
| 1554 |
+
raise NotImplementedError("compressed patched data (flag bit 5)")
|
| 1555 |
+
|
| 1556 |
+
if zinfo.flag_bits & 0x40:
|
| 1557 |
+
# strong encryption
|
| 1558 |
+
raise NotImplementedError("strong encryption (flag bit 6)")
|
| 1559 |
+
|
| 1560 |
+
if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS] & 0x800:
|
| 1561 |
+
# UTF-8 filename
|
| 1562 |
+
fname_str = fname.decode("utf-8")
|
| 1563 |
+
else:
|
| 1564 |
+
fname_str = fname.decode("cp437")
|
| 1565 |
+
|
| 1566 |
+
if fname_str != zinfo.orig_filename:
|
| 1567 |
+
raise BadZipFile(
|
| 1568 |
+
'File name in directory %r and header %r differ.'
|
| 1569 |
+
% (zinfo.orig_filename, fname))
|
| 1570 |
+
|
| 1571 |
+
if (zinfo._end_offset is not None and
|
| 1572 |
+
zef_file.tell() + zinfo.compress_size > zinfo._end_offset):
|
| 1573 |
+
raise BadZipFile(f"Overlapped entries: {zinfo.orig_filename!r} (possible zip bomb)")
|
| 1574 |
+
|
| 1575 |
+
# check for encrypted flag & handle password
|
| 1576 |
+
is_encrypted = zinfo.flag_bits & 0x1
|
| 1577 |
+
if is_encrypted:
|
| 1578 |
+
if not pwd:
|
| 1579 |
+
pwd = self.pwd
|
| 1580 |
+
if not pwd:
|
| 1581 |
+
raise RuntimeError("File %r is encrypted, password "
|
| 1582 |
+
"required for extraction" % name)
|
| 1583 |
+
else:
|
| 1584 |
+
pwd = None
|
| 1585 |
+
|
| 1586 |
+
return ZipExtFile(zef_file, mode, zinfo, pwd, True)
|
| 1587 |
+
except:
|
| 1588 |
+
zef_file.close()
|
| 1589 |
+
raise
|
| 1590 |
+
|
| 1591 |
+
def _open_to_write(self, zinfo, force_zip64=False):
|
| 1592 |
+
if force_zip64 and not self._allowZip64:
|
| 1593 |
+
raise ValueError(
|
| 1594 |
+
"force_zip64 is True, but allowZip64 was False when opening "
|
| 1595 |
+
"the ZIP file."
|
| 1596 |
+
)
|
| 1597 |
+
if self._writing:
|
| 1598 |
+
raise ValueError("Can't write to the ZIP file while there is "
|
| 1599 |
+
"another write handle open on it. "
|
| 1600 |
+
"Close the first handle before opening another.")
|
| 1601 |
+
|
| 1602 |
+
# Size and CRC are overwritten with correct data after processing the file
|
| 1603 |
+
zinfo.compress_size = 0
|
| 1604 |
+
zinfo.CRC = 0
|
| 1605 |
+
|
| 1606 |
+
zinfo.flag_bits = 0x00
|
| 1607 |
+
if zinfo.compress_type == ZIP_LZMA:
|
| 1608 |
+
# Compressed data includes an end-of-stream (EOS) marker
|
| 1609 |
+
zinfo.flag_bits |= 0x02
|
| 1610 |
+
if not self._seekable:
|
| 1611 |
+
zinfo.flag_bits |= 0x08
|
| 1612 |
+
|
| 1613 |
+
if not zinfo.external_attr:
|
| 1614 |
+
zinfo.external_attr = 0o600 << 16 # permissions: ?rw-------
|
| 1615 |
+
|
| 1616 |
+
# Compressed size can be larger than uncompressed size
|
| 1617 |
+
zip64 = self._allowZip64 and \
|
| 1618 |
+
(force_zip64 or zinfo.file_size * 1.05 > ZIP64_LIMIT)
|
| 1619 |
+
|
| 1620 |
+
if self._seekable:
|
| 1621 |
+
self.fp.seek(self.start_dir)
|
| 1622 |
+
zinfo.header_offset = self.fp.tell()
|
| 1623 |
+
|
| 1624 |
+
self._writecheck(zinfo)
|
| 1625 |
+
self._didModify = True
|
| 1626 |
+
|
| 1627 |
+
self.fp.write(zinfo.FileHeader(zip64))
|
| 1628 |
+
|
| 1629 |
+
self._writing = True
|
| 1630 |
+
return _ZipWriteFile(self, zinfo, zip64)
|
| 1631 |
+
|
| 1632 |
+
def extract(self, member, path=None, pwd=None):
|
| 1633 |
+
"""Extract a member from the archive to the current working directory,
|
| 1634 |
+
using its full name. Its file information is extracted as accurately
|
| 1635 |
+
as possible. `member' may be a filename or a ZipInfo object. You can
|
| 1636 |
+
specify a different directory using `path'.
|
| 1637 |
+
"""
|
| 1638 |
+
if path is None:
|
| 1639 |
+
path = os.getcwd()
|
| 1640 |
+
else:
|
| 1641 |
+
path = os.fspath(path)
|
| 1642 |
+
|
| 1643 |
+
return self._extract_member(member, path, pwd)
|
| 1644 |
+
|
| 1645 |
+
def extractall(self, path=None, members=None, pwd=None):
|
| 1646 |
+
"""Extract all members from the archive to the current working
|
| 1647 |
+
directory. `path' specifies a different directory to extract to.
|
| 1648 |
+
`members' is optional and must be a subset of the list returned
|
| 1649 |
+
by namelist().
|
| 1650 |
+
"""
|
| 1651 |
+
if members is None:
|
| 1652 |
+
members = self.namelist()
|
| 1653 |
+
|
| 1654 |
+
if path is None:
|
| 1655 |
+
path = os.getcwd()
|
| 1656 |
+
else:
|
| 1657 |
+
path = os.fspath(path)
|
| 1658 |
+
|
| 1659 |
+
for zipinfo in members:
|
| 1660 |
+
self._extract_member(zipinfo, path, pwd)
|
| 1661 |
+
|
| 1662 |
+
@classmethod
|
| 1663 |
+
def _sanitize_windows_name(cls, arcname, pathsep):
|
| 1664 |
+
"""Replace bad characters and remove trailing dots from parts."""
|
| 1665 |
+
table = cls._windows_illegal_name_trans_table
|
| 1666 |
+
if not table:
|
| 1667 |
+
illegal = ':<>|"?*'
|
| 1668 |
+
table = str.maketrans(illegal, '_' * len(illegal))
|
| 1669 |
+
cls._windows_illegal_name_trans_table = table
|
| 1670 |
+
arcname = arcname.translate(table)
|
| 1671 |
+
# remove trailing dots
|
| 1672 |
+
arcname = (x.rstrip('.') for x in arcname.split(pathsep))
|
| 1673 |
+
# rejoin, removing empty parts.
|
| 1674 |
+
arcname = pathsep.join(x for x in arcname if x)
|
| 1675 |
+
return arcname
|
| 1676 |
+
|
| 1677 |
+
def _extract_member(self, member, targetpath, pwd):
|
| 1678 |
+
"""Extract the ZipInfo object 'member' to a physical
|
| 1679 |
+
file on the path targetpath.
|
| 1680 |
+
"""
|
| 1681 |
+
if not isinstance(member, ZipInfo):
|
| 1682 |
+
member = self.getinfo(member)
|
| 1683 |
+
|
| 1684 |
+
# build the destination pathname, replacing
|
| 1685 |
+
# forward slashes to platform specific separators.
|
| 1686 |
+
arcname = member.filename.replace('/', os.path.sep)
|
| 1687 |
+
|
| 1688 |
+
if os.path.altsep:
|
| 1689 |
+
arcname = arcname.replace(os.path.altsep, os.path.sep)
|
| 1690 |
+
# interpret absolute pathname as relative, remove drive letter or
|
| 1691 |
+
# UNC path, redundant separators, "." and ".." components.
|
| 1692 |
+
arcname = os.path.splitdrive(arcname)[1]
|
| 1693 |
+
invalid_path_parts = ('', os.path.curdir, os.path.pardir)
|
| 1694 |
+
arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)
|
| 1695 |
+
if x not in invalid_path_parts)
|
| 1696 |
+
if os.path.sep == '\\':
|
| 1697 |
+
# filter illegal characters on Windows
|
| 1698 |
+
arcname = self._sanitize_windows_name(arcname, os.path.sep)
|
| 1699 |
+
|
| 1700 |
+
targetpath = os.path.join(targetpath, arcname)
|
| 1701 |
+
targetpath = os.path.normpath(targetpath)
|
| 1702 |
+
|
| 1703 |
+
# Create all upper directories if necessary.
|
| 1704 |
+
upperdirs = os.path.dirname(targetpath)
|
| 1705 |
+
if upperdirs and not os.path.exists(upperdirs):
|
| 1706 |
+
os.makedirs(upperdirs)
|
| 1707 |
+
|
| 1708 |
+
if member.is_dir():
|
| 1709 |
+
if not os.path.isdir(targetpath):
|
| 1710 |
+
os.mkdir(targetpath)
|
| 1711 |
+
return targetpath
|
| 1712 |
+
|
| 1713 |
+
with self.open(member, pwd=pwd) as source, \
|
| 1714 |
+
open(targetpath, "wb") as target:
|
| 1715 |
+
shutil.copyfileobj(source, target)
|
| 1716 |
+
|
| 1717 |
+
return targetpath
|
| 1718 |
+
|
| 1719 |
+
def _writecheck(self, zinfo):
|
| 1720 |
+
"""Check for errors before writing a file to the archive."""
|
| 1721 |
+
if zinfo.filename in self.NameToInfo:
|
| 1722 |
+
import warnings
|
| 1723 |
+
warnings.warn('Duplicate name: %r' % zinfo.filename, stacklevel=3)
|
| 1724 |
+
if self.mode not in ('w', 'x', 'a'):
|
| 1725 |
+
raise ValueError("write() requires mode 'w', 'x', or 'a'")
|
| 1726 |
+
if not self.fp:
|
| 1727 |
+
raise ValueError(
|
| 1728 |
+
"Attempt to write ZIP archive that was already closed")
|
| 1729 |
+
_check_compression(zinfo.compress_type)
|
| 1730 |
+
if not self._allowZip64:
|
| 1731 |
+
requires_zip64 = None
|
| 1732 |
+
if len(self.filelist) >= ZIP_FILECOUNT_LIMIT:
|
| 1733 |
+
requires_zip64 = "Files count"
|
| 1734 |
+
elif zinfo.file_size > ZIP64_LIMIT:
|
| 1735 |
+
requires_zip64 = "Filesize"
|
| 1736 |
+
elif zinfo.header_offset > ZIP64_LIMIT:
|
| 1737 |
+
requires_zip64 = "Zipfile size"
|
| 1738 |
+
if requires_zip64:
|
| 1739 |
+
raise LargeZipFile(requires_zip64 +
|
| 1740 |
+
" would require ZIP64 extensions")
|
| 1741 |
+
|
| 1742 |
+
def write(self, filename, arcname=None,
|
| 1743 |
+
compress_type=None, compresslevel=None):
|
| 1744 |
+
"""Put the bytes from filename into the archive under the name
|
| 1745 |
+
arcname."""
|
| 1746 |
+
if not self.fp:
|
| 1747 |
+
raise ValueError(
|
| 1748 |
+
"Attempt to write to ZIP archive that was already closed")
|
| 1749 |
+
if self._writing:
|
| 1750 |
+
raise ValueError(
|
| 1751 |
+
"Can't write to ZIP archive while an open writing handle exists"
|
| 1752 |
+
)
|
| 1753 |
+
|
| 1754 |
+
zinfo = ZipInfo.from_file(filename, arcname,
|
| 1755 |
+
strict_timestamps=self._strict_timestamps)
|
| 1756 |
+
|
| 1757 |
+
if zinfo.is_dir():
|
| 1758 |
+
zinfo.compress_size = 0
|
| 1759 |
+
zinfo.CRC = 0
|
| 1760 |
+
else:
|
| 1761 |
+
if compress_type is not None:
|
| 1762 |
+
zinfo.compress_type = compress_type
|
| 1763 |
+
else:
|
| 1764 |
+
zinfo.compress_type = self.compression
|
| 1765 |
+
|
| 1766 |
+
if compresslevel is not None:
|
| 1767 |
+
zinfo._compresslevel = compresslevel
|
| 1768 |
+
else:
|
| 1769 |
+
zinfo._compresslevel = self.compresslevel
|
| 1770 |
+
|
| 1771 |
+
if zinfo.is_dir():
|
| 1772 |
+
with self._lock:
|
| 1773 |
+
if self._seekable:
|
| 1774 |
+
self.fp.seek(self.start_dir)
|
| 1775 |
+
zinfo.header_offset = self.fp.tell() # Start of header bytes
|
| 1776 |
+
if zinfo.compress_type == ZIP_LZMA:
|
| 1777 |
+
# Compressed data includes an end-of-stream (EOS) marker
|
| 1778 |
+
zinfo.flag_bits |= 0x02
|
| 1779 |
+
|
| 1780 |
+
self._writecheck(zinfo)
|
| 1781 |
+
self._didModify = True
|
| 1782 |
+
|
| 1783 |
+
self.filelist.append(zinfo)
|
| 1784 |
+
self.NameToInfo[zinfo.filename] = zinfo
|
| 1785 |
+
self.fp.write(zinfo.FileHeader(False))
|
| 1786 |
+
self.start_dir = self.fp.tell()
|
| 1787 |
+
else:
|
| 1788 |
+
with open(filename, "rb") as src, self.open(zinfo, 'w') as dest:
|
| 1789 |
+
shutil.copyfileobj(src, dest, 1024*8)
|
| 1790 |
+
|
| 1791 |
+
def writestr(self, zinfo_or_arcname, data,
|
| 1792 |
+
compress_type=None, compresslevel=None):
|
| 1793 |
+
"""Write a file into the archive. The contents is 'data', which
|
| 1794 |
+
may be either a 'str' or a 'bytes' instance; if it is a 'str',
|
| 1795 |
+
it is encoded as UTF-8 first.
|
| 1796 |
+
'zinfo_or_arcname' is either a ZipInfo instance or
|
| 1797 |
+
the name of the file in the archive."""
|
| 1798 |
+
if isinstance(data, str):
|
| 1799 |
+
data = data.encode("utf-8")
|
| 1800 |
+
if not isinstance(zinfo_or_arcname, ZipInfo):
|
| 1801 |
+
zinfo = ZipInfo(filename=zinfo_or_arcname,
|
| 1802 |
+
date_time=time.localtime(time.time())[:6])
|
| 1803 |
+
zinfo.compress_type = self.compression
|
| 1804 |
+
zinfo._compresslevel = self.compresslevel
|
| 1805 |
+
if zinfo.filename[-1] == '/':
|
| 1806 |
+
zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x
|
| 1807 |
+
zinfo.external_attr |= 0x10 # MS-DOS directory flag
|
| 1808 |
+
else:
|
| 1809 |
+
zinfo.external_attr = 0o600 << 16 # ?rw-------
|
| 1810 |
+
else:
|
| 1811 |
+
zinfo = zinfo_or_arcname
|
| 1812 |
+
|
| 1813 |
+
if not self.fp:
|
| 1814 |
+
raise ValueError(
|
| 1815 |
+
"Attempt to write to ZIP archive that was already closed")
|
| 1816 |
+
if self._writing:
|
| 1817 |
+
raise ValueError(
|
| 1818 |
+
"Can't write to ZIP archive while an open writing handle exists."
|
| 1819 |
+
)
|
| 1820 |
+
|
| 1821 |
+
if compress_type is not None:
|
| 1822 |
+
zinfo.compress_type = compress_type
|
| 1823 |
+
|
| 1824 |
+
if compresslevel is not None:
|
| 1825 |
+
zinfo._compresslevel = compresslevel
|
| 1826 |
+
|
| 1827 |
+
zinfo.file_size = len(data) # Uncompressed size
|
| 1828 |
+
with self._lock:
|
| 1829 |
+
with self.open(zinfo, mode='w') as dest:
|
| 1830 |
+
dest.write(data)
|
| 1831 |
+
|
| 1832 |
+
def __del__(self):
|
| 1833 |
+
"""Call the "close()" method in case the user forgot."""
|
| 1834 |
+
self.close()
|
| 1835 |
+
|
| 1836 |
+
def close(self):
|
| 1837 |
+
"""Close the file, and for mode 'w', 'x' and 'a' write the ending
|
| 1838 |
+
records."""
|
| 1839 |
+
if self.fp is None:
|
| 1840 |
+
return
|
| 1841 |
+
|
| 1842 |
+
if self._writing:
|
| 1843 |
+
raise ValueError("Can't close the ZIP file while there is "
|
| 1844 |
+
"an open writing handle on it. "
|
| 1845 |
+
"Close the writing handle before closing the zip.")
|
| 1846 |
+
|
| 1847 |
+
try:
|
| 1848 |
+
if self.mode in ('w', 'x', 'a') and self._didModify: # write ending records
|
| 1849 |
+
with self._lock:
|
| 1850 |
+
if self._seekable:
|
| 1851 |
+
self.fp.seek(self.start_dir)
|
| 1852 |
+
self._write_end_record()
|
| 1853 |
+
finally:
|
| 1854 |
+
fp = self.fp
|
| 1855 |
+
self.fp = None
|
| 1856 |
+
self._fpclose(fp)
|
| 1857 |
+
|
| 1858 |
+
def _write_end_record(self):
|
| 1859 |
+
for zinfo in self.filelist: # write central directory
|
| 1860 |
+
dt = zinfo.date_time
|
| 1861 |
+
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
|
| 1862 |
+
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
|
| 1863 |
+
extra = []
|
| 1864 |
+
if zinfo.file_size > ZIP64_LIMIT \
|
| 1865 |
+
or zinfo.compress_size > ZIP64_LIMIT:
|
| 1866 |
+
extra.append(zinfo.file_size)
|
| 1867 |
+
extra.append(zinfo.compress_size)
|
| 1868 |
+
file_size = 0xffffffff
|
| 1869 |
+
compress_size = 0xffffffff
|
| 1870 |
+
else:
|
| 1871 |
+
file_size = zinfo.file_size
|
| 1872 |
+
compress_size = zinfo.compress_size
|
| 1873 |
+
|
| 1874 |
+
if zinfo.header_offset > ZIP64_LIMIT:
|
| 1875 |
+
extra.append(zinfo.header_offset)
|
| 1876 |
+
header_offset = 0xffffffff
|
| 1877 |
+
else:
|
| 1878 |
+
header_offset = zinfo.header_offset
|
| 1879 |
+
|
| 1880 |
+
extra_data = zinfo.extra
|
| 1881 |
+
min_version = 0
|
| 1882 |
+
if extra:
|
| 1883 |
+
# Append a ZIP64 field to the extra's
|
| 1884 |
+
extra_data = _strip_extra(extra_data, (1,))
|
| 1885 |
+
extra_data = struct.pack(
|
| 1886 |
+
'<HH' + 'Q'*len(extra),
|
| 1887 |
+
1, 8*len(extra), *extra) + extra_data
|
| 1888 |
+
|
| 1889 |
+
min_version = ZIP64_VERSION
|
| 1890 |
+
|
| 1891 |
+
if zinfo.compress_type == ZIP_BZIP2:
|
| 1892 |
+
min_version = max(BZIP2_VERSION, min_version)
|
| 1893 |
+
elif zinfo.compress_type == ZIP_LZMA:
|
| 1894 |
+
min_version = max(LZMA_VERSION, min_version)
|
| 1895 |
+
|
| 1896 |
+
extract_version = max(min_version, zinfo.extract_version)
|
| 1897 |
+
create_version = max(min_version, zinfo.create_version)
|
| 1898 |
+
filename, flag_bits = zinfo._encodeFilenameFlags()
|
| 1899 |
+
centdir = struct.pack(structCentralDir,
|
| 1900 |
+
stringCentralDir, create_version,
|
| 1901 |
+
zinfo.create_system, extract_version, zinfo.reserved,
|
| 1902 |
+
flag_bits, zinfo.compress_type, dostime, dosdate,
|
| 1903 |
+
zinfo.CRC, compress_size, file_size,
|
| 1904 |
+
len(filename), len(extra_data), len(zinfo.comment),
|
| 1905 |
+
0, zinfo.internal_attr, zinfo.external_attr,
|
| 1906 |
+
header_offset)
|
| 1907 |
+
self.fp.write(centdir)
|
| 1908 |
+
self.fp.write(filename)
|
| 1909 |
+
self.fp.write(extra_data)
|
| 1910 |
+
self.fp.write(zinfo.comment)
|
| 1911 |
+
|
| 1912 |
+
pos2 = self.fp.tell()
|
| 1913 |
+
# Write end-of-zip-archive record
|
| 1914 |
+
centDirCount = len(self.filelist)
|
| 1915 |
+
centDirSize = pos2 - self.start_dir
|
| 1916 |
+
centDirOffset = self.start_dir
|
| 1917 |
+
requires_zip64 = None
|
| 1918 |
+
if centDirCount > ZIP_FILECOUNT_LIMIT:
|
| 1919 |
+
requires_zip64 = "Files count"
|
| 1920 |
+
elif centDirOffset > ZIP64_LIMIT:
|
| 1921 |
+
requires_zip64 = "Central directory offset"
|
| 1922 |
+
elif centDirSize > ZIP64_LIMIT:
|
| 1923 |
+
requires_zip64 = "Central directory size"
|
| 1924 |
+
if requires_zip64:
|
| 1925 |
+
# Need to write the ZIP64 end-of-archive records
|
| 1926 |
+
if not self._allowZip64:
|
| 1927 |
+
raise LargeZipFile(requires_zip64 +
|
| 1928 |
+
" would require ZIP64 extensions")
|
| 1929 |
+
zip64endrec = struct.pack(
|
| 1930 |
+
structEndArchive64, stringEndArchive64,
|
| 1931 |
+
44, 45, 45, 0, 0, centDirCount, centDirCount,
|
| 1932 |
+
centDirSize, centDirOffset)
|
| 1933 |
+
self.fp.write(zip64endrec)
|
| 1934 |
+
|
| 1935 |
+
zip64locrec = struct.pack(
|
| 1936 |
+
structEndArchive64Locator,
|
| 1937 |
+
stringEndArchive64Locator, 0, pos2, 1)
|
| 1938 |
+
self.fp.write(zip64locrec)
|
| 1939 |
+
centDirCount = min(centDirCount, 0xFFFF)
|
| 1940 |
+
centDirSize = min(centDirSize, 0xFFFFFFFF)
|
| 1941 |
+
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
|
| 1942 |
+
|
| 1943 |
+
endrec = struct.pack(structEndArchive, stringEndArchive,
|
| 1944 |
+
0, 0, centDirCount, centDirCount,
|
| 1945 |
+
centDirSize, centDirOffset, len(self._comment))
|
| 1946 |
+
self.fp.write(endrec)
|
| 1947 |
+
self.fp.write(self._comment)
|
| 1948 |
+
if self.mode == "a":
|
| 1949 |
+
self.fp.truncate()
|
| 1950 |
+
self.fp.flush()
|
| 1951 |
+
|
| 1952 |
+
def _fpclose(self, fp):
|
| 1953 |
+
assert self._fileRefCnt > 0
|
| 1954 |
+
self._fileRefCnt -= 1
|
| 1955 |
+
if not self._fileRefCnt and not self._filePassed:
|
| 1956 |
+
fp.close()
|
| 1957 |
+
|
| 1958 |
+
|
| 1959 |
+
class PyZipFile(ZipFile):
|
| 1960 |
+
"""Class to create ZIP archives with Python library files and packages."""
|
| 1961 |
+
|
| 1962 |
+
def __init__(self, file, mode="r", compression=ZIP_STORED,
|
| 1963 |
+
allowZip64=True, optimize=-1):
|
| 1964 |
+
ZipFile.__init__(self, file, mode=mode, compression=compression,
|
| 1965 |
+
allowZip64=allowZip64)
|
| 1966 |
+
self._optimize = optimize
|
| 1967 |
+
|
| 1968 |
+
def writepy(self, pathname, basename="", filterfunc=None):
|
| 1969 |
+
"""Add all files from "pathname" to the ZIP archive.
|
| 1970 |
+
|
| 1971 |
+
If pathname is a package directory, search the directory and
|
| 1972 |
+
all package subdirectories recursively for all *.py and enter
|
| 1973 |
+
the modules into the archive. If pathname is a plain
|
| 1974 |
+
directory, listdir *.py and enter all modules. Else, pathname
|
| 1975 |
+
must be a Python *.py file and the module will be put into the
|
| 1976 |
+
archive. Added modules are always module.pyc.
|
| 1977 |
+
This method will compile the module.py into module.pyc if
|
| 1978 |
+
necessary.
|
| 1979 |
+
If filterfunc(pathname) is given, it is called with every argument.
|
| 1980 |
+
When it is False, the file or directory is skipped.
|
| 1981 |
+
"""
|
| 1982 |
+
pathname = os.fspath(pathname)
|
| 1983 |
+
if filterfunc and not filterfunc(pathname):
|
| 1984 |
+
if self.debug:
|
| 1985 |
+
label = 'path' if os.path.isdir(pathname) else 'file'
|
| 1986 |
+
print('%s %r skipped by filterfunc' % (label, pathname))
|
| 1987 |
+
return
|
| 1988 |
+
dir, name = os.path.split(pathname)
|
| 1989 |
+
if os.path.isdir(pathname):
|
| 1990 |
+
initname = os.path.join(pathname, "__init__.py")
|
| 1991 |
+
if os.path.isfile(initname):
|
| 1992 |
+
# This is a package directory, add it
|
| 1993 |
+
if basename:
|
| 1994 |
+
basename = "%s/%s" % (basename, name)
|
| 1995 |
+
else:
|
| 1996 |
+
basename = name
|
| 1997 |
+
if self.debug:
|
| 1998 |
+
print("Adding package in", pathname, "as", basename)
|
| 1999 |
+
fname, arcname = self._get_codename(initname[0:-3], basename)
|
| 2000 |
+
if self.debug:
|
| 2001 |
+
print("Adding", arcname)
|
| 2002 |
+
self.write(fname, arcname)
|
| 2003 |
+
dirlist = sorted(os.listdir(pathname))
|
| 2004 |
+
dirlist.remove("__init__.py")
|
| 2005 |
+
# Add all *.py files and package subdirectories
|
| 2006 |
+
for filename in dirlist:
|
| 2007 |
+
path = os.path.join(pathname, filename)
|
| 2008 |
+
root, ext = os.path.splitext(filename)
|
| 2009 |
+
if os.path.isdir(path):
|
| 2010 |
+
if os.path.isfile(os.path.join(path, "__init__.py")):
|
| 2011 |
+
# This is a package directory, add it
|
| 2012 |
+
self.writepy(path, basename,
|
| 2013 |
+
filterfunc=filterfunc) # Recursive call
|
| 2014 |
+
elif ext == ".py":
|
| 2015 |
+
if filterfunc and not filterfunc(path):
|
| 2016 |
+
if self.debug:
|
| 2017 |
+
print('file %r skipped by filterfunc' % path)
|
| 2018 |
+
continue
|
| 2019 |
+
fname, arcname = self._get_codename(path[0:-3],
|
| 2020 |
+
basename)
|
| 2021 |
+
if self.debug:
|
| 2022 |
+
print("Adding", arcname)
|
| 2023 |
+
self.write(fname, arcname)
|
| 2024 |
+
else:
|
| 2025 |
+
# This is NOT a package directory, add its files at top level
|
| 2026 |
+
if self.debug:
|
| 2027 |
+
print("Adding files from directory", pathname)
|
| 2028 |
+
for filename in sorted(os.listdir(pathname)):
|
| 2029 |
+
path = os.path.join(pathname, filename)
|
| 2030 |
+
root, ext = os.path.splitext(filename)
|
| 2031 |
+
if ext == ".py":
|
| 2032 |
+
if filterfunc and not filterfunc(path):
|
| 2033 |
+
if self.debug:
|
| 2034 |
+
print('file %r skipped by filterfunc' % path)
|
| 2035 |
+
continue
|
| 2036 |
+
fname, arcname = self._get_codename(path[0:-3],
|
| 2037 |
+
basename)
|
| 2038 |
+
if self.debug:
|
| 2039 |
+
print("Adding", arcname)
|
| 2040 |
+
self.write(fname, arcname)
|
| 2041 |
+
else:
|
| 2042 |
+
if pathname[-3:] != ".py":
|
| 2043 |
+
raise RuntimeError(
|
| 2044 |
+
'Files added with writepy() must end with ".py"')
|
| 2045 |
+
fname, arcname = self._get_codename(pathname[0:-3], basename)
|
| 2046 |
+
if self.debug:
|
| 2047 |
+
print("Adding file", arcname)
|
| 2048 |
+
self.write(fname, arcname)
|
| 2049 |
+
|
| 2050 |
+
def _get_codename(self, pathname, basename):
|
| 2051 |
+
"""Return (filename, archivename) for the path.
|
| 2052 |
+
|
| 2053 |
+
Given a module name path, return the correct file path and
|
| 2054 |
+
archive name, compiling if necessary. For example, given
|
| 2055 |
+
/python/lib/string, return (/python/lib/string.pyc, string).
|
| 2056 |
+
"""
|
| 2057 |
+
def _compile(file, optimize=-1):
|
| 2058 |
+
import py_compile
|
| 2059 |
+
if self.debug:
|
| 2060 |
+
print("Compiling", file)
|
| 2061 |
+
try:
|
| 2062 |
+
py_compile.compile(file, doraise=True, optimize=optimize)
|
| 2063 |
+
except py_compile.PyCompileError as err:
|
| 2064 |
+
print(err.msg)
|
| 2065 |
+
return False
|
| 2066 |
+
return True
|
| 2067 |
+
|
| 2068 |
+
file_py = pathname + ".py"
|
| 2069 |
+
file_pyc = pathname + ".pyc"
|
| 2070 |
+
pycache_opt0 = importlib.util.cache_from_source(file_py, optimization='')
|
| 2071 |
+
pycache_opt1 = importlib.util.cache_from_source(file_py, optimization=1)
|
| 2072 |
+
pycache_opt2 = importlib.util.cache_from_source(file_py, optimization=2)
|
| 2073 |
+
if self._optimize == -1:
|
| 2074 |
+
# legacy mode: use whatever file is present
|
| 2075 |
+
if (os.path.isfile(file_pyc) and
|
| 2076 |
+
os.stat(file_pyc).st_mtime >= os.stat(file_py).st_mtime):
|
| 2077 |
+
# Use .pyc file.
|
| 2078 |
+
arcname = fname = file_pyc
|
| 2079 |
+
elif (os.path.isfile(pycache_opt0) and
|
| 2080 |
+
os.stat(pycache_opt0).st_mtime >= os.stat(file_py).st_mtime):
|
| 2081 |
+
# Use the __pycache__/*.pyc file, but write it to the legacy pyc
|
| 2082 |
+
# file name in the archive.
|
| 2083 |
+
fname = pycache_opt0
|
| 2084 |
+
arcname = file_pyc
|
| 2085 |
+
elif (os.path.isfile(pycache_opt1) and
|
| 2086 |
+
os.stat(pycache_opt1).st_mtime >= os.stat(file_py).st_mtime):
|
| 2087 |
+
# Use the __pycache__/*.pyc file, but write it to the legacy pyc
|
| 2088 |
+
# file name in the archive.
|
| 2089 |
+
fname = pycache_opt1
|
| 2090 |
+
arcname = file_pyc
|
| 2091 |
+
elif (os.path.isfile(pycache_opt2) and
|
| 2092 |
+
os.stat(pycache_opt2).st_mtime >= os.stat(file_py).st_mtime):
|
| 2093 |
+
# Use the __pycache__/*.pyc file, but write it to the legacy pyc
|
| 2094 |
+
# file name in the archive.
|
| 2095 |
+
fname = pycache_opt2
|
| 2096 |
+
arcname = file_pyc
|
| 2097 |
+
else:
|
| 2098 |
+
# Compile py into PEP 3147 pyc file.
|
| 2099 |
+
if _compile(file_py):
|
| 2100 |
+
if sys.flags.optimize == 0:
|
| 2101 |
+
fname = pycache_opt0
|
| 2102 |
+
elif sys.flags.optimize == 1:
|
| 2103 |
+
fname = pycache_opt1
|
| 2104 |
+
else:
|
| 2105 |
+
fname = pycache_opt2
|
| 2106 |
+
arcname = file_pyc
|
| 2107 |
+
else:
|
| 2108 |
+
fname = arcname = file_py
|
| 2109 |
+
else:
|
| 2110 |
+
# new mode: use given optimization level
|
| 2111 |
+
if self._optimize == 0:
|
| 2112 |
+
fname = pycache_opt0
|
| 2113 |
+
arcname = file_pyc
|
| 2114 |
+
else:
|
| 2115 |
+
arcname = file_pyc
|
| 2116 |
+
if self._optimize == 1:
|
| 2117 |
+
fname = pycache_opt1
|
| 2118 |
+
elif self._optimize == 2:
|
| 2119 |
+
fname = pycache_opt2
|
| 2120 |
+
else:
|
| 2121 |
+
msg = "invalid value for 'optimize': {!r}".format(self._optimize)
|
| 2122 |
+
raise ValueError(msg)
|
| 2123 |
+
if not (os.path.isfile(fname) and
|
| 2124 |
+
os.stat(fname).st_mtime >= os.stat(file_py).st_mtime):
|
| 2125 |
+
if not _compile(file_py, optimize=self._optimize):
|
| 2126 |
+
fname = arcname = file_py
|
| 2127 |
+
archivename = os.path.split(arcname)[1]
|
| 2128 |
+
if basename:
|
| 2129 |
+
archivename = "%s/%s" % (basename, archivename)
|
| 2130 |
+
return (fname, archivename)
|
| 2131 |
+
|
| 2132 |
+
|
| 2133 |
+
def _parents(path):
|
| 2134 |
+
"""
|
| 2135 |
+
Given a path with elements separated by
|
| 2136 |
+
posixpath.sep, generate all parents of that path.
|
| 2137 |
+
|
| 2138 |
+
>>> list(_parents('b/d'))
|
| 2139 |
+
['b']
|
| 2140 |
+
>>> list(_parents('/b/d/'))
|
| 2141 |
+
['/b']
|
| 2142 |
+
>>> list(_parents('b/d/f/'))
|
| 2143 |
+
['b/d', 'b']
|
| 2144 |
+
>>> list(_parents('b'))
|
| 2145 |
+
[]
|
| 2146 |
+
>>> list(_parents(''))
|
| 2147 |
+
[]
|
| 2148 |
+
"""
|
| 2149 |
+
return itertools.islice(_ancestry(path), 1, None)
|
| 2150 |
+
|
| 2151 |
+
|
| 2152 |
+
def _ancestry(path):
|
| 2153 |
+
"""
|
| 2154 |
+
Given a path with elements separated by
|
| 2155 |
+
posixpath.sep, generate all elements of that path.
|
| 2156 |
+
|
| 2157 |
+
>>> list(_ancestry('b/d'))
|
| 2158 |
+
['b/d', 'b']
|
| 2159 |
+
>>> list(_ancestry('/b/d/'))
|
| 2160 |
+
['/b/d', '/b']
|
| 2161 |
+
>>> list(_ancestry('b/d/f/'))
|
| 2162 |
+
['b/d/f', 'b/d', 'b']
|
| 2163 |
+
>>> list(_ancestry('b'))
|
| 2164 |
+
['b']
|
| 2165 |
+
>>> list(_ancestry(''))
|
| 2166 |
+
[]
|
| 2167 |
+
|
| 2168 |
+
Multiple separators are treated like a single.
|
| 2169 |
+
|
| 2170 |
+
>>> list(_ancestry('//b//d///f//'))
|
| 2171 |
+
['//b//d///f', '//b//d', '//b']
|
| 2172 |
+
"""
|
| 2173 |
+
path = path.rstrip(posixpath.sep)
|
| 2174 |
+
while path.rstrip(posixpath.sep):
|
| 2175 |
+
yield path
|
| 2176 |
+
path, tail = posixpath.split(path)
|
| 2177 |
+
|
| 2178 |
+
|
| 2179 |
+
_dedupe = dict.fromkeys
|
| 2180 |
+
"""Deduplicate an iterable in original order"""
|
| 2181 |
+
|
| 2182 |
+
|
| 2183 |
+
def _difference(minuend, subtrahend):
|
| 2184 |
+
"""
|
| 2185 |
+
Return items in minuend not in subtrahend, retaining order
|
| 2186 |
+
with O(1) lookup.
|
| 2187 |
+
"""
|
| 2188 |
+
return itertools.filterfalse(set(subtrahend).__contains__, minuend)
|
| 2189 |
+
|
| 2190 |
+
|
| 2191 |
+
class CompleteDirs(ZipFile):
|
| 2192 |
+
"""
|
| 2193 |
+
A ZipFile subclass that ensures that implied directories
|
| 2194 |
+
are always included in the namelist.
|
| 2195 |
+
"""
|
| 2196 |
+
|
| 2197 |
+
@staticmethod
|
| 2198 |
+
def _implied_dirs(names):
|
| 2199 |
+
parents = itertools.chain.from_iterable(map(_parents, names))
|
| 2200 |
+
as_dirs = (p + posixpath.sep for p in parents)
|
| 2201 |
+
return _dedupe(_difference(as_dirs, names))
|
| 2202 |
+
|
| 2203 |
+
def namelist(self):
|
| 2204 |
+
names = super(CompleteDirs, self).namelist()
|
| 2205 |
+
return names + list(self._implied_dirs(names))
|
| 2206 |
+
|
| 2207 |
+
def _name_set(self):
|
| 2208 |
+
return set(self.namelist())
|
| 2209 |
+
|
| 2210 |
+
def resolve_dir(self, name):
|
| 2211 |
+
"""
|
| 2212 |
+
If the name represents a directory, return that name
|
| 2213 |
+
as a directory (with the trailing slash).
|
| 2214 |
+
"""
|
| 2215 |
+
names = self._name_set()
|
| 2216 |
+
dirname = name + '/'
|
| 2217 |
+
dir_match = name not in names and dirname in names
|
| 2218 |
+
return dirname if dir_match else name
|
| 2219 |
+
|
| 2220 |
+
def getinfo(self, name):
|
| 2221 |
+
"""
|
| 2222 |
+
Supplement getinfo for implied dirs.
|
| 2223 |
+
"""
|
| 2224 |
+
try:
|
| 2225 |
+
return super().getinfo(name)
|
| 2226 |
+
except KeyError:
|
| 2227 |
+
if not name.endswith('/') or name not in self._name_set():
|
| 2228 |
+
raise
|
| 2229 |
+
return ZipInfo(filename=name)
|
| 2230 |
+
|
| 2231 |
+
@classmethod
|
| 2232 |
+
def make(cls, source):
|
| 2233 |
+
"""
|
| 2234 |
+
Given a source (filename or zipfile), return an
|
| 2235 |
+
appropriate CompleteDirs subclass.
|
| 2236 |
+
"""
|
| 2237 |
+
if isinstance(source, CompleteDirs):
|
| 2238 |
+
return source
|
| 2239 |
+
|
| 2240 |
+
if not isinstance(source, ZipFile):
|
| 2241 |
+
return cls(source)
|
| 2242 |
+
|
| 2243 |
+
# Only allow for FastLookup when supplied zipfile is read-only
|
| 2244 |
+
if 'r' not in source.mode:
|
| 2245 |
+
cls = CompleteDirs
|
| 2246 |
+
|
| 2247 |
+
source.__class__ = cls
|
| 2248 |
+
return source
|
| 2249 |
+
|
| 2250 |
+
|
| 2251 |
+
class FastLookup(CompleteDirs):
|
| 2252 |
+
"""
|
| 2253 |
+
ZipFile subclass to ensure implicit
|
| 2254 |
+
dirs exist and are resolved rapidly.
|
| 2255 |
+
"""
|
| 2256 |
+
|
| 2257 |
+
def namelist(self):
|
| 2258 |
+
with contextlib.suppress(AttributeError):
|
| 2259 |
+
return self.__names
|
| 2260 |
+
self.__names = super(FastLookup, self).namelist()
|
| 2261 |
+
return self.__names
|
| 2262 |
+
|
| 2263 |
+
def _name_set(self):
|
| 2264 |
+
with contextlib.suppress(AttributeError):
|
| 2265 |
+
return self.__lookup
|
| 2266 |
+
self.__lookup = super(FastLookup, self)._name_set()
|
| 2267 |
+
return self.__lookup
|
| 2268 |
+
|
| 2269 |
+
|
| 2270 |
+
def _extract_text_encoding(encoding=None, *args, **kwargs):
|
| 2271 |
+
# stacklevel=3 so that the caller of the caller see any warning.
|
| 2272 |
+
return io.text_encoding(encoding, 3), args, kwargs
|
| 2273 |
+
|
| 2274 |
+
|
| 2275 |
+
class Path:
|
| 2276 |
+
"""
|
| 2277 |
+
A pathlib-compatible interface for zip files.
|
| 2278 |
+
|
| 2279 |
+
Consider a zip file with this structure::
|
| 2280 |
+
|
| 2281 |
+
.
|
| 2282 |
+
├── a.txt
|
| 2283 |
+
└── b
|
| 2284 |
+
├── c.txt
|
| 2285 |
+
└── d
|
| 2286 |
+
└── e.txt
|
| 2287 |
+
|
| 2288 |
+
>>> data = io.BytesIO()
|
| 2289 |
+
>>> zf = ZipFile(data, 'w')
|
| 2290 |
+
>>> zf.writestr('a.txt', 'content of a')
|
| 2291 |
+
>>> zf.writestr('b/c.txt', 'content of c')
|
| 2292 |
+
>>> zf.writestr('b/d/e.txt', 'content of e')
|
| 2293 |
+
>>> zf.filename = 'mem/abcde.zip'
|
| 2294 |
+
|
| 2295 |
+
Path accepts the zipfile object itself or a filename
|
| 2296 |
+
|
| 2297 |
+
>>> root = Path(zf)
|
| 2298 |
+
|
| 2299 |
+
From there, several path operations are available.
|
| 2300 |
+
|
| 2301 |
+
Directory iteration (including the zip file itself):
|
| 2302 |
+
|
| 2303 |
+
>>> a, b = root.iterdir()
|
| 2304 |
+
>>> a
|
| 2305 |
+
Path('mem/abcde.zip', 'a.txt')
|
| 2306 |
+
>>> b
|
| 2307 |
+
Path('mem/abcde.zip', 'b/')
|
| 2308 |
+
|
| 2309 |
+
name property:
|
| 2310 |
+
|
| 2311 |
+
>>> b.name
|
| 2312 |
+
'b'
|
| 2313 |
+
|
| 2314 |
+
join with divide operator:
|
| 2315 |
+
|
| 2316 |
+
>>> c = b / 'c.txt'
|
| 2317 |
+
>>> c
|
| 2318 |
+
Path('mem/abcde.zip', 'b/c.txt')
|
| 2319 |
+
>>> c.name
|
| 2320 |
+
'c.txt'
|
| 2321 |
+
|
| 2322 |
+
Read text:
|
| 2323 |
+
|
| 2324 |
+
>>> c.read_text()
|
| 2325 |
+
'content of c'
|
| 2326 |
+
|
| 2327 |
+
existence:
|
| 2328 |
+
|
| 2329 |
+
>>> c.exists()
|
| 2330 |
+
True
|
| 2331 |
+
>>> (b / 'missing.txt').exists()
|
| 2332 |
+
False
|
| 2333 |
+
|
| 2334 |
+
Coercion to string:
|
| 2335 |
+
|
| 2336 |
+
>>> import os
|
| 2337 |
+
>>> str(c).replace(os.sep, posixpath.sep)
|
| 2338 |
+
'mem/abcde.zip/b/c.txt'
|
| 2339 |
+
|
| 2340 |
+
At the root, ``name``, ``filename``, and ``parent``
|
| 2341 |
+
resolve to the zipfile. Note these attributes are not
|
| 2342 |
+
valid and will raise a ``ValueError`` if the zipfile
|
| 2343 |
+
has no filename.
|
| 2344 |
+
|
| 2345 |
+
>>> root.name
|
| 2346 |
+
'abcde.zip'
|
| 2347 |
+
>>> str(root.filename).replace(os.sep, posixpath.sep)
|
| 2348 |
+
'mem/abcde.zip'
|
| 2349 |
+
>>> str(root.parent)
|
| 2350 |
+
'mem'
|
| 2351 |
+
"""
|
| 2352 |
+
|
| 2353 |
+
__repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
|
| 2354 |
+
|
| 2355 |
+
def __init__(self, root, at=""):
|
| 2356 |
+
"""
|
| 2357 |
+
Construct a Path from a ZipFile or filename.
|
| 2358 |
+
|
| 2359 |
+
Note: When the source is an existing ZipFile object,
|
| 2360 |
+
its type (__class__) will be mutated to a
|
| 2361 |
+
specialized type. If the caller wishes to retain the
|
| 2362 |
+
original type, the caller should either create a
|
| 2363 |
+
separate ZipFile object or pass a filename.
|
| 2364 |
+
"""
|
| 2365 |
+
self.root = FastLookup.make(root)
|
| 2366 |
+
self.at = at
|
| 2367 |
+
|
| 2368 |
+
def open(self, mode='r', *args, pwd=None, **kwargs):
|
| 2369 |
+
"""
|
| 2370 |
+
Open this entry as text or binary following the semantics
|
| 2371 |
+
of ``pathlib.Path.open()`` by passing arguments through
|
| 2372 |
+
to io.TextIOWrapper().
|
| 2373 |
+
"""
|
| 2374 |
+
if self.is_dir():
|
| 2375 |
+
raise IsADirectoryError(self)
|
| 2376 |
+
zip_mode = mode[0]
|
| 2377 |
+
if not self.exists() and zip_mode == 'r':
|
| 2378 |
+
raise FileNotFoundError(self)
|
| 2379 |
+
stream = self.root.open(self.at, zip_mode, pwd=pwd)
|
| 2380 |
+
if 'b' in mode:
|
| 2381 |
+
if args or kwargs:
|
| 2382 |
+
raise ValueError("encoding args invalid for binary operation")
|
| 2383 |
+
return stream
|
| 2384 |
+
# Text mode:
|
| 2385 |
+
encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
|
| 2386 |
+
return io.TextIOWrapper(stream, encoding, *args, **kwargs)
|
| 2387 |
+
|
| 2388 |
+
@property
|
| 2389 |
+
def name(self):
|
| 2390 |
+
return pathlib.PurePosixPath(self.at).name or self.filename.name
|
| 2391 |
+
|
| 2392 |
+
@property
|
| 2393 |
+
def filename(self):
|
| 2394 |
+
return pathlib.Path(self.root.filename).joinpath(self.at)
|
| 2395 |
+
|
| 2396 |
+
def read_text(self, *args, **kwargs):
|
| 2397 |
+
encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
|
| 2398 |
+
with self.open('r', encoding, *args, **kwargs) as strm:
|
| 2399 |
+
return strm.read()
|
| 2400 |
+
|
| 2401 |
+
def read_bytes(self):
|
| 2402 |
+
with self.open('rb') as strm:
|
| 2403 |
+
return strm.read()
|
| 2404 |
+
|
| 2405 |
+
def _is_child(self, path):
|
| 2406 |
+
return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
|
| 2407 |
+
|
| 2408 |
+
def _next(self, at):
|
| 2409 |
+
return self.__class__(self.root, at)
|
| 2410 |
+
|
| 2411 |
+
def is_dir(self):
|
| 2412 |
+
return not self.at or self.at.endswith("/")
|
| 2413 |
+
|
| 2414 |
+
def is_file(self):
|
| 2415 |
+
return self.exists() and not self.is_dir()
|
| 2416 |
+
|
| 2417 |
+
def exists(self):
|
| 2418 |
+
return self.at in self.root._name_set()
|
| 2419 |
+
|
| 2420 |
+
def iterdir(self):
|
| 2421 |
+
if not self.is_dir():
|
| 2422 |
+
raise ValueError("Can't listdir a file")
|
| 2423 |
+
subs = map(self._next, self.root.namelist())
|
| 2424 |
+
return filter(self._is_child, subs)
|
| 2425 |
+
|
| 2426 |
+
def __str__(self):
|
| 2427 |
+
return posixpath.join(self.root.filename, self.at)
|
| 2428 |
+
|
| 2429 |
+
def __repr__(self):
|
| 2430 |
+
return self.__repr.format(self=self)
|
| 2431 |
+
|
| 2432 |
+
def joinpath(self, *other):
|
| 2433 |
+
next = posixpath.join(self.at, *other)
|
| 2434 |
+
return self._next(self.root.resolve_dir(next))
|
| 2435 |
+
|
| 2436 |
+
__truediv__ = joinpath
|
| 2437 |
+
|
| 2438 |
+
@property
|
| 2439 |
+
def parent(self):
|
| 2440 |
+
if not self.at:
|
| 2441 |
+
return self.filename.parent
|
| 2442 |
+
parent_at = posixpath.dirname(self.at.rstrip('/'))
|
| 2443 |
+
if parent_at:
|
| 2444 |
+
parent_at += '/'
|
| 2445 |
+
return self._next(parent_at)
|
| 2446 |
+
|
| 2447 |
+
|
| 2448 |
+
def main(args=None):
|
| 2449 |
+
import argparse
|
| 2450 |
+
|
| 2451 |
+
description = 'A simple command-line interface for zipfile module.'
|
| 2452 |
+
parser = argparse.ArgumentParser(description=description)
|
| 2453 |
+
group = parser.add_mutually_exclusive_group(required=True)
|
| 2454 |
+
group.add_argument('-l', '--list', metavar='<zipfile>',
|
| 2455 |
+
help='Show listing of a zipfile')
|
| 2456 |
+
group.add_argument('-e', '--extract', nargs=2,
|
| 2457 |
+
metavar=('<zipfile>', '<output_dir>'),
|
| 2458 |
+
help='Extract zipfile into target dir')
|
| 2459 |
+
group.add_argument('-c', '--create', nargs='+',
|
| 2460 |
+
metavar=('<name>', '<file>'),
|
| 2461 |
+
help='Create zipfile from sources')
|
| 2462 |
+
group.add_argument('-t', '--test', metavar='<zipfile>',
|
| 2463 |
+
help='Test if a zipfile is valid')
|
| 2464 |
+
args = parser.parse_args(args)
|
| 2465 |
+
|
| 2466 |
+
if args.test is not None:
|
| 2467 |
+
src = args.test
|
| 2468 |
+
with ZipFile(src, 'r') as zf:
|
| 2469 |
+
badfile = zf.testzip()
|
| 2470 |
+
if badfile:
|
| 2471 |
+
print("The following enclosed file is corrupted: {!r}".format(badfile))
|
| 2472 |
+
print("Done testing")
|
| 2473 |
+
|
| 2474 |
+
elif args.list is not None:
|
| 2475 |
+
src = args.list
|
| 2476 |
+
with ZipFile(src, 'r') as zf:
|
| 2477 |
+
zf.printdir()
|
| 2478 |
+
|
| 2479 |
+
elif args.extract is not None:
|
| 2480 |
+
src, curdir = args.extract
|
| 2481 |
+
with ZipFile(src, 'r') as zf:
|
| 2482 |
+
zf.extractall(curdir)
|
| 2483 |
+
|
| 2484 |
+
elif args.create is not None:
|
| 2485 |
+
zip_name = args.create.pop(0)
|
| 2486 |
+
files = args.create
|
| 2487 |
+
|
| 2488 |
+
def addToZip(zf, path, zippath):
|
| 2489 |
+
if os.path.isfile(path):
|
| 2490 |
+
zf.write(path, zippath, ZIP_DEFLATED)
|
| 2491 |
+
elif os.path.isdir(path):
|
| 2492 |
+
if zippath:
|
| 2493 |
+
zf.write(path, zippath)
|
| 2494 |
+
for nm in sorted(os.listdir(path)):
|
| 2495 |
+
addToZip(zf,
|
| 2496 |
+
os.path.join(path, nm), os.path.join(zippath, nm))
|
| 2497 |
+
# else: ignore
|
| 2498 |
+
|
| 2499 |
+
with ZipFile(zip_name, 'w') as zf:
|
| 2500 |
+
for path in files:
|
| 2501 |
+
zippath = os.path.basename(path)
|
| 2502 |
+
if not zippath:
|
| 2503 |
+
zippath = os.path.basename(os.path.dirname(path))
|
| 2504 |
+
if zippath in ('', os.curdir, os.pardir):
|
| 2505 |
+
zippath = ''
|
| 2506 |
+
addToZip(zf, path, zippath)
|
| 2507 |
+
|
| 2508 |
+
|
| 2509 |
+
if __name__ == "__main__":
|
| 2510 |
+
main()
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_embedding_bag_per_sample_weights_backward_cpu_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cpu {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor _embedding_bag_per_sample_weights_backward(const at::Tensor & grad, const at::Tensor & weight, const at::Tensor & indices, const at::Tensor & offsets, const at::Tensor & offset2bag, int64_t mode, int64_t padding_idx=-1);
|
| 21 |
+
|
| 22 |
+
} // namespace cpu
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor & _fake_quantize_learnable_per_channel_affine_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & scale, const at::Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max, double grad_factor=1.0);
|
| 21 |
+
TORCH_API at::Tensor & _fake_quantize_learnable_per_channel_affine_outf(const at::Tensor & self, const at::Tensor & scale, const at::Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max, double grad_factor, at::Tensor & out);
|
| 22 |
+
|
| 23 |
+
} // namespace compositeexplicitautograd
|
| 24 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_sinh_ops.h
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _foreach_sinh {
|
| 18 |
+
using schema = ::std::vector<at::Tensor> (at::TensorList);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_sinh")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_sinh(Tensor[] self) -> Tensor[]")
|
| 24 |
+
static ::std::vector<at::Tensor> call(at::TensorList self);
|
| 25 |
+
static ::std::vector<at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API _foreach_sinh_ {
|
| 29 |
+
using schema = void (at::TensorList);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_sinh_")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_sinh_(Tensor(a!)[] self) -> ()")
|
| 35 |
+
static void call(at::TensorList self);
|
| 36 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
struct TORCH_API _foreach_sinh_out {
|
| 40 |
+
using schema = void (at::TensorList, at::TensorList);
|
| 41 |
+
using ptr_schema = schema*;
|
| 42 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 43 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_sinh")
|
| 44 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
|
| 45 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_sinh.out(Tensor[] self, *, Tensor(a!)[] out) -> ()")
|
| 46 |
+
static void call(at::TensorList self, at::TensorList out);
|
| 47 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, at::TensorList out);
|
| 48 |
+
};
|
| 49 |
+
|
| 50 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_fused_adamw_native.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API ::std::tuple<::std::vector<at::Tensor>,::std::vector<at::Tensor>,::std::vector<at::Tensor>,::std::vector<at::Tensor>,::std::vector<at::Tensor>> _fused_adamw(at::TensorList self, at::TensorList grads, at::TensorList exp_avgs, at::TensorList exp_avg_sqs, at::TensorList max_exp_avg_sqs, at::TensorList state_steps, double lr, double beta1, double beta2, double weight_decay, double eps, bool amsgrad, bool maximize, const ::std::optional<at::Tensor> & grad_scale={}, const ::std::optional<at::Tensor> & found_inf={});
|
| 20 |
+
TORCH_API void _fused_adamw_out(at::TensorList self, at::TensorList grads, at::TensorList exp_avgs, at::TensorList exp_avg_sqs, at::TensorList max_exp_avg_sqs, at::TensorList state_steps, double lr, double beta1, double beta2, double weight_decay, double eps, bool amsgrad, bool maximize, const ::std::optional<at::Tensor> & grad_scale, const ::std::optional<at::Tensor> & found_inf, at::TensorList out);
|
| 21 |
+
TORCH_API void _fused_adamw_kernel_cpu_(at::TensorList self, at::TensorList grads, at::TensorList exp_avgs, at::TensorList exp_avg_sqs, at::TensorList max_exp_avg_sqs, at::TensorList state_steps, double lr, double beta1, double beta2, double weight_decay, double eps, bool amsgrad, bool maximize, const ::std::optional<at::Tensor> & grad_scale={}, const ::std::optional<at::Tensor> & found_inf={});
|
| 22 |
+
TORCH_API void _fused_adamw_kernel_cuda_(at::TensorList self, at::TensorList grads, at::TensorList exp_avgs, at::TensorList exp_avg_sqs, at::TensorList max_exp_avg_sqs, at::TensorList state_steps, double lr, double beta1, double beta2, double weight_decay, double eps, bool amsgrad, bool maximize, const ::std::optional<at::Tensor> & grad_scale={}, const ::std::optional<at::Tensor> & found_inf={});
|
| 23 |
+
TORCH_API ::std::tuple<::std::vector<at::Tensor>,::std::vector<at::Tensor>,::std::vector<at::Tensor>,::std::vector<at::Tensor>,::std::vector<at::Tensor>> _fused_adamw(at::TensorList self, at::TensorList grads, at::TensorList exp_avgs, at::TensorList exp_avg_sqs, at::TensorList max_exp_avg_sqs, at::TensorList state_steps, const at::Tensor & lr, double beta1, double beta2, double weight_decay, double eps, bool amsgrad, bool maximize, const ::std::optional<at::Tensor> & grad_scale={}, const ::std::optional<at::Tensor> & found_inf={});
|
| 24 |
+
TORCH_API void _fused_adamw_tensor_lr_out(at::TensorList self, at::TensorList grads, at::TensorList exp_avgs, at::TensorList exp_avg_sqs, at::TensorList max_exp_avg_sqs, at::TensorList state_steps, const at::Tensor & lr, double beta1, double beta2, double weight_decay, double eps, bool amsgrad, bool maximize, const ::std::optional<at::Tensor> & grad_scale, const ::std::optional<at::Tensor> & found_inf, at::TensorList out);
|
| 25 |
+
TORCH_API void _fused_adamw_kernel_cpu_(at::TensorList self, at::TensorList grads, at::TensorList exp_avgs, at::TensorList exp_avg_sqs, at::TensorList max_exp_avg_sqs, at::TensorList state_steps, const at::Tensor & lr, double beta1, double beta2, double weight_decay, double eps, bool amsgrad, bool maximize, const ::std::optional<at::Tensor> & grad_scale={}, const ::std::optional<at::Tensor> & found_inf={});
|
| 26 |
+
TORCH_API void _fused_adamw_kernel_cuda_(at::TensorList self, at::TensorList grads, at::TensorList exp_avgs, at::TensorList exp_avg_sqs, at::TensorList max_exp_avg_sqs, at::TensorList state_steps, const at::Tensor & lr, double beta1, double beta2, double weight_decay, double eps, bool amsgrad, bool maximize, const ::std::optional<at::Tensor> & grad_scale={}, const ::std::optional<at::Tensor> & found_inf={});
|
| 27 |
+
} // namespace native
|
| 28 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_backward.h
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_grid_sampler_2d_cpu_fallback_backward_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_grid_sampler_2d_cpu_fallback_backward(Tensor grad_output, Tensor input, Tensor grid, int interpolation_mode, int padding_mode, bool align_corners) -> (Tensor, Tensor)
|
| 26 |
+
inline ::std::tuple<at::Tensor,at::Tensor> _grid_sampler_2d_cpu_fallback_backward(const at::Tensor & grad_output, const at::Tensor & input, const at::Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners) {
|
| 27 |
+
return at::_ops::_grid_sampler_2d_cpu_fallback_backward::call(grad_output, input, grid, interpolation_mode, padding_mode, align_corners);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_cudnn_attention_backward_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor,at::Tensor> _scaled_dot_product_cudnn_attention_backward(const at::Tensor & grad_out, const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const at::Tensor & out, const at::Tensor & logsumexp, const at::Tensor & cum_seq_q, const at::Tensor & cum_seq_k, int64_t max_q, int64_t max_k, double dropout_p, bool is_causal, const at::Tensor & philox_seed, const at::Tensor & philox_offset, ::std::optional<double> scale=::std::nullopt);
|
| 21 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor,at::Tensor> _scaled_dot_product_cudnn_attention_backward_symint(const at::Tensor & grad_out, const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const at::Tensor & out, const at::Tensor & logsumexp, const at::Tensor & cum_seq_q, const at::Tensor & cum_seq_k, c10::SymInt max_q, c10::SymInt max_k, double dropout_p, bool is_causal, const at::Tensor & philox_seed, const at::Tensor & philox_offset, ::std::optional<double> scale=::std::nullopt);
|
| 22 |
+
|
| 23 |
+
} // namespace cuda
|
| 24 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_efficient_attention_backward_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _scaled_dot_product_efficient_attention_backward {
|
| 18 |
+
using schema = ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor> (const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, double, ::std::array<bool,4>, bool, ::std::optional<double>);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_scaled_dot_product_efficient_attention_backward")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_scaled_dot_product_efficient_attention_backward(Tensor grad_out_, Tensor query, Tensor key, Tensor value, Tensor attn_bias, Tensor out, Tensor logsumexp, Tensor philox_seed, Tensor philox_offset, float dropout_p, bool[4] grad_input_mask, bool is_causal=False, *, float? scale=None) -> (Tensor, Tensor, Tensor, Tensor)")
|
| 24 |
+
static ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor> call(const at::Tensor & grad_out_, const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const at::Tensor & attn_bias, const at::Tensor & out, const at::Tensor & logsumexp, const at::Tensor & philox_seed, const at::Tensor & philox_offset, double dropout_p, ::std::array<bool,4> grad_input_mask, bool is_causal, ::std::optional<double> scale);
|
| 25 |
+
static ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & grad_out_, const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const at::Tensor & attn_bias, const at::Tensor & out, const at::Tensor & logsumexp, const at::Tensor & philox_seed, const at::Tensor & philox_offset, double dropout_p, ::std::array<bool,4> grad_input_mask, bool is_causal, ::std::optional<double> scale);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_compressed_tensor_unsafe_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor _sparse_compressed_tensor_unsafe_symint(const at::Tensor & compressed_indices, const at::Tensor & plain_indices, const at::Tensor & values, c10::SymIntArrayRef size, ::std::optional<at::ScalarType> dtype={}, ::std::optional<at::Layout> layout={}, ::std::optional<at::Device> device={}, ::std::optional<bool> pin_memory={});
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_sum_backward.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_sparse_sum_backward_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_sparse_sum_backward(Tensor grad, Tensor self, int[] dim) -> Tensor
|
| 26 |
+
inline at::Tensor _sparse_sum_backward(const at::Tensor & grad, const at::Tensor & self, at::IntArrayRef dim) {
|
| 27 |
+
return at::_ops::_sparse_sum_backward::call(grad, self, dim);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_sparse_sum_backward.out(Tensor grad, Tensor self, int[] dim, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & _sparse_sum_backward_out(at::Tensor & out, const at::Tensor & grad, const at::Tensor & self, at::IntArrayRef dim) {
|
| 32 |
+
return at::_ops::_sparse_sum_backward_out::call(grad, self, dim, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::_sparse_sum_backward.out(Tensor grad, Tensor self, int[] dim, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & _sparse_sum_backward_outf(const at::Tensor & grad, const at::Tensor & self, at::IntArrayRef dim, at::Tensor & out) {
|
| 36 |
+
return at::_ops::_sparse_sum_backward_out::call(grad, self, dim, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_standard_gamma_grad.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_standard_gamma_grad_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_standard_gamma_grad(Tensor self, Tensor output) -> Tensor
|
| 26 |
+
inline at::Tensor _standard_gamma_grad(const at::Tensor & self, const at::Tensor & output) {
|
| 27 |
+
return at::_ops::_standard_gamma_grad::call(self, output);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_standard_gamma_grad.out(Tensor self, Tensor output, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & _standard_gamma_grad_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & output) {
|
| 32 |
+
return at::_ops::_standard_gamma_grad_out::call(self, output, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::_standard_gamma_grad.out(Tensor self, Tensor output, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & _standard_gamma_grad_outf(const at::Tensor & self, const at::Tensor & output, at::Tensor & out) {
|
| 36 |
+
return at::_ops::_standard_gamma_grad_out::call(self, output, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_to_cpu_compositeimplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeimplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::vector<at::Tensor> _to_cpu(at::TensorList tensors);
|
| 21 |
+
|
| 22 |
+
} // namespace compositeimplicitautograd
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_upsample_bicubic2d_aa_meta_dispatch.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace meta {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor _upsample_bicubic2d_aa(const at::Tensor & self, at::IntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt);
|
| 21 |
+
TORCH_API at::Tensor _upsample_bicubic2d_aa_symint(const at::Tensor & self, c10::SymIntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt);
|
| 22 |
+
TORCH_API at::Tensor & _upsample_bicubic2d_aa_out(at::Tensor & out, const at::Tensor & self, at::IntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt);
|
| 23 |
+
TORCH_API at::Tensor & _upsample_bicubic2d_aa_outf(const at::Tensor & self, at::IntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h, ::std::optional<double> scales_w, at::Tensor & out);
|
| 24 |
+
TORCH_API at::Tensor & _upsample_bicubic2d_aa_symint_out(at::Tensor & out, const at::Tensor & self, c10::SymIntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt);
|
| 25 |
+
TORCH_API at::Tensor & _upsample_bicubic2d_aa_symint_outf(const at::Tensor & self, c10::SymIntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h, ::std::optional<double> scales_w, at::Tensor & out);
|
| 26 |
+
|
| 27 |
+
} // namespace meta
|
| 28 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_upsample_bilinear2d_aa_cpu_dispatch.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cpu {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor _upsample_bilinear2d_aa(const at::Tensor & self, at::IntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt);
|
| 21 |
+
TORCH_API at::Tensor _upsample_bilinear2d_aa_symint(const at::Tensor & self, c10::SymIntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt);
|
| 22 |
+
TORCH_API at::Tensor & _upsample_bilinear2d_aa_out(at::Tensor & out, const at::Tensor & self, at::IntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt);
|
| 23 |
+
TORCH_API at::Tensor & _upsample_bilinear2d_aa_outf(const at::Tensor & self, at::IntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h, ::std::optional<double> scales_w, at::Tensor & out);
|
| 24 |
+
TORCH_API at::Tensor & _upsample_bilinear2d_aa_symint_out(at::Tensor & out, const at::Tensor & self, c10::SymIntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h=::std::nullopt, ::std::optional<double> scales_w=::std::nullopt);
|
| 25 |
+
TORCH_API at::Tensor & _upsample_bilinear2d_aa_symint_outf(const at::Tensor & self, c10::SymIntArrayRef output_size, bool align_corners, ::std::optional<double> scales_h, ::std::optional<double> scales_w, at::Tensor & out);
|
| 26 |
+
|
| 27 |
+
} // namespace cpu
|
| 28 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/acosh_compositeexplicitautogradnonfunctional_dispatch.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautogradnonfunctional {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor acosh(const at::Tensor & self);
|
| 21 |
+
TORCH_API at::Tensor & acosh_(at::Tensor & self);
|
| 22 |
+
|
| 23 |
+
} // namespace compositeexplicitautogradnonfunctional
|
| 24 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/adjoint.h
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/adjoint_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::adjoint(Tensor(a) self) -> Tensor(a)
|
| 26 |
+
inline at::Tensor adjoint(const at::Tensor & self) {
|
| 27 |
+
return at::_ops::adjoint::call(self);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/cauchy_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor cauchy(const at::Tensor & self, double median=0, double sigma=1, ::std::optional<at::Generator> generator=::std::nullopt);
|
| 21 |
+
TORCH_API at::Tensor & cauchy_out(at::Tensor & out, const at::Tensor & self, double median=0, double sigma=1, ::std::optional<at::Generator> generator=::std::nullopt);
|
| 22 |
+
TORCH_API at::Tensor & cauchy_outf(const at::Tensor & self, double median, double sigma, ::std::optional<at::Generator> generator, at::Tensor & out);
|
| 23 |
+
|
| 24 |
+
} // namespace compositeexplicitautograd
|
| 25 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/ceil_meta.h
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeMetaFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/TensorIterator.h>
|
| 13 |
+
#include <ATen/TensorMeta.h>
|
| 14 |
+
#include <tuple>
|
| 15 |
+
#include <vector>
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace meta {
|
| 19 |
+
|
| 20 |
+
struct TORCH_API structured_ceil : public TensorIteratorBase {
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
void meta(const at::Tensor & self);
|
| 24 |
+
};
|
| 25 |
+
|
| 26 |
+
} // namespace native
|
| 27 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/cov_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor cov(const at::Tensor & self, int64_t correction=1, const ::std::optional<at::Tensor> & fweights={}, const ::std::optional<at::Tensor> & aweights={});
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|